From 6477ed2a91b929544cae1a80c27a2295fe1d51ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Vallotton?= Date: Thu, 14 Aug 2025 13:31:39 -0400 Subject: [PATCH 01/10] Add typscript target --- codegen/src/file_gen_config.rs | 2 + codegen/src/file_generator.rs | 9 +- codegen/src/jinja_environment_builder.rs | 5 + codegen/src/type_builder.rs | 32 ++-- .../templates/typescript:postgres/config.json | 1 + .../typescript:postgres/model.jinja2 | 32 ++++ .../typescript:postgres/model_init.jinja2 | 25 +++ .../typescript:postgres/query.jinja2 | 150 ++++++++++++++++++ .../templates/typescript:postgres/types.json | 65 ++++++++ migrations/foo.sql | 7 - pgc.yaml | 5 +- 11 files changed, 306 insertions(+), 27 deletions(-) create mode 100644 codegen/templates/typescript:postgres/config.json create mode 100644 codegen/templates/typescript:postgres/model.jinja2 create mode 100644 codegen/templates/typescript:postgres/model_init.jinja2 create mode 100644 codegen/templates/typescript:postgres/query.jinja2 create mode 100644 codegen/templates/typescript:postgres/types.json delete mode 100644 migrations/foo.sql diff --git a/codegen/src/file_gen_config.rs b/codegen/src/file_gen_config.rs index a72a3fc..aa60eff 100644 --- a/codegen/src/file_gen_config.rs +++ b/codegen/src/file_gen_config.rs @@ -6,6 +6,7 @@ use crate::{error::Error, request::Request}; pub struct FileGenConfig { pub extension: String, pub directory_entrypoint: Option, + pub model_dir_entrypoint: Option, } impl FileGenConfig { @@ -14,6 +15,7 @@ impl FileGenConfig { let json = match &**target { "python:asyncpg" => include_str!("../templates/python:asyncpg/config.json"), "python:psycopg" => include_str!("../templates/python:psycopg/config.json"), + "typescript:postgres" => include_str!("../templates/typescript:postgres/config.json"), _ => return Err(Error::NotSupportedLanguage(target.clone())), }; Ok(serde_json::from_str(json).unwrap()) diff --git a/codegen/src/file_generator.rs b/codegen/src/file_generator.rs index dfb21fe..e50e1c4 100644 --- a/codegen/src/file_generator.rs +++ b/codegen/src/file_generator.rs @@ -73,7 +73,12 @@ impl FileGenerator { } fn model_dir_entrypoint(&self) -> Result, Error> { - let Some(filename) = self.config.directory_entrypoint.clone() else { + let Some(filename) = self + .config + .directory_entrypoint + .clone() + .or(self.config.model_dir_entrypoint.clone()) + else { return Ok(None); }; @@ -136,6 +141,6 @@ impl FileGenerator { if let Some(entrypoint) = self.config.directory_entrypoint.as_ref() { return entrypoint.clone(); } - return format!("query.{}", self.config.extension); + return "query".into(); } } diff --git a/codegen/src/jinja_environment_builder.rs b/codegen/src/jinja_environment_builder.rs index 28b4310..bc8b572 100644 --- a/codegen/src/jinja_environment_builder.rs +++ b/codegen/src/jinja_environment_builder.rs @@ -22,6 +22,7 @@ impl JinjaEnvironmentBuilder { Ok(match &*self.target { "python:asyncpg" => include_str!("../templates/python:asyncpg/query.py.jinja2"), "python:psycopg" => include_str!("../templates/python:psycopg/query.py.jinja2"), + "typescript:postgres" => include_str!("../templates/typescript:postgres/query.jinja2"), _ => return Err(Error::NotSupportedLanguage(self.target.clone())), }) } @@ -30,6 +31,7 @@ impl JinjaEnvironmentBuilder { Ok(match &*self.target { "python:asyncpg" => include_str!("../templates/python:asyncpg/model.py.jinja2"), "python:psycopg" => include_str!("../templates/python:psycopg/model.py.jinja2"), + "typescript:postgres" => include_str!("../templates/typescript:postgres/model.jinja2"), _ => return Err(Error::NotSupportedLanguage(self.target.clone())), }) } @@ -38,6 +40,9 @@ impl JinjaEnvironmentBuilder { Ok(match &*self.target { "python:asyncpg" => include_str!("../templates/python:asyncpg/model_init.py.jinja2"), "python:psycopg" => include_str!("../templates/python:psycopg/model_init.py.jinja2"), + "typescript:postgres" => { + include_str!("../templates/typescript:postgres/model_init.jinja2") + } _ => return Err(Error::NotSupportedLanguage(self.target.clone())), }) } diff --git a/codegen/src/type_builder.rs b/codegen/src/type_builder.rs index 41bc1d4..392a285 100644 --- a/codegen/src/type_builder.rs +++ b/codegen/src/type_builder.rs @@ -45,22 +45,20 @@ impl TypeBuilder { .map(move |enum_| (schema.name.clone(), enum_.name.clone())) }) .collect(); - let resolver = match &*lang { - "python:asyncpg" => TypeBuilder { - type_overrides, - enums, - catalog: request.catalog.clone(), - type_map: PYTHON_ASYNCPG.clone(), - }, - "python:psycopg" => TypeBuilder { - type_overrides, - enums, - catalog: request.catalog.clone(), - type_map: PYTHON_PSYCOPG.clone(), - }, + + let type_map = match &*lang { + "python:asyncpg" => PYTHON_ASYNCPG.clone(), + "python:psycopg" => PYTHON_PSYCOPG.clone(), + "typescript:postgres" => TYPESCRIPT_POSTGRES.clone(), _ => return Err(Error::NotSupportedLanguage(lang)), }; - Ok(resolver) + + Ok(TypeBuilder { + type_overrides, + enums, + catalog: request.catalog.clone(), + type_map, + }) } pub fn declared(&self, name: &str) -> Type { @@ -240,3 +238,9 @@ const PYTHON_PSYCOPG: LazyLock = LazyLock::new(|| { let json = include_str!("../templates/python:psycopg/types.json"); return serde_json::from_str(json).expect("failed to deserialize python:psycopg/types.json "); }); + +const TYPESCRIPT_POSTGRES: LazyLock = LazyLock::new(|| { + let json = include_str!("../templates/typescript:postgres/types.json"); + return serde_json::from_str(json) + .expect("failed to deserialize typescript:postgres/types.json "); +}); diff --git a/codegen/templates/typescript:postgres/config.json b/codegen/templates/typescript:postgres/config.json new file mode 100644 index 0000000..56e73aa --- /dev/null +++ b/codegen/templates/typescript:postgres/config.json @@ -0,0 +1 @@ +{ "extension": "ts", "model_dir_entrypoint": "models" } diff --git a/codegen/templates/typescript:postgres/model.jinja2 b/codegen/templates/typescript:postgres/model.jinja2 new file mode 100644 index 0000000..ac41e7d --- /dev/null +++ b/codegen/templates/typescript:postgres/model.jinja2 @@ -0,0 +1,32 @@ + +{%- if enums %} + +{%- endif %} +{%- for import in imports %} +{{import}} +{%- endfor %} +import type * as models from "./models.ts" + +{%- for enum in enums %} + +export enum {{enum.name | to_pascal_case }} { + {%- for value in enum.values %} + {{ value | to_screaming_snake_case }} = {{ value | to_c_string }}, + {%- endfor %} + {% endfor %} +} + +{%- for model in models %} + +export interface {{model.type.declaration}} { + {%- for field, type in model.fields %} + {{field | to_camel_case }}: {% if type.annotation | starts_with("models." + schema) -%} + {{ type.annotation | strip_prefix("models." + schema + ".") }} + {%- elif type.annotation | starts_with("models.") -%} + {{ type.annotation }}; + {%- else -%} + {{ type.annotation }}; + {%- endif %} + {%- endfor %} +} +{%- endfor %} diff --git a/codegen/templates/typescript:postgres/model_init.jinja2 b/codegen/templates/typescript:postgres/model_init.jinja2 new file mode 100644 index 0000000..f7739c0 --- /dev/null +++ b/codegen/templates/typescript:postgres/model_init.jinja2 @@ -0,0 +1,25 @@ +{%- set reserved = ["abstract","arguments","await","boolean", +"break","byte","case","catch", +"char","class","const","continue", +"debugger","default","delete","do", +"double","else","enum","eval", +"export","extends","false","final", +"finally","float","for","function", +"goto","if","implements","import", +"in","instanceof","int","interface", +"let","long","native","new", +"null","package","private","protected", +"public","return","short","static", +"super","switch","synchronized","this", +"throw","throws","transient","true", +"try","typeof","var","void", +"volatile","while","with","yield"] -%} +{%- for module in model_modules -%} +{%- set source = module -%} +{%- if module in reserved -%} +{%- set module = "_" + module %} +{%- endif -%} +export type * as {{module}} from "./{{source}}.ts"; +{% endfor -%} + +export type * from "./public.ts"; diff --git a/codegen/templates/typescript:postgres/query.jinja2 b/codegen/templates/typescript:postgres/query.jinja2 new file mode 100644 index 0000000..d70ded3 --- /dev/null +++ b/codegen/templates/typescript:postgres/query.jinja2 @@ -0,0 +1,150 @@ +/* This file was automatically generated by pgc */ +{%- for import in imports %} +{{import}}; +{%- endfor %} +import postgres from "postgres"; +import type * as models from "{{request.config.codegen.options.import_path | default(request.config.codegen.out)}}/models/models.ts"; +{%- for subnamespace in query_namespace.subnamespaces %} +import * as {{subnamespace}} from "./{{subnamespace}}.ts" +{%- endfor %} + +{%- for method in query_namespace.methods %} + +export const {{ method.query.name | to_screaming_snake_case }} = ` +{{ method.query.query }} +` +{%- endfor %} +{{"\n"}} +{%- for method in query_namespace.methods %} +{%- if method.output_model != None %} + +export interface {{method.output_model.type.declaration | to_pascal_case }} { + {%- for field, type in method.output_model.fields | items %} + {{field | to_camel_case }}: {{type.annotation}}; + {%- endfor %} +} + +{% endif %} +{%- for _, input_model in method.input_models | items %} +export interface {{ input_model.type.declaration | to_pascal_case }} { + {%- for field, type in input_model.fields | items %} + {{field | to_camel_case }}: {{type.annotation}} + {%- endfor %} +} +{% endfor %} +{%- endfor %} + + +export class {{ query_namespace.name | to_pascal_case }}Queries { + {%- for subnamespace in query_namespace.subnamespaces %} + {{subnamespace}}: {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries; + {%- endfor %} + + constructor(readonly sql: postgres.Sql,) { + {%- for subnamespace in query_namespace.subnamespaces %} + this.{{subnamespace}} = new {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(sql); + {%- endfor %} + } + {% for method in query_namespace.methods %} + {%- if method.query.annotations.not_null_result -%} + {%- set HANDLE_NONE = 'assert row is not None, "a query marked with @not_null_result has returned null."' %} + {%- set OR_NONE = '' %} + {% else %} + {%- set HANDLE_NONE = 'if (rows.length === 0) return null;' %} + {%- set OR_NONE = ' | null' %} + {%- endif %} + + {%- if method.query.command == 'one' %} + + {%- if method.query.output | length == 1 %} + async {{ method.query.name}}( + {%- for argument, type in method.arguments | items -%} + {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} + {%- endfor -%} + ): Promise<{{method.output_type.annotation}}{{OR_NONE}}> { + const rows = await this.sql.unsafe( + {{method.query.name | to_screaming_snake_case }}, [ + {%- for parameter in method.query.parameters -%} + {{parameter.name}}{% if not loop.last %}, {% endif %} + {%- endfor %}], { prepare: true } + ); + {{HANDLE_NONE}} + return Object.values(rows[0])[0]; + } + {%- else %} + async {{method.query.name}}( + {%- for argument, type in method.arguments | items -%} + {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} + {%- endfor -%} + ): Promise<{{method.output_type.annotation}}{{OR_NONE}}> { + const rows = await this.sql.unsafe( + {{method.query.name | to_screaming_snake_case }}, [ + {%- for parameter in method.query.parameters -%} + {{parameter.name}}{% if not loop.last %}, {% endif %} + {%- endfor %}], { prepare: true } + ); + {{HANDLE_NONE}} + return rows[0] as {{method.output_type.annotation}}; + } + {%- endif %} + {%- elif method.query.command == 'many' %} + {%- if method.query.output | length == 1 %} + async {{method.query.name}}( + {%- for argument, type in method.arguments | items -%} + {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} + {%- endfor -%} + ): Promise> { + const rows = await this.sql.unsafe( + {{method.query.name | to_screaming_snake_case }}, [ + {%- for parameter in method.query.parameters -%} + {{parameter.name}}{% if not loop.last %}, {% endif %} + {%- endfor %}], { prepare: true } + ) + return rows.map(row => Object.values(row[0])[0] as {{method.output_type.annotation}}); + } + {%- else%} + async {{method.query.name}}( + {%- for argument, type in method.arguments | items -%} + {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} + {%- endfor -%} + ): Promise> { + const rows = await this.sql.unsafe( + {{method.query.name | to_screaming_snake_case }}, [ + {%- for parameter in method.query.parameters -%} + {{parameter.name}}{% if not loop.last %}, {% endif %} + {%- endfor %}], { prepare: true } + ); + return rows as Array<{{method.output_type.annotation}}>; + } + {%- endif %} + {%- elif method.query.command == 'val' %} + async {{method.query.name}}( + {%- for argument, type in method.arguments | items -%} + {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} + {%- endfor -%} + ): Promise<{{method.output_type.annotation}}{{OR_NONE}}> { + const rows = await this.sql.unsafe( + {{method.query.name | to_screaming_snake_case }}, [ + {%- for parameter in method.query.parameters -%} + {{parameter.name}}{% if not loop.last %}, {% endif %} + {%- endfor %}], { prepare: true } + ) + {{HANDLE_NONE}} + return Object.values(rows[0])[0]; + } + {%- else %} + async {{method.query.name}}( + {%- for argument, type in method.arguments | items -%} + {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} + {%- endfor -%} + ) { + return await this.sql.unsafe( + {{method.query.name | to_screaming_snake_case }}, [ + {%- for parameter in method.query.parameters -%} + {{parameter.name}}{% if not loop.last %}, {% endif %} + {%- endfor %}], { prepare: true } + ); + } + {%- endif %} + {%- endfor %} +} diff --git a/codegen/templates/typescript:postgres/types.json b/codegen/templates/typescript:postgres/types.json new file mode 100644 index 0000000..dc51f6c --- /dev/null +++ b/codegen/templates/typescript:postgres/types.json @@ -0,0 +1,65 @@ +{ + "new_type_case": "{{ name | to_pascal_case }}", + "array": { + "constructor": "Array", + "annotation": "Array<{{type.annotation}}>" + }, + "null": { + "declaration": "{{type.declaration}}", + "constructor": "{{type.constructor}}", + "annotation": "{{type.annotation}} | null" + }, + "composite": { + "declaration": "{{ type_name | to_pascal_case }}", + "constructor": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", + "annotation": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", + "import": [] + }, + "wildcard": { + "name": "any" + }, + "schema": { + "pg_catalog": { + "bool": { "name": "boolean" }, + "bytea": { "name": "bytes" }, + "char": { "name": "string" }, + "int8": { "name": "number" }, + "int2": { "name": "number" }, + "int4": { "name": "number" }, + "text": { "name": "string" }, + "json": { "name": "any" }, + "point": { "name": "string" }, + "box": { "name": "string" }, + "polygon": { "name": "string" }, + "line": { "name": "string" }, + + "float4": { "name": "number" }, + "float8": { "name": "number" }, + "unknown": { "name": "unknown" }, + "circle": { "name": "string" }, + "varchar": { "name": "string" }, + "date": { "name": "Date" }, + "time": { "name": "string" }, + "timestamp": { "name": "Date" }, + "timestamptz": { "name": "Date" }, + "interval": { "name": "string" }, + "timetz": { "name": "string" }, + "numeric": { "name": "string" }, + "record": { "name": "string" }, + "any": { "name": "any" }, + "anyarray": { "name": "any[]" }, + "anyelement": { "name": "any" }, + "anynonarray": { "name": "any" }, + "uuid": { "name": "string" }, + "anyenum": { "name": "string" }, + "anyrange": { "name": "string" }, + "jsonb": { "name": "any" }, + "int4range": { "name": "string" }, + "numrange": { "name": "string" }, + "tsrange": { "name": "string" }, + "tstzrange": { "name": "string" }, + "daterange": { "name": "string" }, + "int8range": { "name": "string" } + } + } +} diff --git a/migrations/foo.sql b/migrations/foo.sql deleted file mode 100644 index 7e3cd89..0000000 --- a/migrations/foo.sql +++ /dev/null @@ -1,7 +0,0 @@ -create table user_role (id text not null primary key); - -create table "user" ( - id uuid primary key, - email text not null unique, - role text not null references user_role(id) -); diff --git a/pgc.yaml b/pgc.yaml index 8c76d03..063c661 100644 --- a/pgc.yaml +++ b/pgc.yaml @@ -12,12 +12,9 @@ queries: - "author.sql" - "queries.sql" codegen: - target: python:psycopg + target: typescript:postgres out: ./queries - types: - pg_catalog.uuid: - name: str enums: - "genre" options: From 271cd05835b0684a7f86daf512668cb0ae9edaff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Vallotton?= Date: Fri, 22 Aug 2025 09:59:43 -0400 Subject: [PATCH 02/10] merge --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index e2ad50d..ba204e8 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ pgc.yaml author.sql schema.sql queries/* +**/.DS_Store \ No newline at end of file From 3edd925533ef418d7c44f10bde05038184922fdf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Vallotton?= Date: Fri, 22 Aug 2025 13:57:50 -0400 Subject: [PATCH 03/10] refactor code to make it more extensible --- codegen/src/error.rs | 3 + codegen/src/file_generator.rs | 301 +++++++++--------- codegen/src/ir/method_service.rs | 118 +++++++ codegen/src/ir/mod.rs | 15 + codegen/src/ir/model_modules/mod.rs | 12 + codegen/src/ir/model_modules/model.rs | 23 ++ codegen/src/ir/model_modules/model_module.rs | 22 ++ codegen/src/ir/model_service.rs | 60 ++++ .../query_namespace}/method/method_builder.rs | 0 .../{ => ir/query_namespace}/method/mod.rs | 22 +- codegen/src/{ => ir}/query_namespace/mod.rs | 19 +- .../query_namespace_service.rs} | 9 +- codegen/src/ir/type.rs | 128 ++++++++ codegen/src/ir/type_service.rs | 163 ++++++++++ codegen/src/lib.rs | 14 - codegen/src/main.rs | 6 +- codegen/src/model_modules.rs | 100 ------ codegen/src/presentation/mod.rs | 9 + codegen/src/presentation/python/driver.rs | 5 + .../src/presentation/python/file_generator.rs | 5 + codegen/src/presentation/python/mod.rs | 3 + .../src/presentation/python/python_type.rs | 6 + .../python/templates/asyncpg/config.json | 1 + .../python/templates/asyncpg/model.py.jinja2 | 32 ++ .../templates/asyncpg/model_init.py.jinja2 | 12 + .../python/templates/asyncpg/query.py.jinja2 | 172 ++++++++++ .../python/templates/asyncpg/types.json | 108 +++++++ codegen/src/presentation/typescript/driver.rs | 5 + .../presentation/typescript/file_generator.rs | 0 codegen/src/presentation/typescript/mod.rs | 1 + .../typescript/typescript_type.rs | 6 + codegen/src/request.rs | 4 +- codegen/src/template_context.rs | 8 - codegen/src/type_builder.rs | 2 +- .../typescript:postgres/model_init.jinja2 | 238 ++++++++++++++ .../templates/typescript:postgres/parser.ts | 242 ++++++++++++++ pgc.yaml | 3 - schema.sql | 1 - 38 files changed, 1569 insertions(+), 309 deletions(-) create mode 100644 codegen/src/ir/method_service.rs create mode 100644 codegen/src/ir/mod.rs create mode 100644 codegen/src/ir/model_modules/mod.rs create mode 100644 codegen/src/ir/model_modules/model.rs create mode 100644 codegen/src/ir/model_modules/model_module.rs create mode 100644 codegen/src/ir/model_service.rs rename codegen/src/{ => ir/query_namespace}/method/method_builder.rs (100%) rename codegen/src/{ => ir/query_namespace}/method/mod.rs (53%) rename codegen/src/{ => ir}/query_namespace/mod.rs (77%) rename codegen/src/{query_namespace/query_namespace_builder.rs => ir/query_namespace_service.rs} (84%) create mode 100644 codegen/src/ir/type.rs create mode 100644 codegen/src/ir/type_service.rs delete mode 100644 codegen/src/lib.rs delete mode 100644 codegen/src/model_modules.rs create mode 100644 codegen/src/presentation/mod.rs create mode 100644 codegen/src/presentation/python/driver.rs create mode 100644 codegen/src/presentation/python/file_generator.rs create mode 100644 codegen/src/presentation/python/mod.rs create mode 100644 codegen/src/presentation/python/python_type.rs create mode 100644 codegen/src/presentation/python/templates/asyncpg/config.json create mode 100644 codegen/src/presentation/python/templates/asyncpg/model.py.jinja2 create mode 100644 codegen/src/presentation/python/templates/asyncpg/model_init.py.jinja2 create mode 100644 codegen/src/presentation/python/templates/asyncpg/query.py.jinja2 create mode 100644 codegen/src/presentation/python/templates/asyncpg/types.json create mode 100644 codegen/src/presentation/typescript/driver.rs create mode 100644 codegen/src/presentation/typescript/file_generator.rs create mode 100644 codegen/src/presentation/typescript/mod.rs create mode 100644 codegen/src/presentation/typescript/typescript_type.rs delete mode 100644 codegen/src/template_context.rs create mode 100644 codegen/templates/typescript:postgres/parser.ts diff --git a/codegen/src/error.rs b/codegen/src/error.rs index 7b5e85d..b515927 100644 --- a/codegen/src/error.rs +++ b/codegen/src/error.rs @@ -9,6 +9,9 @@ pub enum Error { #[error("language {0} is not supported.")] NotSupportedLanguage(Rc), + #[error("the language {language} requires the configuration option codegen.options.{option} to be present.")] + MissingConfigurationOption { language: Rc, option: Rc }, + #[error("failed to render or parse a template: {0}.\nThis is a bug in pgc, please report the issue at \"https://github.com/tvallotton/pgc\".")] TemplateError(#[from] minijinja::Error), } diff --git a/codegen/src/file_generator.rs b/codegen/src/file_generator.rs index 22d58f8..a88cef2 100644 --- a/codegen/src/file_generator.rs +++ b/codegen/src/file_generator.rs @@ -1,151 +1,150 @@ -use minijinja::{context, Environment}; -use serde_json::json; -use std::collections::BTreeSet; -use std::path::{Path, PathBuf}; - -use crate::{ - error::Error, - file_gen_config::FileGenConfig, - jinja_environment_builder::{ - JinjaEnvironmentBuilder, MODELS_DIR_ENTRYPOINT, MODEL_SCHEMA_FILE, QUERY, - }, - model_modules::{ModelModule, ModelModules}, - query_namespace::{QueryNamespace, QueryNamespaceBuilder}, - request::Request, - response::File, -}; - -pub struct FileGenerator { - pub environment: Environment<'static>, - pub config: FileGenConfig, - pub model_modules: ModelModules, - pub namespace: QueryNamespace, - pub request: Request, -} - -impl FileGenerator { - pub fn new(request: &Request) -> Result { - let environment = JinjaEnvironmentBuilder::new(request).build()?; - let config = FileGenConfig::new(request)?; - let model_modules = ModelModules::new(request)?; - let namespace = QueryNamespace::from_request(request)?; - - Ok(FileGenerator { - environment, - config, - namespace, - model_modules, - request: request.clone(), - }) - } - - pub fn render_files(&self) -> Result, Error> { - let mut files = self.model_module_files()?; - files.extend(self.model_dir_entrypoint()?); - self.query_files(&mut files)?; - - Ok(files) - } - - fn model_module_files(&self) -> Result, Error> { - let mut files = vec![]; - for (name, module) in self.model_modules.model_modules.iter() { - let filename = format!("models/{}.{}", name, &self.config.extension); - - let content = self - .environment - .get_template(MODEL_SCHEMA_FILE)? - .render(context! { - imports => module.imports(), - schema => name, - models => &module.classes, - enums => &module.enums, - request => &self.request, - })?; - - let file = File { - path: filename, - content, - }; - files.push(file); - } - Ok(files) - } - - fn model_dir_entrypoint(&self) -> Result, Error> { - let Some(filename) = self - .config - .directory_entrypoint - .clone() - .or(self.config.model_dir_entrypoint.clone()) - else { - return Ok(None); - }; - - let content = self - .environment - .get_template(MODELS_DIR_ENTRYPOINT)? - .render(context!( - model_modules=> &self.model_modules.model_modules, - request => &self.request, - ))?; - let path = format!("models/{filename}.{}", self.config.extension); - Ok(Some(File { path, content })) - } - - fn query_files(&self, files: &mut Vec) -> Result<(), Error> { - let pathbuf = PathBuf::from("./"); - self._query_files(pathbuf.as_path(), &self.namespace, files)?; - Ok(()) - } - - fn _query_files( - &self, - dir_path: &Path, - namespace: &QueryNamespace, - files: &mut Vec, - ) -> Result<(), Error> { - let entrypoint = self.directory_entrypoint(); - if namespace.subnamespaces.is_empty() { - let name = if namespace.name.is_empty() { - &entrypoint - } else { - &namespace.name - }; - let path = dir_path.join(&name); - let file = self.render_query_file(&path, namespace)?; - files.push(file); - } else { - let path = dir_path.join(&namespace.name).join(entrypoint); - let file = self.render_query_file(&path, namespace)?; - files.push(file); - } - - for subnamespace in namespace.subnamespaces.values() { - self._query_files(&dir_path.join(&namespace.name), subnamespace, files)?; - } - - Ok(()) - } - - fn render_query_file(&self, path: &Path, namespace: &QueryNamespace) -> Result { - let content = self.environment.get_template(QUERY)?.render(&context! ( - query_namespace => namespace, - imports => namespace.imports(), - request => &self.request, - model_modules => self.model_modules.model_modules, - ))?; - - Ok(File { - path: format!("{}.{}", path.to_str().unwrap(), self.config.extension), - content, - }) - } - - fn directory_entrypoint(&self) -> String { - if let Some(entrypoint) = self.config.directory_entrypoint.as_ref() { - return entrypoint.clone(); - } - return "query".into(); - } -} +// use minijinja::{context, Environment}; +// use serde_json::json; +// use std::collections::BTreeSet; +// use std::path::{Path, PathBuf}; + +// use crate::{ +// error::Error, +// file_gen_config::FileGenConfig, +// jinja_environment_builder::{ +// JinjaEnvironmentBuilder, MODELS_DIR_ENTRYPOINT, MODEL_SCHEMA_FILE, QUERY, +// }, +// model_modules::{ModelModule, ModelModules}, +// request::Request, +// response::File, +// }; + +// pub struct FileGenerator { +// pub environment: Environment<'static>, +// pub config: FileGenConfig, +// pub model_modules: ModelModules, +// pub namespace: QueryNamespace, +// pub request: Request, +// } + +// impl FileGenerator { +// pub fn new(request: &Request) -> Result { +// let environment = JinjaEnvironmentBuilder::new(request).build()?; +// let config = FileGenConfig::new(request)?; +// let model_modules = ModelModules::new(request)?; +// let namespace = QueryNamespace::from_request(request)?; + +// Ok(FileGenerator { +// environment, +// config, +// namespace, +// model_modules, +// request: request.clone(), +// }) +// } + +// pub fn render_files(&self) -> Result, Error> { +// let mut files = self.model_module_files()?; +// files.extend(self.model_dir_entrypoint()?); +// self.query_files(&mut files)?; + +// Ok(files) +// } + +// fn model_module_files(&self) -> Result, Error> { +// let mut files = vec![]; +// for (name, module) in self.model_modules.model_modules.iter() { +// let filename = format!("models/{}.{}", name, &self.config.extension); + +// let content = self +// .environment +// .get_template(MODEL_SCHEMA_FILE)? +// .render(context! { +// imports => module.imports(), +// schema => name, +// models => &module.classes, +// enums => &module.enums, +// request => &self.request, +// })?; + +// let file = File { +// path: filename, +// content, +// }; +// files.push(file); +// } +// Ok(files) +// } + +// fn model_dir_entrypoint(&self) -> Result, Error> { +// let Some(filename) = self +// .config +// .directory_entrypoint +// .clone() +// .or(self.config.model_dir_entrypoint.clone()) +// else { +// return Ok(None); +// }; + +// let content = self +// .environment +// .get_template(MODELS_DIR_ENTRYPOINT)? +// .render(context!( +// model_modules=> &self.model_modules.model_modules, +// request => &self.request, +// ))?; +// let path = format!("models/{filename}.{}", self.config.extension); +// Ok(Some(File { path, content })) +// } + +// fn query_files(&self, files: &mut Vec) -> Result<(), Error> { +// let pathbuf = PathBuf::from("./"); +// self._query_files(pathbuf.as_path(), &self.namespace, files)?; +// Ok(()) +// } + +// fn _query_files( +// &self, +// dir_path: &Path, +// namespace: &QueryNamespace, +// files: &mut Vec, +// ) -> Result<(), Error> { +// let entrypoint = self.directory_entrypoint(); +// if namespace.subnamespaces.is_empty() { +// let name = if namespace.name.is_empty() { +// &entrypoint +// } else { +// &namespace.name +// }; +// let path = dir_path.join(&name); +// let file = self.render_query_file(&path, namespace)?; +// files.push(file); +// } else { +// let path = dir_path.join(&namespace.name).join(entrypoint); +// let file = self.render_query_file(&path, namespace)?; +// files.push(file); +// } + +// for subnamespace in namespace.subnamespaces.values() { +// self._query_files(&dir_path.join(&namespace.name), subnamespace, files)?; +// } + +// Ok(()) +// } + +// fn render_query_file(&self, path: &Path, namespace: &QueryNamespace) -> Result { +// let content = self.environment.get_template(QUERY)?.render(&context! ( +// query_namespace => namespace, +// imports => namespace.imports(), +// request => &self.request, +// model_modules => self.model_modules.model_modules, +// ))?; + +// Ok(File { +// path: format!("{}.{}", path.to_str().unwrap(), self.config.extension), +// content, +// }) +// } + +// fn directory_entrypoint(&self) -> String { +// if let Some(entrypoint) = self.config.directory_entrypoint.as_ref() { +// return entrypoint.clone(); +// } +// return "query".into(); +// } +// } diff --git a/codegen/src/ir/method_service.rs b/codegen/src/ir/method_service.rs new file mode 100644 index 0000000..1c0a89f --- /dev/null +++ b/codegen/src/ir/method_service.rs @@ -0,0 +1,118 @@ +use std::{collections::BTreeMap, mem::take, rc::Rc}; + +use indexmap::IndexMap; + +use crate::{ + ir::query_namespace::{Method, MethodModel}, + r#type::Type, + request::Query, + type_builder::TypeBuilder, +}; + +pub struct MethodService { + type_builder: TypeBuilder, + arguments: IndexMap, Type>, + input_models: BTreeMap, MethodModel>, +} + +impl MethodService { + pub fn new(type_builder: TypeBuilder) -> Self { + MethodService { + type_builder, + arguments: Default::default(), + input_models: Default::default(), + } + } + + pub fn build(&mut self, query: &Query) -> Method { + self.init_input_models(query); + Method { + query: query.clone(), + arguments: take(&mut self.arguments), + input_models: take(&mut self.input_models), + output_type: self.output_type(query), + output_model: self.output_model(query), + } + } + + pub fn create_method(&self, query: &Query) -> Method { + Method { + query: query.clone(), + arguments: self.gather_arguments(query), + input_models: self.gather_input_models(query), + output_type: self.output_type(query), + output_model: self.output_type_model(query), + } + } + + pub fn init_input_models(&mut self, query: &Query) { + for param in query.parameters.iter() { + let mut ty = self.type_builder.from_output_type(¶m.type_); + + if !param.not_null { + ty = self.type_builder.null(&ty); + } + + if let Some((record, field)) = param.name.split_once('.') { + self.include_input_model(record, field, ty, query); + + continue; + }; + + self.arguments.insert(param.name.clone(), ty); + } + } + + pub fn include_input_model(&mut self, record: &str, field: &str, ty: Type, query: &Query) { + let query_name = query.name.clone(); + let type_builder = self.type_builder.clone(); + let entry = self.input_models.entry(record.into()); + + let query_model = entry.or_insert_with(|| MethodModel { + r#type: type_builder.declared(&format!("{}_{}", query_name, record)), + fields: IndexMap::default(), + }); + + query_model.fields.insert(field.into(), ty); + + self.arguments + .insert(record.into(), query_model.r#type.clone()); + } + + fn output_type(&self, query: &Query) -> Option { + if &*query.command == "exec" { + return None; + } + + if query.output.len() == 0 { + return None; + } + + if query.output.len() == 1 { + let pg_type = &query.output[0].type_; + let output_type = self.type_builder.from_output_type(&pg_type); + return Some(output_type); + } + + Some(self.type_builder.declared(&format!("{}_row", query.name))) + } + + fn output_model(&self, query: &Query) -> Option { + if query.output.len() < 2 { + return None; + } + let columns = query + .output + .iter() + .map(|column| { + let type_ = self.type_builder.from_output_type(&column.type_); + (column.name.clone(), type_) + }) + .collect(); + + Some(MethodModel { + r#type: self.output_type(query)?, + fields: columns, + }) + } +} diff --git a/codegen/src/ir/mod.rs b/codegen/src/ir/mod.rs new file mode 100644 index 0000000..413c657 --- /dev/null +++ b/codegen/src/ir/mod.rs @@ -0,0 +1,15 @@ +use crate::{error::Error, request::Request}; + +mod method_service; +mod model_modules; +mod model_service; +mod query_namespace; +mod query_namespace_service; +mod r#type; +mod type_service; + +pub struct Ir { + request: Request, + query_namespace: query_namespace::QueryNamespace, + model_modules: model_modules::ModelModules, +} diff --git a/codegen/src/ir/model_modules/mod.rs b/codegen/src/ir/model_modules/mod.rs new file mode 100644 index 0000000..3344055 --- /dev/null +++ b/codegen/src/ir/model_modules/mod.rs @@ -0,0 +1,12 @@ +pub use model::Model; +pub use model::ModelField; +pub use model_module::ModelModule; +use serde::Serialize; +use std::{collections::BTreeMap, rc::Rc}; +mod model; +mod model_module; + +#[derive(Clone, Serialize, Default)] +pub struct ModelModules { + pub model_modules: BTreeMap, ModelModule>, +} diff --git a/codegen/src/ir/model_modules/model.rs b/codegen/src/ir/model_modules/model.rs new file mode 100644 index 0000000..0bef00f --- /dev/null +++ b/codegen/src/ir/model_modules/model.rs @@ -0,0 +1,23 @@ +use std::rc::Rc; + +use serde::{Deserialize, Serialize}; + +use crate::{ + ir::r#type::Type, + request::{Column, Record}, +}; + +#[derive(Clone, Serialize, Deserialize)] +pub struct Model { + pub record: Record, + pub module_name: Rc, + pub name: Rc, + pub fields: Vec, +} + +#[derive(Clone, Serialize, Deserialize)] +pub struct ModelField { + pub name: Rc, + pub r#type: Type, + pub default_value: Option>, +} diff --git a/codegen/src/ir/model_modules/model_module.rs b/codegen/src/ir/model_modules/model_module.rs new file mode 100644 index 0000000..0ed5064 --- /dev/null +++ b/codegen/src/ir/model_modules/model_module.rs @@ -0,0 +1,22 @@ +use std::rc::Rc; + +use serde::Serialize; + +use crate::{ir::model_modules::Model, request::Enum}; + +#[derive(Clone, Serialize)] +pub struct ModelModule { + pub name: Rc, + pub models: Vec, + pub enums: Rc<[Enum]>, +} + +impl ModelModule { + pub fn new(name: &Rc) -> Self { + ModelModule { + name: name.clone(), + models: vec![], + enums: Default::default(), + } + } +} diff --git a/codegen/src/ir/model_service.rs b/codegen/src/ir/model_service.rs new file mode 100644 index 0000000..c29f61c --- /dev/null +++ b/codegen/src/ir/model_service.rs @@ -0,0 +1,60 @@ +use std::rc::Rc; + +use crate::{ + ir::{ + model_modules::{Model, ModelField, ModelModule, ModelModules}, + type_service::TypeService, + }, + request::{Catalog, Column, Record, Schema}, +}; + +pub struct ModelService { + pub type_service: TypeService, +} + +impl ModelService { + pub fn create_model_modules(&self, catalog: &Catalog) { + let mut modules = ModelModules::default(); + + for schema in catalog.schemas.iter() { + let module = self.create_model_module(schema); + modules.model_modules.insert(schema.name.clone(), module); + } + } + + fn create_model_module(&self, schema: &Schema) -> ModelModule { + let mut module = ModelModule::new(&schema.name); + + for record in schema.records.iter() { + let model = self.create_model_from_record(&schema.name, record); + module.models.push(model); + } + + module.enums = schema.enums.clone(); + + return module; + } + + fn create_model_from_record(&self, module_name: &Rc, record: &Record) -> Model { + let mut model = Model { + record: record.clone(), + module_name: module_name.clone(), + name: record.name.clone(), + fields: vec![], + }; + for column in record.columns.iter() { + let field = self.create_model_field_from_column(column); + model.fields.push(field); + } + return model; + } + + fn create_model_field_from_column(&self, column: &Column) -> ModelField { + let r#type = self.type_service.from_column(&column); + ModelField { + name: column.name.clone(), + r#type, + default_value: column.default.clone(), + } + } +} diff --git a/codegen/src/method/method_builder.rs b/codegen/src/ir/query_namespace/method/method_builder.rs similarity index 100% rename from codegen/src/method/method_builder.rs rename to codegen/src/ir/query_namespace/method/method_builder.rs diff --git a/codegen/src/method/mod.rs b/codegen/src/ir/query_namespace/method/mod.rs similarity index 53% rename from codegen/src/method/mod.rs rename to codegen/src/ir/query_namespace/method/mod.rs index 1fd79b4..d2c9c21 100644 --- a/codegen/src/method/mod.rs +++ b/codegen/src/ir/query_namespace/method/mod.rs @@ -6,35 +6,31 @@ use std::{ use indexmap::IndexMap; use serde::{Deserialize, Serialize}; -use crate::{r#type::Type, request::Query}; - -pub use method_builder::MethodBuilder; -mod method_builder; +use crate::{ir::r#type::Type, request::Query}; #[derive(Deserialize, Serialize, Clone, Debug)] pub struct Method { - query: Query, - arguments: IndexMap, Type>, - input_models: BTreeMap, MethodModel>, + pub query: Query, + pub arguments: IndexMap, Type>, + pub input_models: BTreeMap, MethodModel>, pub output_type: Option, - output_model: Option, + pub output_model: Option, } #[derive(Deserialize, Serialize, Clone, Debug)] pub struct MethodModel { - r#type: Type, + name: Rc, fields: IndexMap, Type>, } impl Method { - pub fn imports(&self) -> impl Iterator + '_ { - let argument_imports = self.arguments.values().flat_map(|ty| ty.import.iter()); + pub fn used_types(&self) -> impl Iterator + '_ { + let argument_imports = self.arguments.values().cloned(); self.input_models .values() .chain(self.output_model.as_ref()) .flat_map(|model| model.fields.iter()) - .flat_map(|field| field.1.import.iter()) + .map(|field| field.1.clone()) .chain(argument_imports) - .map(|v| &**v) } } diff --git a/codegen/src/query_namespace/mod.rs b/codegen/src/ir/query_namespace/mod.rs similarity index 77% rename from codegen/src/query_namespace/mod.rs rename to codegen/src/ir/query_namespace/mod.rs index 76aa263..7d43ee3 100644 --- a/codegen/src/query_namespace/mod.rs +++ b/codegen/src/ir/query_namespace/mod.rs @@ -2,11 +2,16 @@ use std::{ collections::{BTreeMap, BTreeSet}, rc::Rc, }; - -use crate::{error::Error, method::Method, request::Request, utils::to_pascal_case}; -pub use query_namespace_builder::QueryNamespaceBuilder; +mod method; +use crate::{ + error::Error, + ir::{query_namespace_service::QueryNamespaceBuilder, r#type::Type}, + request::Request, + utils::to_pascal_case, +}; +pub use method::Method; +pub use method::MethodModel; use serde::{Deserialize, Serialize}; -mod query_namespace_builder; #[derive(Serialize, Deserialize)] pub struct QueryNamespace { @@ -20,7 +25,7 @@ impl QueryNamespace { Ok(QueryNamespaceBuilder::new(request)?.build()) } - fn root() -> QueryNamespace { + pub fn root() -> QueryNamespace { QueryNamespace { name: String::new(), subnamespaces: Default::default(), @@ -28,10 +33,10 @@ impl QueryNamespace { } } - pub fn imports(&self) -> BTreeSet<&str> { + pub fn used_types(&self) -> BTreeSet { self.methods .iter() - .flat_map(|method| method.imports()) + .flat_map(|method| method.used_types()) .collect() } diff --git a/codegen/src/query_namespace/query_namespace_builder.rs b/codegen/src/ir/query_namespace_service.rs similarity index 84% rename from codegen/src/query_namespace/query_namespace_builder.rs rename to codegen/src/ir/query_namespace_service.rs index 53aa1a8..d0e0905 100644 --- a/codegen/src/query_namespace/query_namespace_builder.rs +++ b/codegen/src/ir/query_namespace_service.rs @@ -1,14 +1,13 @@ use crate::{ error::Error, - method::MethodBuilder, - query_namespace::QueryNamespace, + ir::{method_service::MethodService, query_namespace::QueryNamespace}, request::{Query, Request}, type_builder::TypeBuilder, }; pub struct QueryNamespaceBuilder { request: Request, - method_builder: MethodBuilder, + method_service: MethodService, namespace: QueryNamespace, } @@ -17,7 +16,7 @@ impl QueryNamespaceBuilder { let type_builder = TypeBuilder::new(request.clone())?; Ok(QueryNamespaceBuilder { request: request.clone(), - method_builder: MethodBuilder::new(type_builder.clone()), + method_service: MethodService::new(type_builder.clone()), namespace: QueryNamespace::root(), }) } @@ -33,7 +32,7 @@ impl QueryNamespaceBuilder { pub fn include_query(&mut self, query: &Query) { let name = query.namespace(); let namespace = self.namespace.resolve(name); - namespace.methods.push(self.method_builder.build(query)); + namespace.methods.push(self.method_service.build(query)); } } diff --git a/codegen/src/ir/type.rs b/codegen/src/ir/type.rs new file mode 100644 index 0000000..d3cf7b8 --- /dev/null +++ b/codegen/src/ir/type.rs @@ -0,0 +1,128 @@ +use std::rc::Rc; + +use serde::{Deserialize, Serialize}; + +use crate::{ + ir::model_modules::Model, + request::{Column, Enum, Record}, +}; + +#[derive(Clone, PartialEq, PartialOrd, Ord, Eq, Debug, Serialize, Deserialize)] +pub enum Type { + // A type not matching any of these + Other { + schema: Rc, + name: Rc, + }, + + // Uncategorized + Bool, + Uuid, + + // Text + Text, + VarChar, + BpChar, + Bytea, + + // Numeric types + Int2, + Int4, + Int8, + Serial2, + Serial4, + Serial8, + Decimal, + Numeric, + Money, + Float4, + Float8, + + // Time types + Timestamp, + Date, + Time, + TimestampTz, + DateTz, + TimeTz, + Range, + Interval, + + // Range types + Int4Range, + Int8Range, + NumRange, + TsRange, + TsTzRange, + DateRange, + DateMultiRange, + Int4MultiRange, + Int8MultiRange, + NumMultiRange, + TsMultiRange, + TsTzMultiRange, + + // Geometric types + Point, + Line, + LSeg, + Box, + Path, + Polygon, + Circle, + + // Generic types + Nullable(Rc), + Array { + r#type: Rc, + dim: i64, + }, + + // User defined types + UserDefined { + module_path: Rc<[Rc]>, + name: Rc, + }, + + // Networking types + Cid, + Cidr, + Inet, + MacAddr, + MacAddr8, + + // Bit string types + Bit, + BitVarying, + + // Text Seach types + TsVector, + TsQuery, + + // Encoding types + Xml, + Json, + Jsonb, + JsonPath, + + // PseudoTypes + Any, + AnyArray, + AnyElement, + AnyNonArray, + AnyEnum, + AnyRange, + AnyMultiRange, + AnyCompatible, + AnyCompatibleArray, + AnyCompatibleMultiRange, + AnyCompatibleNonArray, + AnycompatibleRange, + Cstring, + Internal, + Record, + Void, + Unknown, +} + +impl Type {} diff --git a/codegen/src/ir/type_service.rs b/codegen/src/ir/type_service.rs new file mode 100644 index 0000000..a01b6b4 --- /dev/null +++ b/codegen/src/ir/type_service.rs @@ -0,0 +1,163 @@ +use std::rc::Rc; + +use super::r#type::Type; +use crate::{ + ir::model_modules::{Model, ModelModules}, + request::{Catalog, Column, Record, Request, Schema}, +}; + +pub struct TypeService { + catalog: Catalog, + model_modules: ModelModules, +} + +impl TypeService { + pub fn from_column(&self, column: &Column) -> Type { + let schema_name = &column.type_field.schema_name; + let column_name = &column.type_field.name; + + let mut r#type = self.resolve_from_catalog(schema_name, column_name); + + if let Some(r#type_) = self.find_table_backed_enum(column) { + r#type = r#type_; + } + + if column.type_field.is_array { + r#type = Type::Array { + r#type: Rc::new(r#type), + dim: column.type_field.array_dimensions, + }; + } + + if column.is_nullable { + r#type = Type::Nullable(Rc::new(r#type)); + } + + return r#type; + } + + fn find_table_backed_enum(&self, column: &Column) -> Option { + let schema = self.get_schema(column.foreign_table_schema.as_deref()?)?; + self.resolve_enum(schema, column.foreign_table_name.as_ref()?) + } + + fn resolve_from_catalog(&self, schema_name: &Rc, name: &Rc) -> Type { + if &**schema_name == "pg_catalog" { + return self.from_pg_catalog(&name); + } + self.from_user_defined_catalog(schema_name, name) + .unwrap_or(Type::Any) + } + + fn from_user_defined_catalog(&self, schema_name: &Rc, name: &Rc) -> Option { + let schema = self.get_schema(schema_name)?; + + self.resolve_record(schema, name) + .or_else(|| self.resolve_enum(schema, name)) + } + + fn resolve_enum(&self, schema: &Schema, name: &Rc) -> Option { + schema.enums.iter().find(|enum_| enum_.name == *name)?; + Some(self.user_defined_model(schema, name)) + } + + fn resolve_record(&self, schema: &Schema, name: &Rc) -> Option { + schema.records.iter().find(|record| record.name == *name)?; + Some(self.user_defined_model(schema, name)) + } + + fn user_defined_model(&self, schema: &Schema, name: &Rc) -> Type { + let module_path = Rc::new(["models".into(), schema.name.clone()]); + Type::UserDefined { + module_path, + name: name.clone(), + } + } + + fn get_schema(&self, schema_name: &str) -> Option<&Schema> { + self.catalog + .schemas + .iter() + .find(|schema| &*schema.name == schema_name) + } + + fn from_pg_catalog(&self, name: &str) -> Type { + match name { + "bool" => Type::Bool, + "uuid" => Type::Uuid, + "text" => Type::Text, + "varchar" => Type::VarChar, + "bpchar" => Type::BpChar, + "bytea" => Type::Bytea, + "int2" => Type::Int2, + "int4" => Type::Int4, + "int8" => Type::Int8, + "serial2" => Type::Serial2, + "serial4" => Type::Serial4, + "serial8" => Type::Serial8, + "decimal" => Type::Decimal, + "numeric" => Type::Numeric, + "money" => Type::Money, + "float4" => Type::Float4, + "float8" => Type::Float8, + "timestamp" => Type::Timestamp, + "date" => Type::Date, + "time" => Type::Time, + "timestamptz" => Type::TimestampTz, + "datetz" => Type::DateTz, + "timetz" => Type::TimeTz, + "range" => Type::Range, + "interval" => Type::Interval, + "int4range" => Type::Int4Range, + "int8range" => Type::Int8Range, + "numrange" => Type::NumRange, + "tsrange" => Type::TsRange, + "tstzrange" => Type::TsTzRange, + "daterange" => Type::DateRange, + "datemultirange" => Type::DateMultiRange, + "int4multirange" => Type::Int4MultiRange, + "int8multirange" => Type::Int8MultiRange, + "nummultirange" => Type::NumMultiRange, + "tsmultirange" => Type::TsMultiRange, + "tstzmultirange" => Type::TsTzMultiRange, + "point" => Type::Point, + "line" => Type::Line, + "lseg" => Type::LSeg, + "box" => Type::Box, + "path" => Type::Path, + "polygon" => Type::Polygon, + "circle" => Type::Circle, + "cid" => Type::Cid, + "cidr" => Type::Cidr, + "inet" => Type::Inet, + "macaddr" => Type::MacAddr, + "macaddr8" => Type::MacAddr8, + "bit" => Type::Bit, + "bitvarying" => Type::BitVarying, + "tsvector" => Type::TsVector, + "tsquery" => Type::TsQuery, + "xml" => Type::Xml, + "json" => Type::Json, + "jsonb" => Type::Jsonb, + "jsonpath" => Type::JsonPath, + "any" => Type::Any, + "anyarray" => Type::AnyArray, + "anyelement" => Type::AnyElement, + "anynonarray" => Type::AnyNonArray, + "anyenum" => Type::AnyEnum, + "anyrange" => Type::AnyRange, + "anymultirange" => Type::AnyMultiRange, + "anycompatible" => Type::AnyCompatible, + "anycompatiblearray" => Type::AnyCompatibleArray, + "anycompatiblemultirange" => Type::AnyCompatibleMultiRange, + "anycompatiblenonarray" => Type::AnyCompatibleNonArray, + "anycompatiblerange" => Type::AnycompatibleRange, + "cstring" => Type::Cstring, + "internal" => Type::Internal, + "record" => Type::Record, + "void" => Type::Void, + "unknown" => Type::Unknown, + _ => Type::Any, + } + } +} diff --git a/codegen/src/lib.rs b/codegen/src/lib.rs deleted file mode 100644 index d462e37..0000000 --- a/codegen/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub mod error; -pub mod file_gen_config; -pub mod file_generator; -pub mod jinja_environment_builder; -pub mod method; -pub mod mock; -pub mod model_modules; -pub mod query_namespace; -pub mod request; -pub mod response; -pub mod template_context; -pub mod r#type; -pub mod type_builder; -mod utils; diff --git a/codegen/src/main.rs b/codegen/src/main.rs index 3a88b0a..caee672 100644 --- a/codegen/src/main.rs +++ b/codegen/src/main.rs @@ -10,14 +10,12 @@ use std::{slice, sync::atomic::AtomicU64}; pub mod error; pub mod file_gen_config; pub mod file_generator; +pub mod ir; pub mod jinja_environment_builder; -pub mod method; pub mod mock; -pub mod model_modules; -pub mod query_namespace; +pub mod presentation; pub mod request; pub mod response; -pub mod template_context; pub mod r#type; pub mod type_builder; diff --git a/codegen/src/model_modules.rs b/codegen/src/model_modules.rs deleted file mode 100644 index ea3d23f..0000000 --- a/codegen/src/model_modules.rs +++ /dev/null @@ -1,100 +0,0 @@ -use std::{ - collections::{BTreeMap, BTreeSet}, - rc::Rc, -}; - -use serde::Serialize; - -use crate::{ - error::Error, - r#type::Type, - request::{Enum, Query, Request, Schema}, - type_builder::{self, TypeBuilder}, -}; - -#[derive(Clone, Serialize)] -pub struct ModelModules { - type_builder: TypeBuilder, - pub model_modules: BTreeMap, ModelModule>, -} - -#[derive(Clone, Serialize)] -pub struct ModelModule { - type_builder: TypeBuilder, - pub imports: Vec>, - pub classes: Vec, - pub enums: Rc<[Enum]>, -} - -#[derive(Clone, Serialize)] -pub struct ModelClass { - r#type: Type, - fields: Vec<(Rc, Type)>, -} - -impl ModelModule { - fn new(type_builder: TypeBuilder) -> Self { - ModelModule { - type_builder, - imports: vec![], - classes: vec![], - enums: Default::default(), - } - } -} - -impl ModelClass { - fn imports(&self) -> impl Iterator> + '_ { - self.fields - .iter() - .map(|(_, ty)| ty.import.iter()) - .flatten() - .cloned() - } -} - -impl ModelModule { - pub fn imports(&self) -> BTreeSet> { - self.classes - .iter() - .flat_map(|class| class.imports()) - .collect() - } -} - -impl ModelModules { - pub fn new(request: &Request) -> Result { - let type_builder = TypeBuilder::new(request.clone())?; - - let mut modules = ModelModules { - type_builder, - model_modules: Default::default(), - }; - - for schema in request.catalog.schemas.iter() { - modules.add_schema(schema); - } - - Ok(modules) - } - - pub fn add_schema(&mut self, schema: &Schema) { - let mut module = ModelModule::new(self.type_builder.clone()); - - for model in schema.models.iter() { - let model_class = ModelClass { - r#type: self.type_builder.resolve(&schema.name, &model.name), - fields: model - .columns - .iter() - .map(|column| (column.name.clone(), self.type_builder.from_col(column))) - .collect::>(), - }; - module.classes.push(model_class); - } - - module.enums = schema.enums.clone(); - - self.model_modules.insert(schema.name.clone(), module); - } -} diff --git a/codegen/src/presentation/mod.rs b/codegen/src/presentation/mod.rs new file mode 100644 index 0000000..6ee5a0a --- /dev/null +++ b/codegen/src/presentation/mod.rs @@ -0,0 +1,9 @@ +// use crate::{ir::Ir, request::Request}; + +// mod python; +// mod typescript; + +// pub trait FileGenerator { +// fn new(ir: Ir) -> Self; +// fn generate_files(self) -> Vec; +// } diff --git a/codegen/src/presentation/python/driver.rs b/codegen/src/presentation/python/driver.rs new file mode 100644 index 0000000..96aad86 --- /dev/null +++ b/codegen/src/presentation/python/driver.rs @@ -0,0 +1,5 @@ +pub enum PythonDriver { + Aysncpg, + Psycopg, + Psycopg2, +} diff --git a/codegen/src/presentation/python/file_generator.rs b/codegen/src/presentation/python/file_generator.rs new file mode 100644 index 0000000..e0e398d --- /dev/null +++ b/codegen/src/presentation/python/file_generator.rs @@ -0,0 +1,5 @@ +pub struct PythonFileGenerator {} + +impl PythonFileGenerator { + fn new() {} +} diff --git a/codegen/src/presentation/python/mod.rs b/codegen/src/presentation/python/mod.rs new file mode 100644 index 0000000..e46effa --- /dev/null +++ b/codegen/src/presentation/python/mod.rs @@ -0,0 +1,3 @@ +pub(super) mod driver; +pub mod file_generator; +pub mod python_type; diff --git a/codegen/src/presentation/python/python_type.rs b/codegen/src/presentation/python/python_type.rs new file mode 100644 index 0000000..e71d40f --- /dev/null +++ b/codegen/src/presentation/python/python_type.rs @@ -0,0 +1,6 @@ +pub struct PythonType { + module: Rc, + import: Rc, + annotation: Rc, + name: Rc, +} diff --git a/codegen/src/presentation/python/templates/asyncpg/config.json b/codegen/src/presentation/python/templates/asyncpg/config.json new file mode 100644 index 0000000..2a1fdaf --- /dev/null +++ b/codegen/src/presentation/python/templates/asyncpg/config.json @@ -0,0 +1 @@ +{ "extension": "py", "directory_entrypoint": "__init__" } diff --git a/codegen/src/presentation/python/templates/asyncpg/model.py.jinja2 b/codegen/src/presentation/python/templates/asyncpg/model.py.jinja2 new file mode 100644 index 0000000..78addbb --- /dev/null +++ b/codegen/src/presentation/python/templates/asyncpg/model.py.jinja2 @@ -0,0 +1,32 @@ +import dataclasses +{%- if enums %} +import enum +{%- endif %} +{%- for import in imports %} +import {{import}} +{%- endfor %} +from {{request.config.codegen.options.package}} import models + +{%- for enum in enums %} + +class {{enum.name | to_pascal_case }}(enum.StrEnum): + {%- for value in enum.values %} + {{ value | to_screaming_snake_case }} = {{ value | to_c_string }} + {%- endfor %} +{% endfor %} + +{%- for model in models %} + + +@dataclasses.dataclass +class {{model.type.declaration}}: + {%- for field, type in model.fields %} + {{field}}: {% if type.annotation | starts_with("models." + schema) -%} + {{ type.annotation | strip_prefix("models." + schema + ".") }} + {%- elif type.annotation | starts_with("models.") -%} + {{ type.annotation | to_c_string }} + {%- else -%} + {{ type.annotation }} + {%- endif %} + {%- endfor %} +{%- endfor %} diff --git a/codegen/src/presentation/python/templates/asyncpg/model_init.py.jinja2 b/codegen/src/presentation/python/templates/asyncpg/model_init.py.jinja2 new file mode 100644 index 0000000..3a811fc --- /dev/null +++ b/codegen/src/presentation/python/templates/asyncpg/model_init.py.jinja2 @@ -0,0 +1,12 @@ +{%- for module in model_modules -%} +from . import {{module}} +{% endfor -%} + + +{%- if model_modules["public"] -%} +from .public import ( +{%- for model_class in model_modules["public"].classes %} + {{model_class.type.declaration}}, +{%- endfor %} +) +{% endif %} diff --git a/codegen/src/presentation/python/templates/asyncpg/query.py.jinja2 b/codegen/src/presentation/python/templates/asyncpg/query.py.jinja2 new file mode 100644 index 0000000..03e45a5 --- /dev/null +++ b/codegen/src/presentation/python/templates/asyncpg/query.py.jinja2 @@ -0,0 +1,172 @@ +# This file was automatically generated by pgc +# flake8: noqa +# pylint: disable=unused-import +{%- for import in imports %} +import {{import}} +{%- endfor %} +import asyncpg +import typing +import dataclasses +from {{request.config.codegen.options.package}} import models +{%- for subnamespace in query_namespace.subnamespaces %} +from . import {{subnamespace}} +{%- endfor %} + +{%- for method in query_namespace.methods %} + +{{ method.query.name | to_screaming_snake_case }} = """ +{{ method.query.query }} +""" +{%- endfor %} +{{"\n"}} +{%- for method in query_namespace.methods %} +{%- if method.output_model != None %} +@dataclasses.dataclass +class {{method.output_model.type.declaration | to_pascal_case }}: + {%- for field, type in method.output_model.fields | items %} + {{field}}: {{type.annotation}} + {%- endfor %} + +{% endif %} +{%- for _, input_model in method.input_models | items %} +{%- if method.query.annotations.group_arguments and method.query.annotations.group_arguments.value == "dataclass" %} +@dataclasses.dataclass +class {{ input_model.type.declaration | to_pascal_case }}: + {%- for field, type in input_model.fields | items %} + {{field}}: {{type.annotation}} + {%- endfor %} + +{%- else %} +class {{ input_model.type.declaration | to_pascal_case }}(typing.Protocol): + {%- for field, type in input_model.fields | items %} + @property + def {{field}}(self) -> {{type.annotation}}: ... + {%- endfor %} + +{%- endif %} +{% endfor %} +{%- endfor %} + +@dataclasses.dataclass +class {{ query_namespace.name | to_pascal_case }}Queries: + def __init__(self, connection: asyncpg.Connection): + self.connection = connection + {%- for subnamespace in query_namespace.subnamespaces %} + self.{{subnamespace}} = {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) + {%- endfor %} + + {% for method in query_namespace.methods%} + {%- if method.query.annotations.not_null_result -%} + {%- set HANDLE_NONE = 'assert row is not None, "a query marked with @not_null_result has returned null."' %} + {%- set OR_NONE = '' %} + {% else %} + {%- set HANDLE_NONE = 'if row is None: return None' %} + {%- set OR_NONE = ' | None' %} + {%- endif %} + + {%- if method.query.command == 'one' %} + + {%- if method.query.output | length == 1 %} + async def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ) -> {{method.output_type.annotation}}{{OR_NONE}}: + row = await self.connection.fetchrow( + {{method.query.name | to_screaming_snake_case }} + {%- for parameter in method.query.parameters -%} + , {{parameter.name}} + {%- endfor %} + ) + {{HANDLE_NONE}} + return row[0] + {%- else %} + async def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ) -> {{method.output_type.annotation}}{{OR_NONE}}: + row = await self.connection.fetchrow( + {{method.query.name | to_screaming_snake_case }} + {%- for parameter in method.query.parameters -%} + , {{parameter.name}} + {%- endfor %} + ) + {{HANDLE_NONE}} + return {{method.output_type.annotation}}(**row) + {%- endif %} + {%- elif method.query.command == 'many' %} + {%- if method.query.output | length == 1 %} + async def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ) -> list[{{method.output_type.annotation}}]: + rows = await self.connection.fetch( + {{method.query.name | to_screaming_snake_case }} + {%- for parameter in method.query.parameters -%} + , {{parameter.name}} + {%- endfor %} + ) + return [row[0] for row in rows] + {%- else%} + async def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ) -> list[{{method.output_type.annotation}}]: + rows = await self.connection.fetch( + {{method.query.name | to_screaming_snake_case }} + {%- for parameter in method.query.parameters -%} + , {{parameter.name}} + {%- endfor %} + ) + return [{{method.output_type.annotation}}(**row) for row in rows] + {%- endif %} + {%- elif method.query.command == 'val' %} + async def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ) -> {{method.output_type.annotation}}{{OR_NONE}}: + row = await self.connection.fetchval( + {{method.query.name | to_screaming_snake_case }} + {%- for parameter in method.query.parameters -%} + , {{parameter.name}} + {%- endfor %} + ) + {{HANDLE_NONE}} + return row + {%- else %} + async def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ): + return await self.connection.execute( + {{method.query.name | to_screaming_snake_case }} + {%- for parameter in method.query.parameters -%} + , {{parameter.name}} + {%- endfor %} + ) + {%- endif %} + + {% endfor %} + + + +{%- if query_namespace.name == "" %} +async def init_connection(conn: asyncpg.Connection): + {%- for _, model_module in model_modules | items %} + {%- for model in model_module.classes %} + + await conn.set_type_codec( + {{model.type.pgtype_name | to_c_string }}, + encoder=lambda model: ({% for name, _ in model.fields %}model.{{name}}{% if not loop.last %}, {% endif %}{%endfor%}), + decoder=lambda row: {{model.type.constructor}}(*row), + schema={{model.type.pgtype_schema | to_c_string }}, + format="tuple", + ) + {%- endfor %} + {% endfor %} +{% endif -%} diff --git a/codegen/src/presentation/python/templates/asyncpg/types.json b/codegen/src/presentation/python/templates/asyncpg/types.json new file mode 100644 index 0000000..4389b8f --- /dev/null +++ b/codegen/src/presentation/python/templates/asyncpg/types.json @@ -0,0 +1,108 @@ +{ + "new_type_case": "{{ name | to_pascal_case }}", + "array": { + "constructor": "list", + "annotation": "list[{{type.annotation}}]" + }, + "null": { + "declaration": "{{type.declaration}}", + "constructor": "{{type.constructor}}", + "annotation": "{{type.annotation}} | None" + }, + "composite": { + "declaration": "{{ type_name | to_pascal_case }}", + "constructor": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", + "annotation": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", + "import": [] + }, + "wildcard": { + "name": "typing.Any", + "import": ["typing"] + }, + "schema": { + "pg_catalog": { + "bool": { "name": "bool" }, + "bytea": { "name": "bytes" }, + "char": { "name": "str" }, + "int8": { "name": "int" }, + "int2": { "name": "int" }, + "int4": { "name": "int" }, + "text": { "name": "str" }, + "json": { "name": "str" }, + "point": { "name": "asyncpg.types.Point", "import": ["asyncpg"] }, + "box": { "name": "asyncpg.pgproto.types.Box", "import": ["asyncpg"] }, + "polygon": { + "name": "asyncpg.pgproto.types.Polygon", + "import": ["asyncpg"] + }, + "line": { + "name": "asyncpg.pgproto.types.Line", + "import": ["asyncpg"] + }, + + "float4": { "name": "float" }, + "float8": { "name": "float" }, + "unknown": { "name": "typing.Any", "import": ["typing"] }, + "circle": { + "name": "asyncpg.pgproto.types.Circle", + "import": ["asyncpg"] + }, + "varchar": { "name": "str" }, + "date": { "name": "datetime.date", "import": ["datetime"] }, + "time": { "name": "datetime.time", "import": [] }, + "timestamp": { + "name": "datetime.datetime", + "import": ["datetime"] + }, + "timestamptz": { + "name": "datetime.datetime", + "import": ["datetime"] + }, + "interval": { + "name": "datatime.timedelta", + "import": ["datetime"] + }, + "timetz": { "name": "datetime.time", "import": ["datetime"] }, + "numeric": { "name": "decimal.Decimal", "import": ["decimal"] }, + "record": { "name": "asyncpg.Record", "import": ["asyncpg"] }, + "any": { "name": "typing.Any", "import": ["typing"] }, + "anyarray": { "name": "list[typing.Any]", "import": ["typing"] }, + "anyelement": { "name": "typing.Any", "import": ["typing"] }, + "anynonarray": { "name": "typing.Any", "import": ["typing"] }, + "uuid": { "name": "uuid.UUID", "import": ["uuid"] }, + "anyenum": { "name": "str" }, + "anyrange": { "name": "asyncpg.Range", "import": ["asyncpg"] }, + "jsonb": { "name": "str" }, + "int4range": { + "name": "asyncpg.types.Range", + "annotation": "asyncpg.types.Range[int]", + "import": ["asyncpg"] + }, + "numrange": { + "name": "asyncpg.types.Range", + "annotation": "asyncpg.types.Range[float]", + "import": ["asyncpg"] + }, + "tsrange": { + "name": "asyncpg.types.Range", + "annotation": "asyncpg.types.Range[datetime.datetime]", + "import": ["asyncpg", "datetime"] + }, + "tstzrange": { + "name": "tstzrange", + "annotation": "asyncpg.types.Range[datetime.datetime]", + "import": ["asyncpg", "datetime"] + }, + "daterange": { + "name": "asyncpg.types.Range", + "annotation": "asyncpg.types.Range[datetime.date]", + "import": ["asyncpg", "datetime"] + }, + "int8range": { + "name": "asyncpg.types.Range", + "annotation": "asyncpg.types.Range[int]", + "import": ["asyncpg"] + } + } + } +} diff --git a/codegen/src/presentation/typescript/driver.rs b/codegen/src/presentation/typescript/driver.rs new file mode 100644 index 0000000..6caa197 --- /dev/null +++ b/codegen/src/presentation/typescript/driver.rs @@ -0,0 +1,5 @@ +pub enum TypescriptDriver { + PGlite, + Postgres, + Pg, +} diff --git a/codegen/src/presentation/typescript/file_generator.rs b/codegen/src/presentation/typescript/file_generator.rs new file mode 100644 index 0000000..e69de29 diff --git a/codegen/src/presentation/typescript/mod.rs b/codegen/src/presentation/typescript/mod.rs new file mode 100644 index 0000000..6723df8 --- /dev/null +++ b/codegen/src/presentation/typescript/mod.rs @@ -0,0 +1 @@ +pub(super) mod driver; diff --git a/codegen/src/presentation/typescript/typescript_type.rs b/codegen/src/presentation/typescript/typescript_type.rs new file mode 100644 index 0000000..f259c60 --- /dev/null +++ b/codegen/src/presentation/typescript/typescript_type.rs @@ -0,0 +1,6 @@ +pub struct TypescriptType { + module: Rc, + import: Rc, + annotation: Rc, + name: Rc, +} diff --git a/codegen/src/request.rs b/codegen/src/request.rs index 44ddf44..e03dee3 100644 --- a/codegen/src/request.rs +++ b/codegen/src/request.rs @@ -23,7 +23,7 @@ pub struct Catalog { pub struct Schema { pub name: Rc, pub enums: Rc<[Enum]>, - pub models: Rc<[Model]>, + pub records: Rc<[Record]>, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] @@ -33,7 +33,7 @@ pub struct Enum { } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] -pub struct Model { +pub struct Record { pub kind: Rc, pub name: Rc, pub columns: Rc<[Column]>, diff --git a/codegen/src/template_context.rs b/codegen/src/template_context.rs deleted file mode 100644 index 5f738a4..0000000 --- a/codegen/src/template_context.rs +++ /dev/null @@ -1,8 +0,0 @@ -use crate::{ - query_namespace::QueryNamespace, - request::{Model, Query}, -}; - -pub struct TemplateContext { - namespaces: QueryNamespace, -} diff --git a/codegen/src/type_builder.rs b/codegen/src/type_builder.rs index 853afa0..559f584 100644 --- a/codegen/src/type_builder.rs +++ b/codegen/src/type_builder.rs @@ -211,7 +211,7 @@ impl TypeBuilder { .find(|schema| &schema.name == type_schema)?; schema - .models + .records .iter() .find(|model| &model.name == type_name)?; diff --git a/codegen/templates/typescript:postgres/model_init.jinja2 b/codegen/templates/typescript:postgres/model_init.jinja2 index f7739c0..3340b24 100644 --- a/codegen/templates/typescript:postgres/model_init.jinja2 +++ b/codegen/templates/typescript:postgres/model_init.jinja2 @@ -23,3 +23,241 @@ export type * as {{module}} from "./{{source}}.ts"; {% endfor -%} export type * from "./public.ts"; + + +type Step = (cell: string) => any; + +function trimOuter(str: string, open: string, close: string) { + const s = str.trim(); + if (s.startsWith(open) && s.endsWith(close)) return s.slice(1, -1); + return s; +} + +function unquote(s: string): string { + const t = s.trim(); + if (t.length >= 2 && t.startsWith('"') && t.endsWith('"')) { + // Remove surrounding quotes and unescape \" and \\ (good enough for most PG cases) + return t + .slice(1, -1) + .replace(/\\(["\\])/g, "$1"); + } + return t; +} + +function splitTopLevel( + s: string, + separator: string, + { respectQuotes = true, parens = true, braces = true }: { + respectQuotes?: boolean; + parens?: boolean; + braces?: boolean; + } = {}, +): string[] { + const out: string[] = []; + let buf = ""; + let inQuotes = false; + let parenDepth = 0; + let braceDepth = 0; + + const flush = () => { + out.push(buf); + buf = ""; + }; + + for (let i = 0; i < s.length; i++) { + const ch = s[i]; + + if (respectQuotes && ch === '"' && s[i - 1] !== "\\") { + inQuotes = !inQuotes; + buf += ch; + continue; + } + if (!inQuotes) { + if (parens && (ch === "(" || ch === ")")) { + if (ch === "(") parenDepth++; + else parenDepth--; + buf += ch; + continue; + } + if (braces && (ch === "{" || ch === "}")) { + if (ch === "{") braceDepth++; + else braceDepth--; + buf += ch; + continue; + } + if (parenDepth === 0 && braceDepth === 0 && ch === separator) { + flush(); + continue; + } + } + buf += ch; + } + flush(); + return out.map((t) => t.trim()); +} + +function parsePgRowToCells(row: string): string[] { + const inner = trimOuter(row.trim(), "(", ")"); + if (inner === "") return []; + // Note: allow parentheses/braces in cells; split only at top-level commas + return splitTopLevel(inner, ","); +} + +function parsePgArrayToElements(arr: string): string[] { + const inner = trimOuter(arr.trim(), "{", "}"); + if (inner === "") return []; + // In arrays, elements can be quoted (including quoted rows "(...)") + return splitTopLevel(inner, ",", { + respectQuotes: true, + parens: true, + braces: true, + }); +} + +// ---- Scalar parsers ---- +function parseNumber(cell: string): number { + const t = cell.trim(); + if (t.toUpperCase() === "NULL" || t === "") return NaN; // choose your null policy + const q = unquote(t); + const v = Number(q); + if (Number.isNaN(v)) throw new Error(`Invalid number: ${cell}`); + return v; +} + +function parseString(cell: string): string | null { + const t = cell.trim(); + if (cell == "") return null as any; + return unquote(t).replaceAll(/""/g, '"'); +} + +function parseDate(cell: string): Date { + const t = unquote(cell.trim()); + const d = new Date(t); + if (Number.isNaN(d.getTime())) throw new Error(`Invalid date: ${cell}`); + return d; +} + +function parseBoolean(cell: string): boolean { + const t = unquote(cell.trim()); + if (!["t", "f"].includes(t)) { + throw new Error(`Invalid boolean: ${cell}. Expected "t" or "f".`); + } + return t == "t"; +} + +class ArrayParser { + constructor(readonly map: (_: string) => T) {} + parse(array: string): T[] { + const unquoted = unquote(array.trim()); + return parsePgArrayToElements(unquoted).map(this.map); + } + + arrayOfThis() { + return new ArrayParser((e) => this.parse(e)); + } +} + +export class RowParser { + private steps: Step[]; + private mapFun: (_: T) => V; + constructor(steps: Step[] = [], map?: (_: T) => V) { + this.steps = steps; + this.mapFun = map ?? ((row: T) => row as unknown as V); + } + + number(): RowParser<[...T, number]> { + return new RowParser<[...T, number]>([...this.steps, parseNumber]); + } + + string(): RowParser<[...T, string]> { + return new RowParser<[...T, string]>([...this.steps, parseString]); + } + + date(): RowParser<[...T, Date]> { + return new RowParser<[...T, Date]>([...this.steps, parseDate]); + } + + boolean(): RowParser<[...T, boolean]> { + return new RowParser<[...T, boolean]>([...this.steps, parseBoolean]); + } + + row(sub: RowParser): RowParser<[...T, U]> { + const step: Step = (cell: string) => { + const raw = unquote(cell.trim()); // nested rows are often quoted inside rows/arrays + return sub.parse(raw); + }; + return new RowParser<[...T, U]>([...this.steps, step]); + } + + arrayOfNumber(): RowParser<[...T, number[]]> { + const step: Step = (cell: string) => { + return new ArrayParser(parseNumber).parse(cell); + }; + return new RowParser<[...T, number[]]>([...this.steps, step]); + } + + arrayOfDate(): RowParser<[...T, Date[]]> { + const step: Step = (cell: string) => { + return new ArrayParser(parseDate).parse(cell); + }; + return new RowParser<[...T, Date[]]>([...this.steps, step]); + } + + arrayOfRow(sub: RowParser): RowParser<[...T, U]> { + const step: Step = (cell: string) => { + // Each element is typically a quoted row string "(...)" + return sub.arrayOfThis().parse(cell); + }; + return new RowParser<[...T, U]>([...this.steps, step]); + } + + arrayOfThis(): ArrayParser { + return new ArrayParser((e) => this.parse(unquote(e))); + } + + parse(input: string): V { + const trimmed = input.trim(); + // Accept either full row "(a,b,...)" or a bare CSV (we’ll try row first) + const cells = trimmed.startsWith("(") + ? parsePgRowToCells(trimmed) + : splitTopLevel(trimmed, ","); + if (cells.length !== this.steps.length) { + throw new Error( + `Arity mismatch: expected ${this.steps.length} fields, got ${cells.length} (${ + JSON.stringify(cells) + })`, + ); + } + const out = this.steps.map((fn, i) => fn(cells[i])) as T; + return this.mapFun(out); + } + + map(fun: (_: V) => U): RowParser { + const newMap = (row: T) => fun(this.mapFun(row)); + return new RowParser(this.steps, newMap); + } +} + + +export const parser = { +{% for module_name, module in model_modules | items %} + {{module_name | to_camel_case }}: { + {% for model in module.classes %} + {{model.type.declaration | to_camel_case }}() { + return new RowParser() + {% for field_name, field_type in model.fields -%} + {% if field_type.annotation == 'Array' -%} + .arrayOfString() + {% elif field_type.annotation == 'Array' -%} + .arrayOfDate() + {% elif field_type.annotation == 'Array' -%} + .arrayOfDate() + {% else -%} + .{{field_type.constructor | to_camel_case }}() + {% endif -%} + {% endfor -%} + }, + {%- endfor %} + } +{% endfor %} +}; diff --git a/codegen/templates/typescript:postgres/parser.ts b/codegen/templates/typescript:postgres/parser.ts new file mode 100644 index 0000000..94e6c58 --- /dev/null +++ b/codegen/templates/typescript:postgres/parser.ts @@ -0,0 +1,242 @@ +import { PGlite } from "@electric-sql/pglite"; +const pg = new PGlite(); +type Step = (cell: string) => any; + +function trimOuter(str: string, open: string, close: string) { + const s = str.trim(); + if (s.startsWith(open) && s.endsWith(close)) return s.slice(1, -1); + return s; +} + +function unquote(s: string): string { + const t = s.trim(); + if (t.length >= 2 && t.startsWith('"') && t.endsWith('"')) { + // Remove surrounding quotes and unescape \" and \\ (good enough for most PG cases) + return t + .slice(1, -1) + .replace(/\\(["\\])/g, "$1"); + } + return t; +} + +function splitTopLevel( + s: string, + separator: string, + { respectQuotes = true, parens = true, braces = true }: { + respectQuotes?: boolean; + parens?: boolean; + braces?: boolean; + } = {}, +): string[] { + const out: string[] = []; + let buf = ""; + let inQuotes = false; + let parenDepth = 0; + let braceDepth = 0; + + const flush = () => { + out.push(buf); + buf = ""; + }; + + for (let i = 0; i < s.length; i++) { + const ch = s[i]; + + if (respectQuotes && ch === '"' && s[i - 1] !== "\\") { + inQuotes = !inQuotes; + buf += ch; + continue; + } + if (!inQuotes) { + if (parens && (ch === "(" || ch === ")")) { + if (ch === "(") parenDepth++; + else parenDepth--; + buf += ch; + continue; + } + if (braces && (ch === "{" || ch === "}")) { + if (ch === "{") braceDepth++; + else braceDepth--; + buf += ch; + continue; + } + if (parenDepth === 0 && braceDepth === 0 && ch === separator) { + flush(); + continue; + } + } + buf += ch; + } + flush(); + return out.map((t) => t.trim()); +} + +function parsePgRowToCells(row: string): string[] { + const inner = trimOuter(row.trim(), "(", ")"); + if (inner === "") return []; + // Note: allow parentheses/braces in cells; split only at top-level commas + return splitTopLevel(inner, ","); +} + +function parsePgArrayToElements(arr: string): string[] { + const inner = trimOuter(arr.trim(), "{", "}"); + if (inner === "") return []; + // In arrays, elements can be quoted (including quoted rows "(...)") + return splitTopLevel(inner, ",", { + respectQuotes: true, + parens: true, + braces: true, + }); +} + +// ---- Scalar parsers ---- +function parseNumber(cell: string): number { + const t = cell.trim(); + if (t.toUpperCase() === "NULL" || t === "") return NaN; // choose your null policy + const q = unquote(t); + const v = Number(q); + if (Number.isNaN(v)) throw new Error(`Invalid number: ${cell}`); + return v; +} + +function parseString(cell: string): string | null { + const t = cell.trim(); + if (cell == "") return null as any; + return unquote(t).replaceAll(/""/g, '"'); +} + +function parseDate(cell: string): Date { + const t = unquote(cell.trim()); + const d = new Date(t); + if (Number.isNaN(d.getTime())) throw new Error(`Invalid date: ${cell}`); + return d; +} + +function parseBoolean(cell: string): boolean { + const t = unquote(cell.trim()); + if (!["t", "f"].includes(t)) { + throw new Error(`Invalid boolean: ${cell}. Expected "t" or "f".`); + } + return t == "t"; +} + +class ArrayParser { + constructor(readonly map: (_: string) => T) {} + parse(array: string): T[] { + const unquoted = unquote(array.trim()); + return parsePgArrayToElements(unquoted).map(this.map); + } + + arrayOfThis() { + return new ArrayParser((e) => this.parse(e)); + } +} + +export class RowParser { + private steps: Step[]; + private mapFun: (_: T) => V; + constructor(steps: Step[] = [], map?: (_: T) => V) { + this.steps = steps; + this.mapFun = map ?? ((row: T) => row as unknown as V); + } + + number(): RowParser<[...T, number]> { + return new RowParser<[...T, number]>([...this.steps, parseNumber]); + } + + string(): RowParser<[...T, string]> { + return new RowParser<[...T, string]>([...this.steps, parseString]); + } + + date(): RowParser<[...T, Date]> { + return new RowParser<[...T, Date]>([...this.steps, parseDate]); + } + + boolean(): RowParser<[...T, boolean]> { + return new RowParser<[...T, boolean]>([...this.steps, parseBoolean]); + } + + row(sub: RowParser): RowParser<[...T, U]> { + const step: Step = (cell: string) => { + const raw = unquote(cell.trim()); // nested rows are often quoted inside rows/arrays + return sub.parse(raw); + }; + return new RowParser<[...T, U]>([...this.steps, step]); + } + + arrayOfNumber(): RowParser<[...T, number[]]> { + const step: Step = (cell: string) => { + return new ArrayParser(parseNumber).parse(cell); + }; + return new RowParser<[...T, number[]]>([...this.steps, step]); + } + + arrayOfDate(): RowParser<[...T, Date[]]> { + const step: Step = (cell: string) => { + return new ArrayParser(parseDate).parse(cell); + }; + return new RowParser<[...T, Date[]]>([...this.steps, step]); + } + + arrayOfRow(sub: RowParser): RowParser<[...T, U]> { + const step: Step = (cell: string) => { + // Each element is typically a quoted row string "(...)" + return sub.arrayOfThis().parse(cell); + }; + return new RowParser<[...T, U]>([...this.steps, step]); + } + + arrayOfThis(): ArrayParser { + return new ArrayParser((e) => this.parse(unquote(e))); + } + + parse(input: string): V { + const trimmed = input.trim(); + // Accept either full row "(a,b,...)" or a bare CSV (we’ll try row first) + const cells = trimmed.startsWith("(") + ? parsePgRowToCells(trimmed) + : splitTopLevel(trimmed, ","); + if (cells.length !== this.steps.length) { + throw new Error( + `Arity mismatch: expected ${this.steps.length} fields, got ${cells.length} (${ + JSON.stringify(cells) + })`, + ); + } + const out = this.steps.map((fn, i) => fn(cells[i])) as T; + return this.mapFun(out); + } + + map(fun: (_: V) => U): RowParser { + const newMap = (row: T) => fun(this.mapFun(row)); + return new RowParser(this.steps, newMap); + } +} + +const authorParser = new RowParser() + .number() + .string() + .string() + .map(([id, firstName, lastName]) => ({ + id, + firstName, + lastName, + })); + +const parser = { + author: new RowParser() + .number() + .string() + .string() + .map(([id, firstName, lastName]) => ({ + id, + firstName, + lastName, + })), +}; + +const { rows } = await pg.query( + `select array[row(true, 1, null, 'asd""')] as row`, +); + +authorParser.arrayOfThis().parse(rows[0].row); diff --git a/pgc.yaml b/pgc.yaml index 957adbc..464d49b 100644 --- a/pgc.yaml +++ b/pgc.yaml @@ -4,9 +4,6 @@ database: migrations: - schema.sql - pglite: - extensions: - pg_trgm: "@electric-sql/pglite/contrib/pg_trgm" queries: - "book.sql" - "author.sql" diff --git a/schema.sql b/schema.sql index defca25..b5a93c0 100644 --- a/schema.sql +++ b/schema.sql @@ -1,4 +1,3 @@ --- create extension pg_trgm; create table author ( id uuid primary key default gen_random_uuid(), name text not null, From e8c7ee5c96ee707ffc5c54bd6b8179e310a04de6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Vallotton?= Date: Fri, 22 Aug 2025 14:15:58 -0400 Subject: [PATCH 04/10] refactor --- codegen/src/ir/method_service.rs | 46 ++++++++++---------- codegen/src/ir/query_namespace/method/mod.rs | 4 +- codegen/src/ir/query_namespace_service.rs | 12 +++-- codegen/src/ir/type_service.rs | 18 ++++++-- codegen/src/main.rs | 11 ++--- 5 files changed, 54 insertions(+), 37 deletions(-) diff --git a/codegen/src/ir/method_service.rs b/codegen/src/ir/method_service.rs index 1c0a89f..8352247 100644 --- a/codegen/src/ir/method_service.rs +++ b/codegen/src/ir/method_service.rs @@ -3,22 +3,25 @@ use std::{collections::BTreeMap, mem::take, rc::Rc}; use indexmap::IndexMap; use crate::{ - ir::query_namespace::{Method, MethodModel}, - r#type::Type, + ir::{ + query_namespace::{Method, MethodModel}, + r#type::Type, + type_service::TypeService, + }, request::Query, type_builder::TypeBuilder, }; pub struct MethodService { - type_builder: TypeBuilder, + type_service: TypeService, arguments: IndexMap, Type>, input_models: BTreeMap, MethodModel>, } impl MethodService { - pub fn new(type_builder: TypeBuilder) -> Self { + pub fn new(type_service: TypeService) -> Self { MethodService { - type_builder, + type_service, arguments: Default::default(), input_models: Default::default(), } @@ -35,22 +38,12 @@ impl MethodService { } } - pub fn create_method(&self, query: &Query) -> Method { - Method { - query: query.clone(), - arguments: self.gather_arguments(query), - input_models: self.gather_input_models(query), - output_type: self.output_type(query), - output_model: self.output_type_model(query), - } - } - pub fn init_input_models(&mut self, query: &Query) { for param in query.parameters.iter() { - let mut ty = self.type_builder.from_output_type(¶m.type_); + let mut ty = self.type_service.resolve_from_output(¶m.type_); if !param.not_null { - ty = self.type_builder.null(&ty); + ty = Type::Nullable(Rc::new(ty)); } if let Some((record, field)) = param.name.split_once('.') { @@ -65,11 +58,15 @@ impl MethodService { pub fn include_input_model(&mut self, record: &str, field: &str, ty: Type, query: &Query) { let query_name = query.name.clone(); - let type_builder = self.type_builder.clone(); let entry = self.input_models.entry(record.into()); + let r#type = self.type_service.user_defined( + query.namespace().split('.'), + &format!("{}_{}", query_name, record), + ); + let query_model = entry.or_insert_with(|| MethodModel { - r#type: type_builder.declared(&format!("{}_{}", query_name, record)), + r#type, fields: IndexMap::default(), }); @@ -90,11 +87,14 @@ impl MethodService { if query.output.len() == 1 { let pg_type = &query.output[0].type_; - let output_type = self.type_builder.from_output_type(&pg_type); + let output_type = self.type_service.resolve_from_output(&pg_type); return Some(output_type); } - - Some(self.type_builder.declared(&format!("{}_row", query.name))) + let module_path = query.namespace().split('.'); + Some( + self.type_service + .user_defined(module_path, &format!("{}_row", query.name)), + ) } fn output_model(&self, query: &Query) -> Option { @@ -105,7 +105,7 @@ impl MethodService { .output .iter() .map(|column| { - let type_ = self.type_builder.from_output_type(&column.type_); + let type_ = self.type_service.resolve_from_output(&column.type_); (column.name.clone(), type_) }) .collect(); diff --git a/codegen/src/ir/query_namespace/method/mod.rs b/codegen/src/ir/query_namespace/method/mod.rs index d2c9c21..d572124 100644 --- a/codegen/src/ir/query_namespace/method/mod.rs +++ b/codegen/src/ir/query_namespace/method/mod.rs @@ -19,8 +19,8 @@ pub struct Method { #[derive(Deserialize, Serialize, Clone, Debug)] pub struct MethodModel { - name: Rc, - fields: IndexMap, Type>, + pub r#type: Type, + pub fields: IndexMap, Type>, } impl Method { diff --git a/codegen/src/ir/query_namespace_service.rs b/codegen/src/ir/query_namespace_service.rs index d0e0905..2bfe589 100644 --- a/codegen/src/ir/query_namespace_service.rs +++ b/codegen/src/ir/query_namespace_service.rs @@ -1,6 +1,10 @@ use crate::{ error::Error, - ir::{method_service::MethodService, query_namespace::QueryNamespace}, + ir::{ + method_service::MethodService, + query_namespace::QueryNamespace, + type_service::{self, TypeService}, + }, request::{Query, Request}, type_builder::TypeBuilder, }; @@ -13,10 +17,12 @@ pub struct QueryNamespaceBuilder { impl QueryNamespaceBuilder { pub fn new(request: &Request) -> Result { - let type_builder = TypeBuilder::new(request.clone())?; + let type_service = TypeService { + catalog: request.catalog.clone(), + }; Ok(QueryNamespaceBuilder { request: request.clone(), - method_service: MethodService::new(type_builder.clone()), + method_service: MethodService::new(type_service.clone()), namespace: QueryNamespace::root(), }) } diff --git a/codegen/src/ir/type_service.rs b/codegen/src/ir/type_service.rs index a01b6b4..0fad55b 100644 --- a/codegen/src/ir/type_service.rs +++ b/codegen/src/ir/type_service.rs @@ -3,15 +3,25 @@ use std::rc::Rc; use super::r#type::Type; use crate::{ ir::model_modules::{Model, ModelModules}, - request::{Catalog, Column, Record, Request, Schema}, + request::{Catalog, Column, OutputType, Record, Request, Schema}, }; - +#[derive(Clone)] pub struct TypeService { - catalog: Catalog, - model_modules: ModelModules, + pub catalog: Catalog, } impl TypeService { + pub fn user_defined<'a>(&self, module_path: impl Iterator, name: &str) -> Type { + Type::UserDefined { + module_path: module_path.map(|str| str.into()).collect(), + name: name.into(), + } + } + + pub fn resolve_from_output(&self, ty: &OutputType) -> Type { + self.resolve_from_catalog(&ty.schema, &ty.name) + } + pub fn from_column(&self, column: &Column) -> Type { let schema_name = &column.type_field.schema_name; let column_name = &column.type_field.name; diff --git a/codegen/src/main.rs b/codegen/src/main.rs index caee672..8f52efe 100644 --- a/codegen/src/main.rs +++ b/codegen/src/main.rs @@ -1,4 +1,3 @@ -use crate::file_generator::FileGenerator; use crate::request::Request; use crate::response::{File, Response}; use error::Error; @@ -39,10 +38,12 @@ pub extern "C" fn build(ptr: *mut u8, size: usize) -> *const u8 { fn try_build(ptr: *mut u8, size: usize) -> Result { let request = load_request(ptr, size)?; - let generator = FileGenerator::new(&request)?; - Ok(Response { - files: generator.render_files()?, - }) + todo!(); + Ok(0) + // let generator = FileGenerator::new(&request)?; + // Ok(Response { + // files: generator.render_files()?, + // }) } static RESPONSE_LENGTH: AtomicU64 = AtomicU64::new(0); From 1db1f7461437a04a0aac7b6e379c9df2005fee72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Vallotton?= Date: Sun, 24 Aug 2025 20:49:01 -0400 Subject: [PATCH 05/10] more changes --- codegen/Cargo.toml | 2 +- codegen/src/error.rs | 9 +- codegen/src/file_gen_config.rs | 23 -- codegen/src/file_generator.rs | 150 ----------- codegen/src/ir/method_service.rs | 9 +- codegen/src/ir/mod.rs | 13 +- codegen/src/ir/model_modules/mod.rs | 4 +- codegen/src/ir/model_modules/model.rs | 10 +- codegen/src/ir/model_modules/model_module.rs | 21 +- codegen/src/ir/model_service.rs | 4 +- .../query_namespace/method/method_builder.rs | 6 +- codegen/src/ir/query_namespace/method/mod.rs | 8 +- codegen/src/ir/query_namespace/mod.rs | 4 +- codegen/src/ir/query_namespace_service.rs | 1 - codegen/src/ir/type.rs | 106 +++++++- codegen/src/ir/type_service.rs | 102 ++------ codegen/src/jinja_environment_builder.rs | 57 ---- codegen/src/main.rs | 10 +- codegen/src/presentation/environment.rs | 100 +++++++ .../presentation/file_generation_config.rs | 5 + codegen/src/presentation/file_generator.rs | 111 ++++++++ codegen/src/presentation/mod.rs | 19 +- .../python/file_generation_config.rs | 5 + .../src/presentation/python/file_generator.rs | 101 ++++++- codegen/src/presentation/python/mod.rs | 7 +- .../src/presentation/python/python_type.rs | 6 - .../python/templates/asyncpg/config.json | 1 - .../asyncpg/{model.py.jinja2 => model.j2} | 0 .../{model_init.py.jinja2 => model_init.j2} | 0 .../asyncpg/{query.py.jinja2 => query.j2} | 0 .../python/templates/psycopg/model.py.jinja2 | 32 +++ .../templates/psycopg/model_init.py.jinja2 | 12 + .../python/templates/psycopg/query.py.jinja2 | 174 +++++++++++++ .../python/templates/psycopg/types.json | 108 ++++++++ .../presentation/python/type_map_service.rs | 136 ++++++++++ codegen/src/presentation/type_map.rs | 1 + .../src/presentation/type_mapping_service.rs | 76 ++++++ .../typescript/typescript_type.rs | 8 +- codegen/src/request.rs | 75 +++--- codegen/src/type.rs | 14 +- codegen/src/type_builder.rs | 246 ------------------ codegen/src/utils.rs | 2 +- codegen/tests/models.rs | 15 -- 43 files changed, 1096 insertions(+), 697 deletions(-) delete mode 100644 codegen/src/file_gen_config.rs delete mode 100644 codegen/src/file_generator.rs delete mode 100644 codegen/src/jinja_environment_builder.rs create mode 100644 codegen/src/presentation/environment.rs create mode 100644 codegen/src/presentation/file_generation_config.rs create mode 100644 codegen/src/presentation/file_generator.rs create mode 100644 codegen/src/presentation/python/file_generation_config.rs delete mode 100644 codegen/src/presentation/python/python_type.rs delete mode 100644 codegen/src/presentation/python/templates/asyncpg/config.json rename codegen/src/presentation/python/templates/asyncpg/{model.py.jinja2 => model.j2} (100%) rename codegen/src/presentation/python/templates/asyncpg/{model_init.py.jinja2 => model_init.j2} (100%) rename codegen/src/presentation/python/templates/asyncpg/{query.py.jinja2 => query.j2} (100%) create mode 100644 codegen/src/presentation/python/templates/psycopg/model.py.jinja2 create mode 100644 codegen/src/presentation/python/templates/psycopg/model_init.py.jinja2 create mode 100644 codegen/src/presentation/python/templates/psycopg/query.py.jinja2 create mode 100644 codegen/src/presentation/python/templates/psycopg/types.json create mode 100644 codegen/src/presentation/python/type_map_service.rs create mode 100644 codegen/src/presentation/type_map.rs create mode 100644 codegen/src/presentation/type_mapping_service.rs delete mode 100644 codegen/src/type_builder.rs delete mode 100644 codegen/tests/models.rs diff --git a/codegen/Cargo.toml b/codegen/Cargo.toml index 0c3fbb3..be8e750 100644 --- a/codegen/Cargo.toml +++ b/codegen/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "pgc-codegen" version = "0.1.0" -edition = "2018" +edition = "2024" [profile.release] lto = true diff --git a/codegen/src/error.rs b/codegen/src/error.rs index b515927..4662cc1 100644 --- a/codegen/src/error.rs +++ b/codegen/src/error.rs @@ -1,4 +1,4 @@ -use std::rc::Rc; +use std::sync::Arc; use thiserror::Error; #[derive(Error, Debug)] @@ -7,10 +7,13 @@ pub enum Error { RequestDeserialization(#[from] serde_json::Error), #[error("language {0} is not supported.")] - NotSupportedLanguage(Rc), + NotSupportedLanguage(Arc), #[error("the language {language} requires the configuration option codegen.options.{option} to be present.")] - MissingConfigurationOption { language: Rc, option: Rc }, + MissingConfigurationOption { + language: Arc, + option: Arc, + }, #[error("failed to render or parse a template: {0}.\nThis is a bug in pgc, please report the issue at \"https://github.com/tvallotton/pgc\".")] TemplateError(#[from] minijinja::Error), diff --git a/codegen/src/file_gen_config.rs b/codegen/src/file_gen_config.rs deleted file mode 100644 index aa60eff..0000000 --- a/codegen/src/file_gen_config.rs +++ /dev/null @@ -1,23 +0,0 @@ -use serde::{Deserialize, Serialize}; - -use crate::{error::Error, request::Request}; - -#[derive(Deserialize, Serialize, Clone)] -pub struct FileGenConfig { - pub extension: String, - pub directory_entrypoint: Option, - pub model_dir_entrypoint: Option, -} - -impl FileGenConfig { - pub fn new(request: &Request) -> Result { - let target = &request.config.codegen.target; - let json = match &**target { - "python:asyncpg" => include_str!("../templates/python:asyncpg/config.json"), - "python:psycopg" => include_str!("../templates/python:psycopg/config.json"), - "typescript:postgres" => include_str!("../templates/typescript:postgres/config.json"), - _ => return Err(Error::NotSupportedLanguage(target.clone())), - }; - Ok(serde_json::from_str(json).unwrap()) - } -} diff --git a/codegen/src/file_generator.rs b/codegen/src/file_generator.rs deleted file mode 100644 index a88cef2..0000000 --- a/codegen/src/file_generator.rs +++ /dev/null @@ -1,150 +0,0 @@ -// use minijinja::{context, Environment}; -// use serde_json::json; -// use std::collections::BTreeSet; -// use std::path::{Path, PathBuf}; - -// use crate::{ -// error::Error, -// file_gen_config::FileGenConfig, -// jinja_environment_builder::{ -// JinjaEnvironmentBuilder, MODELS_DIR_ENTRYPOINT, MODEL_SCHEMA_FILE, QUERY, -// }, -// model_modules::{ModelModule, ModelModules}, -// request::Request, -// response::File, -// }; - -// pub struct FileGenerator { -// pub environment: Environment<'static>, -// pub config: FileGenConfig, -// pub model_modules: ModelModules, -// pub namespace: QueryNamespace, -// pub request: Request, -// } - -// impl FileGenerator { -// pub fn new(request: &Request) -> Result { -// let environment = JinjaEnvironmentBuilder::new(request).build()?; -// let config = FileGenConfig::new(request)?; -// let model_modules = ModelModules::new(request)?; -// let namespace = QueryNamespace::from_request(request)?; - -// Ok(FileGenerator { -// environment, -// config, -// namespace, -// model_modules, -// request: request.clone(), -// }) -// } - -// pub fn render_files(&self) -> Result, Error> { -// let mut files = self.model_module_files()?; -// files.extend(self.model_dir_entrypoint()?); -// self.query_files(&mut files)?; - -// Ok(files) -// } - -// fn model_module_files(&self) -> Result, Error> { -// let mut files = vec![]; -// for (name, module) in self.model_modules.model_modules.iter() { -// let filename = format!("models/{}.{}", name, &self.config.extension); - -// let content = self -// .environment -// .get_template(MODEL_SCHEMA_FILE)? -// .render(context! { -// imports => module.imports(), -// schema => name, -// models => &module.classes, -// enums => &module.enums, -// request => &self.request, -// })?; - -// let file = File { -// path: filename, -// content, -// }; -// files.push(file); -// } -// Ok(files) -// } - -// fn model_dir_entrypoint(&self) -> Result, Error> { -// let Some(filename) = self -// .config -// .directory_entrypoint -// .clone() -// .or(self.config.model_dir_entrypoint.clone()) -// else { -// return Ok(None); -// }; - -// let content = self -// .environment -// .get_template(MODELS_DIR_ENTRYPOINT)? -// .render(context!( -// model_modules=> &self.model_modules.model_modules, -// request => &self.request, -// ))?; -// let path = format!("models/{filename}.{}", self.config.extension); -// Ok(Some(File { path, content })) -// } - -// fn query_files(&self, files: &mut Vec) -> Result<(), Error> { -// let pathbuf = PathBuf::from("./"); -// self._query_files(pathbuf.as_path(), &self.namespace, files)?; -// Ok(()) -// } - -// fn _query_files( -// &self, -// dir_path: &Path, -// namespace: &QueryNamespace, -// files: &mut Vec, -// ) -> Result<(), Error> { -// let entrypoint = self.directory_entrypoint(); -// if namespace.subnamespaces.is_empty() { -// let name = if namespace.name.is_empty() { -// &entrypoint -// } else { -// &namespace.name -// }; -// let path = dir_path.join(&name); -// let file = self.render_query_file(&path, namespace)?; -// files.push(file); -// } else { -// let path = dir_path.join(&namespace.name).join(entrypoint); -// let file = self.render_query_file(&path, namespace)?; -// files.push(file); -// } - -// for subnamespace in namespace.subnamespaces.values() { -// self._query_files(&dir_path.join(&namespace.name), subnamespace, files)?; -// } - -// Ok(()) -// } - -// fn render_query_file(&self, path: &Path, namespace: &QueryNamespace) -> Result { -// let content = self.environment.get_template(QUERY)?.render(&context! ( -// query_namespace => namespace, -// imports => namespace.imports(), -// request => &self.request, -// model_modules => self.model_modules.model_modules, -// ))?; - -// Ok(File { -// path: format!("{}.{}", path.to_str().unwrap(), self.config.extension), -// content, -// }) -// } - -// fn directory_entrypoint(&self) -> String { -// if let Some(entrypoint) = self.config.directory_entrypoint.as_ref() { -// return entrypoint.clone(); -// } -// return "query".into(); -// } -// } diff --git a/codegen/src/ir/method_service.rs b/codegen/src/ir/method_service.rs index 8352247..a826373 100644 --- a/codegen/src/ir/method_service.rs +++ b/codegen/src/ir/method_service.rs @@ -1,4 +1,4 @@ -use std::{collections::BTreeMap, mem::take, rc::Rc}; +use std::{collections::BTreeMap, mem::take, sync::Arc}; use indexmap::IndexMap; @@ -9,13 +9,12 @@ use crate::{ type_service::TypeService, }, request::Query, - type_builder::TypeBuilder, }; pub struct MethodService { type_service: TypeService, - arguments: IndexMap, Type>, - input_models: BTreeMap, MethodModel>, + arguments: IndexMap, Type>, + input_models: BTreeMap, MethodModel>, } impl MethodService { @@ -43,7 +42,7 @@ impl MethodService { let mut ty = self.type_service.resolve_from_output(¶m.type_); if !param.not_null { - ty = Type::Nullable(Rc::new(ty)); + ty = Type::Nullable(Arc::new(ty)); } if let Some((record, field)) = param.name.split_once('.') { diff --git a/codegen/src/ir/mod.rs b/codegen/src/ir/mod.rs index 413c657..7aeac89 100644 --- a/codegen/src/ir/mod.rs +++ b/codegen/src/ir/mod.rs @@ -1,5 +1,9 @@ use crate::{error::Error, request::Request}; - +pub use model_modules::*; +pub use query_namespace::*; +pub use r#type::Type; +use serde::Serialize; +pub use type_service::TypeService; mod method_service; mod model_modules; mod model_service; @@ -8,8 +12,9 @@ mod query_namespace_service; mod r#type; mod type_service; +#[derive(Serialize)] pub struct Ir { - request: Request, - query_namespace: query_namespace::QueryNamespace, - model_modules: model_modules::ModelModules, + pub request: Request, + pub query_namespace: query_namespace::QueryNamespace, + pub model_modules: model_modules::ModelModules, } diff --git a/codegen/src/ir/model_modules/mod.rs b/codegen/src/ir/model_modules/mod.rs index 3344055..feae83a 100644 --- a/codegen/src/ir/model_modules/mod.rs +++ b/codegen/src/ir/model_modules/mod.rs @@ -2,11 +2,11 @@ pub use model::Model; pub use model::ModelField; pub use model_module::ModelModule; use serde::Serialize; -use std::{collections::BTreeMap, rc::Rc}; +use std::{collections::BTreeMap, sync::Arc}; mod model; mod model_module; #[derive(Clone, Serialize, Default)] pub struct ModelModules { - pub model_modules: BTreeMap, ModelModule>, + pub model_modules: BTreeMap, ModelModule>, } diff --git a/codegen/src/ir/model_modules/model.rs b/codegen/src/ir/model_modules/model.rs index 0bef00f..ae7acea 100644 --- a/codegen/src/ir/model_modules/model.rs +++ b/codegen/src/ir/model_modules/model.rs @@ -1,4 +1,4 @@ -use std::rc::Rc; +use std::sync::Arc; use serde::{Deserialize, Serialize}; @@ -10,14 +10,14 @@ use crate::{ #[derive(Clone, Serialize, Deserialize)] pub struct Model { pub record: Record, - pub module_name: Rc, - pub name: Rc, + pub module_name: Arc, + pub name: Arc, pub fields: Vec, } #[derive(Clone, Serialize, Deserialize)] pub struct ModelField { - pub name: Rc, + pub name: Arc, pub r#type: Type, - pub default_value: Option>, + pub default_value: Option>, } diff --git a/codegen/src/ir/model_modules/model_module.rs b/codegen/src/ir/model_modules/model_module.rs index 0ed5064..407da56 100644 --- a/codegen/src/ir/model_modules/model_module.rs +++ b/codegen/src/ir/model_modules/model_module.rs @@ -1,22 +1,33 @@ -use std::rc::Rc; +use std::{collections::BTreeSet, sync::Arc}; use serde::Serialize; -use crate::{ir::model_modules::Model, request::Enum}; +use crate::{ + ir::{model_modules::Model, Type}, + request::Enum, +}; #[derive(Clone, Serialize)] pub struct ModelModule { - pub name: Rc, + pub name: Arc, pub models: Vec, - pub enums: Rc<[Enum]>, + pub enums: Arc<[Enum]>, } impl ModelModule { - pub fn new(name: &Rc) -> Self { + pub fn new(name: &Arc) -> Self { ModelModule { name: name.clone(), models: vec![], enums: Default::default(), } } + + pub fn used_types(&self) -> BTreeSet { + self.models + .iter() + .flat_map(|field| field.fields.iter()) + .map(|ty| ty.r#type.clone()) + .collect() + } } diff --git a/codegen/src/ir/model_service.rs b/codegen/src/ir/model_service.rs index c29f61c..885f024 100644 --- a/codegen/src/ir/model_service.rs +++ b/codegen/src/ir/model_service.rs @@ -1,4 +1,4 @@ -use std::rc::Rc; +use std::sync::Arc; use crate::{ ir::{ @@ -35,7 +35,7 @@ impl ModelService { return module; } - fn create_model_from_record(&self, module_name: &Rc, record: &Record) -> Model { + fn create_model_from_record(&self, module_name: &Arc, record: &Record) -> Model { let mut model = Model { record: record.clone(), module_name: module_name.clone(), diff --git a/codegen/src/ir/query_namespace/method/method_builder.rs b/codegen/src/ir/query_namespace/method/method_builder.rs index 900fb41..e99f150 100644 --- a/codegen/src/ir/query_namespace/method/method_builder.rs +++ b/codegen/src/ir/query_namespace/method/method_builder.rs @@ -1,4 +1,4 @@ -use std::{collections::BTreeMap, mem::take, rc::Rc}; +use std::{collections::BTreeMap, mem::take, sync::Arc}; use indexmap::IndexMap; @@ -11,8 +11,8 @@ use crate::{ pub struct MethodBuilder { type_builder: TypeBuilder, - arguments: IndexMap, Type>, - input_models: BTreeMap, MethodModel>, + arguments: IndexMap, Type>, + input_models: BTreeMap, MethodModel>, } impl MethodBuilder { diff --git a/codegen/src/ir/query_namespace/method/mod.rs b/codegen/src/ir/query_namespace/method/mod.rs index d572124..f8fc79c 100644 --- a/codegen/src/ir/query_namespace/method/mod.rs +++ b/codegen/src/ir/query_namespace/method/mod.rs @@ -1,6 +1,6 @@ use std::{ collections::{btree_map::Entry, BTreeMap}, - rc::Rc, + sync::Arc, }; use indexmap::IndexMap; @@ -11,8 +11,8 @@ use crate::{ir::r#type::Type, request::Query}; #[derive(Deserialize, Serialize, Clone, Debug)] pub struct Method { pub query: Query, - pub arguments: IndexMap, Type>, - pub input_models: BTreeMap, MethodModel>, + pub arguments: IndexMap, Type>, + pub input_models: BTreeMap, MethodModel>, pub output_type: Option, pub output_model: Option, } @@ -20,7 +20,7 @@ pub struct Method { #[derive(Deserialize, Serialize, Clone, Debug)] pub struct MethodModel { pub r#type: Type, - pub fields: IndexMap, Type>, + pub fields: IndexMap, Type>, } impl Method { diff --git a/codegen/src/ir/query_namespace/mod.rs b/codegen/src/ir/query_namespace/mod.rs index 7d43ee3..7999e4f 100644 --- a/codegen/src/ir/query_namespace/mod.rs +++ b/codegen/src/ir/query_namespace/mod.rs @@ -1,6 +1,6 @@ use std::{ collections::{BTreeMap, BTreeSet}, - rc::Rc, + sync::Arc, }; mod method; use crate::{ @@ -16,7 +16,7 @@ use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize)] pub struct QueryNamespace { pub name: String, - pub subnamespaces: BTreeMap, QueryNamespace>, + pub subnamespaces: BTreeMap, QueryNamespace>, pub methods: Vec, } diff --git a/codegen/src/ir/query_namespace_service.rs b/codegen/src/ir/query_namespace_service.rs index 2bfe589..6e38a11 100644 --- a/codegen/src/ir/query_namespace_service.rs +++ b/codegen/src/ir/query_namespace_service.rs @@ -6,7 +6,6 @@ use crate::{ type_service::{self, TypeService}, }, request::{Query, Request}, - type_builder::TypeBuilder, }; pub struct QueryNamespaceBuilder { diff --git a/codegen/src/ir/type.rs b/codegen/src/ir/type.rs index d3cf7b8..afb8bfd 100644 --- a/codegen/src/ir/type.rs +++ b/codegen/src/ir/type.rs @@ -1,5 +1,6 @@ -use std::rc::Rc; +use std::sync::Arc; +use minijinja::value::{Object, ObjectRepr}; use serde::{Deserialize, Serialize}; use crate::{ @@ -11,8 +12,8 @@ use crate::{ pub enum Type { // A type not matching any of these Other { - schema: Rc, - name: Rc, + schema: Arc, + name: Arc, }, // Uncategorized @@ -72,16 +73,16 @@ pub enum Type { Circle, // Generic types - Nullable(Rc), + Nullable(Arc), Array { - r#type: Rc, + r#type: Arc, dim: i64, }, // User defined types UserDefined { - module_path: Rc<[Rc]>, - name: Rc, + module_path: Arc<[Arc]>, + name: Arc, }, // Networking types @@ -119,10 +120,97 @@ pub enum Type { AnyCompatibleNonArray, AnycompatibleRange, Cstring, - Internal, Record, Void, Unknown, } -impl Type {} +impl Type { + #[rustfmt::skip] + pub const NAMES: &[(&'static str, &'static str, Type)] = &[ + ("any", "pg_catalog.any", Type::Any), + ("anyarray", "pg_catalog.anyarray", Type::AnyArray), + ("anycompatible", "pg_catalog.anycompatible", Type::AnyCompatible), + ("anycompatiblearray", "pg_catalog.anycompatiblearray", Type::AnyCompatibleArray), + ("anycompatiblemultirange", "pg_catalog.anycompatiblemultirange", Type::AnyCompatibleMultiRange), + ("anycompatiblenonarray", "pg_catalog.anycompatiblenonarray", Type::AnyCompatibleNonArray), + ("anycompatiblerange", "pg_catalog.anycompatiblerange", Type::AnycompatibleRange), + ("anyelement", "pg_catalog.anyelement", Type::AnyElement), + ("anyenum", "pg_catalog.anyenum", Type::AnyEnum), + ("anymultirange", "pg_catalog.anymultirange", Type::AnyMultiRange), + ("anynonarray", "pg_catalog.anynonarray", Type::AnyNonArray), + ("anyrange", "pg_catalog.anyrange", Type::AnyRange), + ("bit", "pg_catalog.bit", Type::Bit), + ("bitvarying", "pg_catalog.bitvarying", Type::BitVarying), + ("bool", "pg_catalog.bool", Type::Bool), + ("box", "pg_catalog.box", Type::Box), + ("bpchar", "pg_catalog.bpchar", Type::BpChar), + ("bytea", "pg_catalog.bytea", Type::Bytea), + ("cid", "pg_catalog.cid", Type::Cid), + ("cidr", "pg_catalog.cidr", Type::Cidr), + ("circle", "pg_catalog.circle", Type::Circle), + ("cstring", "pg_catalog.cstring", Type::Cstring), + ("date", "pg_catalog.date", Type::Date), + ("datemultirange", "pg_catalog.datemultirange", Type::DateMultiRange), + ("daterange", "pg_catalog.daterange", Type::DateRange), + ("datetz", "pg_catalog.datetz", Type::DateTz), + ("decimal", "pg_catalog.decimal", Type::Decimal), + ("float4", "pg_catalog.float4", Type::Float4), + ("float8", "pg_catalog.float8", Type::Float8), + ("inet", "pg_catalog.inet", Type::Inet), + ("int2", "pg_catalog.int2", Type::Int2), + ("int4", "pg_catalog.int4", Type::Int4), + ("int4multirange", "pg_catalog.int4multirange", Type::Int4MultiRange), + ("int4range", "pg_catalog.int4range", Type::Int4Range), + ("int8", "pg_catalog.int8", Type::Int8), + ("int8multirange", "pg_catalog.int8multirange", Type::Int8MultiRange), + ("int8range", "pg_catalog.int8range", Type::Int8Range), + ("interval", "pg_catalog.interval", Type::Interval), + ("json", "pg_catalog.json", Type::Json), + ("jsonb", "pg_catalog.jsonb", Type::Jsonb), + ("jsonpath", "pg_catalog.jsonpath", Type::JsonPath), + ("line", "pg_catalog.line", Type::Line), + ("lseg", "pg_catalog.lseg", Type::LSeg), + ("macaddr", "pg_catalog.macaddr", Type::MacAddr), + ("macaddr8", "pg_catalog.macaddr8", Type::MacAddr8), + ("money", "pg_catalog.money", Type::Money), + ("numeric", "pg_catalog.numeric", Type::Numeric), + ("nummultirange", "pg_catalog.nummultirange", Type::NumMultiRange), + ("numrange", "pg_catalog.numrange", Type::NumRange), + ("path", "pg_catalog.path", Type::Path), + ("point", "pg_catalog.point", Type::Point), + ("polygon", "pg_catalog.polygon", Type::Polygon), + ("range", "pg_catalog.range", Type::Range), + ("record", "pg_catalog.record", Type::Record), + ("serial2", "pg_catalog.serial2", Type::Serial2), + ("serial4", "pg_catalog.serial4", Type::Serial4), + ("serial8", "pg_catalog.serial8", Type::Serial8), + ("text", "pg_catalog.text", Type::Text), + ("time", "pg_catalog.time", Type::Time), + ("timestamp", "pg_catalog.timestamp", Type::Timestamp), + ("timestamptz", "pg_catalog.timestamptz", Type::TimestampTz), + ("timetz", "pg_catalog.timetz", Type::TimeTz), + ("tsmultirange", "pg_catalog.tsmultirange", Type::TsMultiRange), + ("tsquery", "pg_catalog.tsquery", Type::TsQuery), + ("tsrange", "pg_catalog.tsrange", Type::TsRange), + ("tstzmultirange", "pg_catalog.tstzmultirange", Type::TsTzMultiRange), + ("tstzrange", "pg_catalog.tstzrange", Type::TsTzRange), + ("tsvector", "pg_catalog.tsvector", Type::TsVector), + ("unknown", "pg_catalog.unknown", Type::Unknown), + ("uuid", "pg_catalog.uuid", Type::Uuid), + ("varchar", "pg_catalog.varchar", Type::VarChar), + ("void", "pg_catalog.void", Type::Void), + ("xml", "pg_catalog.xml", Type::Xml), + ]; +} + +impl Object for Type { + fn repr(self: &Arc) -> minijinja::value::ObjectRepr { + ObjectRepr::Plain + } +} + +#[test] +fn array_is_sorted() { + assert!(Type::NAMES.is_sorted()) +} diff --git a/codegen/src/ir/type_service.rs b/codegen/src/ir/type_service.rs index 0fad55b..3ea7afb 100644 --- a/codegen/src/ir/type_service.rs +++ b/codegen/src/ir/type_service.rs @@ -1,4 +1,4 @@ -use std::rc::Rc; +use std::sync::Arc; use super::r#type::Type; use crate::{ @@ -34,13 +34,13 @@ impl TypeService { if column.type_field.is_array { r#type = Type::Array { - r#type: Rc::new(r#type), + r#type: Arc::new(r#type), dim: column.type_field.array_dimensions, }; } if column.is_nullable { - r#type = Type::Nullable(Rc::new(r#type)); + r#type = Type::Nullable(Arc::new(r#type)); } return r#type; @@ -51,7 +51,7 @@ impl TypeService { self.resolve_enum(schema, column.foreign_table_name.as_ref()?) } - fn resolve_from_catalog(&self, schema_name: &Rc, name: &Rc) -> Type { + fn resolve_from_catalog(&self, schema_name: &Arc, name: &Arc) -> Type { if &**schema_name == "pg_catalog" { return self.from_pg_catalog(&name); } @@ -59,25 +59,25 @@ impl TypeService { .unwrap_or(Type::Any) } - fn from_user_defined_catalog(&self, schema_name: &Rc, name: &Rc) -> Option { + fn from_user_defined_catalog(&self, schema_name: &Arc, name: &Arc) -> Option { let schema = self.get_schema(schema_name)?; self.resolve_record(schema, name) .or_else(|| self.resolve_enum(schema, name)) } - fn resolve_enum(&self, schema: &Schema, name: &Rc) -> Option { + fn resolve_enum(&self, schema: &Schema, name: &Arc) -> Option { schema.enums.iter().find(|enum_| enum_.name == *name)?; Some(self.user_defined_model(schema, name)) } - fn resolve_record(&self, schema: &Schema, name: &Rc) -> Option { + fn resolve_record(&self, schema: &Schema, name: &Arc) -> Option { schema.records.iter().find(|record| record.name == *name)?; Some(self.user_defined_model(schema, name)) } - fn user_defined_model(&self, schema: &Schema, name: &Rc) -> Type { - let module_path = Rc::new(["models".into(), schema.name.clone()]); + fn user_defined_model(&self, schema: &Schema, name: &Arc) -> Type { + let module_path = Arc::new(["models".into(), schema.name.clone()]); Type::UserDefined { module_path, name: name.clone(), @@ -91,83 +91,11 @@ impl TypeService { .find(|schema| &*schema.name == schema_name) } - fn from_pg_catalog(&self, name: &str) -> Type { - match name { - "bool" => Type::Bool, - "uuid" => Type::Uuid, - "text" => Type::Text, - "varchar" => Type::VarChar, - "bpchar" => Type::BpChar, - "bytea" => Type::Bytea, - "int2" => Type::Int2, - "int4" => Type::Int4, - "int8" => Type::Int8, - "serial2" => Type::Serial2, - "serial4" => Type::Serial4, - "serial8" => Type::Serial8, - "decimal" => Type::Decimal, - "numeric" => Type::Numeric, - "money" => Type::Money, - "float4" => Type::Float4, - "float8" => Type::Float8, - "timestamp" => Type::Timestamp, - "date" => Type::Date, - "time" => Type::Time, - "timestamptz" => Type::TimestampTz, - "datetz" => Type::DateTz, - "timetz" => Type::TimeTz, - "range" => Type::Range, - "interval" => Type::Interval, - "int4range" => Type::Int4Range, - "int8range" => Type::Int8Range, - "numrange" => Type::NumRange, - "tsrange" => Type::TsRange, - "tstzrange" => Type::TsTzRange, - "daterange" => Type::DateRange, - "datemultirange" => Type::DateMultiRange, - "int4multirange" => Type::Int4MultiRange, - "int8multirange" => Type::Int8MultiRange, - "nummultirange" => Type::NumMultiRange, - "tsmultirange" => Type::TsMultiRange, - "tstzmultirange" => Type::TsTzMultiRange, - "point" => Type::Point, - "line" => Type::Line, - "lseg" => Type::LSeg, - "box" => Type::Box, - "path" => Type::Path, - "polygon" => Type::Polygon, - "circle" => Type::Circle, - "cid" => Type::Cid, - "cidr" => Type::Cidr, - "inet" => Type::Inet, - "macaddr" => Type::MacAddr, - "macaddr8" => Type::MacAddr8, - "bit" => Type::Bit, - "bitvarying" => Type::BitVarying, - "tsvector" => Type::TsVector, - "tsquery" => Type::TsQuery, - "xml" => Type::Xml, - "json" => Type::Json, - "jsonb" => Type::Jsonb, - "jsonpath" => Type::JsonPath, - "any" => Type::Any, - "anyarray" => Type::AnyArray, - "anyelement" => Type::AnyElement, - "anynonarray" => Type::AnyNonArray, - "anyenum" => Type::AnyEnum, - "anyrange" => Type::AnyRange, - "anymultirange" => Type::AnyMultiRange, - "anycompatible" => Type::AnyCompatible, - "anycompatiblearray" => Type::AnyCompatibleArray, - "anycompatiblemultirange" => Type::AnyCompatibleMultiRange, - "anycompatiblenonarray" => Type::AnyCompatibleNonArray, - "anycompatiblerange" => Type::AnycompatibleRange, - "cstring" => Type::Cstring, - "internal" => Type::Internal, - "record" => Type::Record, - "void" => Type::Void, - "unknown" => Type::Unknown, - _ => Type::Any, - } + fn from_pg_catalog(&self, type_name: &str) -> Type { + Type::NAMES + .iter() + .find(|(name, _, _)| *name == type_name) + .map(|(_, _, ty)| ty.clone()) + .unwrap_or(Type::Any) } } diff --git a/codegen/src/jinja_environment_builder.rs b/codegen/src/jinja_environment_builder.rs deleted file mode 100644 index bc8b572..0000000 --- a/codegen/src/jinja_environment_builder.rs +++ /dev/null @@ -1,57 +0,0 @@ -use std::rc::Rc; - -use minijinja::Environment; - -use crate::{error::Error, request::Request, utils}; - -pub struct JinjaEnvironmentBuilder { - pub target: Rc, -} -pub const QUERY: &'static str = "query"; -pub const MODEL_SCHEMA_FILE: &'static str = "model_schema_file"; -pub const MODELS_DIR_ENTRYPOINT: &'static str = "model_dir_entrypoint"; - -impl JinjaEnvironmentBuilder { - pub fn new(request: &Request) -> Self { - Self { - target: request.config.codegen.target.clone(), - } - } - - fn query_template(&self) -> Result<&'static str, Error> { - Ok(match &*self.target { - "python:asyncpg" => include_str!("../templates/python:asyncpg/query.py.jinja2"), - "python:psycopg" => include_str!("../templates/python:psycopg/query.py.jinja2"), - "typescript:postgres" => include_str!("../templates/typescript:postgres/query.jinja2"), - _ => return Err(Error::NotSupportedLanguage(self.target.clone())), - }) - } - - fn model_template(&self) -> Result<&'static str, Error> { - Ok(match &*self.target { - "python:asyncpg" => include_str!("../templates/python:asyncpg/model.py.jinja2"), - "python:psycopg" => include_str!("../templates/python:psycopg/model.py.jinja2"), - "typescript:postgres" => include_str!("../templates/typescript:postgres/model.jinja2"), - _ => return Err(Error::NotSupportedLanguage(self.target.clone())), - }) - } - - fn model_dir_entrypoint_template(&self) -> Result<&'static str, Error> { - Ok(match &*self.target { - "python:asyncpg" => include_str!("../templates/python:asyncpg/model_init.py.jinja2"), - "python:psycopg" => include_str!("../templates/python:psycopg/model_init.py.jinja2"), - "typescript:postgres" => { - include_str!("../templates/typescript:postgres/model_init.jinja2") - } - _ => return Err(Error::NotSupportedLanguage(self.target.clone())), - }) - } - - pub fn build(self) -> Result, Error> { - let mut environment = utils::env(); - environment.add_template(QUERY, self.query_template()?)?; - environment.add_template(MODEL_SCHEMA_FILE, self.model_template()?)?; - environment.add_template(MODELS_DIR_ENTRYPOINT, self.model_dir_entrypoint_template()?)?; - Ok(environment) - } -} diff --git a/codegen/src/main.rs b/codegen/src/main.rs index 8f52efe..cdc74ba 100644 --- a/codegen/src/main.rs +++ b/codegen/src/main.rs @@ -7,20 +7,16 @@ use std::sync::atomic::Ordering::Relaxed; use std::{slice, sync::atomic::AtomicU64}; pub mod error; -pub mod file_gen_config; -pub mod file_generator; pub mod ir; -pub mod jinja_environment_builder; pub mod mock; pub mod presentation; pub mod request; pub mod response; pub mod r#type; -pub mod type_builder; mod utils; -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn alloc(size: usize) -> *mut u8 { let mut buffer = Vec::with_capacity(size); let ptr = buffer.as_mut_ptr(); @@ -28,7 +24,7 @@ pub extern "C" fn alloc(size: usize) -> *mut u8 { ptr } -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn build(ptr: *mut u8, size: usize) -> *const u8 { match try_build(ptr, size) { Ok(value) => write_response(value), @@ -55,7 +51,7 @@ fn write_response(response: T) -> *const u8 { buffer.leak().as_bytes().as_ptr() } -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn response_length() -> u64 { RESPONSE_LENGTH.load(Relaxed) } diff --git a/codegen/src/presentation/environment.rs b/codegen/src/presentation/environment.rs new file mode 100644 index 0000000..f282589 --- /dev/null +++ b/codegen/src/presentation/environment.rs @@ -0,0 +1,100 @@ +use std::{ + collections::BTreeMap, + sync::{Arc, Mutex}, +}; + +use heck::{ToKebabCase, ToLowerCamelCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase}; +use minijinja::{Environment, State, Value}; +use regex::bytes::Regex; + +use crate::{ir::Type, presentation::type_mapping_service::TypeMapService}; + +pub fn env(service: Arc) -> Environment<'static> { + let mut env = minijinja::Environment::new(); + let service_ = service.clone(); + env.add_filter("annotation", move |state: &State, ty: &Type| -> Arc { + service_.get(module_path(state), ty).annotation + }); + let service_ = service.clone(); + env.add_filter( + "name", + move |state: &State, ty: &Type| -> Option> { + service_.get(module_path(state), ty).name + }, + ); + let service_ = service.clone(); + env.add_filter("import", move |state: &State, ty: &Type| -> Vec> { + service_.get(module_path(state), ty).import + }); + let service_ = service.clone(); + env.add_filter( + "type_module", + move |state: &State, ty: &Type| -> Option> { + service_.get(module_path(state), ty).module + }, + ); + env.add_filter("to_camel_case", to_camel_case); + env.add_filter("to_pascal_case", to_pascal_case); + env.add_filter("to_snake_case", to_snake_case); + env.add_filter("to_kebab_case", to_kebab_case); + env.add_filter("to_screaming_snake_case", to_screaming_snake_case); + env.add_filter("to_c_string", to_c_string); + env.add_filter("starts_with", starts_with); + env.add_filter("strip_prefix", strip_prefix); + env.add_filter("regex_replace", regex_replace); + env +} + +pub fn module_path<'a>(state: &State<'_, 'a>) -> Arc<[Arc]> { + state + .lookup("module_path") + .unwrap() + .downcast_object_ref::]>>() + .unwrap() + .clone() +} + +pub fn regex_replace(text: &str, pattern: &str, replacement: &str) -> String { + static REGEXES: Mutex> = Mutex::new(BTreeMap::new()); + let mut guard = REGEXES.lock().unwrap(); + let entry = guard.entry(pattern.into()); + let regex = entry.or_insert_with(|| Regex::new(pattern).unwrap()); + String::from_utf8( + regex + .replace_all(text.as_bytes(), replacement.as_bytes()) + .into(), + ) + .unwrap() +} + +pub fn to_c_string(s: &str) -> String { + format!("{:?}", s) +} + +pub fn strip_prefix<'a>(text: &'a str, pattern: &str) -> String { + text.strip_prefix(pattern).unwrap_or(text).to_string() +} + +pub fn starts_with(text: &str, pattern: &str) -> bool { + text.starts_with(pattern) +} + +pub fn to_camel_case(s: &str) -> String { + s.to_lower_camel_case() +} + +pub fn to_pascal_case(s: &str) -> String { + s.to_upper_camel_case() +} + +pub fn to_snake_case(s: &str) -> String { + s.to_snake_case() +} + +pub fn to_screaming_snake_case(s: &str) -> String { + s.to_shouty_snake_case() +} + +pub fn to_kebab_case(s: &str) -> String { + s.to_kebab_case() +} diff --git a/codegen/src/presentation/file_generation_config.rs b/codegen/src/presentation/file_generation_config.rs new file mode 100644 index 0000000..8fc777e --- /dev/null +++ b/codegen/src/presentation/file_generation_config.rs @@ -0,0 +1,5 @@ +pub struct FileGenerationConfig { + pub query_directory_entrypoint: &'static str, + pub model_directory_entrypoint: &'static str, + pub file_extension: &'static str, +} diff --git a/codegen/src/presentation/file_generator.rs b/codegen/src/presentation/file_generator.rs new file mode 100644 index 0000000..e1ebc22 --- /dev/null +++ b/codegen/src/presentation/file_generator.rs @@ -0,0 +1,111 @@ +use std::sync::Arc; + +use minijinja::{context, Environment}; + +use crate::{ + error::Error, + ir::{Ir, ModelModule, QueryNamespace}, + presentation::python::file_generation_config::FileGenerationConfig, + response::File, +}; + +pub struct FileGeneratorService { + pub ir: Ir, + pub config: FileGenerationConfig, + pub environment: Environment<'static>, +} + +impl FileGeneratorService { + fn files(&self) -> Result, Error> { + let mut files = self.model_module_files()?; + self.add_query_files(&mut files); + files.push(self.add_model_entrypoint()?); + return Ok(files); + } + + fn model_module_files(&self) -> Result, Error> { + let mut files = vec![]; + for module in self.ir.model_modules.model_modules.values() { + self.add_model_module_file(&mut files, module)?; + } + Ok(files) + } + + pub fn add_model_module_file( + &self, + files: &mut Vec, + module: &ModelModule, + ) -> Result<(), Error> { + let filename = format!("models/{}.{}", module.name, &self.config.file_extension); + + let content = self.environment.get_template("model")?.render(context! { + path => ["models", &module.name], + used_types => module.used_types(), + module => module, + })?; + + files.push(File { + path: filename, + content, + }); + Ok(()) + } + + fn add_model_entrypoint(&self) -> Result { + let content = self + .environment + .get_template("model_dir")? + .render(context!( + ir => self.ir, + ))?; + let path = format!("models/{}", self.config.model_directory_entrypoint); + Ok(File { path, content }) + } + + pub fn add_query_files(&self, files: &mut Vec) { + let namespace = &self.ir.query_namespace; + self.add_query_namespaces_recursively(files, &vec![], &namespace); + } + + fn add_query_namespaces_recursively( + &self, + files: &mut Vec, + path: &Vec>, + namespace: &QueryNamespace, + ) -> Result<(), Error> { + self.add_query_namespace(files, path, namespace)?; + + for (name, subnamespace) in namespace.subnamespaces.iter() { + let mut path = path.clone(); + path.push(name.clone()); + self.add_query_namespaces_recursively(files, &path, subnamespace); + } + Ok(()) + } + + pub fn add_query_namespace( + &self, + files: &mut Vec, + path: &Vec>, + namespace: &QueryNamespace, + ) -> Result<(), Error> { + let content = self + .environment + .get_template("query") + .unwrap() + .render(context! { + query_namespace => namespace, + path => path, + ir => self.ir, + })?; + + let path = format!( + "{}.{}", + path.join("/"), + self.config.query_directory_entrypoint + ); + + files.push(File { path, content }); + Ok(()) + } +} diff --git a/codegen/src/presentation/mod.rs b/codegen/src/presentation/mod.rs index 6ee5a0a..3272245 100644 --- a/codegen/src/presentation/mod.rs +++ b/codegen/src/presentation/mod.rs @@ -1,9 +1,16 @@ -// use crate::{ir::Ir, request::Request}; +use crate::{ir::Ir, request::Request}; -// mod python; -// mod typescript; +mod python; +mod typescript; -// pub trait FileGenerator { -// fn new(ir: Ir) -> Self; -// fn generate_files(self) -> Vec; +mod file_generation_config; +mod file_generator; +mod type_mapping_service; + +mod environment; + +// pub fn generate_files(ir: Ir) { +// match ir.request.config.codegen { +// "python" => +// } // } diff --git a/codegen/src/presentation/python/file_generation_config.rs b/codegen/src/presentation/python/file_generation_config.rs new file mode 100644 index 0000000..8fc777e --- /dev/null +++ b/codegen/src/presentation/python/file_generation_config.rs @@ -0,0 +1,5 @@ +pub struct FileGenerationConfig { + pub query_directory_entrypoint: &'static str, + pub model_directory_entrypoint: &'static str, + pub file_extension: &'static str, +} diff --git a/codegen/src/presentation/python/file_generator.rs b/codegen/src/presentation/python/file_generator.rs index e0e398d..5a295bf 100644 --- a/codegen/src/presentation/python/file_generator.rs +++ b/codegen/src/presentation/python/file_generator.rs @@ -1,5 +1,100 @@ -pub struct PythonFileGenerator {} +use std::sync::Arc; -impl PythonFileGenerator { - fn new() {} +use minijinja::{context, Environment}; + +use crate::{ + error::Error, + ir::{Ir, ModelModule, QueryNamespace}, + presentation::python::file_generation_config::FileGenerationConfig, + response::File, +}; + +pub struct FileGeneratorService { + pub ir: Ir, + pub config: FileGenerationConfig, + pub environment: Environment<'static>, +} + +impl FileGeneratorService { + fn files(&self) -> Result, Error> { + let mut files = self.model_module_files()?; + self.add_query_files(&mut files); + return Ok(files); + } + + fn model_module_files(&self) -> Result, Error> { + let mut files = vec![]; + for module in self.ir.model_modules.model_modules.values() { + self.add_model_module_file(&mut files, module)?; + } + + Ok(files) + } + + pub fn add_model_module_file( + &self, + files: &mut Vec, + module: &ModelModule, + ) -> Result<(), Error> { + let filename = format!("models/{}.{}", module.name, &self.config.file_extension); + + let content = self.environment.get_template("model")?.render(context! { + path => ["models", &module.name], + used_types => module.used_types(), + module => module, + })?; + + files.push(File { + path: filename, + content, + }); + Ok(()) + } + + pub fn add_query_files(&self, files: &mut Vec) { + let namespace = &self.ir.query_namespace; + self.add_query_namespaces_recursively(files, &vec![], &namespace); + } + + fn add_query_namespaces_recursively( + &self, + files: &mut Vec, + path: &Vec>, + namespace: &QueryNamespace, + ) -> Result<(), Error> { + self.add_query_namespace(files, path, namespace)?; + + for (name, subnamespace) in namespace.subnamespaces.iter() { + let mut path = path.clone(); + path.push(name.clone()); + self.add_query_namespaces_recursively(files, &path, subnamespace); + } + Ok(()) + } + + pub fn add_query_namespace( + &self, + files: &mut Vec, + path: &Vec>, + namespace: &QueryNamespace, + ) -> Result<(), Error> { + let content = self + .environment + .get_template("query") + .unwrap() + .render(context! { + query_namespace => namespace, + path => path, + ir => self.ir, + })?; + + let path = format!( + "{}.{}", + path.join("/"), + self.config.query_directory_entrypoint + ); + + files.push(File { path, content }); + Ok(()) + } } diff --git a/codegen/src/presentation/python/mod.rs b/codegen/src/presentation/python/mod.rs index e46effa..0a51b9e 100644 --- a/codegen/src/presentation/python/mod.rs +++ b/codegen/src/presentation/python/mod.rs @@ -1,3 +1,8 @@ pub(super) mod driver; + +pub mod file_generation_config; pub mod file_generator; -pub mod python_type; +pub mod type_map_service; +pub struct PythonFileGenerator {} + +impl PythonFileGenerator {} diff --git a/codegen/src/presentation/python/python_type.rs b/codegen/src/presentation/python/python_type.rs deleted file mode 100644 index e71d40f..0000000 --- a/codegen/src/presentation/python/python_type.rs +++ /dev/null @@ -1,6 +0,0 @@ -pub struct PythonType { - module: Rc, - import: Rc, - annotation: Rc, - name: Rc, -} diff --git a/codegen/src/presentation/python/templates/asyncpg/config.json b/codegen/src/presentation/python/templates/asyncpg/config.json deleted file mode 100644 index 2a1fdaf..0000000 --- a/codegen/src/presentation/python/templates/asyncpg/config.json +++ /dev/null @@ -1 +0,0 @@ -{ "extension": "py", "directory_entrypoint": "__init__" } diff --git a/codegen/src/presentation/python/templates/asyncpg/model.py.jinja2 b/codegen/src/presentation/python/templates/asyncpg/model.j2 similarity index 100% rename from codegen/src/presentation/python/templates/asyncpg/model.py.jinja2 rename to codegen/src/presentation/python/templates/asyncpg/model.j2 diff --git a/codegen/src/presentation/python/templates/asyncpg/model_init.py.jinja2 b/codegen/src/presentation/python/templates/asyncpg/model_init.j2 similarity index 100% rename from codegen/src/presentation/python/templates/asyncpg/model_init.py.jinja2 rename to codegen/src/presentation/python/templates/asyncpg/model_init.j2 diff --git a/codegen/src/presentation/python/templates/asyncpg/query.py.jinja2 b/codegen/src/presentation/python/templates/asyncpg/query.j2 similarity index 100% rename from codegen/src/presentation/python/templates/asyncpg/query.py.jinja2 rename to codegen/src/presentation/python/templates/asyncpg/query.j2 diff --git a/codegen/src/presentation/python/templates/psycopg/model.py.jinja2 b/codegen/src/presentation/python/templates/psycopg/model.py.jinja2 new file mode 100644 index 0000000..78addbb --- /dev/null +++ b/codegen/src/presentation/python/templates/psycopg/model.py.jinja2 @@ -0,0 +1,32 @@ +import dataclasses +{%- if enums %} +import enum +{%- endif %} +{%- for import in imports %} +import {{import}} +{%- endfor %} +from {{request.config.codegen.options.package}} import models + +{%- for enum in enums %} + +class {{enum.name | to_pascal_case }}(enum.StrEnum): + {%- for value in enum.values %} + {{ value | to_screaming_snake_case }} = {{ value | to_c_string }} + {%- endfor %} +{% endfor %} + +{%- for model in models %} + + +@dataclasses.dataclass +class {{model.type.declaration}}: + {%- for field, type in model.fields %} + {{field}}: {% if type.annotation | starts_with("models." + schema) -%} + {{ type.annotation | strip_prefix("models." + schema + ".") }} + {%- elif type.annotation | starts_with("models.") -%} + {{ type.annotation | to_c_string }} + {%- else -%} + {{ type.annotation }} + {%- endif %} + {%- endfor %} +{%- endfor %} diff --git a/codegen/src/presentation/python/templates/psycopg/model_init.py.jinja2 b/codegen/src/presentation/python/templates/psycopg/model_init.py.jinja2 new file mode 100644 index 0000000..3a811fc --- /dev/null +++ b/codegen/src/presentation/python/templates/psycopg/model_init.py.jinja2 @@ -0,0 +1,12 @@ +{%- for module in model_modules -%} +from . import {{module}} +{% endfor -%} + + +{%- if model_modules["public"] -%} +from .public import ( +{%- for model_class in model_modules["public"].classes %} + {{model_class.type.declaration}}, +{%- endfor %} +) +{% endif %} diff --git a/codegen/src/presentation/python/templates/psycopg/query.py.jinja2 b/codegen/src/presentation/python/templates/psycopg/query.py.jinja2 new file mode 100644 index 0000000..8128a88 --- /dev/null +++ b/codegen/src/presentation/python/templates/psycopg/query.py.jinja2 @@ -0,0 +1,174 @@ +# This file was automatically generated by pgc +# flake8: noqa +# pylint: disable=unused-import +{%- for import in imports %} +import {{import}} +{%- endfor %} +import psycopg +import typing +import dataclasses +from psycopg.rows import dict_row +{%- if query_namespace.name == "" %} +from psycopg.types.composite import CompositeInfo, register_composite +{%- endif %} +from {{request.config.codegen.options.package}} import models +{%- for subnamespace in query_namespace.subnamespaces %} +from . import {{subnamespace}} +{%- endfor %} + +{%- for method in query_namespace.methods %} + +{{ method.query.name | to_screaming_snake_case }} = """ +{{ method.query.query | regex_replace('\\$(\\d+)', '%(p$1)s') }} +""" +{%- endfor %} +{{"\n"}} +{%- for method in query_namespace.methods %} +{%- if method.output_model != None %} +@dataclasses.dataclass +class {{method.output_model.type.declaration | to_pascal_case }}: + {%- for field, type in method.output_model.fields | items %} + {{field}}: {{type.annotation}} + {%- endfor %} + +{% endif %} +{%- for _, input_model in method.input_models | items %} +{%- if method.query.annotations.group_arguments and method.query.annotations.group_arguments.value == "dataclass" %} +@dataclasses.dataclass +class {{ input_model.type.declaration | to_pascal_case }}: + {%- for field, type in input_model.fields | items %} + {{field}}: {{type.annotation}} + {%- endfor %} + +{%- else %} +class {{ input_model.type.declaration | to_pascal_case }}(typing.Protocol): + {%- for field, type in input_model.fields | items %} + @property + def {{field}}(self) -> {{type.annotation}}: ... + {%- endfor %} + +{%- endif %} +{% endfor %} +{%- endfor %} + +@dataclasses.dataclass +class {{ query_namespace.name | to_pascal_case }}Queries: + def __init__(self, connection: psycopg.Connection): + self.connection = connection + {%- for subnamespace in query_namespace.subnamespaces %} + self.{{subnamespace}} = {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) + {%- endfor %} + + {% for method in query_namespace.methods%} + {%- if method.query.annotations.not_null_result -%} + {%- set HANDLE_NONE = 'assert row is not None, "a query marked with @not_null_result has returned null."' %} + {%- set OR_NONE = '' %} + {% else %} + {%- set HANDLE_NONE = 'if row is None: return None' %} + {%- set OR_NONE = ' | None' %} + {%- endif %} + + {%- if method.query.command == 'one' %} + + {%- if method.query.output | length == 1 %} + def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ) -> {{method.output_type.annotation}}{{OR_NONE}}: + row = self.connection.execute( + {{method.query.name | to_screaming_snake_case }}, { + {%- for parameter in method.query.parameters -%} + "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} + {%- endfor %}} + ).fetchone() + {{HANDLE_NONE}} + return row[0] + {%- else %} + def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ) -> {{method.output_type.annotation}}{{OR_NONE}}: + row = self.connection.cursor(row_factory=dict_row).execute( + {{method.query.name | to_screaming_snake_case }}, { + {%- for parameter in method.query.parameters -%} + "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} + {%- endfor %}} + ).fetchone() + {{HANDLE_NONE}} + return {{method.output_type.annotation}}(**row) + + {%- endif %} + {%- elif method.query.command == 'many' %} + {%- if method.query.output | length == 1 %} + def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ) -> list[{{method.output_type.annotation}}]: + rows = self.connection.execute( + {{method.query.name | to_screaming_snake_case }}, { + {%- for parameter in method.query.parameters -%} + "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} + {%- endfor %}} + ).fetchall() + return [row[0] for row in rows] + {%- else %} + def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ) -> list[{{method.output_type.annotation}}]: + rows = self.connection.cursor(row_factory=dict_row).execute( + {{method.query.name | to_screaming_snake_case }}, { + {%- for parameter in method.query.parameters -%} + "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} + {%- endfor %}} + ).fetchall() + return [{{method.output_type.annotation}}(**row) for row in rows] + + {%- endif %} + {%- elif method.query.command == 'val' %} + def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ) -> {{method.output_type.annotation}}{{OR_NONE}}: + row = self.connection.execute( + {{method.query.name | to_screaming_snake_case }}, { + {%- for parameter in method.query.parameters -%} + "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} + {%- endfor %}} + ).fetchone() + {{HANDLE_NONE}} + return row[0] + {%- else %} + def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type.annotation}} + {%- endfor -%} + ): + return self.connection.execute( + {{method.query.name | to_screaming_snake_case }}, { + {%- for parameter in method.query.parameters -%} + "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} + {%- endfor %}} + ) + {%- endif %} + + {% endfor %} + + + +{%- if query_namespace.name == "" %} +def init_connection(conn: psycopg.Connection): + {%- for _, model_module in model_modules | items %} + {%- for model in model_module.classes %} + + register_composite( + CompositeInfo.fetch(conn, "\"{{model.type.pgtype_schema}}\".\"{{model.type.pgtype_name }}\""), conn, {{model.type.constructor}} # type: ignore + ) + {%- endfor %} + {% endfor %} +{% endif -%} diff --git a/codegen/src/presentation/python/templates/psycopg/types.json b/codegen/src/presentation/python/templates/psycopg/types.json new file mode 100644 index 0000000..8bdc19e --- /dev/null +++ b/codegen/src/presentation/python/templates/psycopg/types.json @@ -0,0 +1,108 @@ +{ + "new_type_case": "{{ name | to_pascal_case }}", + "array": { + "constructor": "list", + "annotation": "list[{{type.annotation}}]" + }, + "null": { + "declaration": "{{type.declaration}}", + "constructor": "{{type.constructor}}", + "annotation": "{{type.annotation}} | None" + }, + "composite": { + "declaration": "{{ type_name | to_pascal_case }}", + "constructor": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", + "annotation": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", + "import": [] + }, + "wildcard": { + "annotation": "typing.Any", + "import": ["typing"] + }, + "schema": { + "pg_catalog": { + "bool": { "annotation": "bool" }, + "bytea": { "annotation": "bytes" }, + "char": { "annotation": "str" }, + "int8": { "annotation": "int" }, + "int2": { "annotation": "int" }, + "int4": { "annotation": "int" }, + "text": { "annotation": "str" }, + "json": { "annotation": "dict" }, + "point": { "annotation": "asyncpg.types.Point", "import": ["asyncpg"] }, + "box": { + "annotation": "asyncpg.pgproto.types.Box", + "import": ["asyncpg"] + }, + "polygon": { + "annotation": "asyncpg.pgproto.types.Polygon", + "import": ["asyncpg"] + }, + "line": { + "annotation": "asyncpg.pgproto.types.Line", + "import": ["asyncpg"] + }, + + "float4": { "annotation": "float" }, + "float8": { "annotation": "float" }, + "unknown": { "annotation": "typing.Any", "import": ["typing"] }, + "circle": { + "annotation": "asyncpg.pgproto.types.Circle", + "import": ["asyncpg"] + }, + "varchar": { "annotation": "str" }, + "date": { "annotation": "datetime.date", "import": ["datetime"] }, + "time": { "annotation": "datetime.time", "import": [] }, + "timestamp": { + "annotation": "datetime.datetime", + "import": ["datetime"] + }, + "timestamptz": { + "annotation": "datetime.datetime", + "import": ["datetime"] + }, + "interval": { + "annotation": "datatime.timedelta", + "import": ["datetime"] + }, + "timetz": { "annotation": "datetime.time", "import": ["datetime"] }, + "numeric": { "annotation": "decimal.Decimal", "import": ["decimal"] }, + "record": { "annotation": "str" }, + "any": { "annotation": "typing.Any", "import": ["typing"] }, + "anyarray": { "annotation": "list[typing.Any]", "import": ["typing"] }, + "anyelement": { "annotation": "typing.Any", "import": ["typing"] }, + "anynonarray": { "annotation": "typing.Any", "import": ["typing"] }, + "uuid": { "annotation": "uuid.UUID", "import": ["uuid"] }, + "anyenum": { "annotation": "str" }, + "anyrange": { + "annotation": "psycopg.types.range.Range", + "import": ["psycopg.types.range"] + }, + "jsonb": { "annotation": "dict" }, + "int4range": { + "annotation": "psycopg.types.range.Range[int]", + "import": ["psycopg.types.range"] + }, + "numrange": { + "annotation": "asyncpg.types.Range[float]", + "import": ["psycopg.types.range"] + }, + "tsrange": { + "annotation": "asyncpg.types.Range[datetime.datetime]", + "import": ["psycopg.types.range.Range", "datetime"] + }, + "tstzrange": { + "annotation": "psycopg.types.range.Range[datetime.datetime]", + "import": ["psycopg.types.range", "datetime"] + }, + "daterange": { + "annotation": "asyncpg.types.Range[datetime.date]", + "import": ["psycopg.types.range", "datetime"] + }, + "int8range": { + "annotation": "psycopg.types.range.Range[int]", + "import": ["psycopg.types.range"] + } + } + } +} diff --git a/codegen/src/presentation/python/type_map_service.rs b/codegen/src/presentation/python/type_map_service.rs new file mode 100644 index 0000000..0026b0f --- /dev/null +++ b/codegen/src/presentation/python/type_map_service.rs @@ -0,0 +1,136 @@ +use crate::{ + ir::Type, + presentation::type_mapping_service::{LanguageType, TypeMapService}, +}; +use std::sync::Arc; + +struct AsyncpgTypeMapService; +pub struct PsycopgTypeMapService; + +impl TypeMapService for PsycopgTypeMapService { + #[rustfmt::skip] + fn get(&self, module: Arc<[Arc]>, r#type: &Type) -> LanguageType { + match r#type { + Type::Bit + | Type::BitVarying + | Type::Record + | Type::Line + | Type::LSeg + | Type::Point + | Type::Path + | Type::Polygon + | Type::Circle + | Type::Box => LanguageType::annotation("str"), + Type::AnyMultiRange | Type::AnyCompatibleMultiRange => LanguageType::annotation("list[psycopg.types.range.Range]").import(["import psycopg.types.range"]), + Type::TsMultiRange | Type::TsTzMultiRange => LanguageType::annotation("list[psycopg.types.range.Range[datetime.datetime]]").import(["import psycopg.types.range", "import datetime"]), + Type::DateMultiRange => LanguageType::annotation("list[psycopg.types.range.Range[datetime.date]]").import(["import psycopg.types.range", "import datetime"]), + Type::DateRange => LanguageType::annotation("psycopg.types.range.Range[datetime.date]").import(["import psycopg.types.range", "import datetime"]), + Type::TsRange | Type::TsTzRange => LanguageType::annotation("psycopg.types.range.Range[datetime.datetime]").import(["import psycopg.types.range", "import datetime"]), + Type::NumMultiRange => LanguageType::annotation("list[psycopg.types.range.Range[decimal.Decimal]]").import(["import psycopg.types.range", "import decimal"]), + Type::Int4Range | Type::Int8Range => LanguageType::annotation("psycopg.types.range.Range[int]").import(["import psycopg.types.range"]), + Type::Range | Type::AnyRange | Type::AnycompatibleRange => LanguageType::annotation("psycopg.types.range.Range").import(["import psycopg.types.range"]), + Type::NumRange => LanguageType::annotation("psycopg.types.range.Range[decimal.Decimal]").import(["import psycopg.types.range", "import decimal"]), + Type::Int4MultiRange | Type::Int8MultiRange => LanguageType::annotation("list[psycopg.types.range.Range[int]]").import(["import psycopg.types.range"]), + _ => return AsyncpgTypeMapService.get(module, r#type), + } + } +} + +impl TypeMapService for AsyncpgTypeMapService { + #[rustfmt::skip] + fn get(&self, current_module: Arc<[Arc]>, r#type: &crate::ir::Type) -> LanguageType { + match r#type { + Type::UserDefined { module_path, name } => { + let module: Arc<_> = module_path.join(".").into(); + LanguageType { name: Some(name.clone()), annotation: format!("{module}.{name}").into(), import: vec![format!("import {}", module).into()], module: Some(module) } + }, + Type::Nullable(r#type) => { + let r#type = self.get(current_module, r#type); + LanguageType { + name: r#type.name, + annotation: format!("{} | None", r#type.annotation).into(), + import: r#type.import, + module: r#type.module + } + } + Type::Array { r#type, dim } => { + let r#type = self.get(current_module, r#type); + let mut annotation = r#type.annotation; + for _ in 0..*dim { + annotation = format!("list[{}]", annotation).into(); + } + LanguageType { + name: None, + annotation, + import: r#type.import, + module: r#type.module + } + } + Type::AnyArray | Type::AnyCompatibleArray => LanguageType::annotation("list"), + Type::Void => LanguageType::annotation("None"), + Type::Bool => LanguageType::annotation("bool"), + Type::Bytea => LanguageType::annotation("bytes"), + Type::Cidr => LanguageType::annotation("ipaddress.IPv4Network | ipaddress.IPv6Network").import(["import ipaddress"]), + Type::Inet => LanguageType::annotation("ipaddress.IPv4Interface | ipaddress.IPv6Interface").import(["import ipaddress"]), + Type::Date | Type::DateTz => LanguageType::annotation("datetime.date").import(["import datetime"]), + Type::Time | Type::TimeTz=> LanguageType::annotation("datetime.time").import(["import datetime"]), + Type::Timestamp | Type::TimestampTz => LanguageType::annotation("datetime.datetime").import(["import datetime"]), + Type::Interval => LanguageType::annotation("datetime.timedelta").import(["import datetime"]), + Type::Float4 | Type::Float8 => LanguageType::annotation("float"), + Type::Uuid => LanguageType::annotation("uuid.UUID").import(["import uuid"]), + Type::Record => LanguageType::annotation("asyncpg.Record").import(["import asyncpg"]), + Type::Bit | Type::BitVarying => LanguageType::annotation("asyncpg.BitString").import(["import asyncpg"]), + Type::Box => LanguageType::annotation("asyncpg.Box").import(["import asyncpg"]), + Type::Int4Range | Type::Int8Range => LanguageType::annotation("asyncpg.Range[int]").import(["import asyncpg"]), + Type::NumRange => LanguageType::annotation("asyncpg.Range[decimal.Decimal]").import(["import asyncpg", "import decimal"]), + Type::Int4MultiRange | Type::Int8MultiRange => LanguageType::annotation("list[asyncpg.Range[int]]").import(["import asyncpg"]), + Type::NumMultiRange => LanguageType::annotation("list[asyncpg.Range[decimal.Decimal]]").import(["import asyncpg", "import decimal"]), + Type::Circle => LanguageType::annotation("asyncpg.Circle").import(["import asyncpg"]), + Type::Line => LanguageType::annotation("asyncpg.Line").import(["import asyncpg"]), + Type::LSeg => LanguageType::annotation("asyncpg.LineSegment").import(["import asyncpg"]), + Type::Path => LanguageType::annotation("asyncpg.Path").import(["import asyncpg"]), + Type::Point => LanguageType::annotation("asyncpg.Point").import(["import asyncpg"]), + Type::Polygon => LanguageType::annotation("asyncpg.Polygon").import(["import asyncpg"]), + Type::AnyRange + | Type::TsRange + | Type::TsTzRange + | Type::DateRange + | Type::AnycompatibleRange + | Type::Range => LanguageType::annotation("asyncpg.Range").import(["import asyncpg"]), + Type::AnyMultiRange + | Type::AnyCompatibleMultiRange + | Type::TsMultiRange + | Type::TsTzMultiRange + | Type::DateMultiRange => LanguageType::annotation("list[asyncpg.Range]").import(["import asyncpg"]), + Type::Any + | Type::Unknown + | Type::AnyElement + | Type::AnyNonArray + | Type::AnyCompatibleNonArray + | Type::AnyCompatible => LanguageType::annotation("typing.Any").import(["import typing"]), + Type::BpChar + | Type::VarChar + | Type::Text + | Type::Xml + | Type::Json + | Type::Jsonb + | Type::Cstring + | Type::Money + | Type::AnyEnum + | Type::JsonPath + | Type::Cid + | Type::MacAddr + | Type::MacAddr8 + | Type::TsVector + | Type::TsQuery + | Type::Other {..} => LanguageType::annotation("str"), + Type::Int2 + | Type::Int4 + | Type::Int8 + | Type::Serial2 + | Type::Serial4 + | Type::Serial8 => LanguageType::annotation("int"), + Type::Numeric | Type::Decimal => LanguageType::annotation("decimal.Decimal").import(["import decimal"]), + } + } +} diff --git a/codegen/src/presentation/type_map.rs b/codegen/src/presentation/type_map.rs new file mode 100644 index 0000000..a40e630 --- /dev/null +++ b/codegen/src/presentation/type_map.rs @@ -0,0 +1 @@ +pub struct TypeMap {} diff --git a/codegen/src/presentation/type_mapping_service.rs b/codegen/src/presentation/type_mapping_service.rs new file mode 100644 index 0000000..e1671e5 --- /dev/null +++ b/codegen/src/presentation/type_mapping_service.rs @@ -0,0 +1,76 @@ +use std::{collections::BTreeMap, sync::Arc}; + +use minijinja::value::DynObject; +use serde::{Deserialize, Serialize}; + +use crate::{ + ir::{Type, TypeService}, + request::TypeConfig, +}; + +pub trait TypeMapService: Send + Sync + 'static { + fn get(&self, module: Arc<[Arc]>, r#type: &Type) -> LanguageType; +} + +#[derive(Deserialize, Clone, Debug, PartialEq, Eq)] +pub struct LanguageType { + pub name: Option>, + pub annotation: Arc, + pub import: Vec>, + pub module: Option>, +} + +struct OverriddenTypeMapService { + service: Box, + overrides: Arc, TypeConfig>>, +} + +impl TypeMapService for OverriddenTypeMapService { + fn get(&self, module: Arc<[Arc]>, r#type: &Type) -> LanguageType { + let Ok(ty) = Type::NAMES.binary_search_by(|(_, _, ty)| ty.cmp(r#type)) else { + return self.service.get(module, r#type); + }; + let (_, name, _) = Type::NAMES[ty]; + + let Some(type_config) = self.overrides.get(name) else { + return self.service.get(module, r#type); + }; + + return LanguageType { + name: None, + annotation: type_config.annotation.clone(), + import: type_config.import.clone(), + module: None, + }; + } +} + +impl LanguageType { + pub fn annotation(annotation: &str) -> Self { + LanguageType { + annotation: annotation.into(), + name: None, + import: vec![], + module: None, + } + } + + pub fn name(self, name: &str) -> Self { + Self { + name: Some(name.into()), + ..self + } + } + + pub fn import(self, import: [&str; N]) -> Self { + let import: Vec> = import.into_iter().map(Into::into).collect(); + Self { import, ..self } + } + + pub fn module(self, module: &str) -> Self { + Self { + module: Some(module.into()), + ..self + } + } +} diff --git a/codegen/src/presentation/typescript/typescript_type.rs b/codegen/src/presentation/typescript/typescript_type.rs index f259c60..f5d19f4 100644 --- a/codegen/src/presentation/typescript/typescript_type.rs +++ b/codegen/src/presentation/typescript/typescript_type.rs @@ -1,6 +1,6 @@ pub struct TypescriptType { - module: Rc, - import: Rc, - annotation: Rc, - name: Rc, + module: Arc, + import: Arc, + annotation: Arc, + name: Arc, } diff --git a/codegen/src/request.rs b/codegen/src/request.rs index e03dee3..dd7d735 100644 --- a/codegen/src/request.rs +++ b/codegen/src/request.rs @@ -1,7 +1,7 @@ use std::collections::BTreeMap; use std::collections::HashMap; use std::iter::Map; -use std::rc::Rc; +use std::sync::Arc; use serde::Deserialize; use serde::Serialize; @@ -10,93 +10,93 @@ use serde_json::Value; #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Request { pub catalog: Catalog, - pub queries: Rc<[Query]>, + pub queries: Arc<[Query]>, pub config: Config, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Catalog { - pub schemas: Rc<[Schema]>, + pub schemas: Arc<[Schema]>, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Schema { - pub name: Rc, - pub enums: Rc<[Enum]>, - pub records: Rc<[Record]>, + pub name: Arc, + pub enums: Arc<[Enum]>, + pub records: Arc<[Record]>, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Enum { - pub name: Rc, - pub values: Rc<[Rc]>, + pub name: Arc, + pub values: Arc<[Arc]>, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Record { - pub kind: Rc, - pub name: Rc, - pub columns: Rc<[Column]>, + pub kind: Arc, + pub name: Arc, + pub columns: Arc<[Column]>, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Column { - pub name: Rc, + pub name: Arc, #[serde(rename = "type")] pub type_field: ColumnType, - pub default: Option>, + pub default: Option>, pub is_unique: bool, pub is_nullable: bool, pub is_foreign_key: bool, pub is_primary_key: bool, - pub foreign_table_name: Option>, - pub foreign_table_schema: Option>, + pub foreign_table_name: Option>, + pub foreign_table_schema: Option>, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct ColumnType { - pub name: Rc, - pub display: Rc, + pub name: Arc, + pub display: Arc, pub is_array: bool, - pub schema_name: Rc, + pub schema_name: Arc, pub is_composite: bool, pub array_dimensions: i64, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Query { - pub query: Rc, - pub name: Rc, - pub command: Rc, - pub path: Rc, - pub annotations: Rc>, - pub output: Rc<[OutputColumn]>, - pub parameters: Rc<[Parameter]>, + pub query: Arc, + pub name: Arc, + pub command: Arc, + pub path: Arc, + pub annotations: Arc>, + pub output: Arc<[OutputColumn]>, + pub parameters: Arc<[Parameter]>, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Annotation { - pub value: Option>, + pub value: Option>, pub line: i64, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct OutputColumn { - pub name: Rc, + pub name: Arc, #[serde(rename = "type")] pub type_: OutputType, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct OutputType { - pub schema: Rc, - pub name: Rc, + pub schema: Arc, + pub name: Arc, pub id: i64, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Parameter { - pub name: Rc, + pub name: Arc, #[serde(rename = "type")] pub type_: OutputType, pub not_null: bool, @@ -104,23 +104,24 @@ pub struct Parameter { #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Config { - pub version: Rc, - pub queries: Rc<[Rc]>, + pub version: Arc, + pub queries: Arc<[Arc]>, pub codegen: Codegen, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Codegen { - pub out: Rc, - pub target: Rc, + pub out: Arc, + pub language: Arc, + pub driver: Arc, #[serde(default)] - pub types: Rc, TypeConfig>>, + pub types: Arc, TypeConfig>>, pub options: Value, } #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct TypeConfig { - pub annotation: Rc, + pub annotation: Arc, #[serde(default)] - pub import: Rc<[Rc]>, + pub import: Vec>, } diff --git a/codegen/src/type.rs b/codegen/src/type.rs index c9da673..107a56a 100644 --- a/codegen/src/type.rs +++ b/codegen/src/type.rs @@ -1,4 +1,4 @@ -use std::{collections::BTreeMap, rc::Rc, sync::LazyLock}; +use std::{collections::BTreeMap, sync::Arc, sync::LazyLock}; use serde::Deserialize; use serde_json::json; @@ -12,13 +12,13 @@ use crate::{ #[derive(serde::Serialize, Deserialize, Clone, Debug)] pub struct Type { #[serde(default)] - pub declaration: Rc, + pub declaration: Arc, #[serde(default)] - pub annotation: Rc, + pub annotation: Arc, #[serde(default)] - pub constructor: Rc, + pub constructor: Arc, #[serde(default)] - pub import: Rc<[Rc]>, - pub pgtype_name: Option>, - pub pgtype_schema: Option>, + pub import: Arc<[Arc]>, + pub pgtype_name: Option>, + pub pgtype_schema: Option>, } diff --git a/codegen/src/type_builder.rs b/codegen/src/type_builder.rs deleted file mode 100644 index 559f584..0000000 --- a/codegen/src/type_builder.rs +++ /dev/null @@ -1,246 +0,0 @@ -use std::{collections::BTreeMap, rc::Rc, sync::LazyLock}; - -use minijinja::context; -use serde::{Deserialize, Serialize}; - -use crate::{ - error::Error, - r#type::Type, - request::{Catalog, Column, ColumnType, OutputType, Request, TypeConfig}, - utils::render, -}; - -/// It creates instances of `Type`, either of newly declared types -/// or by transforming an SQL type into a `Type`. -#[derive(Deserialize, Serialize, Clone)] -pub struct TypeBuilder { - type_overrides: Rc, TypeConfig>>, - enums: Vec<(Rc, Rc)>, - catalog: Catalog, - type_map: TypeMap, -} - -#[derive(Deserialize, Serialize, Clone)] -pub struct TypeMap { - new_type_case: String, - null: Type, - array: Type, - composite: Type, - wildcard: TypeConfig, - schema: BTreeMap>, -} - -impl TypeBuilder { - pub fn new(request: Request) -> Result { - let type_overrides = request.config.codegen.types.clone(); - let lang = request.config.codegen.target.clone(); - let enums: Vec<_> = request - .catalog - .schemas - .iter() - .flat_map(|schema| { - schema - .enums - .iter() - .map(move |enum_| (schema.name.clone(), enum_.name.clone())) - }) - .collect(); - - let type_map = match &*lang { - "python:asyncpg" => PYTHON_ASYNCPG.clone(), - "python:psycopg" => PYTHON_PSYCOPG.clone(), - "typescript:postgres" => TYPESCRIPT_POSTGRES.clone(), - _ => return Err(Error::NotSupportedLanguage(lang)), - }; - - Ok(TypeBuilder { - type_overrides, - enums, - catalog: request.catalog.clone(), - type_map, - }) - } - - pub fn declared(&self, name: &str) -> Type { - let name: Rc = render(&self.type_map.new_type_case, context!(name=>name)).into(); - Type { - declaration: name.clone(), - annotation: name.clone(), - constructor: name.clone(), - import: Default::default(), - pgtype_name: None, - pgtype_schema: None, - } - } - - pub fn composite(&self, type_schema: &Rc, type_name: &Rc) -> Type { - let ctx = &context! {type_schema => type_schema, type_name => type_name}; - let composite = &self.type_map.composite; - Type { - declaration: render(&composite.declaration, ctx).into(), - annotation: render(&composite.annotation, ctx).into(), - constructor: render(&composite.constructor, ctx).into(), - import: composite - .import - .iter() - .map(|import| render(import, ctx).into()) - .collect(), - pgtype_name: Some(type_name.clone()), - pgtype_schema: Some(type_schema.clone()), - } - } - - pub fn from_col(&self, column: &Column) -> Type { - let mut type_ = self.from_column_type(&column.type_field); - - type_ = self.array(type_, column.type_field.array_dimensions); - - if let Some(enum_type) = self.try_enum(&column) { - type_ = enum_type; - } - - if column.is_nullable { - return self.null(&type_); - } - - return type_; - } - - pub fn null(&self, type_: &Type) -> Type { - let map = &self.type_map; - let cx = context!(type=> type_); - Type { - annotation: render(&map.null.annotation, &cx).into(), - declaration: render(&map.null.declaration, &cx).into(), - constructor: render(&map.null.constructor, &cx).into(), - import: type_.import.clone(), - pgtype_name: type_.pgtype_name.clone(), - pgtype_schema: type_.pgtype_schema.clone(), - } - } - - pub fn try_enum(&self, column: &Column) -> Option { - let type_name = column.foreign_table_name.clone()?; - let schema_name = column.foreign_table_schema.clone()?; - let full_name = (schema_name.clone(), type_name.clone()); - - if self.enums.contains(&full_name) { - return Some(self.composite(&schema_name, &type_name)); - } - return None; - } - - pub fn array(&self, mut type_: Type, dim: i64) -> Type { - let map = &self.type_map; - if dim == 0 { - return type_; - } - - for _ in 0..dim { - let cx = context!(type => type_); - type_ = Type { - annotation: render(&map.array.annotation, &cx).into(), - declaration: render(&map.array.declaration, &cx).into(), - constructor: render(&map.array.constructor, &cx).into(), - import: type_.import.clone(), - pgtype_name: type_.pgtype_name.clone(), - pgtype_schema: type_.pgtype_schema.clone(), - }; - } - - return Type { - pgtype_name: type_.pgtype_name.map(|ty| format!("_{}", ty).into()), - pgtype_schema: type_.pgtype_schema.map(|ty| format!("_{}", ty).into()), - ..type_ - }; - } - - pub fn from_column_type(&self, ty: &ColumnType) -> Type { - self.resolve(&ty.schema_name, &ty.name) - } - - pub fn from_output_type(&self, ty: &OutputType) -> Type { - return self.resolve(&ty.schema, &ty.name); - } - - pub fn resolve(&self, type_schema: &Rc, type_name: &Rc) -> Type { - if let Some(ty) = self.resolve_from_catalog(type_schema, type_name) { - return ty; - }; - - if !type_name.starts_with('_') { - return self.resolve_non_array(type_schema, type_name); - } - - let type_name: Rc = type_name.strip_prefix('_').unwrap().into(); - return self.array(self.resolve_non_array(&type_schema, &type_name), 1); - } - - pub fn resolve_non_array(&self, type_schema: &Rc, type_name: &Rc) -> Type { - if let Some(ty) = self.resolve_from_catalog(type_schema, type_name) { - return ty; - }; - - let ty = self.resolve_type_config(&type_schema, &type_name); - - Type { - declaration: Default::default(), - constructor: Default::default(), - annotation: ty.annotation.clone(), - import: ty.import.clone(), - pgtype_name: Some(type_name.clone()), - pgtype_schema: Some(type_schema.clone()), - } - } - - fn resolve_type_config(&self, type_schema: &str, type_name: &str) -> TypeConfig { - let name = format!("{}.{}", type_schema, type_name); - - if let Some(value) = self.type_overrides.get(&*name) { - return value.clone(); - } - - self.default_type_resolution(type_schema, type_name) - } - - fn resolve_from_catalog(&self, type_schema: &Rc, type_name: &Rc) -> Option { - let schema = self - .catalog - .schemas - .iter() - .find(|schema| &schema.name == type_schema)?; - - schema - .records - .iter() - .find(|model| &model.name == type_name)?; - - Some(Self::composite(&self, type_schema, type_name)) - } - - fn default_type_resolution(&self, type_schema: &str, type_name: &str) -> TypeConfig { - self.type_map - .schema - .get(&*type_schema) - .map(|schema| schema.get(&*type_name)) - .flatten() - .cloned() - .unwrap_or_else(|| self.type_map.wildcard.clone()) - } -} - -const PYTHON_ASYNCPG: LazyLock = LazyLock::new(|| { - let json = include_str!("../templates/python:asyncpg/types.json"); - return serde_json::from_str(json).expect("failed to deserialize python:asyncpg/types.json "); -}); - -const PYTHON_PSYCOPG: LazyLock = LazyLock::new(|| { - let json = include_str!("../templates/python:psycopg/types.json"); - return serde_json::from_str(json).expect("failed to deserialize python:psycopg/types.json "); -}); - -const TYPESCRIPT_POSTGRES: LazyLock = LazyLock::new(|| { - let json = include_str!("../templates/typescript:postgres/types.json"); - return serde_json::from_str(json) - .expect("failed to deserialize typescript:postgres/types.json "); -}); diff --git a/codegen/src/utils.rs b/codegen/src/utils.rs index 12d5c57..a7bfb33 100644 --- a/codegen/src/utils.rs +++ b/codegen/src/utils.rs @@ -5,7 +5,7 @@ use std::{ }; use heck::{ToKebabCase, ToLowerCamelCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase}; -use minijinja::{Environment, Template}; +use minijinja::{Environment, State, Template, Value}; use regex::bytes::Regex; use serde::Serialize; diff --git a/codegen/tests/models.rs b/codegen/tests/models.rs deleted file mode 100644 index e819fb3..0000000 --- a/codegen/tests/models.rs +++ /dev/null @@ -1,15 +0,0 @@ -use pgc_codegen::{model_modules::ModelModules, request::Request}; - -#[test] -fn load_models() { - // let model_modules = ModelModules::new(request); - let file_generator = pgc_codegen::file_generator::FileGenerator::new(&request()).unwrap(); - let x = file_generator.render_files().unwrap(); - dbg!(x); -} - -fn request() -> Request { - serde_json::from_str(REQUEST).unwrap() -} - -const REQUEST: &str = r#"{"catalog":{"schemas":[{"name":"public","enums":[],"models":[{"kind":"table","name":"author","columns":[{"name":"id","type":{"name":"uuid","display":"uuid","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":"gen_random_uuid()","is_unique":false,"is_nullable":false,"is_foreign_key":false,"is_primary_key":true,"foreign_table_name":null,"foreign_table_schema":null},{"name":"name","type":{"name":"text","display":"text","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":false,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null},{"name":"birthday","type":{"name":"date","display":"date","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":true,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null}]},{"kind":"table","name":"book","columns":[{"name":"id","type":{"name":"uuid","display":"uuid","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":"gen_random_uuid()","is_unique":false,"is_nullable":false,"is_foreign_key":false,"is_primary_key":true,"foreign_table_name":null,"foreign_table_schema":null},{"name":"title","type":{"name":"text","display":"text","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":false,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null},{"name":"author_id","type":{"name":"uuid","display":"uuid","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":false,"is_foreign_key":true,"is_primary_key":false,"foreign_table_name":"author","foreign_table_schema":"public"},{"name":"year","type":{"name":"int4","display":"integer","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":false,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null},{"name":"isbn","type":{"name":"text","display":"text","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":true,"is_nullable":false,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null},{"name":"is_best_seller","type":{"name":"bool","display":"boolean","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":"false","is_unique":false,"is_nullable":true,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null},{"name":"genre","type":{"name":"text","display":"text","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":false,"is_foreign_key":true,"is_primary_key":false,"foreign_table_name":"genre","foreign_table_schema":"public"}]},{"kind":"table","name":"genre","columns":[{"name":"id","type":{"name":"text","display":"text","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":false,"is_foreign_key":false,"is_primary_key":true,"foreign_table_name":null,"foreign_table_schema":null}]}]}]},"queries":[{"query":"select book from book where id = $1;","name":"fetch_by_id","command":"one","path":"book.sql","annotations":{"name":{"value":"fetch_by_id :one","line":2}},"output":[{"name":"book","type":{"schema":"public","name":"book","id":16401}}],"parameters":[{"name":"id","not_null":true,"type":{"schema":"pg_catalog","name":"uuid","id":2950}}]},{"query":"select author, book from author\njoin book on book.author_id = author.id\nwhere author.id = $1;","name":"fetch_by_author_id","command":"many","path":"book.sql","annotations":{"name":{"value":"fetch_by_author_id :many","line":5},"namespace":{"value":"book.author","line":6}},"output":[{"name":"author","type":{"schema":"public","name":"author","id":16386}},{"name":"book","type":{"schema":"public","name":"book","id":16401}}],"parameters":[{"name":"id","not_null":true,"type":{"schema":"pg_catalog","name":"uuid","id":2950}}]},{"query":"insert into book (\n title,\n author_id,\n year,\n isbn,\n genre,\n is_best_seller\n)\nvalues (\n $1,\n $2,\n $3,\n $4,\n $5,\n coalesce($6, false)\n)\non conflict (id) do update set\n title = $1,\n author_id = $2,\n year = $3,\n isbn = $4,\n is_best_seller = coalesce($6, false),\n genre = $5\nreturning book;","name":"upsert","command":"one","path":"book.sql","annotations":{"name":{"value":"upsert :one","line":12}},"output":[{"name":"book","type":{"schema":"public","name":"book","id":16401}}],"parameters":[{"name":"book.title","not_null":true,"type":{"schema":"pg_catalog","name":"text","id":25}},{"name":"book.author_id","not_null":true,"type":{"schema":"pg_catalog","name":"uuid","id":2950}},{"name":"book.year","not_null":true,"type":{"schema":"pg_catalog","name":"int4","id":23}},{"name":"book.isbn","not_null":true,"type":{"schema":"pg_catalog","name":"text","id":25}},{"name":"book.genre","not_null":true,"type":{"schema":"pg_catalog","name":"text","id":25}},{"name":"book.is_best_seller","not_null":false,"type":{"schema":"pg_catalog","name":"bool","id":16}}]},{"query":"select author from author where id = $1;","name":"fetch_by_id","command":"one","path":"author.sql","annotations":{"name":{"value":"fetch_by_id :one","line":2}},"output":[{"name":"author","type":{"schema":"public","name":"author","id":16386}}],"parameters":[{"name":"id","not_null":true,"type":{"schema":"pg_catalog","name":"uuid","id":2950}}]},{"query":"select * from author;","name":"fetch_all","command":"many","path":"author.sql","annotations":{"name":{"value":"fetch_all :many","line":5}},"output":[{"name":"id","type":{"schema":"pg_catalog","name":"uuid","id":2950}},{"name":"name","type":{"schema":"pg_catalog","name":"text","id":25}},{"name":"birthday","type":{"schema":"pg_catalog","name":"date","id":1082}}],"parameters":[]},{"query":"select book\nfrom author\njoin book on author.id = book.author_id\nwhere author.id = $1;","name":"fetch_books","command":"many","path":"author.sql","annotations":{"name":{"value":"fetch_books :many","line":8}},"output":[{"name":"book","type":{"schema":"public","name":"book","id":16401}}],"parameters":[{"name":"id","not_null":true,"type":{"schema":"pg_catalog","name":"uuid","id":2950}}]},{"query":"select count(*) from author;","name":"count","command":"val","path":"author.sql","annotations":{"name":{"value":"count :val","line":15}},"output":[{"name":"count","type":{"schema":"pg_catalog","name":"int8","id":20}}],"parameters":[]}],"config":{"version":"1","queries":["book.sql","author.sql","queries.sql"],"disable_cache":false,"database":{"migrations":"schema.sql","pglite":{"extensions":{"vector":"@electric-sql/pglite/vector"}}},"codegen":{"out":"./src/queries","target":"python:asyncpg","options":null},"env_file":[".env"]}}"#; From dc0eb06ef16723dd410cca4eb6df4a5b0f5a14bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Vallotton?= Date: Mon, 25 Aug 2025 23:39:14 -0400 Subject: [PATCH 06/10] refactor --- codegen/Cargo.lock | 4 +- codegen/foo.json | 55 --- codegen/src/error.rs | 24 +- codegen/src/ir/mod.rs | 42 ++- codegen/src/ir/model_modules/model.rs | 2 +- codegen/src/ir/model_service.rs | 7 +- codegen/src/ir/query_namespace/method/mod.rs | 2 +- codegen/src/ir/query_namespace/mod.rs | 7 +- codegen/src/ir/query_namespace_service.rs | 12 +- codegen/src/ir/type.rs | 12 +- codegen/src/ir/type_service.rs | 45 ++- codegen/src/main.rs | 33 +- codegen/src/mock.rs | 35 ++ codegen/src/presentation/environment.rs | 103 +++++- .../presentation/file_generation_config.rs | 10 +- codegen/src/presentation/mod.rs | 56 ++- .../python/file_generation_config.rs | 5 - .../src/presentation/python/file_generator.rs | 100 ----- codegen/src/presentation/python/mod.rs | 49 ++- .../python/templates/asyncpg/model.j2 | 26 +- .../python/templates/asyncpg/model_init.j2 | 8 +- .../python/templates/asyncpg/query.j2 | 61 ++-- .../python/templates/asyncpg/types.json | 108 ------ .../python/templates/psycopg/model.j2 | 28 ++ .../python/templates/psycopg/model.py.jinja2 | 32 -- .../python/templates/psycopg/model_init.j2 | 12 + .../templates/psycopg/model_init.py.jinja2 | 12 - .../python/templates/psycopg/query.j2} | 63 ++-- .../python/templates/psycopg/query.py.jinja2 | 174 --------- .../python/templates/psycopg/types.json | 108 ------ .../presentation/python/type_map_service.rs | 26 +- ...ile_generator.rs => templating_service.rs} | 73 ++-- .../src/presentation/type_mapping_service.rs | 54 +-- codegen/src/request.rs | 2 - codegen/src/type.rs | 24 -- codegen/src/utils.rs | 7 +- codegen/templates/python:asyncpg/config.json | 1 - .../templates/python:asyncpg/model.py.jinja2 | 32 -- .../python:asyncpg/model_init.py.jinja2 | 12 - .../templates/python:asyncpg/query.py.jinja2 | 172 --------- codegen/templates/python:asyncpg/types.json | 105 ------ codegen/templates/python:psycopg/config.json | 1 - .../templates/python:psycopg/model.py.jinja2 | 32 -- .../python:psycopg/model_init.py.jinja2 | 12 - codegen/templates/python:psycopg/types.json | 108 ------ .../templates/typescript:postgres/config.json | 1 - .../typescript:postgres/model.jinja2 | 32 -- .../typescript:postgres/model_init.jinja2 | 263 -------------- .../templates/typescript:postgres/parser.ts | 242 ------------- .../typescript:postgres/query.jinja2 | 150 -------- .../templates/typescript:postgres/types.json | 65 ---- codegen/tests/request.json | 341 ++++++++++++++++++ pgc.yaml | 3 +- src/build/build.service.ts | 2 +- src/codegen/codegen.service.ts | 1 + src/config/config.types.ts | 3 +- src/main.ts | 1 + src/schema_service/enum.service.ts | 2 +- src/schema_service/excluder.service.ts | 2 +- src/schema_service/schema.service.ts | 2 +- src/schema_service/schema.types.ts | 2 +- 61 files changed, 943 insertions(+), 2065 deletions(-) delete mode 100644 codegen/foo.json delete mode 100644 codegen/src/presentation/python/file_generation_config.rs delete mode 100644 codegen/src/presentation/python/file_generator.rs delete mode 100644 codegen/src/presentation/python/templates/asyncpg/types.json create mode 100644 codegen/src/presentation/python/templates/psycopg/model.j2 delete mode 100644 codegen/src/presentation/python/templates/psycopg/model.py.jinja2 create mode 100644 codegen/src/presentation/python/templates/psycopg/model_init.j2 delete mode 100644 codegen/src/presentation/python/templates/psycopg/model_init.py.jinja2 rename codegen/{templates/python:psycopg/query.py.jinja2 => src/presentation/python/templates/psycopg/query.j2} (75%) delete mode 100644 codegen/src/presentation/python/templates/psycopg/query.py.jinja2 delete mode 100644 codegen/src/presentation/python/templates/psycopg/types.json rename codegen/src/presentation/{file_generator.rs => templating_service.rs} (55%) delete mode 100644 codegen/src/type.rs delete mode 100644 codegen/templates/python:asyncpg/config.json delete mode 100644 codegen/templates/python:asyncpg/model.py.jinja2 delete mode 100644 codegen/templates/python:asyncpg/model_init.py.jinja2 delete mode 100644 codegen/templates/python:asyncpg/query.py.jinja2 delete mode 100644 codegen/templates/python:asyncpg/types.json delete mode 100644 codegen/templates/python:psycopg/config.json delete mode 100644 codegen/templates/python:psycopg/model.py.jinja2 delete mode 100644 codegen/templates/python:psycopg/model_init.py.jinja2 delete mode 100644 codegen/templates/python:psycopg/types.json delete mode 100644 codegen/templates/typescript:postgres/config.json delete mode 100644 codegen/templates/typescript:postgres/model.jinja2 delete mode 100644 codegen/templates/typescript:postgres/model_init.jinja2 delete mode 100644 codegen/templates/typescript:postgres/parser.ts delete mode 100644 codegen/templates/typescript:postgres/query.jinja2 delete mode 100644 codegen/templates/typescript:postgres/types.json create mode 100644 codegen/tests/request.json diff --git a/codegen/Cargo.lock b/codegen/Cargo.lock index 4743361..9060d38 100644 --- a/codegen/Cargo.lock +++ b/codegen/Cargo.lock @@ -161,9 +161,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.104" +version = "2.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" dependencies = [ "proc-macro2", "quote", diff --git a/codegen/foo.json b/codegen/foo.json deleted file mode 100644 index 7aa821d..0000000 --- a/codegen/foo.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "rows": [ - { "schema": "pg_catalog", "name": "bool", "id": 16 }, - { "schema": "pg_catalog", "name": "bytea", "id": 17 }, - { "schema": "pg_catalog", "name": "char", "id": 18 }, - { "schema": "pg_catalog", "name": "int8", "id": 20 }, - { "schema": "pg_catalog", "name": "int2", "id": 21 }, - { "schema": "pg_catalog", "name": "int2vector", "id": 22 }, - { "schema": "pg_catalog", "name": "int4", "id": 23 }, - { "schema": "pg_catalog", "name": "text", "id": 25 }, - { "schema": "pg_catalog", "name": "json", "id": 114 }, - { "schema": "pg_catalog", "name": "point", "id": 600 }, - { "schema": "pg_catalog", "name": "path", "id": 602 }, - { "schema": "pg_catalog", "name": "box", "id": 603 }, - { "schema": "pg_catalog", "name": "polygon", "id": 604 }, - { "schema": "pg_catalog", "name": "line", "id": 628 }, - { "schema": "pg_catalog", "name": "cidr", "id": 650 }, - { "schema": "pg_catalog", "name": "float4", "id": 700 }, - { "schema": "pg_catalog", "name": "float8", "id": 701 }, - { "schema": "pg_catalog", "name": "unknown", "id": 705 }, - { "schema": "pg_catalog", "name": "circle", "id": 718 }, - { "schema": "pg_catalog", "name": "macaddr8", "id": 774 }, - { "schema": "pg_catalog", "name": "money", "id": 790 }, - { "schema": "pg_catalog", "name": "macaddr", "id": 829 }, - { "schema": "pg_catalog", "name": "inet", "id": 869 }, - { "schema": "pg_catalog", "name": "aclitem", "id": 1033 }, - { "schema": "pg_catalog", "name": "varchar", "id": 1043 }, - { "schema": "pg_catalog", "name": "date", "id": 1082 }, - { "schema": "pg_catalog", "name": "time", "id": 1083 }, - { "schema": "pg_catalog", "name": "timestamp", "id": 1114 }, - { "schema": "pg_catalog", "name": "timestamptz", "id": 1184 }, - { "schema": "pg_catalog", "name": "interval", "id": 1186 }, - { "schema": "pg_catalog", "name": "timetz", "id": 1266 }, - { "schema": "pg_catalog", "name": "numeric", "id": 1700 }, - { "schema": "pg_catalog", "name": "record", "id": 2249 }, - { "schema": "pg_catalog", "name": "any", "id": 2276 }, - { "schema": "pg_catalog", "name": "anyarray", "id": 2277 }, - { "schema": "pg_catalog", "name": "anyelement", "id": 2283 }, - { "schema": "pg_catalog", "name": "anynonarray", "id": 2776 }, - { "schema": "pg_catalog", "name": "uuid", "id": 2950 }, - { "schema": "pg_catalog", "name": "anyenum", "id": 3500 }, - { "schema": "pg_catalog", "name": "anyrange", "id": 3831 }, - { "schema": "pg_catalog", "name": "jsonb", "id": 3802 }, - { "schema": "pg_catalog", "name": "int4range", "id": 3904 }, - { "schema": "pg_catalog", "name": "numrange", "id": 3906 }, - { "schema": "pg_catalog", "name": "tsrange", "id": 3908 }, - { "schema": "pg_catalog", "name": "tstzrange", "id": 3910 }, - { "schema": "pg_catalog", "name": "daterange", "id": 3912 }, - { "schema": "pg_catalog", "name": "int8range", "id": 3926 }, - { "schema": "pg_catalog", "name": "anycompatible", "id": 5077 }, - { "schema": "pg_catalog", "name": "anycompatiblearray", "id": 5078 }, - { "schema": "pg_catalog", "name": "anycompatiblenonarray", "id": 5079 }, - { "schema": "pg_catalog", "name": "anycompatiblerange", "id": 5080 } - ] -} diff --git a/codegen/src/error.rs b/codegen/src/error.rs index 4662cc1..0c731ec 100644 --- a/codegen/src/error.rs +++ b/codegen/src/error.rs @@ -3,18 +3,30 @@ use std::sync::Arc; use thiserror::Error; #[derive(Error, Debug)] pub enum Error { - #[error("failed to deserialize request: {0}.\nThis may be a versioning issue between pgc and the codegen plugin being used.")] + #[error( + "failed to deserialize request: {0}.\nThis may be a versioning issue between pgc and the codegen plugin being used." + )] RequestDeserialization(#[from] serde_json::Error), #[error("language {0} is not supported.")] - NotSupportedLanguage(Arc), + UnsupportedLanguage(Arc), - #[error("the language {language} requires the configuration option codegen.options.{option} to be present.")] - MissingConfigurationOption { + #[error("driver {driver} is not supported for {language}.")] + UnsupportedDriver { + driver: Arc, language: Arc, - option: Arc, }, - #[error("failed to render or parse a template: {0}.\nThis is a bug in pgc, please report the issue at \"https://github.com/tvallotton/pgc\".")] + #[error( + "the language {language} requires the configuration option codegen.options.{option} to be present." + )] + MissingConfigurationOption { + language: &'static str, + option: &'static str, + }, + + #[error( + "failed to render or parse a template: {0}.\nThis is a bug in pgc, please report the issue at \"https://github.com/tvallotton/pgc\"." + )] TemplateError(#[from] minijinja::Error), } diff --git a/codegen/src/ir/mod.rs b/codegen/src/ir/mod.rs index 7aeac89..7bbacd5 100644 --- a/codegen/src/ir/mod.rs +++ b/codegen/src/ir/mod.rs @@ -1,8 +1,12 @@ -use crate::{error::Error, request::Request}; +use crate::{ + error::Error, + ir::{model_service::ModelService, query_namespace_service::QueryNamespaceService}, + request::{Catalog, Request}, +}; pub use model_modules::*; pub use query_namespace::*; -pub use r#type::Type; use serde::Serialize; +pub use r#type::Type; pub use type_service::TypeService; mod method_service; mod model_modules; @@ -12,9 +16,41 @@ mod query_namespace_service; mod r#type; mod type_service; -#[derive(Serialize)] +#[derive(Serialize, Clone)] pub struct Ir { pub request: Request, pub query_namespace: query_namespace::QueryNamespace, pub model_modules: model_modules::ModelModules, } + +pub struct IrService { + query_namespace_service: QueryNamespaceService, + model_service: ModelService, +} + +impl IrService { + pub fn new(request: Request) -> Result { + let type_service = TypeService { + catalog: request.catalog.clone(), + }; + let query_namespace_service = QueryNamespaceService::new(&request)?; + let model_service = ModelService { + type_service, + catalog: request.catalog.clone(), + }; + Ok(IrService { + query_namespace_service, + model_service, + }) + } + + pub fn build(&mut self, request: Request) -> Ir { + let model_modules = self.model_service.create_model_modules(); + let query_namespace = self.query_namespace_service.build(); + Ir { + model_modules, + query_namespace, + request, + } + } +} diff --git a/codegen/src/ir/model_modules/model.rs b/codegen/src/ir/model_modules/model.rs index ae7acea..d8afa04 100644 --- a/codegen/src/ir/model_modules/model.rs +++ b/codegen/src/ir/model_modules/model.rs @@ -4,7 +4,7 @@ use serde::{Deserialize, Serialize}; use crate::{ ir::r#type::Type, - request::{Column, Record}, + request::Record, }; #[derive(Clone, Serialize, Deserialize)] diff --git a/codegen/src/ir/model_service.rs b/codegen/src/ir/model_service.rs index 885f024..3d9f40e 100644 --- a/codegen/src/ir/model_service.rs +++ b/codegen/src/ir/model_service.rs @@ -10,16 +10,19 @@ use crate::{ pub struct ModelService { pub type_service: TypeService, + pub catalog: Catalog, } impl ModelService { - pub fn create_model_modules(&self, catalog: &Catalog) { + pub fn create_model_modules(&self) -> ModelModules { let mut modules = ModelModules::default(); - for schema in catalog.schemas.iter() { + for schema in self.catalog.schemas.iter() { let module = self.create_model_module(schema); modules.model_modules.insert(schema.name.clone(), module); } + + modules } fn create_model_module(&self, schema: &Schema) -> ModelModule { diff --git a/codegen/src/ir/query_namespace/method/mod.rs b/codegen/src/ir/query_namespace/method/mod.rs index f8fc79c..7cac8f5 100644 --- a/codegen/src/ir/query_namespace/method/mod.rs +++ b/codegen/src/ir/query_namespace/method/mod.rs @@ -1,5 +1,5 @@ use std::{ - collections::{btree_map::Entry, BTreeMap}, + collections::BTreeMap, sync::Arc, }; diff --git a/codegen/src/ir/query_namespace/mod.rs b/codegen/src/ir/query_namespace/mod.rs index 7999e4f..795144b 100644 --- a/codegen/src/ir/query_namespace/mod.rs +++ b/codegen/src/ir/query_namespace/mod.rs @@ -5,15 +5,14 @@ use std::{ mod method; use crate::{ error::Error, - ir::{query_namespace_service::QueryNamespaceBuilder, r#type::Type}, + ir::{query_namespace_service::QueryNamespaceService, r#type::Type}, request::Request, - utils::to_pascal_case, }; pub use method::Method; pub use method::MethodModel; use serde::{Deserialize, Serialize}; -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, Clone)] pub struct QueryNamespace { pub name: String, pub subnamespaces: BTreeMap, QueryNamespace>, @@ -22,7 +21,7 @@ pub struct QueryNamespace { impl QueryNamespace { pub fn from_request(request: &Request) -> Result { - Ok(QueryNamespaceBuilder::new(request)?.build()) + Ok(QueryNamespaceService::new(request)?.build()) } pub fn root() -> QueryNamespace { diff --git a/codegen/src/ir/query_namespace_service.rs b/codegen/src/ir/query_namespace_service.rs index 6e38a11..cfec2ba 100644 --- a/codegen/src/ir/query_namespace_service.rs +++ b/codegen/src/ir/query_namespace_service.rs @@ -1,25 +1,23 @@ use crate::{ error::Error, ir::{ - method_service::MethodService, - query_namespace::QueryNamespace, - type_service::{self, TypeService}, + method_service::MethodService, query_namespace::QueryNamespace, type_service::TypeService, }, request::{Query, Request}, }; -pub struct QueryNamespaceBuilder { +pub struct QueryNamespaceService { request: Request, method_service: MethodService, namespace: QueryNamespace, } -impl QueryNamespaceBuilder { - pub fn new(request: &Request) -> Result { +impl QueryNamespaceService { + pub fn new(request: &Request) -> Result { let type_service = TypeService { catalog: request.catalog.clone(), }; - Ok(QueryNamespaceBuilder { + Ok(QueryNamespaceService { request: request.clone(), method_service: MethodService::new(type_service.clone()), namespace: QueryNamespace::root(), diff --git a/codegen/src/ir/type.rs b/codegen/src/ir/type.rs index afb8bfd..c313561 100644 --- a/codegen/src/ir/type.rs +++ b/codegen/src/ir/type.rs @@ -1,14 +1,10 @@ use std::sync::Arc; -use minijinja::value::{Object, ObjectRepr}; +use minijinja::value::{Enumerator, Object, ObjectRepr}; use serde::{Deserialize, Serialize}; -use crate::{ - ir::model_modules::Model, - request::{Column, Enum, Record}, -}; - #[derive(Clone, PartialEq, PartialOrd, Ord, Eq, Debug, Serialize, Deserialize)] +#[serde(tag = "t", content = "c")] pub enum Type { // A type not matching any of these Other { @@ -208,6 +204,10 @@ impl Object for Type { fn repr(self: &Arc) -> minijinja::value::ObjectRepr { ObjectRepr::Plain } + + // fn enumerate(self: &Arc) -> minijinja::value::Enumerator { + // Enumerator::Str(&[]) + // } } #[test] diff --git a/codegen/src/ir/type_service.rs b/codegen/src/ir/type_service.rs index 3ea7afb..e37ec87 100644 --- a/codegen/src/ir/type_service.rs +++ b/codegen/src/ir/type_service.rs @@ -1,10 +1,7 @@ use std::sync::Arc; use super::r#type::Type; -use crate::{ - ir::model_modules::{Model, ModelModules}, - request::{Catalog, Column, OutputType, Record, Request, Schema}, -}; +use crate::request::{Catalog, Column, OutputType, Schema}; #[derive(Clone)] pub struct TypeService { pub catalog: Catalog, @@ -99,3 +96,43 @@ impl TypeService { .unwrap_or(Type::Any) } } + +#[cfg(test)] +mod test { + use crate::{ + ir::{Type, TypeService}, + mock::{self, records, schema}, + }; + + fn type_service() -> TypeService { + TypeService { + catalog: mock::catalog(), + } + } + + #[test] + fn type_service_user_defined_model() { + let type_service = type_service(); + let schema = schema(); + let record = &schema.records[0]; + let user_defined = type_service.user_defined_model(&schema, &record.name); + let Type::UserDefined { name, .. } = user_defined else { + unreachable!() + }; + assert_eq!(name, record.name); + } + + #[test] + fn type_service_get_schema() { + let type_service = type_service(); + let name = "public"; + let schema = type_service.get_schema(name).unwrap(); + assert_eq!(&*schema.name, name); + } + + #[test] + fn type_service_from_pg_catalog() { + let type_service = type_service(); + assert_eq!(type_service.from_pg_catalog("int4range"), Type::Int4Range) + } +} diff --git a/codegen/src/main.rs b/codegen/src/main.rs index cdc74ba..79deb47 100644 --- a/codegen/src/main.rs +++ b/codegen/src/main.rs @@ -1,5 +1,7 @@ +use crate::ir::{Ir, IrService}; +use crate::presentation::PresentationService; use crate::request::Request; -use crate::response::{File, Response}; +use crate::response::Response; use error::Error; use serde::Serialize; use serde_json::json; @@ -12,7 +14,6 @@ pub mod mock; pub mod presentation; pub mod request; pub mod response; -pub mod r#type; mod utils; @@ -32,14 +33,16 @@ pub extern "C" fn build(ptr: *mut u8, size: usize) -> *const u8 { } } -fn try_build(ptr: *mut u8, size: usize) -> Result { +fn try_build(ptr: *mut u8, size: usize) -> Result { let request = load_request(ptr, size)?; - todo!(); - Ok(0) - // let generator = FileGenerator::new(&request)?; - // Ok(Response { - // files: generator.render_files()?, - // }) + + let ir = IrService::new(request.clone())?.build(request); + + let presentation_service = PresentationService { ir }; + + Ok(Response { + files: presentation_service.generate()?, + }) } static RESPONSE_LENGTH: AtomicU64 = AtomicU64::new(0); @@ -64,3 +67,15 @@ fn load_request(ptr: *mut u8, size: usize) -> Result { } fn main() {} + +#[test] +fn test_from_catalog() { + let contents = include_str!("../tests/request.json"); + + println!( + "{:?}", + try_build(contents.as_ptr() as _, contents.len()) + .unwrap() + .files + ); +} diff --git a/codegen/src/mock.rs b/codegen/src/mock.rs index 8b13789..09e3561 100644 --- a/codegen/src/mock.rs +++ b/codegen/src/mock.rs @@ -1 +1,36 @@ +use crate::request::{Catalog, Request}; +use crate::{ + ir::TypeService, + request::{Enum, Record, Schema}, +}; +use std::sync::Arc; + +pub fn enums() -> [Enum; 1] { + [Enum { + name: "myenum".into(), + values: [].into(), + }] +} + +pub fn records() -> [Record; 1] { + [Record { + kind: "table".into(), + name: "".into(), + columns: Arc::default(), + }] +} + +pub fn schema() -> Schema { + Schema { + name: "public".into(), + enums: enums().into(), + records: records().into(), + } +} + +pub fn catalog() -> Catalog { + Catalog { + schemas: [schema()].into(), + } +} diff --git a/codegen/src/presentation/environment.rs b/codegen/src/presentation/environment.rs index f282589..78b9bcb 100644 --- a/codegen/src/presentation/environment.rs +++ b/codegen/src/presentation/environment.rs @@ -1,38 +1,78 @@ use std::{ collections::BTreeMap, + panic::{AssertUnwindSafe, catch_unwind}, sync::{Arc, Mutex}, }; use heck::{ToKebabCase, ToLowerCamelCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase}; -use minijinja::{Environment, State, Value}; +use indexmap::map::serde_seq::deserialize; +use minijinja::{Environment, State, Value, context, value::Object}; use regex::bytes::Regex; +use serde::{Deserialize, Deserializer, Serialize, de::IntoDeserializer}; -use crate::{ir::Type, presentation::type_mapping_service::TypeMapService}; +use crate::{ + error::Error, + ir::{Ir, Type}, + presentation::{ + file_generation_config::TemplateGenConfig, + type_mapping_service::{OverriddenTypeMapService, TypeMapService}, + }, +}; -pub fn env(service: Arc) -> Environment<'static> { +pub fn env(ir: Ir, config: TemplateGenConfig) -> Result, Error> { let mut env = minijinja::Environment::new(); + + add_templates(&mut env, config)?; + add_string_filters(&mut env); + add_type_filters(&mut env, ir, config); + + Ok(env) +} + +pub fn add_type_filters(env: &mut Environment<'static>, ir: Ir, config: TemplateGenConfig) { + let service = Arc::new(OverriddenTypeMapService::new(ir, config.type_map_service)); + let service_ = service.clone(); - env.add_filter("annotation", move |state: &State, ty: &Type| -> Arc { - service_.get(module_path(state), ty).annotation + + env.add_filter("annotation", move |state: &State, ty: Value| -> Arc { + service_.get(module_path(state), &as_type(ty)).annotation }); + let service_ = service.clone(); + env.add_filter( "name", - move |state: &State, ty: &Type| -> Option> { - service_.get(module_path(state), ty).name + move |state: &State, ty: Value| -> Option> { + service_.get(module_path(state), &as_type(ty)).name }, ); let service_ = service.clone(); - env.add_filter("import", move |state: &State, ty: &Type| -> Vec> { - service_.get(module_path(state), ty).import - }); + env.add_filter( + "imports", + move |state: &State, ty: Value| -> Vec> { + service_.get(module_path(state), &as_type(ty)).import + }, + ); let service_ = service.clone(); env.add_filter( "type_module", - move |state: &State, ty: &Type| -> Option> { - service_.get(module_path(state), ty).module + move |state: &State, ty: Value| -> Option> { + service_.get(module_path(state), &as_type(ty)).module }, ); +} + +pub fn add_templates( + env: &mut Environment<'static>, + config: TemplateGenConfig, +) -> Result<(), Error> { + env.add_template("query", config.query_template)?; + env.add_template("model", config.model_template)?; + env.add_template("model_init", config.model_init_template)?; + Ok(()) +} + +pub fn add_string_filters(env: &mut Environment<'static>) { env.add_filter("to_camel_case", to_camel_case); env.add_filter("to_pascal_case", to_pascal_case); env.add_filter("to_snake_case", to_snake_case); @@ -42,16 +82,26 @@ pub fn env(service: Arc) -> Environment<'static> { env.add_filter("starts_with", starts_with); env.add_filter("strip_prefix", strip_prefix); env.add_filter("regex_replace", regex_replace); - env } -pub fn module_path<'a>(state: &State<'_, 'a>) -> Arc<[Arc]> { - state - .lookup("module_path") - .unwrap() - .downcast_object_ref::]>>() - .unwrap() - .clone() +pub fn module_path<'a>(state: &State<'_, 'a>) -> Vec { + use serde::de::value::SeqDeserializer; + let this_module = state.lookup("this_module").unwrap(); + return >::deserialize(SeqDeserializer::new(this_module.try_iter().unwrap())) + .unwrap(); +} + +pub fn as_type(value: Value) -> Type { + let Ok(value) = serde_json::to_value(value) else { + return Type::AnyEnum; + }; + match serde_json::from_value(value) { + Ok(ty) => ty, + Err(err) => Type::Other { + schema: format!("{err:?}").into(), + name: "failed".into(), + }, + } } pub fn regex_replace(text: &str, pattern: &str, replacement: &str) -> String { @@ -98,3 +148,16 @@ pub fn to_screaming_snake_case(s: &str) -> String { pub fn to_kebab_case(s: &str) -> String { s.to_kebab_case() } + +#[test] +fn foo() { + let mut env = Environment::new(); + + env.add_filter("foo", |ty: &Value| { + let ty: Type = serde_json::from_value(serde_json::to_value(ty).unwrap()).unwrap(); + "works" + }); + + let content = env.render_str("{{ x | foo }}", context! { x => Type::MacAddr}); + assert!(content.unwrap().contains("works")); +} diff --git a/codegen/src/presentation/file_generation_config.rs b/codegen/src/presentation/file_generation_config.rs index 8fc777e..1772600 100644 --- a/codegen/src/presentation/file_generation_config.rs +++ b/codegen/src/presentation/file_generation_config.rs @@ -1,5 +1,13 @@ -pub struct FileGenerationConfig { +use crate::{presentation::type_mapping_service::TypeMapService, response::File}; + +#[derive(Clone, Copy)] +pub struct TemplateGenConfig { pub query_directory_entrypoint: &'static str, pub model_directory_entrypoint: &'static str, pub file_extension: &'static str, + pub query_template: &'static str, + pub model_template: &'static str, + pub model_init_template: &'static str, + pub type_map_service: &'static dyn TypeMapService, + pub static_files: &'static [File], } diff --git a/codegen/src/presentation/mod.rs b/codegen/src/presentation/mod.rs index 3272245..aff4022 100644 --- a/codegen/src/presentation/mod.rs +++ b/codegen/src/presentation/mod.rs @@ -1,16 +1,58 @@ -use crate::{ir::Ir, request::Request}; +use crate::{ + error::Error, + ir::Ir, + presentation::{templating_service::TemplatingService, type_mapping_service::TypeMapService}, + request::Codegen, + response::File, +}; mod python; mod typescript; mod file_generation_config; -mod file_generator; +mod templating_service; mod type_mapping_service; mod environment; -// pub fn generate_files(ir: Ir) { -// match ir.request.config.codegen { -// "python" => -// } -// } +pub struct PresentationService { + pub ir: Ir, +} + +trait FileGeneratorService { + fn generate(&self) -> Result, Error>; +} + +impl PresentationService { + pub fn generate(&self) -> Result, Error> { + self.templating_service()?.generate() + } + + fn templating_service(&self) -> Result { + let Codegen { + language, driver, .. + } = self.ir.request.config.codegen.clone(); + + let config = match (&*language, &*driver) { + ("python", "asyncpg") => python::asyncpg(&self.ir)?, + ("python", "psycopg") => python::psycopg(&self.ir)?, + ("python", _) => return Err(Error::UnsupportedLanguage(language)), + _ => return Err(Error::UnsupportedLanguage(language)), + }; + + TemplatingService::new(self.ir.clone(), config) + } + + pub fn type_map_service(&self) -> Result<&'static dyn TypeMapService, Error> { + let Codegen { + language, driver, .. + } = self.ir.request.config.codegen.clone(); + + match (&*language, &*driver) { + ("python", "asyncpg") => Ok(&python::AsyncpgTypeMapService), + ("python", "psycopg") => Ok(&python::PsycopgTypeMapService), + ("python", _) => return Err(Error::UnsupportedLanguage(language)), + _ => return Err(Error::UnsupportedLanguage(language)), + } + } +} diff --git a/codegen/src/presentation/python/file_generation_config.rs b/codegen/src/presentation/python/file_generation_config.rs deleted file mode 100644 index 8fc777e..0000000 --- a/codegen/src/presentation/python/file_generation_config.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub struct FileGenerationConfig { - pub query_directory_entrypoint: &'static str, - pub model_directory_entrypoint: &'static str, - pub file_extension: &'static str, -} diff --git a/codegen/src/presentation/python/file_generator.rs b/codegen/src/presentation/python/file_generator.rs deleted file mode 100644 index 5a295bf..0000000 --- a/codegen/src/presentation/python/file_generator.rs +++ /dev/null @@ -1,100 +0,0 @@ -use std::sync::Arc; - -use minijinja::{context, Environment}; - -use crate::{ - error::Error, - ir::{Ir, ModelModule, QueryNamespace}, - presentation::python::file_generation_config::FileGenerationConfig, - response::File, -}; - -pub struct FileGeneratorService { - pub ir: Ir, - pub config: FileGenerationConfig, - pub environment: Environment<'static>, -} - -impl FileGeneratorService { - fn files(&self) -> Result, Error> { - let mut files = self.model_module_files()?; - self.add_query_files(&mut files); - return Ok(files); - } - - fn model_module_files(&self) -> Result, Error> { - let mut files = vec![]; - for module in self.ir.model_modules.model_modules.values() { - self.add_model_module_file(&mut files, module)?; - } - - Ok(files) - } - - pub fn add_model_module_file( - &self, - files: &mut Vec, - module: &ModelModule, - ) -> Result<(), Error> { - let filename = format!("models/{}.{}", module.name, &self.config.file_extension); - - let content = self.environment.get_template("model")?.render(context! { - path => ["models", &module.name], - used_types => module.used_types(), - module => module, - })?; - - files.push(File { - path: filename, - content, - }); - Ok(()) - } - - pub fn add_query_files(&self, files: &mut Vec) { - let namespace = &self.ir.query_namespace; - self.add_query_namespaces_recursively(files, &vec![], &namespace); - } - - fn add_query_namespaces_recursively( - &self, - files: &mut Vec, - path: &Vec>, - namespace: &QueryNamespace, - ) -> Result<(), Error> { - self.add_query_namespace(files, path, namespace)?; - - for (name, subnamespace) in namespace.subnamespaces.iter() { - let mut path = path.clone(); - path.push(name.clone()); - self.add_query_namespaces_recursively(files, &path, subnamespace); - } - Ok(()) - } - - pub fn add_query_namespace( - &self, - files: &mut Vec, - path: &Vec>, - namespace: &QueryNamespace, - ) -> Result<(), Error> { - let content = self - .environment - .get_template("query") - .unwrap() - .render(context! { - query_namespace => namespace, - path => path, - ir => self.ir, - })?; - - let path = format!( - "{}.{}", - path.join("/"), - self.config.query_directory_entrypoint - ); - - files.push(File { path, content }); - Ok(()) - } -} diff --git a/codegen/src/presentation/python/mod.rs b/codegen/src/presentation/python/mod.rs index 0a51b9e..038eeb7 100644 --- a/codegen/src/presentation/python/mod.rs +++ b/codegen/src/presentation/python/mod.rs @@ -1,8 +1,47 @@ -pub(super) mod driver; +pub use type_map_service::{AsyncpgTypeMapService, PsycopgTypeMapService}; + +use crate::{error::Error, ir::Ir, presentation::file_generation_config::TemplateGenConfig}; -pub mod file_generation_config; -pub mod file_generator; +pub(super) mod driver; pub mod type_map_service; -pub struct PythonFileGenerator {} -impl PythonFileGenerator {} +pub fn asyncpg(ir: &Ir) -> Result { + check_required_options(&ir)?; + Ok(TemplateGenConfig { + query_directory_entrypoint: "__init__.py", + model_directory_entrypoint: "__init__.py", + file_extension: "py", + query_template: include_str!("./templates/asyncpg/query.j2"), + model_template: include_str!("./templates/asyncpg/model.j2"), + model_init_template: include_str!("./templates/asyncpg/model_init.j2"), + type_map_service: &AsyncpgTypeMapService, + static_files: &[], + }) +} + +pub fn psycopg(ir: &Ir) -> Result { + check_required_options(&ir)?; + Ok(TemplateGenConfig { + query_directory_entrypoint: "__init__.py", + model_directory_entrypoint: "__init__.py", + file_extension: "py", + query_template: include_str!("./templates/psycopg/query.j2"), + model_template: include_str!("./templates/psycopg/model.j2"), + model_init_template: include_str!("./templates/psycopg/model_init.j2"), + type_map_service: &PsycopgTypeMapService, + static_files: &[], + }) +} + +pub fn check_required_options(ir: &Ir) -> Result<(), Error> { + ir.request + .config + .codegen + .options + .get("package") + .ok_or(Error::MissingConfigurationOption { + language: "python", + option: "package", + })?; + Ok(()) +} diff --git a/codegen/src/presentation/python/templates/asyncpg/model.j2 b/codegen/src/presentation/python/templates/asyncpg/model.j2 index 78addbb..6e5279f 100644 --- a/codegen/src/presentation/python/templates/asyncpg/model.j2 +++ b/codegen/src/presentation/python/templates/asyncpg/model.j2 @@ -1,13 +1,15 @@ import dataclasses -{%- if enums %} +{%- if model_module.enums %} import enum {%- endif %} -{%- for import in imports %} -import {{import}} +{%- for type in used_types %} +{%- for import in (type | imports) %} +{{import}} {%- endfor %} -from {{request.config.codegen.options.package}} import models +{%- endfor %} +from {{ir.request.config.codegen.options.package}} import models -{%- for enum in enums %} +{%- for enum in model_module.enums %} class {{enum.name | to_pascal_case }}(enum.StrEnum): {%- for value in enum.values %} @@ -15,18 +17,12 @@ class {{enum.name | to_pascal_case }}(enum.StrEnum): {%- endfor %} {% endfor %} -{%- for model in models %} +{%- for model in model_module.models %} @dataclasses.dataclass -class {{model.type.declaration}}: - {%- for field, type in model.fields %} - {{field}}: {% if type.annotation | starts_with("models." + schema) -%} - {{ type.annotation | strip_prefix("models." + schema + ".") }} - {%- elif type.annotation | starts_with("models.") -%} - {{ type.annotation | to_c_string }} - {%- else -%} - {{ type.annotation }} - {%- endif %} +class {{ model.name | to_pascal_case }}: + {%- for field in model.fields %} + {{field.name}}: {{ field.type | annotation }} {%- endfor %} {%- endfor %} diff --git a/codegen/src/presentation/python/templates/asyncpg/model_init.j2 b/codegen/src/presentation/python/templates/asyncpg/model_init.j2 index 3a811fc..6fc9c54 100644 --- a/codegen/src/presentation/python/templates/asyncpg/model_init.j2 +++ b/codegen/src/presentation/python/templates/asyncpg/model_init.j2 @@ -1,12 +1,12 @@ -{%- for module in model_modules -%} +{%- for module in ir.model_modules.model_modules -%} from . import {{module}} {% endfor -%} -{%- if model_modules["public"] -%} +{%- if ir.model_modules.model_modules["public"] -%} from .public import ( -{%- for model_class in model_modules["public"].classes %} - {{model_class.type.declaration}}, +{%- for model in ir.model_modules.model_modules["public"].models %} + {{ model.name | to_pascal_case }}, {%- endfor %} ) {% endif %} diff --git a/codegen/src/presentation/python/templates/asyncpg/query.j2 b/codegen/src/presentation/python/templates/asyncpg/query.j2 index 03e45a5..86a52cb 100644 --- a/codegen/src/presentation/python/templates/asyncpg/query.j2 +++ b/codegen/src/presentation/python/templates/asyncpg/query.j2 @@ -1,13 +1,14 @@ # This file was automatically generated by pgc -# flake8: noqa -# pylint: disable=unused-import -{%- for import in imports %} -import {{import}} +{%- for type in used_types %} +{% for import in (type | imports) %} +{%- if import != ""%} +{{import}} +{%- endif %} +{%- endfor %} {%- endfor %} -import asyncpg -import typing import dataclasses -from {{request.config.codegen.options.package}} import models +from {{ir.request.config.codegen.options.package}} import models +from asyncpg import Connection {%- for subnamespace in query_namespace.subnamespaces %} from . import {{subnamespace}} {%- endfor %} @@ -22,25 +23,25 @@ from . import {{subnamespace}} {%- for method in query_namespace.methods %} {%- if method.output_model != None %} @dataclasses.dataclass -class {{method.output_model.type.declaration | to_pascal_case }}: +class {{method.output_model.type | name | to_pascal_case }}: {%- for field, type in method.output_model.fields | items %} - {{field}}: {{type.annotation}} + {{field}}: {{ type | annotation }} {%- endfor %} {% endif %} {%- for _, input_model in method.input_models | items %} {%- if method.query.annotations.group_arguments and method.query.annotations.group_arguments.value == "dataclass" %} @dataclasses.dataclass -class {{ input_model.type.declaration | to_pascal_case }}: +class {{ input_model.type | name | to_pascal_case }}: {%- for field, type in input_model.fields | items %} - {{field}}: {{type.annotation}} + {{field}}: {{type | annotation}} {%- endfor %} {%- else %} -class {{ input_model.type.declaration | to_pascal_case }}(typing.Protocol): +class {{ input_model.type | name }}(typing.Protocol): {%- for field, type in input_model.fields | items %} @property - def {{field}}(self) -> {{type.annotation}}: ... + def {{field}}(self) -> {{type | annotation}}: ... {%- endfor %} {%- endif %} @@ -49,7 +50,7 @@ class {{ input_model.type.declaration | to_pascal_case }}(typing.Protocol): @dataclasses.dataclass class {{ query_namespace.name | to_pascal_case }}Queries: - def __init__(self, connection: asyncpg.Connection): + def __init__(self, connection: Connection): self.connection = connection {%- for subnamespace in query_namespace.subnamespaces %} self.{{subnamespace}} = {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) @@ -69,9 +70,9 @@ class {{ query_namespace.name | to_pascal_case }}Queries: {%- if method.query.output | length == 1 %} async def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: + ) -> {{method.output_type | annotation}}{{OR_NONE}}: row = await self.connection.fetchrow( {{method.query.name | to_screaming_snake_case }} {%- for parameter in method.query.parameters -%} @@ -83,9 +84,9 @@ class {{ query_namespace.name | to_pascal_case }}Queries: {%- else %} async def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: + ) -> {{method.output_type | annotation}}{{OR_NONE}}: row = await self.connection.fetchrow( {{method.query.name | to_screaming_snake_case }} {%- for parameter in method.query.parameters -%} @@ -93,15 +94,15 @@ class {{ query_namespace.name | to_pascal_case }}Queries: {%- endfor %} ) {{HANDLE_NONE}} - return {{method.output_type.annotation}}(**row) + return {{method.output_type | annotation}}(**row) {%- endif %} {%- elif method.query.command == 'many' %} {%- if method.query.output | length == 1 %} async def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} - ) -> list[{{method.output_type.annotation}}]: + ) -> list[{{method.output_type | annotation}}]: rows = await self.connection.fetch( {{method.query.name | to_screaming_snake_case }} {%- for parameter in method.query.parameters -%} @@ -112,23 +113,23 @@ class {{ query_namespace.name | to_pascal_case }}Queries: {%- else%} async def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} - ) -> list[{{method.output_type.annotation}}]: + ) -> list[{{method.output_type | annotation}}]: rows = await self.connection.fetch( {{method.query.name | to_screaming_snake_case }} {%- for parameter in method.query.parameters -%} , {{parameter.name}} {%- endfor %} ) - return [{{method.output_type.annotation}}(**row) for row in rows] + return [{{method.output_type | annotation}}(**row) for row in rows] {%- endif %} {%- elif method.query.command == 'val' %} async def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: + ) -> {{method.output_type | annotation}}{{OR_NONE}}: row = await self.connection.fetchval( {{method.query.name | to_screaming_snake_case }} {%- for parameter in method.query.parameters -%} @@ -140,7 +141,7 @@ class {{ query_namespace.name | to_pascal_case }}Queries: {%- else %} async def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} ): return await self.connection.execute( @@ -155,9 +156,9 @@ class {{ query_namespace.name | to_pascal_case }}Queries: -{%- if query_namespace.name == "" %} -async def init_connection(conn: asyncpg.Connection): - {%- for _, model_module in model_modules | items %} +{%- if this_module == [] %} +async def init_connection(conn: Connection): + {%- for _, model_module in ir.model_modules.model_modules | items %} {%- for model in model_module.classes %} await conn.set_type_codec( diff --git a/codegen/src/presentation/python/templates/asyncpg/types.json b/codegen/src/presentation/python/templates/asyncpg/types.json deleted file mode 100644 index 4389b8f..0000000 --- a/codegen/src/presentation/python/templates/asyncpg/types.json +++ /dev/null @@ -1,108 +0,0 @@ -{ - "new_type_case": "{{ name | to_pascal_case }}", - "array": { - "constructor": "list", - "annotation": "list[{{type.annotation}}]" - }, - "null": { - "declaration": "{{type.declaration}}", - "constructor": "{{type.constructor}}", - "annotation": "{{type.annotation}} | None" - }, - "composite": { - "declaration": "{{ type_name | to_pascal_case }}", - "constructor": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", - "annotation": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", - "import": [] - }, - "wildcard": { - "name": "typing.Any", - "import": ["typing"] - }, - "schema": { - "pg_catalog": { - "bool": { "name": "bool" }, - "bytea": { "name": "bytes" }, - "char": { "name": "str" }, - "int8": { "name": "int" }, - "int2": { "name": "int" }, - "int4": { "name": "int" }, - "text": { "name": "str" }, - "json": { "name": "str" }, - "point": { "name": "asyncpg.types.Point", "import": ["asyncpg"] }, - "box": { "name": "asyncpg.pgproto.types.Box", "import": ["asyncpg"] }, - "polygon": { - "name": "asyncpg.pgproto.types.Polygon", - "import": ["asyncpg"] - }, - "line": { - "name": "asyncpg.pgproto.types.Line", - "import": ["asyncpg"] - }, - - "float4": { "name": "float" }, - "float8": { "name": "float" }, - "unknown": { "name": "typing.Any", "import": ["typing"] }, - "circle": { - "name": "asyncpg.pgproto.types.Circle", - "import": ["asyncpg"] - }, - "varchar": { "name": "str" }, - "date": { "name": "datetime.date", "import": ["datetime"] }, - "time": { "name": "datetime.time", "import": [] }, - "timestamp": { - "name": "datetime.datetime", - "import": ["datetime"] - }, - "timestamptz": { - "name": "datetime.datetime", - "import": ["datetime"] - }, - "interval": { - "name": "datatime.timedelta", - "import": ["datetime"] - }, - "timetz": { "name": "datetime.time", "import": ["datetime"] }, - "numeric": { "name": "decimal.Decimal", "import": ["decimal"] }, - "record": { "name": "asyncpg.Record", "import": ["asyncpg"] }, - "any": { "name": "typing.Any", "import": ["typing"] }, - "anyarray": { "name": "list[typing.Any]", "import": ["typing"] }, - "anyelement": { "name": "typing.Any", "import": ["typing"] }, - "anynonarray": { "name": "typing.Any", "import": ["typing"] }, - "uuid": { "name": "uuid.UUID", "import": ["uuid"] }, - "anyenum": { "name": "str" }, - "anyrange": { "name": "asyncpg.Range", "import": ["asyncpg"] }, - "jsonb": { "name": "str" }, - "int4range": { - "name": "asyncpg.types.Range", - "annotation": "asyncpg.types.Range[int]", - "import": ["asyncpg"] - }, - "numrange": { - "name": "asyncpg.types.Range", - "annotation": "asyncpg.types.Range[float]", - "import": ["asyncpg"] - }, - "tsrange": { - "name": "asyncpg.types.Range", - "annotation": "asyncpg.types.Range[datetime.datetime]", - "import": ["asyncpg", "datetime"] - }, - "tstzrange": { - "name": "tstzrange", - "annotation": "asyncpg.types.Range[datetime.datetime]", - "import": ["asyncpg", "datetime"] - }, - "daterange": { - "name": "asyncpg.types.Range", - "annotation": "asyncpg.types.Range[datetime.date]", - "import": ["asyncpg", "datetime"] - }, - "int8range": { - "name": "asyncpg.types.Range", - "annotation": "asyncpg.types.Range[int]", - "import": ["asyncpg"] - } - } - } -} diff --git a/codegen/src/presentation/python/templates/psycopg/model.j2 b/codegen/src/presentation/python/templates/psycopg/model.j2 new file mode 100644 index 0000000..6e5279f --- /dev/null +++ b/codegen/src/presentation/python/templates/psycopg/model.j2 @@ -0,0 +1,28 @@ +import dataclasses +{%- if model_module.enums %} +import enum +{%- endif %} +{%- for type in used_types %} +{%- for import in (type | imports) %} +{{import}} +{%- endfor %} +{%- endfor %} +from {{ir.request.config.codegen.options.package}} import models + +{%- for enum in model_module.enums %} + +class {{enum.name | to_pascal_case }}(enum.StrEnum): + {%- for value in enum.values %} + {{ value | to_screaming_snake_case }} = {{ value | to_c_string }} + {%- endfor %} +{% endfor %} + +{%- for model in model_module.models %} + + +@dataclasses.dataclass +class {{ model.name | to_pascal_case }}: + {%- for field in model.fields %} + {{field.name}}: {{ field.type | annotation }} + {%- endfor %} +{%- endfor %} diff --git a/codegen/src/presentation/python/templates/psycopg/model.py.jinja2 b/codegen/src/presentation/python/templates/psycopg/model.py.jinja2 deleted file mode 100644 index 78addbb..0000000 --- a/codegen/src/presentation/python/templates/psycopg/model.py.jinja2 +++ /dev/null @@ -1,32 +0,0 @@ -import dataclasses -{%- if enums %} -import enum -{%- endif %} -{%- for import in imports %} -import {{import}} -{%- endfor %} -from {{request.config.codegen.options.package}} import models - -{%- for enum in enums %} - -class {{enum.name | to_pascal_case }}(enum.StrEnum): - {%- for value in enum.values %} - {{ value | to_screaming_snake_case }} = {{ value | to_c_string }} - {%- endfor %} -{% endfor %} - -{%- for model in models %} - - -@dataclasses.dataclass -class {{model.type.declaration}}: - {%- for field, type in model.fields %} - {{field}}: {% if type.annotation | starts_with("models." + schema) -%} - {{ type.annotation | strip_prefix("models." + schema + ".") }} - {%- elif type.annotation | starts_with("models.") -%} - {{ type.annotation | to_c_string }} - {%- else -%} - {{ type.annotation }} - {%- endif %} - {%- endfor %} -{%- endfor %} diff --git a/codegen/src/presentation/python/templates/psycopg/model_init.j2 b/codegen/src/presentation/python/templates/psycopg/model_init.j2 new file mode 100644 index 0000000..6fc9c54 --- /dev/null +++ b/codegen/src/presentation/python/templates/psycopg/model_init.j2 @@ -0,0 +1,12 @@ +{%- for module in ir.model_modules.model_modules -%} +from . import {{module}} +{% endfor -%} + + +{%- if ir.model_modules.model_modules["public"] -%} +from .public import ( +{%- for model in ir.model_modules.model_modules["public"].models %} + {{ model.name | to_pascal_case }}, +{%- endfor %} +) +{% endif %} diff --git a/codegen/src/presentation/python/templates/psycopg/model_init.py.jinja2 b/codegen/src/presentation/python/templates/psycopg/model_init.py.jinja2 deleted file mode 100644 index 3a811fc..0000000 --- a/codegen/src/presentation/python/templates/psycopg/model_init.py.jinja2 +++ /dev/null @@ -1,12 +0,0 @@ -{%- for module in model_modules -%} -from . import {{module}} -{% endfor -%} - - -{%- if model_modules["public"] -%} -from .public import ( -{%- for model_class in model_modules["public"].classes %} - {{model_class.type.declaration}}, -{%- endfor %} -) -{% endif %} diff --git a/codegen/templates/python:psycopg/query.py.jinja2 b/codegen/src/presentation/python/templates/psycopg/query.j2 similarity index 75% rename from codegen/templates/python:psycopg/query.py.jinja2 rename to codegen/src/presentation/python/templates/psycopg/query.j2 index 8128a88..433add9 100644 --- a/codegen/templates/python:psycopg/query.py.jinja2 +++ b/codegen/src/presentation/python/templates/psycopg/query.j2 @@ -1,8 +1,10 @@ # This file was automatically generated by pgc -# flake8: noqa -# pylint: disable=unused-import -{%- for import in imports %} -import {{import}} +{%- for type in used_types %} +{% for import in (type | imports) %} +{%- if import != ""%} +{{import}} +{%- endif %} +{%- endfor %} {%- endfor %} import psycopg import typing @@ -11,7 +13,7 @@ from psycopg.rows import dict_row {%- if query_namespace.name == "" %} from psycopg.types.composite import CompositeInfo, register_composite {%- endif %} -from {{request.config.codegen.options.package}} import models +from {{ir.request.config.codegen.options.package}} import models {%- for subnamespace in query_namespace.subnamespaces %} from . import {{subnamespace}} {%- endfor %} @@ -26,25 +28,25 @@ from . import {{subnamespace}} {%- for method in query_namespace.methods %} {%- if method.output_model != None %} @dataclasses.dataclass -class {{method.output_model.type.declaration | to_pascal_case }}: +class {{method.output_model.type | name }}: {%- for field, type in method.output_model.fields | items %} - {{field}}: {{type.annotation}} + {{field}}: {{type | annotation}} {%- endfor %} {% endif %} {%- for _, input_model in method.input_models | items %} {%- if method.query.annotations.group_arguments and method.query.annotations.group_arguments.value == "dataclass" %} @dataclasses.dataclass -class {{ input_model.type.declaration | to_pascal_case }}: +class {{ input_model.type | name }}: {%- for field, type in input_model.fields | items %} - {{field}}: {{type.annotation}} + {{field}}: {{type | annotation}} {%- endfor %} {%- else %} -class {{ input_model.type.declaration | to_pascal_case }}(typing.Protocol): +class {{ input_model.type | name }}(typing.Protocol): {%- for field, type in input_model.fields | items %} @property - def {{field}}(self) -> {{type.annotation}}: ... + def {{field}}(self) -> {{type | annotation}}: ... {%- endfor %} {%- endif %} @@ -55,7 +57,7 @@ class {{ input_model.type.declaration | to_pascal_case }}(typing.Protocol): class {{ query_namespace.name | to_pascal_case }}Queries: def __init__(self, connection: psycopg.Connection): self.connection = connection - {%- for subnamespace in query_namespace.subnamespaces %} + {%- for subnamespace in query_namespace.subnamespaces %} self.{{subnamespace}} = {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) {%- endfor %} @@ -73,9 +75,9 @@ class {{ query_namespace.name | to_pascal_case }}Queries: {%- if method.query.output | length == 1 %} def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: + ) -> {{method.output_type | annotation}}{{OR_NONE}}: row = self.connection.execute( {{method.query.name | to_screaming_snake_case }}, { {%- for parameter in method.query.parameters -%} @@ -87,9 +89,9 @@ class {{ query_namespace.name | to_pascal_case }}Queries: {%- else %} def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: + ) -> {{method.output_type | annotation}}{{OR_NONE}}: row = self.connection.cursor(row_factory=dict_row).execute( {{method.query.name | to_screaming_snake_case }}, { {%- for parameter in method.query.parameters -%} @@ -97,16 +99,16 @@ class {{ query_namespace.name | to_pascal_case }}Queries: {%- endfor %}} ).fetchone() {{HANDLE_NONE}} - return {{method.output_type.annotation}}(**row) + return {{method.output_type | annotation}}(**row) {%- endif %} {%- elif method.query.command == 'many' %} {%- if method.query.output | length == 1 %} def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} - ) -> list[{{method.output_type.annotation}}]: + ) -> list[{{method.output_type | annotation}}]: rows = self.connection.execute( {{method.query.name | to_screaming_snake_case }}, { {%- for parameter in method.query.parameters -%} @@ -117,24 +119,24 @@ class {{ query_namespace.name | to_pascal_case }}Queries: {%- else %} def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} - ) -> list[{{method.output_type.annotation}}]: + ) -> list[{{method.output_type | annotation}}]: rows = self.connection.cursor(row_factory=dict_row).execute( {{method.query.name | to_screaming_snake_case }}, { {%- for parameter in method.query.parameters -%} "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} {%- endfor %}} ).fetchall() - return [{{method.output_type.annotation}}(**row) for row in rows] + return [{{method.output_type | annotation}}(**row) for row in rows] {%- endif %} {%- elif method.query.command == 'val' %} def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: + ) -> {{method.output_type | annotation}}{{OR_NONE}}: row = self.connection.execute( {{method.query.name | to_screaming_snake_case }}, { {%- for parameter in method.query.parameters -%} @@ -146,7 +148,7 @@ class {{ query_namespace.name | to_pascal_case }}Queries: {%- else %} def {{method.query.name}}(self {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} + , {{argument}}: {{type | annotation}} {%- endfor -%} ): return self.connection.execute( @@ -161,13 +163,16 @@ class {{ query_namespace.name | to_pascal_case }}Queries: -{%- if query_namespace.name == "" %} +{%- if this_module == [] %} def init_connection(conn: psycopg.Connection): - {%- for _, model_module in model_modules | items %} - {%- for model in model_module.classes %} + {%- for _, model_module in ir.model_modules.model_modules | items %} + {%- for model in model_module.models %} + info = CompositeInfo.fetch(conn, "\"{{model_module.name}}\".\"{{model.name}}\"") + assert info is not None, "The table \"{{model_module.name}}\".\"{{model.name}}\" was not found." register_composite( - CompositeInfo.fetch(conn, "\"{{model.type.pgtype_schema}}\".\"{{model.type.pgtype_name }}\""), conn, {{model.type.constructor}} # type: ignore + + info, conn, models.{{model_module.name}}.{{model.name | to_pascal_case }} ) {%- endfor %} {% endfor %} diff --git a/codegen/src/presentation/python/templates/psycopg/query.py.jinja2 b/codegen/src/presentation/python/templates/psycopg/query.py.jinja2 deleted file mode 100644 index 8128a88..0000000 --- a/codegen/src/presentation/python/templates/psycopg/query.py.jinja2 +++ /dev/null @@ -1,174 +0,0 @@ -# This file was automatically generated by pgc -# flake8: noqa -# pylint: disable=unused-import -{%- for import in imports %} -import {{import}} -{%- endfor %} -import psycopg -import typing -import dataclasses -from psycopg.rows import dict_row -{%- if query_namespace.name == "" %} -from psycopg.types.composite import CompositeInfo, register_composite -{%- endif %} -from {{request.config.codegen.options.package}} import models -{%- for subnamespace in query_namespace.subnamespaces %} -from . import {{subnamespace}} -{%- endfor %} - -{%- for method in query_namespace.methods %} - -{{ method.query.name | to_screaming_snake_case }} = """ -{{ method.query.query | regex_replace('\\$(\\d+)', '%(p$1)s') }} -""" -{%- endfor %} -{{"\n"}} -{%- for method in query_namespace.methods %} -{%- if method.output_model != None %} -@dataclasses.dataclass -class {{method.output_model.type.declaration | to_pascal_case }}: - {%- for field, type in method.output_model.fields | items %} - {{field}}: {{type.annotation}} - {%- endfor %} - -{% endif %} -{%- for _, input_model in method.input_models | items %} -{%- if method.query.annotations.group_arguments and method.query.annotations.group_arguments.value == "dataclass" %} -@dataclasses.dataclass -class {{ input_model.type.declaration | to_pascal_case }}: - {%- for field, type in input_model.fields | items %} - {{field}}: {{type.annotation}} - {%- endfor %} - -{%- else %} -class {{ input_model.type.declaration | to_pascal_case }}(typing.Protocol): - {%- for field, type in input_model.fields | items %} - @property - def {{field}}(self) -> {{type.annotation}}: ... - {%- endfor %} - -{%- endif %} -{% endfor %} -{%- endfor %} - -@dataclasses.dataclass -class {{ query_namespace.name | to_pascal_case }}Queries: - def __init__(self, connection: psycopg.Connection): - self.connection = connection - {%- for subnamespace in query_namespace.subnamespaces %} - self.{{subnamespace}} = {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) - {%- endfor %} - - {% for method in query_namespace.methods%} - {%- if method.query.annotations.not_null_result -%} - {%- set HANDLE_NONE = 'assert row is not None, "a query marked with @not_null_result has returned null."' %} - {%- set OR_NONE = '' %} - {% else %} - {%- set HANDLE_NONE = 'if row is None: return None' %} - {%- set OR_NONE = ' | None' %} - {%- endif %} - - {%- if method.query.command == 'one' %} - - {%- if method.query.output | length == 1 %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: - row = self.connection.execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ).fetchone() - {{HANDLE_NONE}} - return row[0] - {%- else %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: - row = self.connection.cursor(row_factory=dict_row).execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ).fetchone() - {{HANDLE_NONE}} - return {{method.output_type.annotation}}(**row) - - {%- endif %} - {%- elif method.query.command == 'many' %} - {%- if method.query.output | length == 1 %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ) -> list[{{method.output_type.annotation}}]: - rows = self.connection.execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ).fetchall() - return [row[0] for row in rows] - {%- else %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ) -> list[{{method.output_type.annotation}}]: - rows = self.connection.cursor(row_factory=dict_row).execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ).fetchall() - return [{{method.output_type.annotation}}(**row) for row in rows] - - {%- endif %} - {%- elif method.query.command == 'val' %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: - row = self.connection.execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ).fetchone() - {{HANDLE_NONE}} - return row[0] - {%- else %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ): - return self.connection.execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ) - {%- endif %} - - {% endfor %} - - - -{%- if query_namespace.name == "" %} -def init_connection(conn: psycopg.Connection): - {%- for _, model_module in model_modules | items %} - {%- for model in model_module.classes %} - - register_composite( - CompositeInfo.fetch(conn, "\"{{model.type.pgtype_schema}}\".\"{{model.type.pgtype_name }}\""), conn, {{model.type.constructor}} # type: ignore - ) - {%- endfor %} - {% endfor %} -{% endif -%} diff --git a/codegen/src/presentation/python/templates/psycopg/types.json b/codegen/src/presentation/python/templates/psycopg/types.json deleted file mode 100644 index 8bdc19e..0000000 --- a/codegen/src/presentation/python/templates/psycopg/types.json +++ /dev/null @@ -1,108 +0,0 @@ -{ - "new_type_case": "{{ name | to_pascal_case }}", - "array": { - "constructor": "list", - "annotation": "list[{{type.annotation}}]" - }, - "null": { - "declaration": "{{type.declaration}}", - "constructor": "{{type.constructor}}", - "annotation": "{{type.annotation}} | None" - }, - "composite": { - "declaration": "{{ type_name | to_pascal_case }}", - "constructor": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", - "annotation": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", - "import": [] - }, - "wildcard": { - "annotation": "typing.Any", - "import": ["typing"] - }, - "schema": { - "pg_catalog": { - "bool": { "annotation": "bool" }, - "bytea": { "annotation": "bytes" }, - "char": { "annotation": "str" }, - "int8": { "annotation": "int" }, - "int2": { "annotation": "int" }, - "int4": { "annotation": "int" }, - "text": { "annotation": "str" }, - "json": { "annotation": "dict" }, - "point": { "annotation": "asyncpg.types.Point", "import": ["asyncpg"] }, - "box": { - "annotation": "asyncpg.pgproto.types.Box", - "import": ["asyncpg"] - }, - "polygon": { - "annotation": "asyncpg.pgproto.types.Polygon", - "import": ["asyncpg"] - }, - "line": { - "annotation": "asyncpg.pgproto.types.Line", - "import": ["asyncpg"] - }, - - "float4": { "annotation": "float" }, - "float8": { "annotation": "float" }, - "unknown": { "annotation": "typing.Any", "import": ["typing"] }, - "circle": { - "annotation": "asyncpg.pgproto.types.Circle", - "import": ["asyncpg"] - }, - "varchar": { "annotation": "str" }, - "date": { "annotation": "datetime.date", "import": ["datetime"] }, - "time": { "annotation": "datetime.time", "import": [] }, - "timestamp": { - "annotation": "datetime.datetime", - "import": ["datetime"] - }, - "timestamptz": { - "annotation": "datetime.datetime", - "import": ["datetime"] - }, - "interval": { - "annotation": "datatime.timedelta", - "import": ["datetime"] - }, - "timetz": { "annotation": "datetime.time", "import": ["datetime"] }, - "numeric": { "annotation": "decimal.Decimal", "import": ["decimal"] }, - "record": { "annotation": "str" }, - "any": { "annotation": "typing.Any", "import": ["typing"] }, - "anyarray": { "annotation": "list[typing.Any]", "import": ["typing"] }, - "anyelement": { "annotation": "typing.Any", "import": ["typing"] }, - "anynonarray": { "annotation": "typing.Any", "import": ["typing"] }, - "uuid": { "annotation": "uuid.UUID", "import": ["uuid"] }, - "anyenum": { "annotation": "str" }, - "anyrange": { - "annotation": "psycopg.types.range.Range", - "import": ["psycopg.types.range"] - }, - "jsonb": { "annotation": "dict" }, - "int4range": { - "annotation": "psycopg.types.range.Range[int]", - "import": ["psycopg.types.range"] - }, - "numrange": { - "annotation": "asyncpg.types.Range[float]", - "import": ["psycopg.types.range"] - }, - "tsrange": { - "annotation": "asyncpg.types.Range[datetime.datetime]", - "import": ["psycopg.types.range.Range", "datetime"] - }, - "tstzrange": { - "annotation": "psycopg.types.range.Range[datetime.datetime]", - "import": ["psycopg.types.range", "datetime"] - }, - "daterange": { - "annotation": "asyncpg.types.Range[datetime.date]", - "import": ["psycopg.types.range", "datetime"] - }, - "int8range": { - "annotation": "psycopg.types.range.Range[int]", - "import": ["psycopg.types.range"] - } - } - } -} diff --git a/codegen/src/presentation/python/type_map_service.rs b/codegen/src/presentation/python/type_map_service.rs index 0026b0f..aae5e28 100644 --- a/codegen/src/presentation/python/type_map_service.rs +++ b/codegen/src/presentation/python/type_map_service.rs @@ -1,15 +1,19 @@ use crate::{ ir::Type, presentation::type_mapping_service::{LanguageType, TypeMapService}, + utils::to_pascal_case, }; use std::sync::Arc; -struct AsyncpgTypeMapService; +#[derive(Clone, Copy)] +pub struct AsyncpgTypeMapService; + +#[derive(Clone, Copy)] pub struct PsycopgTypeMapService; impl TypeMapService for PsycopgTypeMapService { #[rustfmt::skip] - fn get(&self, module: Arc<[Arc]>, r#type: &Type) -> LanguageType { + fn get(&self, module: Vec, r#type: &Type) -> LanguageType { match r#type { Type::Bit | Type::BitVarying @@ -21,7 +25,7 @@ impl TypeMapService for PsycopgTypeMapService { | Type::Polygon | Type::Circle | Type::Box => LanguageType::annotation("str"), - Type::AnyMultiRange | Type::AnyCompatibleMultiRange => LanguageType::annotation("list[psycopg.types.range.Range]").import(["import psycopg.types.range"]), + Type::AnyMultiRange | Type::AnyCompatibleMultiRange => LanguageType::annotation("list[psycopg.types.range.Range]"). import(["import psycopg.types.range"]), Type::TsMultiRange | Type::TsTzMultiRange => LanguageType::annotation("list[psycopg.types.range.Range[datetime.datetime]]").import(["import psycopg.types.range", "import datetime"]), Type::DateMultiRange => LanguageType::annotation("list[psycopg.types.range.Range[datetime.date]]").import(["import psycopg.types.range", "import datetime"]), Type::DateRange => LanguageType::annotation("psycopg.types.range.Range[datetime.date]").import(["import psycopg.types.range", "import datetime"]), @@ -38,11 +42,23 @@ impl TypeMapService for PsycopgTypeMapService { impl TypeMapService for AsyncpgTypeMapService { #[rustfmt::skip] - fn get(&self, current_module: Arc<[Arc]>, r#type: &crate::ir::Type) -> LanguageType { + fn get(&self, current_module: Vec, r#type: &crate::ir::Type) -> LanguageType { match r#type { Type::UserDefined { module_path, name } => { + let name: Arc = to_pascal_case(&name).into(); let module: Arc<_> = module_path.join(".").into(); - LanguageType { name: Some(name.clone()), annotation: format!("{module}.{name}").into(), import: vec![format!("import {}", module).into()], module: Some(module) } + let mut annotation = format!("{module}.{name}").into(); + let same_module = current_module.iter().map(|s|&**s).eq(module_path.iter().map(|s|&**s)); + if same_module { + annotation = name.clone(); + } + + LanguageType { + name: Some(name.clone()), + annotation, + import: vec![], + module: Some(module) + } }, Type::Nullable(r#type) => { let r#type = self.get(current_module, r#type); diff --git a/codegen/src/presentation/file_generator.rs b/codegen/src/presentation/templating_service.rs similarity index 55% rename from codegen/src/presentation/file_generator.rs rename to codegen/src/presentation/templating_service.rs index e1ebc22..295c4fc 100644 --- a/codegen/src/presentation/file_generator.rs +++ b/codegen/src/presentation/templating_service.rs @@ -1,27 +1,41 @@ use std::sync::Arc; -use minijinja::{context, Environment}; +use minijinja::{Environment, context}; use crate::{ error::Error, ir::{Ir, ModelModule, QueryNamespace}, - presentation::python::file_generation_config::FileGenerationConfig, + presentation::{ + FileGeneratorService, environment::env, file_generation_config::TemplateGenConfig, + }, response::File, }; -pub struct FileGeneratorService { +pub struct TemplatingService { pub ir: Ir, - pub config: FileGenerationConfig, + pub config: TemplateGenConfig, pub environment: Environment<'static>, } -impl FileGeneratorService { - fn files(&self) -> Result, Error> { +impl FileGeneratorService for TemplatingService { + fn generate(&self) -> Result, Error> { let mut files = self.model_module_files()?; - self.add_query_files(&mut files); + self.add_query_files(&mut files)?; files.push(self.add_model_entrypoint()?); return Ok(files); } +} + +impl TemplatingService { + pub fn new(ir: Ir, config: TemplateGenConfig) -> Result { + let environment = env(ir.clone(), config)?; + + Ok(TemplatingService { + ir, + config, + environment, + }) + } fn model_module_files(&self) -> Result, Error> { let mut files = vec![]; @@ -39,9 +53,10 @@ impl FileGeneratorService { let filename = format!("models/{}.{}", module.name, &self.config.file_extension); let content = self.environment.get_template("model")?.render(context! { - path => ["models", &module.name], + this_module => ["models", &module.name], used_types => module.used_types(), - module => module, + model_module => module, + ir => self.ir, })?; files.push(File { @@ -54,31 +69,34 @@ impl FileGeneratorService { fn add_model_entrypoint(&self) -> Result { let content = self .environment - .get_template("model_dir")? + .get_template("model_init")? .render(context!( ir => self.ir, + this_module => ["models", self.config.model_directory_entrypoint] ))?; let path = format!("models/{}", self.config.model_directory_entrypoint); Ok(File { path, content }) } - pub fn add_query_files(&self, files: &mut Vec) { + pub fn add_query_files(&self, files: &mut Vec) -> Result<(), Error> { let namespace = &self.ir.query_namespace; - self.add_query_namespaces_recursively(files, &vec![], &namespace); + let mut path = vec![]; + self.add_query_namespaces_recursively(files, &mut path, &namespace)?; + Ok(()) } fn add_query_namespaces_recursively( &self, files: &mut Vec, - path: &Vec>, + path: &mut Vec>, namespace: &QueryNamespace, ) -> Result<(), Error> { self.add_query_namespace(files, path, namespace)?; for (name, subnamespace) in namespace.subnamespaces.iter() { - let mut path = path.clone(); path.push(name.clone()); - self.add_query_namespaces_recursively(files, &path, subnamespace); + self.add_query_namespaces_recursively(files, path, subnamespace)?; + path.pop(); } Ok(()) } @@ -86,7 +104,7 @@ impl FileGeneratorService { pub fn add_query_namespace( &self, files: &mut Vec, - path: &Vec>, + module_segments: &Vec>, namespace: &QueryNamespace, ) -> Result<(), Error> { let content = self @@ -95,15 +113,26 @@ impl FileGeneratorService { .unwrap() .render(context! { query_namespace => namespace, - path => path, + this_module => module_segments, ir => self.ir, + used_types => namespace.used_types(), })?; - let path = format!( - "{}.{}", - path.join("/"), - self.config.query_directory_entrypoint - ); + let path; + + if namespace.subnamespaces.len() == 0 { + path = format!( + "{}.{}", + module_segments.join("/"), + self.config.file_extension + ); + } else { + path = format!( + "{}/{}", + module_segments.join("/"), + self.config.query_directory_entrypoint + ); + } files.push(File { path, content }); Ok(()) diff --git a/codegen/src/presentation/type_mapping_service.rs b/codegen/src/presentation/type_mapping_service.rs index e1671e5..a4599fc 100644 --- a/codegen/src/presentation/type_mapping_service.rs +++ b/codegen/src/presentation/type_mapping_service.rs @@ -1,17 +1,12 @@ use std::{collections::BTreeMap, sync::Arc}; -use minijinja::value::DynObject; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; use crate::{ - ir::{Type, TypeService}, + ir::{Ir, Type}, request::TypeConfig, }; -pub trait TypeMapService: Send + Sync + 'static { - fn get(&self, module: Arc<[Arc]>, r#type: &Type) -> LanguageType; -} - #[derive(Deserialize, Clone, Debug, PartialEq, Eq)] pub struct LanguageType { pub name: Option>, @@ -20,28 +15,41 @@ pub struct LanguageType { pub module: Option>, } -struct OverriddenTypeMapService { - service: Box, +pub trait TypeMapService: Send + Sync + 'static { + fn get(&self, module: Vec, r#type: &Type) -> LanguageType; +} + +pub struct OverriddenTypeMapService { + service: &'static dyn TypeMapService, overrides: Arc, TypeConfig>>, } +impl OverriddenTypeMapService { + pub fn new(ir: Ir, service: &'static dyn TypeMapService) -> Self { + Self { + service, + overrides: ir.request.config.codegen.types.clone(), + } + } +} + impl TypeMapService for OverriddenTypeMapService { - fn get(&self, module: Arc<[Arc]>, r#type: &Type) -> LanguageType { - let Ok(ty) = Type::NAMES.binary_search_by(|(_, _, ty)| ty.cmp(r#type)) else { - return self.service.get(module, r#type); - }; - let (_, name, _) = Type::NAMES[ty]; + fn get(&self, module: Vec, r#type: &Type) -> LanguageType { + return self.service.get(module, r#type); + // let Ok(ty) = Type::NAMES.binary_search_by(|(_, _, ty)| ty.cmp(r#type)) else { + // }; + // let (_, name, _) = Type::NAMES[ty]; - let Some(type_config) = self.overrides.get(name) else { - return self.service.get(module, r#type); - }; + // let Some(type_config) = self.overrides.get(name) else { + // return self.service.get(module, r#type); + // }; - return LanguageType { - name: None, - annotation: type_config.annotation.clone(), - import: type_config.import.clone(), - module: None, - }; + // return LanguageType { + // name: None, + // annotation: type_config.annotation.clone(), + // import: type_config.import.clone(), + // module: None, + // }; } } diff --git a/codegen/src/request.rs b/codegen/src/request.rs index dd7d735..734e01f 100644 --- a/codegen/src/request.rs +++ b/codegen/src/request.rs @@ -1,6 +1,4 @@ use std::collections::BTreeMap; -use std::collections::HashMap; -use std::iter::Map; use std::sync::Arc; use serde::Deserialize; diff --git a/codegen/src/type.rs b/codegen/src/type.rs deleted file mode 100644 index 107a56a..0000000 --- a/codegen/src/type.rs +++ /dev/null @@ -1,24 +0,0 @@ -use std::{collections::BTreeMap, sync::Arc, sync::LazyLock}; - -use serde::Deserialize; -use serde_json::json; - -use crate::{ - error::Error, - request::{self, Column, OutputType, TypeConfig}, - utils::render, -}; - -#[derive(serde::Serialize, Deserialize, Clone, Debug)] -pub struct Type { - #[serde(default)] - pub declaration: Arc, - #[serde(default)] - pub annotation: Arc, - #[serde(default)] - pub constructor: Arc, - #[serde(default)] - pub import: Arc<[Arc]>, - pub pgtype_name: Option>, - pub pgtype_schema: Option>, -} diff --git a/codegen/src/utils.rs b/codegen/src/utils.rs index a7bfb33..ccba0b3 100644 --- a/codegen/src/utils.rs +++ b/codegen/src/utils.rs @@ -1,11 +1,10 @@ use std::{ - collections::{BTreeMap, HashMap}, - fmt::format, - sync::{LazyLock, Mutex}, + collections::BTreeMap, + sync::Mutex, }; use heck::{ToKebabCase, ToLowerCamelCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase}; -use minijinja::{Environment, State, Template, Value}; +use minijinja::Environment; use regex::bytes::Regex; use serde::Serialize; diff --git a/codegen/templates/python:asyncpg/config.json b/codegen/templates/python:asyncpg/config.json deleted file mode 100644 index 2a1fdaf..0000000 --- a/codegen/templates/python:asyncpg/config.json +++ /dev/null @@ -1 +0,0 @@ -{ "extension": "py", "directory_entrypoint": "__init__" } diff --git a/codegen/templates/python:asyncpg/model.py.jinja2 b/codegen/templates/python:asyncpg/model.py.jinja2 deleted file mode 100644 index 78addbb..0000000 --- a/codegen/templates/python:asyncpg/model.py.jinja2 +++ /dev/null @@ -1,32 +0,0 @@ -import dataclasses -{%- if enums %} -import enum -{%- endif %} -{%- for import in imports %} -import {{import}} -{%- endfor %} -from {{request.config.codegen.options.package}} import models - -{%- for enum in enums %} - -class {{enum.name | to_pascal_case }}(enum.StrEnum): - {%- for value in enum.values %} - {{ value | to_screaming_snake_case }} = {{ value | to_c_string }} - {%- endfor %} -{% endfor %} - -{%- for model in models %} - - -@dataclasses.dataclass -class {{model.type.declaration}}: - {%- for field, type in model.fields %} - {{field}}: {% if type.annotation | starts_with("models." + schema) -%} - {{ type.annotation | strip_prefix("models." + schema + ".") }} - {%- elif type.annotation | starts_with("models.") -%} - {{ type.annotation | to_c_string }} - {%- else -%} - {{ type.annotation }} - {%- endif %} - {%- endfor %} -{%- endfor %} diff --git a/codegen/templates/python:asyncpg/model_init.py.jinja2 b/codegen/templates/python:asyncpg/model_init.py.jinja2 deleted file mode 100644 index 3a811fc..0000000 --- a/codegen/templates/python:asyncpg/model_init.py.jinja2 +++ /dev/null @@ -1,12 +0,0 @@ -{%- for module in model_modules -%} -from . import {{module}} -{% endfor -%} - - -{%- if model_modules["public"] -%} -from .public import ( -{%- for model_class in model_modules["public"].classes %} - {{model_class.type.declaration}}, -{%- endfor %} -) -{% endif %} diff --git a/codegen/templates/python:asyncpg/query.py.jinja2 b/codegen/templates/python:asyncpg/query.py.jinja2 deleted file mode 100644 index 03e45a5..0000000 --- a/codegen/templates/python:asyncpg/query.py.jinja2 +++ /dev/null @@ -1,172 +0,0 @@ -# This file was automatically generated by pgc -# flake8: noqa -# pylint: disable=unused-import -{%- for import in imports %} -import {{import}} -{%- endfor %} -import asyncpg -import typing -import dataclasses -from {{request.config.codegen.options.package}} import models -{%- for subnamespace in query_namespace.subnamespaces %} -from . import {{subnamespace}} -{%- endfor %} - -{%- for method in query_namespace.methods %} - -{{ method.query.name | to_screaming_snake_case }} = """ -{{ method.query.query }} -""" -{%- endfor %} -{{"\n"}} -{%- for method in query_namespace.methods %} -{%- if method.output_model != None %} -@dataclasses.dataclass -class {{method.output_model.type.declaration | to_pascal_case }}: - {%- for field, type in method.output_model.fields | items %} - {{field}}: {{type.annotation}} - {%- endfor %} - -{% endif %} -{%- for _, input_model in method.input_models | items %} -{%- if method.query.annotations.group_arguments and method.query.annotations.group_arguments.value == "dataclass" %} -@dataclasses.dataclass -class {{ input_model.type.declaration | to_pascal_case }}: - {%- for field, type in input_model.fields | items %} - {{field}}: {{type.annotation}} - {%- endfor %} - -{%- else %} -class {{ input_model.type.declaration | to_pascal_case }}(typing.Protocol): - {%- for field, type in input_model.fields | items %} - @property - def {{field}}(self) -> {{type.annotation}}: ... - {%- endfor %} - -{%- endif %} -{% endfor %} -{%- endfor %} - -@dataclasses.dataclass -class {{ query_namespace.name | to_pascal_case }}Queries: - def __init__(self, connection: asyncpg.Connection): - self.connection = connection - {%- for subnamespace in query_namespace.subnamespaces %} - self.{{subnamespace}} = {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) - {%- endfor %} - - {% for method in query_namespace.methods%} - {%- if method.query.annotations.not_null_result -%} - {%- set HANDLE_NONE = 'assert row is not None, "a query marked with @not_null_result has returned null."' %} - {%- set OR_NONE = '' %} - {% else %} - {%- set HANDLE_NONE = 'if row is None: return None' %} - {%- set OR_NONE = ' | None' %} - {%- endif %} - - {%- if method.query.command == 'one' %} - - {%- if method.query.output | length == 1 %} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: - row = await self.connection.fetchrow( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - {{HANDLE_NONE}} - return row[0] - {%- else %} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: - row = await self.connection.fetchrow( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - {{HANDLE_NONE}} - return {{method.output_type.annotation}}(**row) - {%- endif %} - {%- elif method.query.command == 'many' %} - {%- if method.query.output | length == 1 %} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ) -> list[{{method.output_type.annotation}}]: - rows = await self.connection.fetch( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - return [row[0] for row in rows] - {%- else%} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ) -> list[{{method.output_type.annotation}}]: - rows = await self.connection.fetch( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - return [{{method.output_type.annotation}}(**row) for row in rows] - {%- endif %} - {%- elif method.query.command == 'val' %} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ) -> {{method.output_type.annotation}}{{OR_NONE}}: - row = await self.connection.fetchval( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - {{HANDLE_NONE}} - return row - {%- else %} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type.annotation}} - {%- endfor -%} - ): - return await self.connection.execute( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - {%- endif %} - - {% endfor %} - - - -{%- if query_namespace.name == "" %} -async def init_connection(conn: asyncpg.Connection): - {%- for _, model_module in model_modules | items %} - {%- for model in model_module.classes %} - - await conn.set_type_codec( - {{model.type.pgtype_name | to_c_string }}, - encoder=lambda model: ({% for name, _ in model.fields %}model.{{name}}{% if not loop.last %}, {% endif %}{%endfor%}), - decoder=lambda row: {{model.type.constructor}}(*row), - schema={{model.type.pgtype_schema | to_c_string }}, - format="tuple", - ) - {%- endfor %} - {% endfor %} -{% endif -%} diff --git a/codegen/templates/python:asyncpg/types.json b/codegen/templates/python:asyncpg/types.json deleted file mode 100644 index 6209ebf..0000000 --- a/codegen/templates/python:asyncpg/types.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "new_type_case": "{{ name | to_pascal_case }}", - "array": { - "constructor": "list", - "annotation": "list[{{type.annotation}}]" - }, - "null": { - "declaration": "{{type.declaration}}", - "constructor": "{{type.constructor}}", - "annotation": "{{type.annotation}} | None" - }, - "composite": { - "declaration": "{{ type_name | to_pascal_case }}", - "constructor": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", - "annotation": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", - "import": [] - }, - "wildcard": { - "annotation": "typing.Any", - "import": ["typing"] - }, - "schema": { - "pg_catalog": { - "bool": { "annotation": "bool" }, - "bytea": { "annotation": "bytes" }, - "char": { "annotation": "str" }, - "int8": { "annotation": "int" }, - "int2": { "annotation": "int" }, - "int4": { "annotation": "int" }, - "text": { "annotation": "str" }, - "json": { "annotation": "str" }, - "point": { "annotation": "asyncpg.types.Point", "import": ["asyncpg"] }, - "box": { - "annotation": "asyncpg.pgproto.types.Box", - "import": ["asyncpg"] - }, - "polygon": { - "annotation": "asyncpg.pgproto.types.Polygon", - "import": ["asyncpg"] - }, - "line": { - "annotation": "asyncpg.pgproto.types.Line", - "import": ["asyncpg"] - }, - - "float4": { "annotation": "float" }, - "float8": { "annotation": "float" }, - "unknown": { "annotation": "typing.Any", "import": ["typing"] }, - "circle": { - "annotation": "asyncpg.pgproto.types.Circle", - "import": ["asyncpg"] - }, - "varchar": { "annotation": "str" }, - "date": { "annotation": "datetime.date", "import": ["datetime"] }, - "time": { "annotation": "datetime.time", "import": [] }, - "timestamp": { - "annotation": "datetime.datetime", - "import": ["datetime"] - }, - "timestamptz": { - "annotation": "datetime.datetime", - "import": ["datetime"] - }, - "interval": { - "annotation": "datatime.timedelta", - "import": ["datetime"] - }, - "timetz": { "annotation": "datetime.time", "import": ["datetime"] }, - "numeric": { "annotation": "decimal.Decimal", "import": ["decimal"] }, - "record": { "annotation": "asyncpg.Record", "import": ["asyncpg"] }, - "any": { "annotation": "typing.Any", "import": ["typing"] }, - "anyarray": { "annotation": "list[typing.Any]", "import": ["typing"] }, - "anyelement": { "annotation": "typing.Any", "import": ["typing"] }, - "anynonarray": { "annotation": "typing.Any", "import": ["typing"] }, - "uuid": { "annotation": "uuid.UUID", "import": ["uuid"] }, - "anyenum": { "annotation": "str" }, - "anyrange": { "annotation": "asyncpg.Range", "import": ["asyncpg"] }, - "jsonb": { "annotation": "str" }, - "int4range": { - "annotation": "asyncpg.types.Range[int]", - "import": ["asyncpg"] - }, - "numrange": { - "annotation": "asyncpg.types.Range[float]", - "import": ["asyncpg"] - }, - "tsrange": { - "annotation": "asyncpg.types.Range[datetime.datetime]", - "import": ["asyncpg", "datetime"] - }, - "tstzrange": { - "annotation": "asyncpg.types.Range[datetime.datetime]", - "import": ["asyncpg", "datetime"] - }, - "daterange": { - "annotation": "asyncpg.types.Range[datetime.date]", - "import": ["asyncpg", "datetime"] - }, - "int8range": { - "annotation": "asyncpg.types.Range[int]", - "import": ["asyncpg"] - } - } - } -} diff --git a/codegen/templates/python:psycopg/config.json b/codegen/templates/python:psycopg/config.json deleted file mode 100644 index 2a1fdaf..0000000 --- a/codegen/templates/python:psycopg/config.json +++ /dev/null @@ -1 +0,0 @@ -{ "extension": "py", "directory_entrypoint": "__init__" } diff --git a/codegen/templates/python:psycopg/model.py.jinja2 b/codegen/templates/python:psycopg/model.py.jinja2 deleted file mode 100644 index 78addbb..0000000 --- a/codegen/templates/python:psycopg/model.py.jinja2 +++ /dev/null @@ -1,32 +0,0 @@ -import dataclasses -{%- if enums %} -import enum -{%- endif %} -{%- for import in imports %} -import {{import}} -{%- endfor %} -from {{request.config.codegen.options.package}} import models - -{%- for enum in enums %} - -class {{enum.name | to_pascal_case }}(enum.StrEnum): - {%- for value in enum.values %} - {{ value | to_screaming_snake_case }} = {{ value | to_c_string }} - {%- endfor %} -{% endfor %} - -{%- for model in models %} - - -@dataclasses.dataclass -class {{model.type.declaration}}: - {%- for field, type in model.fields %} - {{field}}: {% if type.annotation | starts_with("models." + schema) -%} - {{ type.annotation | strip_prefix("models." + schema + ".") }} - {%- elif type.annotation | starts_with("models.") -%} - {{ type.annotation | to_c_string }} - {%- else -%} - {{ type.annotation }} - {%- endif %} - {%- endfor %} -{%- endfor %} diff --git a/codegen/templates/python:psycopg/model_init.py.jinja2 b/codegen/templates/python:psycopg/model_init.py.jinja2 deleted file mode 100644 index 3a811fc..0000000 --- a/codegen/templates/python:psycopg/model_init.py.jinja2 +++ /dev/null @@ -1,12 +0,0 @@ -{%- for module in model_modules -%} -from . import {{module}} -{% endfor -%} - - -{%- if model_modules["public"] -%} -from .public import ( -{%- for model_class in model_modules["public"].classes %} - {{model_class.type.declaration}}, -{%- endfor %} -) -{% endif %} diff --git a/codegen/templates/python:psycopg/types.json b/codegen/templates/python:psycopg/types.json deleted file mode 100644 index 8bdc19e..0000000 --- a/codegen/templates/python:psycopg/types.json +++ /dev/null @@ -1,108 +0,0 @@ -{ - "new_type_case": "{{ name | to_pascal_case }}", - "array": { - "constructor": "list", - "annotation": "list[{{type.annotation}}]" - }, - "null": { - "declaration": "{{type.declaration}}", - "constructor": "{{type.constructor}}", - "annotation": "{{type.annotation}} | None" - }, - "composite": { - "declaration": "{{ type_name | to_pascal_case }}", - "constructor": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", - "annotation": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", - "import": [] - }, - "wildcard": { - "annotation": "typing.Any", - "import": ["typing"] - }, - "schema": { - "pg_catalog": { - "bool": { "annotation": "bool" }, - "bytea": { "annotation": "bytes" }, - "char": { "annotation": "str" }, - "int8": { "annotation": "int" }, - "int2": { "annotation": "int" }, - "int4": { "annotation": "int" }, - "text": { "annotation": "str" }, - "json": { "annotation": "dict" }, - "point": { "annotation": "asyncpg.types.Point", "import": ["asyncpg"] }, - "box": { - "annotation": "asyncpg.pgproto.types.Box", - "import": ["asyncpg"] - }, - "polygon": { - "annotation": "asyncpg.pgproto.types.Polygon", - "import": ["asyncpg"] - }, - "line": { - "annotation": "asyncpg.pgproto.types.Line", - "import": ["asyncpg"] - }, - - "float4": { "annotation": "float" }, - "float8": { "annotation": "float" }, - "unknown": { "annotation": "typing.Any", "import": ["typing"] }, - "circle": { - "annotation": "asyncpg.pgproto.types.Circle", - "import": ["asyncpg"] - }, - "varchar": { "annotation": "str" }, - "date": { "annotation": "datetime.date", "import": ["datetime"] }, - "time": { "annotation": "datetime.time", "import": [] }, - "timestamp": { - "annotation": "datetime.datetime", - "import": ["datetime"] - }, - "timestamptz": { - "annotation": "datetime.datetime", - "import": ["datetime"] - }, - "interval": { - "annotation": "datatime.timedelta", - "import": ["datetime"] - }, - "timetz": { "annotation": "datetime.time", "import": ["datetime"] }, - "numeric": { "annotation": "decimal.Decimal", "import": ["decimal"] }, - "record": { "annotation": "str" }, - "any": { "annotation": "typing.Any", "import": ["typing"] }, - "anyarray": { "annotation": "list[typing.Any]", "import": ["typing"] }, - "anyelement": { "annotation": "typing.Any", "import": ["typing"] }, - "anynonarray": { "annotation": "typing.Any", "import": ["typing"] }, - "uuid": { "annotation": "uuid.UUID", "import": ["uuid"] }, - "anyenum": { "annotation": "str" }, - "anyrange": { - "annotation": "psycopg.types.range.Range", - "import": ["psycopg.types.range"] - }, - "jsonb": { "annotation": "dict" }, - "int4range": { - "annotation": "psycopg.types.range.Range[int]", - "import": ["psycopg.types.range"] - }, - "numrange": { - "annotation": "asyncpg.types.Range[float]", - "import": ["psycopg.types.range"] - }, - "tsrange": { - "annotation": "asyncpg.types.Range[datetime.datetime]", - "import": ["psycopg.types.range.Range", "datetime"] - }, - "tstzrange": { - "annotation": "psycopg.types.range.Range[datetime.datetime]", - "import": ["psycopg.types.range", "datetime"] - }, - "daterange": { - "annotation": "asyncpg.types.Range[datetime.date]", - "import": ["psycopg.types.range", "datetime"] - }, - "int8range": { - "annotation": "psycopg.types.range.Range[int]", - "import": ["psycopg.types.range"] - } - } - } -} diff --git a/codegen/templates/typescript:postgres/config.json b/codegen/templates/typescript:postgres/config.json deleted file mode 100644 index 56e73aa..0000000 --- a/codegen/templates/typescript:postgres/config.json +++ /dev/null @@ -1 +0,0 @@ -{ "extension": "ts", "model_dir_entrypoint": "models" } diff --git a/codegen/templates/typescript:postgres/model.jinja2 b/codegen/templates/typescript:postgres/model.jinja2 deleted file mode 100644 index ac41e7d..0000000 --- a/codegen/templates/typescript:postgres/model.jinja2 +++ /dev/null @@ -1,32 +0,0 @@ - -{%- if enums %} - -{%- endif %} -{%- for import in imports %} -{{import}} -{%- endfor %} -import type * as models from "./models.ts" - -{%- for enum in enums %} - -export enum {{enum.name | to_pascal_case }} { - {%- for value in enum.values %} - {{ value | to_screaming_snake_case }} = {{ value | to_c_string }}, - {%- endfor %} - {% endfor %} -} - -{%- for model in models %} - -export interface {{model.type.declaration}} { - {%- for field, type in model.fields %} - {{field | to_camel_case }}: {% if type.annotation | starts_with("models." + schema) -%} - {{ type.annotation | strip_prefix("models." + schema + ".") }} - {%- elif type.annotation | starts_with("models.") -%} - {{ type.annotation }}; - {%- else -%} - {{ type.annotation }}; - {%- endif %} - {%- endfor %} -} -{%- endfor %} diff --git a/codegen/templates/typescript:postgres/model_init.jinja2 b/codegen/templates/typescript:postgres/model_init.jinja2 deleted file mode 100644 index 3340b24..0000000 --- a/codegen/templates/typescript:postgres/model_init.jinja2 +++ /dev/null @@ -1,263 +0,0 @@ -{%- set reserved = ["abstract","arguments","await","boolean", -"break","byte","case","catch", -"char","class","const","continue", -"debugger","default","delete","do", -"double","else","enum","eval", -"export","extends","false","final", -"finally","float","for","function", -"goto","if","implements","import", -"in","instanceof","int","interface", -"let","long","native","new", -"null","package","private","protected", -"public","return","short","static", -"super","switch","synchronized","this", -"throw","throws","transient","true", -"try","typeof","var","void", -"volatile","while","with","yield"] -%} -{%- for module in model_modules -%} -{%- set source = module -%} -{%- if module in reserved -%} -{%- set module = "_" + module %} -{%- endif -%} -export type * as {{module}} from "./{{source}}.ts"; -{% endfor -%} - -export type * from "./public.ts"; - - -type Step = (cell: string) => any; - -function trimOuter(str: string, open: string, close: string) { - const s = str.trim(); - if (s.startsWith(open) && s.endsWith(close)) return s.slice(1, -1); - return s; -} - -function unquote(s: string): string { - const t = s.trim(); - if (t.length >= 2 && t.startsWith('"') && t.endsWith('"')) { - // Remove surrounding quotes and unescape \" and \\ (good enough for most PG cases) - return t - .slice(1, -1) - .replace(/\\(["\\])/g, "$1"); - } - return t; -} - -function splitTopLevel( - s: string, - separator: string, - { respectQuotes = true, parens = true, braces = true }: { - respectQuotes?: boolean; - parens?: boolean; - braces?: boolean; - } = {}, -): string[] { - const out: string[] = []; - let buf = ""; - let inQuotes = false; - let parenDepth = 0; - let braceDepth = 0; - - const flush = () => { - out.push(buf); - buf = ""; - }; - - for (let i = 0; i < s.length; i++) { - const ch = s[i]; - - if (respectQuotes && ch === '"' && s[i - 1] !== "\\") { - inQuotes = !inQuotes; - buf += ch; - continue; - } - if (!inQuotes) { - if (parens && (ch === "(" || ch === ")")) { - if (ch === "(") parenDepth++; - else parenDepth--; - buf += ch; - continue; - } - if (braces && (ch === "{" || ch === "}")) { - if (ch === "{") braceDepth++; - else braceDepth--; - buf += ch; - continue; - } - if (parenDepth === 0 && braceDepth === 0 && ch === separator) { - flush(); - continue; - } - } - buf += ch; - } - flush(); - return out.map((t) => t.trim()); -} - -function parsePgRowToCells(row: string): string[] { - const inner = trimOuter(row.trim(), "(", ")"); - if (inner === "") return []; - // Note: allow parentheses/braces in cells; split only at top-level commas - return splitTopLevel(inner, ","); -} - -function parsePgArrayToElements(arr: string): string[] { - const inner = trimOuter(arr.trim(), "{", "}"); - if (inner === "") return []; - // In arrays, elements can be quoted (including quoted rows "(...)") - return splitTopLevel(inner, ",", { - respectQuotes: true, - parens: true, - braces: true, - }); -} - -// ---- Scalar parsers ---- -function parseNumber(cell: string): number { - const t = cell.trim(); - if (t.toUpperCase() === "NULL" || t === "") return NaN; // choose your null policy - const q = unquote(t); - const v = Number(q); - if (Number.isNaN(v)) throw new Error(`Invalid number: ${cell}`); - return v; -} - -function parseString(cell: string): string | null { - const t = cell.trim(); - if (cell == "") return null as any; - return unquote(t).replaceAll(/""/g, '"'); -} - -function parseDate(cell: string): Date { - const t = unquote(cell.trim()); - const d = new Date(t); - if (Number.isNaN(d.getTime())) throw new Error(`Invalid date: ${cell}`); - return d; -} - -function parseBoolean(cell: string): boolean { - const t = unquote(cell.trim()); - if (!["t", "f"].includes(t)) { - throw new Error(`Invalid boolean: ${cell}. Expected "t" or "f".`); - } - return t == "t"; -} - -class ArrayParser { - constructor(readonly map: (_: string) => T) {} - parse(array: string): T[] { - const unquoted = unquote(array.trim()); - return parsePgArrayToElements(unquoted).map(this.map); - } - - arrayOfThis() { - return new ArrayParser((e) => this.parse(e)); - } -} - -export class RowParser { - private steps: Step[]; - private mapFun: (_: T) => V; - constructor(steps: Step[] = [], map?: (_: T) => V) { - this.steps = steps; - this.mapFun = map ?? ((row: T) => row as unknown as V); - } - - number(): RowParser<[...T, number]> { - return new RowParser<[...T, number]>([...this.steps, parseNumber]); - } - - string(): RowParser<[...T, string]> { - return new RowParser<[...T, string]>([...this.steps, parseString]); - } - - date(): RowParser<[...T, Date]> { - return new RowParser<[...T, Date]>([...this.steps, parseDate]); - } - - boolean(): RowParser<[...T, boolean]> { - return new RowParser<[...T, boolean]>([...this.steps, parseBoolean]); - } - - row(sub: RowParser): RowParser<[...T, U]> { - const step: Step = (cell: string) => { - const raw = unquote(cell.trim()); // nested rows are often quoted inside rows/arrays - return sub.parse(raw); - }; - return new RowParser<[...T, U]>([...this.steps, step]); - } - - arrayOfNumber(): RowParser<[...T, number[]]> { - const step: Step = (cell: string) => { - return new ArrayParser(parseNumber).parse(cell); - }; - return new RowParser<[...T, number[]]>([...this.steps, step]); - } - - arrayOfDate(): RowParser<[...T, Date[]]> { - const step: Step = (cell: string) => { - return new ArrayParser(parseDate).parse(cell); - }; - return new RowParser<[...T, Date[]]>([...this.steps, step]); - } - - arrayOfRow(sub: RowParser): RowParser<[...T, U]> { - const step: Step = (cell: string) => { - // Each element is typically a quoted row string "(...)" - return sub.arrayOfThis().parse(cell); - }; - return new RowParser<[...T, U]>([...this.steps, step]); - } - - arrayOfThis(): ArrayParser { - return new ArrayParser((e) => this.parse(unquote(e))); - } - - parse(input: string): V { - const trimmed = input.trim(); - // Accept either full row "(a,b,...)" or a bare CSV (we’ll try row first) - const cells = trimmed.startsWith("(") - ? parsePgRowToCells(trimmed) - : splitTopLevel(trimmed, ","); - if (cells.length !== this.steps.length) { - throw new Error( - `Arity mismatch: expected ${this.steps.length} fields, got ${cells.length} (${ - JSON.stringify(cells) - })`, - ); - } - const out = this.steps.map((fn, i) => fn(cells[i])) as T; - return this.mapFun(out); - } - - map(fun: (_: V) => U): RowParser { - const newMap = (row: T) => fun(this.mapFun(row)); - return new RowParser(this.steps, newMap); - } -} - - -export const parser = { -{% for module_name, module in model_modules | items %} - {{module_name | to_camel_case }}: { - {% for model in module.classes %} - {{model.type.declaration | to_camel_case }}() { - return new RowParser() - {% for field_name, field_type in model.fields -%} - {% if field_type.annotation == 'Array' -%} - .arrayOfString() - {% elif field_type.annotation == 'Array' -%} - .arrayOfDate() - {% elif field_type.annotation == 'Array' -%} - .arrayOfDate() - {% else -%} - .{{field_type.constructor | to_camel_case }}() - {% endif -%} - {% endfor -%} - }, - {%- endfor %} - } -{% endfor %} -}; diff --git a/codegen/templates/typescript:postgres/parser.ts b/codegen/templates/typescript:postgres/parser.ts deleted file mode 100644 index 94e6c58..0000000 --- a/codegen/templates/typescript:postgres/parser.ts +++ /dev/null @@ -1,242 +0,0 @@ -import { PGlite } from "@electric-sql/pglite"; -const pg = new PGlite(); -type Step = (cell: string) => any; - -function trimOuter(str: string, open: string, close: string) { - const s = str.trim(); - if (s.startsWith(open) && s.endsWith(close)) return s.slice(1, -1); - return s; -} - -function unquote(s: string): string { - const t = s.trim(); - if (t.length >= 2 && t.startsWith('"') && t.endsWith('"')) { - // Remove surrounding quotes and unescape \" and \\ (good enough for most PG cases) - return t - .slice(1, -1) - .replace(/\\(["\\])/g, "$1"); - } - return t; -} - -function splitTopLevel( - s: string, - separator: string, - { respectQuotes = true, parens = true, braces = true }: { - respectQuotes?: boolean; - parens?: boolean; - braces?: boolean; - } = {}, -): string[] { - const out: string[] = []; - let buf = ""; - let inQuotes = false; - let parenDepth = 0; - let braceDepth = 0; - - const flush = () => { - out.push(buf); - buf = ""; - }; - - for (let i = 0; i < s.length; i++) { - const ch = s[i]; - - if (respectQuotes && ch === '"' && s[i - 1] !== "\\") { - inQuotes = !inQuotes; - buf += ch; - continue; - } - if (!inQuotes) { - if (parens && (ch === "(" || ch === ")")) { - if (ch === "(") parenDepth++; - else parenDepth--; - buf += ch; - continue; - } - if (braces && (ch === "{" || ch === "}")) { - if (ch === "{") braceDepth++; - else braceDepth--; - buf += ch; - continue; - } - if (parenDepth === 0 && braceDepth === 0 && ch === separator) { - flush(); - continue; - } - } - buf += ch; - } - flush(); - return out.map((t) => t.trim()); -} - -function parsePgRowToCells(row: string): string[] { - const inner = trimOuter(row.trim(), "(", ")"); - if (inner === "") return []; - // Note: allow parentheses/braces in cells; split only at top-level commas - return splitTopLevel(inner, ","); -} - -function parsePgArrayToElements(arr: string): string[] { - const inner = trimOuter(arr.trim(), "{", "}"); - if (inner === "") return []; - // In arrays, elements can be quoted (including quoted rows "(...)") - return splitTopLevel(inner, ",", { - respectQuotes: true, - parens: true, - braces: true, - }); -} - -// ---- Scalar parsers ---- -function parseNumber(cell: string): number { - const t = cell.trim(); - if (t.toUpperCase() === "NULL" || t === "") return NaN; // choose your null policy - const q = unquote(t); - const v = Number(q); - if (Number.isNaN(v)) throw new Error(`Invalid number: ${cell}`); - return v; -} - -function parseString(cell: string): string | null { - const t = cell.trim(); - if (cell == "") return null as any; - return unquote(t).replaceAll(/""/g, '"'); -} - -function parseDate(cell: string): Date { - const t = unquote(cell.trim()); - const d = new Date(t); - if (Number.isNaN(d.getTime())) throw new Error(`Invalid date: ${cell}`); - return d; -} - -function parseBoolean(cell: string): boolean { - const t = unquote(cell.trim()); - if (!["t", "f"].includes(t)) { - throw new Error(`Invalid boolean: ${cell}. Expected "t" or "f".`); - } - return t == "t"; -} - -class ArrayParser { - constructor(readonly map: (_: string) => T) {} - parse(array: string): T[] { - const unquoted = unquote(array.trim()); - return parsePgArrayToElements(unquoted).map(this.map); - } - - arrayOfThis() { - return new ArrayParser((e) => this.parse(e)); - } -} - -export class RowParser { - private steps: Step[]; - private mapFun: (_: T) => V; - constructor(steps: Step[] = [], map?: (_: T) => V) { - this.steps = steps; - this.mapFun = map ?? ((row: T) => row as unknown as V); - } - - number(): RowParser<[...T, number]> { - return new RowParser<[...T, number]>([...this.steps, parseNumber]); - } - - string(): RowParser<[...T, string]> { - return new RowParser<[...T, string]>([...this.steps, parseString]); - } - - date(): RowParser<[...T, Date]> { - return new RowParser<[...T, Date]>([...this.steps, parseDate]); - } - - boolean(): RowParser<[...T, boolean]> { - return new RowParser<[...T, boolean]>([...this.steps, parseBoolean]); - } - - row(sub: RowParser): RowParser<[...T, U]> { - const step: Step = (cell: string) => { - const raw = unquote(cell.trim()); // nested rows are often quoted inside rows/arrays - return sub.parse(raw); - }; - return new RowParser<[...T, U]>([...this.steps, step]); - } - - arrayOfNumber(): RowParser<[...T, number[]]> { - const step: Step = (cell: string) => { - return new ArrayParser(parseNumber).parse(cell); - }; - return new RowParser<[...T, number[]]>([...this.steps, step]); - } - - arrayOfDate(): RowParser<[...T, Date[]]> { - const step: Step = (cell: string) => { - return new ArrayParser(parseDate).parse(cell); - }; - return new RowParser<[...T, Date[]]>([...this.steps, step]); - } - - arrayOfRow(sub: RowParser): RowParser<[...T, U]> { - const step: Step = (cell: string) => { - // Each element is typically a quoted row string "(...)" - return sub.arrayOfThis().parse(cell); - }; - return new RowParser<[...T, U]>([...this.steps, step]); - } - - arrayOfThis(): ArrayParser { - return new ArrayParser((e) => this.parse(unquote(e))); - } - - parse(input: string): V { - const trimmed = input.trim(); - // Accept either full row "(a,b,...)" or a bare CSV (we’ll try row first) - const cells = trimmed.startsWith("(") - ? parsePgRowToCells(trimmed) - : splitTopLevel(trimmed, ","); - if (cells.length !== this.steps.length) { - throw new Error( - `Arity mismatch: expected ${this.steps.length} fields, got ${cells.length} (${ - JSON.stringify(cells) - })`, - ); - } - const out = this.steps.map((fn, i) => fn(cells[i])) as T; - return this.mapFun(out); - } - - map(fun: (_: V) => U): RowParser { - const newMap = (row: T) => fun(this.mapFun(row)); - return new RowParser(this.steps, newMap); - } -} - -const authorParser = new RowParser() - .number() - .string() - .string() - .map(([id, firstName, lastName]) => ({ - id, - firstName, - lastName, - })); - -const parser = { - author: new RowParser() - .number() - .string() - .string() - .map(([id, firstName, lastName]) => ({ - id, - firstName, - lastName, - })), -}; - -const { rows } = await pg.query( - `select array[row(true, 1, null, 'asd""')] as row`, -); - -authorParser.arrayOfThis().parse(rows[0].row); diff --git a/codegen/templates/typescript:postgres/query.jinja2 b/codegen/templates/typescript:postgres/query.jinja2 deleted file mode 100644 index d70ded3..0000000 --- a/codegen/templates/typescript:postgres/query.jinja2 +++ /dev/null @@ -1,150 +0,0 @@ -/* This file was automatically generated by pgc */ -{%- for import in imports %} -{{import}}; -{%- endfor %} -import postgres from "postgres"; -import type * as models from "{{request.config.codegen.options.import_path | default(request.config.codegen.out)}}/models/models.ts"; -{%- for subnamespace in query_namespace.subnamespaces %} -import * as {{subnamespace}} from "./{{subnamespace}}.ts" -{%- endfor %} - -{%- for method in query_namespace.methods %} - -export const {{ method.query.name | to_screaming_snake_case }} = ` -{{ method.query.query }} -` -{%- endfor %} -{{"\n"}} -{%- for method in query_namespace.methods %} -{%- if method.output_model != None %} - -export interface {{method.output_model.type.declaration | to_pascal_case }} { - {%- for field, type in method.output_model.fields | items %} - {{field | to_camel_case }}: {{type.annotation}}; - {%- endfor %} -} - -{% endif %} -{%- for _, input_model in method.input_models | items %} -export interface {{ input_model.type.declaration | to_pascal_case }} { - {%- for field, type in input_model.fields | items %} - {{field | to_camel_case }}: {{type.annotation}} - {%- endfor %} -} -{% endfor %} -{%- endfor %} - - -export class {{ query_namespace.name | to_pascal_case }}Queries { - {%- for subnamespace in query_namespace.subnamespaces %} - {{subnamespace}}: {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries; - {%- endfor %} - - constructor(readonly sql: postgres.Sql,) { - {%- for subnamespace in query_namespace.subnamespaces %} - this.{{subnamespace}} = new {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(sql); - {%- endfor %} - } - {% for method in query_namespace.methods %} - {%- if method.query.annotations.not_null_result -%} - {%- set HANDLE_NONE = 'assert row is not None, "a query marked with @not_null_result has returned null."' %} - {%- set OR_NONE = '' %} - {% else %} - {%- set HANDLE_NONE = 'if (rows.length === 0) return null;' %} - {%- set OR_NONE = ' | null' %} - {%- endif %} - - {%- if method.query.command == 'one' %} - - {%- if method.query.output | length == 1 %} - async {{ method.query.name}}( - {%- for argument, type in method.arguments | items -%} - {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} - {%- endfor -%} - ): Promise<{{method.output_type.annotation}}{{OR_NONE}}> { - const rows = await this.sql.unsafe( - {{method.query.name | to_screaming_snake_case }}, [ - {%- for parameter in method.query.parameters -%} - {{parameter.name}}{% if not loop.last %}, {% endif %} - {%- endfor %}], { prepare: true } - ); - {{HANDLE_NONE}} - return Object.values(rows[0])[0]; - } - {%- else %} - async {{method.query.name}}( - {%- for argument, type in method.arguments | items -%} - {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} - {%- endfor -%} - ): Promise<{{method.output_type.annotation}}{{OR_NONE}}> { - const rows = await this.sql.unsafe( - {{method.query.name | to_screaming_snake_case }}, [ - {%- for parameter in method.query.parameters -%} - {{parameter.name}}{% if not loop.last %}, {% endif %} - {%- endfor %}], { prepare: true } - ); - {{HANDLE_NONE}} - return rows[0] as {{method.output_type.annotation}}; - } - {%- endif %} - {%- elif method.query.command == 'many' %} - {%- if method.query.output | length == 1 %} - async {{method.query.name}}( - {%- for argument, type in method.arguments | items -%} - {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} - {%- endfor -%} - ): Promise> { - const rows = await this.sql.unsafe( - {{method.query.name | to_screaming_snake_case }}, [ - {%- for parameter in method.query.parameters -%} - {{parameter.name}}{% if not loop.last %}, {% endif %} - {%- endfor %}], { prepare: true } - ) - return rows.map(row => Object.values(row[0])[0] as {{method.output_type.annotation}}); - } - {%- else%} - async {{method.query.name}}( - {%- for argument, type in method.arguments | items -%} - {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} - {%- endfor -%} - ): Promise> { - const rows = await this.sql.unsafe( - {{method.query.name | to_screaming_snake_case }}, [ - {%- for parameter in method.query.parameters -%} - {{parameter.name}}{% if not loop.last %}, {% endif %} - {%- endfor %}], { prepare: true } - ); - return rows as Array<{{method.output_type.annotation}}>; - } - {%- endif %} - {%- elif method.query.command == 'val' %} - async {{method.query.name}}( - {%- for argument, type in method.arguments | items -%} - {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} - {%- endfor -%} - ): Promise<{{method.output_type.annotation}}{{OR_NONE}}> { - const rows = await this.sql.unsafe( - {{method.query.name | to_screaming_snake_case }}, [ - {%- for parameter in method.query.parameters -%} - {{parameter.name}}{% if not loop.last %}, {% endif %} - {%- endfor %}], { prepare: true } - ) - {{HANDLE_NONE}} - return Object.values(rows[0])[0]; - } - {%- else %} - async {{method.query.name}}( - {%- for argument, type in method.arguments | items -%} - {{argument}}: {{type.annotation}}{% if not loop.last %}, {% endif %} - {%- endfor -%} - ) { - return await this.sql.unsafe( - {{method.query.name | to_screaming_snake_case }}, [ - {%- for parameter in method.query.parameters -%} - {{parameter.name}}{% if not loop.last %}, {% endif %} - {%- endfor %}], { prepare: true } - ); - } - {%- endif %} - {%- endfor %} -} diff --git a/codegen/templates/typescript:postgres/types.json b/codegen/templates/typescript:postgres/types.json deleted file mode 100644 index dc51f6c..0000000 --- a/codegen/templates/typescript:postgres/types.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "new_type_case": "{{ name | to_pascal_case }}", - "array": { - "constructor": "Array", - "annotation": "Array<{{type.annotation}}>" - }, - "null": { - "declaration": "{{type.declaration}}", - "constructor": "{{type.constructor}}", - "annotation": "{{type.annotation}} | null" - }, - "composite": { - "declaration": "{{ type_name | to_pascal_case }}", - "constructor": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", - "annotation": "models.{{ type_schema | to_snake_case }}.{{ type_name | to_pascal_case }}", - "import": [] - }, - "wildcard": { - "name": "any" - }, - "schema": { - "pg_catalog": { - "bool": { "name": "boolean" }, - "bytea": { "name": "bytes" }, - "char": { "name": "string" }, - "int8": { "name": "number" }, - "int2": { "name": "number" }, - "int4": { "name": "number" }, - "text": { "name": "string" }, - "json": { "name": "any" }, - "point": { "name": "string" }, - "box": { "name": "string" }, - "polygon": { "name": "string" }, - "line": { "name": "string" }, - - "float4": { "name": "number" }, - "float8": { "name": "number" }, - "unknown": { "name": "unknown" }, - "circle": { "name": "string" }, - "varchar": { "name": "string" }, - "date": { "name": "Date" }, - "time": { "name": "string" }, - "timestamp": { "name": "Date" }, - "timestamptz": { "name": "Date" }, - "interval": { "name": "string" }, - "timetz": { "name": "string" }, - "numeric": { "name": "string" }, - "record": { "name": "string" }, - "any": { "name": "any" }, - "anyarray": { "name": "any[]" }, - "anyelement": { "name": "any" }, - "anynonarray": { "name": "any" }, - "uuid": { "name": "string" }, - "anyenum": { "name": "string" }, - "anyrange": { "name": "string" }, - "jsonb": { "name": "any" }, - "int4range": { "name": "string" }, - "numrange": { "name": "string" }, - "tsrange": { "name": "string" }, - "tstzrange": { "name": "string" }, - "daterange": { "name": "string" }, - "int8range": { "name": "string" } - } - } -} diff --git a/codegen/tests/request.json b/codegen/tests/request.json new file mode 100644 index 0000000..6e301bb --- /dev/null +++ b/codegen/tests/request.json @@ -0,0 +1,341 @@ +{ + "catalog": { + "schemas": [ + { + "name": "public", + "enums": [ + { + "name": "genre", + "values": [ + "comedy", + "drama", + "science fiction", + "fantasy", + "biography" + ] + } + ], + "records": [ + { + "kind": "table", + "name": "author", + "columns": [ + { + "name": "id", + "type": { + "name": "uuid", + "display": "uuid", + "is_array": false, + "schema_name": "pg_catalog", + "is_composite": false, + "array_dimensions": 0 + }, + "default": "gen_random_uuid()", + "is_unique": false, + "is_nullable": false, + "is_foreign_key": false, + "is_primary_key": true, + "foreign_table_name": null, + "foreign_table_schema": null + }, + { + "name": "name", + "type": { + "name": "text", + "display": "text", + "is_array": false, + "schema_name": "pg_catalog", + "is_composite": false, + "array_dimensions": 0 + }, + "default": null, + "is_unique": false, + "is_nullable": false, + "is_foreign_key": false, + "is_primary_key": false, + "foreign_table_name": null, + "foreign_table_schema": null + }, + { + "name": "birthday", + "type": { + "name": "date", + "display": "date", + "is_array": false, + "schema_name": "pg_catalog", + "is_composite": false, + "array_dimensions": 0 + }, + "default": null, + "is_unique": false, + "is_nullable": true, + "is_foreign_key": false, + "is_primary_key": false, + "foreign_table_name": null, + "foreign_table_schema": null + } + ] + }, + { + "kind": "table", + "name": "book", + "columns": [ + { + "name": "id", + "type": { + "name": "uuid", + "display": "uuid", + "is_array": false, + "schema_name": "pg_catalog", + "is_composite": false, + "array_dimensions": 0 + }, + "default": "gen_random_uuid()", + "is_unique": false, + "is_nullable": false, + "is_foreign_key": false, + "is_primary_key": true, + "foreign_table_name": null, + "foreign_table_schema": null + }, + { + "name": "title", + "type": { + "name": "text", + "display": "text", + "is_array": false, + "schema_name": "pg_catalog", + "is_composite": false, + "array_dimensions": 0 + }, + "default": null, + "is_unique": false, + "is_nullable": false, + "is_foreign_key": false, + "is_primary_key": false, + "foreign_table_name": null, + "foreign_table_schema": null + }, + { + "name": "author_id", + "type": { + "name": "uuid", + "display": "uuid", + "is_array": false, + "schema_name": "pg_catalog", + "is_composite": false, + "array_dimensions": 0 + }, + "default": null, + "is_unique": false, + "is_nullable": false, + "is_foreign_key": true, + "is_primary_key": false, + "foreign_table_name": "author", + "foreign_table_schema": "public" + }, + { + "name": "year", + "type": { + "name": "int4", + "display": "integer", + "is_array": false, + "schema_name": "pg_catalog", + "is_composite": false, + "array_dimensions": 0 + }, + "default": null, + "is_unique": false, + "is_nullable": false, + "is_foreign_key": false, + "is_primary_key": false, + "foreign_table_name": null, + "foreign_table_schema": null + }, + { + "name": "isbn", + "type": { + "name": "text", + "display": "text", + "is_array": false, + "schema_name": "pg_catalog", + "is_composite": false, + "array_dimensions": 0 + }, + "default": null, + "is_unique": true, + "is_nullable": false, + "is_foreign_key": false, + "is_primary_key": false, + "foreign_table_name": null, + "foreign_table_schema": null + }, + { + "name": "is_best_seller", + "type": { + "name": "bool", + "display": "boolean", + "is_array": false, + "schema_name": "pg_catalog", + "is_composite": false, + "array_dimensions": 0 + }, + "default": "false", + "is_unique": false, + "is_nullable": true, + "is_foreign_key": false, + "is_primary_key": false, + "foreign_table_name": null, + "foreign_table_schema": null + }, + { + "name": "genre", + "type": { + "name": "text", + "display": "text", + "is_array": false, + "schema_name": "pg_catalog", + "is_composite": false, + "array_dimensions": 0 + }, + "default": null, + "is_unique": false, + "is_nullable": false, + "is_foreign_key": true, + "is_primary_key": false, + "foreign_table_name": "genre", + "foreign_table_schema": "public" + } + ] + } + ] + } + ] + }, + "queries": [ + { + "query": "insert into author (\n id, name, birthday\n) values (\n $1, $2, $3\n) on conflict (id)\ndo update set\n id = $1,\n name = $2,\n birthday = $3\nreturning author;", + "name": "upsert", + "command": "one", + "path": "author.sql", + "annotations": { + "name": { "value": "upsert :one", "line": 2 }, + "not_null_result": { "value": "", "line": 3 } + }, + "output": [ + { + "name": "author", + "type": { "schema": "public", "name": "author", "id": 16386 } + } + ], + "parameters": [ + { + "name": "author.id", + "not_null": true, + "type": { "schema": "pg_catalog", "name": "uuid", "id": 2950 } + }, + { + "name": "author.name", + "not_null": true, + "type": { "schema": "pg_catalog", "name": "text", "id": 25 } + }, + { + "name": "author.birthday", + "not_null": true, + "type": { "schema": "pg_catalog", "name": "date", "id": 1082 } + } + ] + }, + { + "query": "select author from author where id = $1::uuid;", + "name": "get_by_id", + "command": "one", + "path": "author.sql", + "annotations": { "name": { "value": "get_by_id :one", "line": 15 } }, + "output": [ + { + "name": "author", + "type": { "schema": "public", "name": "author", "id": 16386 } + } + ], + "parameters": [ + { + "name": "id", + "not_null": true, + "type": { "schema": "pg_catalog", "name": "uuid", "id": 2950 } + } + ] + }, + { + "query": "select author, array_agg(book) as books\nfrom author\njoin book on author.id = book.author_id\ngroup by author.id;", + "name": "get_all_with_books", + "command": "many", + "path": "author.sql", + "annotations": { + "name": { "value": "get_all_with_books :many", "line": 18 } + }, + "output": [ + { + "name": "author", + "type": { "schema": "public", "name": "author", "id": 16386 } + }, + { + "name": "books", + "type": { "schema": "public", "name": "_book", "id": 16400 } + } + ], + "parameters": [] + }, + { + "query": "select count(*) from author;", + "name": "count", + "command": "val", + "path": "author.sql", + "annotations": { "name": { "value": "count :val", "line": 24 } }, + "output": [ + { + "name": "count", + "type": { "schema": "pg_catalog", "name": "int8", "id": 20 } + } + ], + "parameters": [] + }, + { + "query": "select book from book where id = $1;", + "name": "get_by_id", + "command": "one", + "path": "author.sql", + "annotations": { + "name": { "value": "get_by_id :one", "line": 28 }, + "namespace": { "value": "book", "line": 29 } + }, + "output": [ + { + "name": "book", + "type": { "schema": "public", "name": "book", "id": 16401 } + } + ], + "parameters": [ + { + "name": "id", + "not_null": true, + "type": { "schema": "pg_catalog", "name": "uuid", "id": 2950 } + } + ] + } + ], + "config": { + "version": "1", + "queries": ["book.sql", "author.sql", "queries.sql"], + "disable_cache": false, + "database": { "migrations": ["schema.sql"] }, + "codegen": { + "out": "./queries", + "language": "python", + "driver": "psycopg", + "types": { "pg_catalog.uuid": { "annotation": "str" } }, + "options": { "package": "queries" }, + "enums": ["genre"] + } + } +} diff --git a/pgc.yaml b/pgc.yaml index 464d49b..ca4b11c 100644 --- a/pgc.yaml +++ b/pgc.yaml @@ -9,7 +9,8 @@ queries: - "author.sql" - "queries.sql" codegen: - target: typescript:postgres + language: python # typescript + driver: psycopg out: ./queries types: diff --git a/src/build/build.service.ts b/src/build/build.service.ts index 9c04a4d..3c76926 100644 --- a/src/build/build.service.ts +++ b/src/build/build.service.ts @@ -52,7 +52,7 @@ export class BuildService { queries: await this.getQueries(), config: this.configService.config, }; - + console.log(payload); await this.codegenService.generate(payload); } diff --git a/src/codegen/codegen.service.ts b/src/codegen/codegen.service.ts index 4097565..25ae35f 100644 --- a/src/codegen/codegen.service.ts +++ b/src/codegen/codegen.service.ts @@ -49,6 +49,7 @@ export class CodegenService { } async runWasmCodegenModule(payload: object) { + Deno.writeTextFile("catalog.json", JSON.stringify(payload)); const utf8JsonPayload = await this.serializePayload(payload); const { instance } = await this.loadPlugin(); const exports = instance.exports as any; diff --git a/src/config/config.types.ts b/src/config/config.types.ts index 319e698..26fd100 100644 --- a/src/config/config.types.ts +++ b/src/config/config.types.ts @@ -34,7 +34,8 @@ const EnumOptions = z.string().or(z.record(z.string(), z.string().array())); const CodegenConfig = z.object({ out: z.string(), - target: z.string(), + language: z.string(), + driver: z.string(), plugin: PluginConfig.optional().nullable(), types: z.record(z.string(), TypeOverride).optional().nullable(), exclude_tables: z.string().array().optional().nullable(), diff --git a/src/main.ts b/src/main.ts index 87d7ea6..825a1ad 100644 --- a/src/main.ts +++ b/src/main.ts @@ -18,6 +18,7 @@ program.command("build").description( buildService = await BuildService.fromConfig(configService); await buildService.build(); } catch (e) { + console.log(e); console.log("error:", (e as Error).message); } finally { await buildService?.close(); diff --git a/src/schema_service/enum.service.ts b/src/schema_service/enum.service.ts index c887191..976b47e 100644 --- a/src/schema_service/enum.service.ts +++ b/src/schema_service/enum.service.ts @@ -47,7 +47,7 @@ export class EnumService { if (!schema) return; schema.enums.push({ name, values }); - schema.models = schema.models.filter((table) => table.name != name); + schema.records = schema.records.filter((table) => table.name != name); } } diff --git a/src/schema_service/excluder.service.ts b/src/schema_service/excluder.service.ts index 965a73a..735aa25 100644 --- a/src/schema_service/excluder.service.ts +++ b/src/schema_service/excluder.service.ts @@ -14,7 +14,7 @@ export class ExcluderService { ); if (!schema) return; - schema.models = schema.models.filter((table) => table.name != name); + schema.records = schema.records.filter((table) => table.name != name); } } } diff --git a/src/schema_service/schema.service.ts b/src/schema_service/schema.service.ts index 3ef8be0..3d33ee8 100644 --- a/src/schema_service/schema.service.ts +++ b/src/schema_service/schema.service.ts @@ -130,7 +130,7 @@ const LOAD_SCHEMA_QUERY = ` FROM enums WHERE enums.enum_schema = schemas.schema_name ), - 'models', ( + 'records', ( SELECT jsonb_agg( jsonb_build_object( 'name', table_name, diff --git a/src/schema_service/schema.types.ts b/src/schema_service/schema.types.ts index aefeecc..111492e 100644 --- a/src/schema_service/schema.types.ts +++ b/src/schema_service/schema.types.ts @@ -5,7 +5,7 @@ export interface Catalog { export interface Schema { name: string; enums: Enum[]; - models: Table[]; + records: Table[]; } export interface Enum { From 31d7a95d8e04fb8664c127dbe252983a60beecca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Vallotton?= Date: Wed, 27 Aug 2025 18:03:19 -0400 Subject: [PATCH 07/10] add typescript language --- codegen/src/ir/method_service.rs | 18 +- codegen/src/ir/query_namespace/method/mod.rs | 6 +- codegen/src/ir/type.rs | 20 +- codegen/src/ir/type_service.rs | 44 ++- codegen/src/presentation/environment.rs | 10 +- .../presentation/file_generation_config.rs | 9 +- codegen/src/presentation/mod.rs | 20 +- codegen/src/presentation/python/mod.rs | 6 +- .../src/presentation/templating_service.rs | 25 +- codegen/src/presentation/typescript/driver.rs | 5 - codegen/src/presentation/typescript/mod.rs | 51 ++- .../typescript/templates/parsers.ts | 268 ++++++++++++++ .../typescript/templates/postgres/model.j2 | 29 ++ .../templates/postgres/model_init.j2 | 5 + .../typescript/templates/postgres/query.j2 | 176 +++++++++ .../typescript/type_map_service.rs | 119 ++++++ .../typescript/typescript_type.rs | 6 - codegen/tests/request.json | 342 +----------------- pgc.yaml | 4 +- schema.sql | 2 +- src/query_collector/query_parser.service.ts | 4 +- 21 files changed, 756 insertions(+), 413 deletions(-) delete mode 100644 codegen/src/presentation/typescript/driver.rs create mode 100644 codegen/src/presentation/typescript/templates/parsers.ts create mode 100644 codegen/src/presentation/typescript/templates/postgres/model.j2 create mode 100644 codegen/src/presentation/typescript/templates/postgres/model_init.j2 create mode 100644 codegen/src/presentation/typescript/templates/postgres/query.j2 create mode 100644 codegen/src/presentation/typescript/type_map_service.rs delete mode 100644 codegen/src/presentation/typescript/typescript_type.rs diff --git a/codegen/src/ir/method_service.rs b/codegen/src/ir/method_service.rs index a826373..2553251 100644 --- a/codegen/src/ir/method_service.rs +++ b/codegen/src/ir/method_service.rs @@ -34,6 +34,7 @@ impl MethodService { input_models: take(&mut self.input_models), output_type: self.output_type(query), output_model: self.output_model(query), + output_columns: self.output_columns(query), } } @@ -100,18 +101,21 @@ impl MethodService { if query.output.len() < 2 { return None; } - let columns = query + + Some(MethodModel { + r#type: self.output_type(query)?, + fields: self.output_columns(query), + }) + } + + fn output_columns(&self, query: &Query) -> IndexMap, Type> { + query .output .iter() .map(|column| { let type_ = self.type_service.resolve_from_output(&column.type_); (column.name.clone(), type_) }) - .collect(); - - Some(MethodModel { - r#type: self.output_type(query)?, - fields: columns, - }) + .collect() } } diff --git a/codegen/src/ir/query_namespace/method/mod.rs b/codegen/src/ir/query_namespace/method/mod.rs index 7cac8f5..daf672b 100644 --- a/codegen/src/ir/query_namespace/method/mod.rs +++ b/codegen/src/ir/query_namespace/method/mod.rs @@ -1,7 +1,4 @@ -use std::{ - collections::BTreeMap, - sync::Arc, -}; +use std::{collections::BTreeMap, sync::Arc}; use indexmap::IndexMap; use serde::{Deserialize, Serialize}; @@ -15,6 +12,7 @@ pub struct Method { pub input_models: BTreeMap, MethodModel>, pub output_type: Option, pub output_model: Option, + pub output_columns: IndexMap, Type>, } #[derive(Deserialize, Serialize, Clone, Debug)] diff --git a/codegen/src/ir/type.rs b/codegen/src/ir/type.rs index c313561..2145992 100644 --- a/codegen/src/ir/type.rs +++ b/codegen/src/ir/type.rs @@ -4,7 +4,7 @@ use minijinja::value::{Enumerator, Object, ObjectRepr}; use serde::{Deserialize, Serialize}; #[derive(Clone, PartialEq, PartialOrd, Ord, Eq, Debug, Serialize, Deserialize)] -#[serde(tag = "t", content = "c")] +#[serde(tag = "variant", content = "c")] pub enum Type { // A type not matching any of these Other { @@ -200,17 +200,21 @@ impl Type { ]; } -impl Object for Type { - fn repr(self: &Arc) -> minijinja::value::ObjectRepr { - ObjectRepr::Plain +impl Type { + pub fn from_jinja(value: minijinja::Value) -> Self { + let deserializer = serde::de::value::MapDeserializer::new( + value.as_object().unwrap().try_iter_pairs().unwrap(), + ); + Type::deserialize(deserializer).unwrap() } - - // fn enumerate(self: &Arc) -> minijinja::value::Enumerator { - // Enumerator::Str(&[]) - // } } #[test] fn array_is_sorted() { assert!(Type::NAMES.is_sorted()) } +#[test] +fn type_from_jinja() { + let value = minijinja::Value::from_serialize(Type::Polygon); + assert_eq!(Type::from_jinja(value), Type::Polygon) +} diff --git a/codegen/src/ir/type_service.rs b/codegen/src/ir/type_service.rs index e37ec87..731cb7f 100644 --- a/codegen/src/ir/type_service.rs +++ b/codegen/src/ir/type_service.rs @@ -49,11 +49,33 @@ impl TypeService { } fn resolve_from_catalog(&self, schema_name: &Arc, name: &Arc) -> Type { + if let Some(ty) = self.resolve_from_catalog_non_array(schema_name, name) { + return ty; + } + + let Some(name) = name.strip_prefix('_') else { + return Type::Any; + }; + + let r#type = self + .resolve_from_catalog_non_array(schema_name, &name.into()) + .unwrap_or(Type::Any); + + Type::Array { + r#type: Arc::new(r#type), + dim: 1, + } + } + + fn resolve_from_catalog_non_array( + &self, + schema_name: &Arc, + name: &Arc, + ) -> Option { if &**schema_name == "pg_catalog" { return self.from_pg_catalog(&name); } self.from_user_defined_catalog(schema_name, name) - .unwrap_or(Type::Any) } fn from_user_defined_catalog(&self, schema_name: &Arc, name: &Arc) -> Option { @@ -81,20 +103,19 @@ impl TypeService { } } + fn from_pg_catalog(&self, type_name: &str) -> Option { + let index = Type::NAMES + .binary_search_by(|(name, _, _)| name.cmp(&type_name)) + .ok()?; + Some(Type::NAMES[index].2.clone()) + } + fn get_schema(&self, schema_name: &str) -> Option<&Schema> { self.catalog .schemas .iter() .find(|schema| &*schema.name == schema_name) } - - fn from_pg_catalog(&self, type_name: &str) -> Type { - Type::NAMES - .iter() - .find(|(name, _, _)| *name == type_name) - .map(|(_, _, ty)| ty.clone()) - .unwrap_or(Type::Any) - } } #[cfg(test)] @@ -133,6 +154,9 @@ mod test { #[test] fn type_service_from_pg_catalog() { let type_service = type_service(); - assert_eq!(type_service.from_pg_catalog("int4range"), Type::Int4Range) + assert_eq!( + type_service.from_pg_catalog("int4range"), + Some(Type::Int4Range) + ) } } diff --git a/codegen/src/presentation/environment.rs b/codegen/src/presentation/environment.rs index 78b9bcb..86173d0 100644 --- a/codegen/src/presentation/environment.rs +++ b/codegen/src/presentation/environment.rs @@ -19,9 +19,13 @@ use crate::{ }, }; -pub fn env(ir: Ir, config: TemplateGenConfig) -> Result, Error> { +pub fn env(ir: Ir, config: &TemplateGenConfig) -> Result, Error> { let mut env = minijinja::Environment::new(); + if let Some(custom_filters) = config.register_filters { + custom_filters(&mut env)?; + } + add_templates(&mut env, config)?; add_string_filters(&mut env); add_type_filters(&mut env, ir, config); @@ -29,7 +33,7 @@ pub fn env(ir: Ir, config: TemplateGenConfig) -> Result, Er Ok(env) } -pub fn add_type_filters(env: &mut Environment<'static>, ir: Ir, config: TemplateGenConfig) { +pub fn add_type_filters(env: &mut Environment<'static>, ir: Ir, config: &TemplateGenConfig) { let service = Arc::new(OverriddenTypeMapService::new(ir, config.type_map_service)); let service_ = service.clone(); @@ -64,7 +68,7 @@ pub fn add_type_filters(env: &mut Environment<'static>, ir: Ir, config: Template pub fn add_templates( env: &mut Environment<'static>, - config: TemplateGenConfig, + config: &TemplateGenConfig, ) -> Result<(), Error> { env.add_template("query", config.query_template)?; env.add_template("model", config.model_template)?; diff --git a/codegen/src/presentation/file_generation_config.rs b/codegen/src/presentation/file_generation_config.rs index 1772600..057ed2c 100644 --- a/codegen/src/presentation/file_generation_config.rs +++ b/codegen/src/presentation/file_generation_config.rs @@ -1,6 +1,8 @@ -use crate::{presentation::type_mapping_service::TypeMapService, response::File}; +use minijinja::Environment; -#[derive(Clone, Copy)] +use crate::{error::Error, presentation::type_mapping_service::TypeMapService, response::File}; + +#[derive(Clone)] pub struct TemplateGenConfig { pub query_directory_entrypoint: &'static str, pub model_directory_entrypoint: &'static str, @@ -9,5 +11,6 @@ pub struct TemplateGenConfig { pub model_template: &'static str, pub model_init_template: &'static str, pub type_map_service: &'static dyn TypeMapService, - pub static_files: &'static [File], + pub other_templates: Vec, + pub register_filters: Option Result<(), Error>>, } diff --git a/codegen/src/presentation/mod.rs b/codegen/src/presentation/mod.rs index aff4022..bc1b631 100644 --- a/codegen/src/presentation/mod.rs +++ b/codegen/src/presentation/mod.rs @@ -20,7 +20,7 @@ pub struct PresentationService { } trait FileGeneratorService { - fn generate(&self) -> Result, Error>; + fn generate(&mut self) -> Result, Error>; } impl PresentationService { @@ -36,23 +36,13 @@ impl PresentationService { let config = match (&*language, &*driver) { ("python", "asyncpg") => python::asyncpg(&self.ir)?, ("python", "psycopg") => python::psycopg(&self.ir)?, - ("python", _) => return Err(Error::UnsupportedLanguage(language)), + ("typescript", "postgres") => typescript::postgres(), + ("python" | "typescript", _) => { + return Err(Error::UnsupportedDriver { language, driver }); + } _ => return Err(Error::UnsupportedLanguage(language)), }; TemplatingService::new(self.ir.clone(), config) } - - pub fn type_map_service(&self) -> Result<&'static dyn TypeMapService, Error> { - let Codegen { - language, driver, .. - } = self.ir.request.config.codegen.clone(); - - match (&*language, &*driver) { - ("python", "asyncpg") => Ok(&python::AsyncpgTypeMapService), - ("python", "psycopg") => Ok(&python::PsycopgTypeMapService), - ("python", _) => return Err(Error::UnsupportedLanguage(language)), - _ => return Err(Error::UnsupportedLanguage(language)), - } - } } diff --git a/codegen/src/presentation/python/mod.rs b/codegen/src/presentation/python/mod.rs index 038eeb7..24d1156 100644 --- a/codegen/src/presentation/python/mod.rs +++ b/codegen/src/presentation/python/mod.rs @@ -15,7 +15,8 @@ pub fn asyncpg(ir: &Ir) -> Result { model_template: include_str!("./templates/asyncpg/model.j2"), model_init_template: include_str!("./templates/asyncpg/model_init.j2"), type_map_service: &AsyncpgTypeMapService, - static_files: &[], + other_templates: vec![], + register_filters: None, }) } @@ -29,7 +30,8 @@ pub fn psycopg(ir: &Ir) -> Result { model_template: include_str!("./templates/psycopg/model.j2"), model_init_template: include_str!("./templates/psycopg/model_init.j2"), type_map_service: &PsycopgTypeMapService, - static_files: &[], + other_templates: vec![], + register_filters: None, }) } diff --git a/codegen/src/presentation/templating_service.rs b/codegen/src/presentation/templating_service.rs index 295c4fc..3c374b8 100644 --- a/codegen/src/presentation/templating_service.rs +++ b/codegen/src/presentation/templating_service.rs @@ -1,4 +1,4 @@ -use std::sync::Arc; +use std::{mem::take, sync::Arc}; use minijinja::{Environment, context}; @@ -18,9 +18,10 @@ pub struct TemplatingService { } impl FileGeneratorService for TemplatingService { - fn generate(&self) -> Result, Error> { + fn generate(&mut self) -> Result, Error> { let mut files = self.model_module_files()?; self.add_query_files(&mut files)?; + self.include_other_templates(&mut files)?; files.push(self.add_model_entrypoint()?); return Ok(files); } @@ -28,7 +29,7 @@ impl FileGeneratorService for TemplatingService { impl TemplatingService { pub fn new(ir: Ir, config: TemplateGenConfig) -> Result { - let environment = env(ir.clone(), config)?; + let environment = env(ir.clone(), &config)?; Ok(TemplatingService { ir, @@ -37,6 +38,24 @@ impl TemplatingService { }) } + fn include_other_templates(&self, files: &mut Vec) -> Result<(), Error> { + for file in &self.config.other_templates { + let content = self.environment.render_named_str( + &file.path, + &file.content, + context! { + ir => self.ir + }, + )?; + + files.push(File { + path: file.path.clone(), + content: content, + }); + } + Ok(()) + } + fn model_module_files(&self) -> Result, Error> { let mut files = vec![]; for module in self.ir.model_modules.model_modules.values() { diff --git a/codegen/src/presentation/typescript/driver.rs b/codegen/src/presentation/typescript/driver.rs deleted file mode 100644 index 6caa197..0000000 --- a/codegen/src/presentation/typescript/driver.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub enum TypescriptDriver { - PGlite, - Postgres, - Pg, -} diff --git a/codegen/src/presentation/typescript/mod.rs b/codegen/src/presentation/typescript/mod.rs index 6723df8..2864797 100644 --- a/codegen/src/presentation/typescript/mod.rs +++ b/codegen/src/presentation/typescript/mod.rs @@ -1 +1,50 @@ -pub(super) mod driver; +use minijinja::{Environment, Value}; + +use crate::{ + error::Error, + ir::Type, + presentation::{ + file_generation_config::TemplateGenConfig, + typescript::type_map_service::TypescriptTypeMapService, + }, + response::File, +}; + +mod type_map_service; + +pub fn postgres() -> TemplateGenConfig { + TemplateGenConfig { + query_directory_entrypoint: "queries.ts", + model_directory_entrypoint: "models.ts", + file_extension: "ts", + query_template: include_str!("./templates/postgres/query.j2"), + model_template: include_str!("./templates/postgres/model.j2"), + model_init_template: include_str!("./templates/postgres/model_init.j2"), + type_map_service: &TypescriptTypeMapService, + other_templates: vec![File { + path: "parsers.ts".into(), + content: include_str!("./templates/parsers.ts").into(), + }], + register_filters: Some(register_filters), + } +} + +fn register_filters(env: &mut Environment) -> Result<(), Error> { + env.add_filter("is_nullable", move |ty: Value| -> bool { + matches!(Type::from_jinja(ty), Type::Nullable(_)) + }); + + env.add_filter("type_parser", |value: Value| { + TypescriptTypeMapService.type_parser(Type::from_jinja(value)) + }); + + env.add_filter("is_user_defined", |value: Value| -> bool { + matches!(Type::from_jinja(value), Type::UserDefined { .. }) + }); + + env.add_filter("requires_parsing", |value: Value| -> bool { + TypescriptTypeMapService.column_requires_parser(Type::from_jinja(value)) + }); + + Ok(()) +} diff --git a/codegen/src/presentation/typescript/templates/parsers.ts b/codegen/src/presentation/typescript/templates/parsers.ts new file mode 100644 index 0000000..6deeffc --- /dev/null +++ b/codegen/src/presentation/typescript/templates/parsers.ts @@ -0,0 +1,268 @@ +// This file was automatically generated by pgc +// run `pgc build` to regenerate it +// {%- set options = ir.request.config.codegen.options %} +// {%- if options and options == true %} +import { Buffer } from "node:buffer"; +// {%- endif %} +import * as models from "./models/models.ts"; +type Step = (cell: string) => any; + +function trimOuter(str: string, open: string, close: string) { + const s = str.trim(); + if (s.startsWith(open) && s.endsWith(close)) return s.slice(1, -1); + return s; +} + +function unquote(s: string): string { + const t = s.trim(); + if (t.length >= 2 && t.startsWith('"') && t.endsWith('"')) { + // Remove surrounding quotes and unescape \" and \\ (good enough for most PG cases) + return t + .slice(1, -1) + .replace(/\\(["\\])/g, "$1"); + } + return t; +} + +function splitTopLevel( + s: string, + separator: string, + { respectQuotes = true, parens = true, braces = true }: { + respectQuotes?: boolean; + parens?: boolean; + braces?: boolean; + } = {}, +): string[] { + const out: string[] = []; + let buf = ""; + let inQuotes = false; + let parenDepth = 0; + let braceDepth = 0; + + const flush = () => { + out.push(buf); + buf = ""; + }; + + for (let i = 0; i < s.length; i++) { + const ch = s[i]; + + if (respectQuotes && ch === '"' && s[i - 1] !== "\\") { + inQuotes = !inQuotes; + buf += ch; + continue; + } + if (!inQuotes) { + if (parens && (ch === "(" || ch === ")")) { + if (ch === "(") parenDepth++; + else parenDepth--; + buf += ch; + continue; + } + if (braces && (ch === "{" || ch === "}")) { + if (ch === "{") braceDepth++; + else braceDepth--; + buf += ch; + continue; + } + if (parenDepth === 0 && braceDepth === 0 && ch === separator) { + flush(); + continue; + } + } + buf += ch; + } + flush(); + return out.map((t) => t.trim()); +} + +function parsePgRowToCells(row: string): string[] { + const inner = trimOuter(row.trim(), "(", ")"); + if (inner === "") return []; + // Note: allow parentheses/braces in cells; split only at top-level commas + return splitTopLevel(inner, ","); +} + +function parsePgArrayToElements(arr: string): string[] { + const inner = trimOuter(arr.trim(), "{", "}"); + if (inner === "") return []; + // In arrays, elements can be quoted (including quoted rows "(...)") + return splitTopLevel(inner, ",", { + respectQuotes: true, + parens: true, + braces: true, + }); +} + +export interface Parser { + parse(value: string): T; +} + +export class NullParser { + constructor(readonly subparser: Parser) {} + + parse(cell: string): T | null { + const t = cell.trim(); + if (t == "") return null; + return this.subparser.parse(cell); + } +} + +export class NumberParser { + parse(cell: string): number { + const t = cell.trim(); + const q = unquote(t); + const v = Number(q); + if (Number.isNaN(v)) throw new Error(`Invalid number: ${cell}`); + return v; + } +} + +export class BigIntParser { + parse(cell: string): bigint { + if (typeof cell != "string") return cell; + const t = cell.trim(); + const q = unquote(t); + const v = BigInt(q); + return v; + } +} + +export class StringParser { + parse(cell: string): string { + const t = cell.trim(); + return unquote(t).replaceAll(/""/g, '"'); + } +} + +export class DateParser { + parse(cell: string): Date { + const t = unquote(cell.trim()); + const d = new Date(t); + if (Number.isNaN(d.getTime())) throw new Error(`Invalid date: ${cell}`); + return d; + } +} + +export class BooleanParser { + parse(cell: string): boolean { + const t = unquote(cell.trim()); + if (!["t", "f"].includes(t)) { + throw new Error(`Invalid boolean: ${cell}. Expected "t" or "f".`); + } + return t == "t"; + } +} + +export class EnumParser { + parse(cell: string): T { + return new StringParser().parse(cell) as T; + } +} + +export class JsonParser { + parse(cell: string): any { + const t = unquote(cell); + return JSON.parse(t); + } +} + +export class BufferParser { + parse(cell: string): Buffer { + const t = unquote(cell); + return Buffer.from(t.replace(/^\\x/, ""), "hex"); + } +} + +export class ArrayParser { + constructor(readonly elementParser: Parser) {} + parse(array: string | Array): Array { + let stringArray; + + if (typeof array == "string") { + const unquoted = unquote(array.trim()); + stringArray = parsePgArrayToElements(unquoted); + } else { + stringArray = array; + } + + return stringArray.map((element) => this.elementParser.parse(element)); + } + + arrayOfThis() { + return new ArrayParser({ parse: (e) => this.parse(e) }); + } +} + +export class RowParser { + private steps: Parser[]; + private mapFun: (_: T) => V; + + constructor(steps: Parser[] = [], map?: (_: T) => V) { + this.steps = steps; + this.mapFun = map ?? ((row: T) => row as unknown as V); + } + + addColumnParser(parser: Parser): RowParser<[...T, U]> { + return new RowParser<[...T, U]>([...this.steps, parser]); + } + + parse(input: string): V { + input = unquote(input); + const trimmed = input.trim(); + // Accept either full row "(a,b,...)" or a bare CSV (we’ll try row first) + const cells = trimmed.startsWith("(") + ? parsePgRowToCells(trimmed) + : splitTopLevel(trimmed, ","); + if (cells.length !== this.steps.length) { + throw new Error( + `Arity mismatch: expected ${this.steps.length} fields, got ${cells.length} (${ + JSON.stringify(cells) + })`, + ); + } + const out = this.steps.map((parser, i) => parser.parse(cells[i])) as T; + return this.mapFun(out); + } + + map(fun: (_: V) => U): RowParser { + const newMap = (row: T) => fun(this.mapFun(row)); + return new RowParser(this.steps, newMap); + } +} + +/* parsers for custom types {{"*" + "/"}} +export const parser = { +{%- for module_name, module in ir.model_modules.model_modules | items %} +{%- if module_name in reserved -%} +{%- set module_name = "_" + module_name %} +{%- endif %} + {{module_name | to_camel_case }}: { +{%- for model in module.models %} + {{model.name | to_camel_case }}: () => { + return new RowParser() + {% for field in model.fields -%} + .addColumnParser({{ field.type | type_parser }}) + {% endfor -%} + .map(([ + {%- for field in model.fields -%} + {{field.name | to_camel_case }} {%- if not loop.last %}, {% endif %} + {%- endfor -%} + ]) => ({ {{""}} + {%- for field in model.fields -%} + {{field.name | to_camel_case }} {%- if not loop.last %}, {% endif %} + {%- endfor -%} + {{""}} })) + }, + {%- endfor %} + {%- for enum in module.enums %} + {{enum.name | to_camel_case }}: () => { + return new EnumParser() + }, + {%- endfor %} +} +{% endfor %} + +}; + +/**/ diff --git a/codegen/src/presentation/typescript/templates/postgres/model.j2 b/codegen/src/presentation/typescript/templates/postgres/model.j2 new file mode 100644 index 0000000..7300542 --- /dev/null +++ b/codegen/src/presentation/typescript/templates/postgres/model.j2 @@ -0,0 +1,29 @@ +{%- if model_module.enums %} +{%- endif %} +{%- for type in used_types %} +{%- for import in (type | imports) %} +{{import}} +{%- endfor %} +{%- endfor %} + +{%- for enum in model_module.enums %} + +export enum {{enum.name | to_pascal_case }} { + {%- for value in enum.values %} + {{ value | to_screaming_snake_case }} = {{ value | to_c_string }}, + {%- endfor %} +{%- endfor %} +} + +{%- for model in model_module.models %} + +export interface {{ model.name | to_pascal_case }} { + {%- for field in model.fields %} + {%- if field.type | is_nullable %} + {{field.name | to_camel_case }}?: {{ field.type | annotation }}; + {%- else %} + {{field.name | to_camel_case }}: {{ field.type | annotation }}; + {%- endif %} + {%- endfor %} +} +{%- endfor %} diff --git a/codegen/src/presentation/typescript/templates/postgres/model_init.j2 b/codegen/src/presentation/typescript/templates/postgres/model_init.j2 new file mode 100644 index 0000000..3525f16 --- /dev/null +++ b/codegen/src/presentation/typescript/templates/postgres/model_init.j2 @@ -0,0 +1,5 @@ +import * as parsers from "../parsers.ts"; +{%- for module in ir.model_modules.model_modules -%} +export type * as "{{module}}" from "./{{module}}.ts"; +{%- endfor %} +export * from "./public.ts"; diff --git a/codegen/src/presentation/typescript/templates/postgres/query.j2 b/codegen/src/presentation/typescript/templates/postgres/query.j2 new file mode 100644 index 0000000..6f2677f --- /dev/null +++ b/codegen/src/presentation/typescript/templates/postgres/query.j2 @@ -0,0 +1,176 @@ +// This file was automatically generated by pgc +// run `pgc build` to regenerate it +import postgres from "postgres" +{%- for type in used_types %} +{% for import in (type | imports) %} +{%- if import != ""%} +{{import}} +{%- endif %} +{%- endfor %} +{%- endfor %} +import { parser, ArrayParser, BigIntParser } from "./parsers.ts"; +import * as models from "./models/models.ts"; +{%- for subnamespace in query_namespace.subnamespaces %} +import * as {{subnamespace}} from "./{{subnamespace}}.ts" +{%- endfor %} + +{%- for method in query_namespace.methods %} + +const {{ method.query.name | to_screaming_snake_case }} = ` +{{ method.query.query }} +`; +{%- endfor %} +{{"\n"}} +{%- for method in query_namespace.methods %} +{%- if method.output_model != None %} +export interface {{method.output_model.type | name | to_pascal_case }} { + {%- for field, type in method.output_model.fields | items %} + {{field}}: {{ type | annotation }}; // {{ type }} + {%- endfor %} +} +{% endif %} +{%- for _, input_model in method.input_models | items %} +export interface {{ input_model.type | name }} { + {%- for field, type in input_model.fields | items %} + {{field}}: {{type | annotation}}; + {%- endfor %} +} +{% endfor %} +{%- endfor %} + + +export class {{ query_namespace.name | to_pascal_case }}Queries { + constructor(readonly connection: postgres.Sql) { + this.connection = connection + {%- for subnamespace in query_namespace.subnamespaces %} + this.{{subnamespace}} = new {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) + {%- endfor %} + } + + {%- for method in query_namespace.methods%} + {%- if method.query.annotations.not_null_result -%} + {% set HANDLE_NONE -%} + if (!rows[0]) { + throw new Error("The query {{method.query.name}} is marked with @not_null_result, but it has returned null."); + } + {% endset %} + {%- set OR_NONE = '' %} + {% else %} + {%- set HANDLE_NONE = 'if (!rows[0]) return null' %} + {%- set OR_NONE = ' | null' %} + {%- endif %} + {%- if method.query.command == 'one' %} + {%- if method.query.output | length == 1 %} + async {{method.query.name}}( + {%- for argument, type in method.arguments | items -%} + {{argument}}: {{type | annotation}} {%- if not loop.last %}, {% endif %} + {%- endfor -%} + ) { + const rows = await this.connection.unsafe( + {{method.query.name | to_screaming_snake_case }}, [ + {%- for parameter in method.query.parameters -%} + {{parameter.name}}, + {%- endfor -%} + ], { prepare: true } + ) + {{HANDLE_NONE}} + {%- if method.output_type | requires_parsing %} + return {{method.output_type | type_parser }}.parse(rows[0]["{{method.query.output[0].name}}"]) as {{method.output_type | annotation}}{{OR_NONE}}; + {%- else %} + return rows[0]["{{method.query.output[0].name}}"] as {{method.output_type | annotation}}{{OR_NONE}}; + {%- endif %} + } + {%- else %} + async {{method.query.name}}( + {%- for argument, type in method.arguments | items -%} + {{argument}}: {{type | annotation}}{%- if not loop.last %}, {% endif %} + {%- endfor -%} + ) { + const rows = await this.connection.unsafe( + {{method.query.name | to_screaming_snake_case }}, [ + {%- for parameter in method.query.parameters -%} + {{parameter.name}} {%- if not loop.last %}, {% endif %} + {%- endfor -%} + ], { prepare: true } + ) + {{HANDLE_NONE}} + + + return { + {%- for column_name, column_type in method.output_columns | items %} + {%- if column_type | requires_parsing %} + ["{{column_name | to_camel_case }}"]: {{ column_type | type_parser }}.parse(rows[0]["{{column_name}}"]), + {%- else %} + ["{{column_name | to_camel_case }}"]: rows[0]["{{column_name}}"], + {%- endif %} + {%- endfor %} + } as {{method.output_type | annotation}}{{OR_NONE}}; + + } + {%- endif %} + {%- elif method.query.command == 'many' %} + {%- if method.query.output | length == 1 %} + async {{method.query.name}}( + {%- for argument, type in method.arguments | items -%} + {{argument}}: {{type | annotation}} {% if not loop.last %}, {% endif %} + {%- endfor -%} + ) { + const rows = await this.connection.unsafe( + {{method.query.name | to_screaming_snake_case }}, [ + {%- for parameter in method.query.parameters -%} + {{parameter.name}}{%- if not loop.last %}, {% endif %} + {%- endfor -%} + ], { prepare: true } + ) + return rows.map(row => { + {%- for column_name, column_type in method.output_columns | items %} + {%- if column_type | requires_parsing %} + return {{ column_type | type_parser }}.parse(row["{{ column_name }}"]) as {{ method.output_type | annotation}}; + {%- else %} + return row["{{method.query.output[0].name}}"] as {{method.output_type | annotation}}; + {%- endif %} + {%- endfor %} + }) + } + {%- else%} + async {{method.query.name}}( + {%- for argument, type in method.arguments | items -%} + {{argument}}: {{type | annotation}} {% if not loop.last %}, {% endif %} + {%- endfor -%} + ) { + const rows = await this.connection.unsafe( + {{method.query.name | to_screaming_snake_case }}, [ + {%- for parameter in method.query.parameters -%} + {{parameter.name}} {%- if not loop.last %}, {% endif %} + {%- endfor -%} + ], { prepare: true } + ) + return rows.map((row) => ({ + {%- for column_name, column_type in method.output_columns | items %} + {%- if column_type | requires_parsing %} + ["{{column_name | to_camel_case }}"]: {{ column_type | type_parser}}.parse(row["{{column_name}}"]), + {%- else %} + ["{{column_name | to_camel_case }}"]: row["{{column_name}}"], + {%- endif %} + + {%- endfor %} + })); + } + {%- endif %} + {%- elif method.query.command == 'exec' %} + async {{method.query.name}}( + {%- for argument, type in method.arguments | items -%} + {{argument}}: {{type | annotation}}{%- if not loop.last %}, {% endif %} + {%- endfor -%} + ): {{method.output_type | annotation}}{{OR_NONE}} { + const rows = await this.connection.unsafe( + {{method.query.name | to_screaming_snake_case }}, [ + {%- for parameter in method.query.parameters -%} + {{parameter.name}}, {%- if not loop.last %}, {% endif %} + {%- endfor -%} + ], { prepare: true } + ) + } + {%- endif %} + {%- endfor %} +} diff --git a/codegen/src/presentation/typescript/type_map_service.rs b/codegen/src/presentation/typescript/type_map_service.rs new file mode 100644 index 0000000..2afa5e9 --- /dev/null +++ b/codegen/src/presentation/typescript/type_map_service.rs @@ -0,0 +1,119 @@ +use std::sync::Arc; + +use crate::{ + ir::Type, + presentation::type_mapping_service::{LanguageType, TypeMapService}, + utils::{to_camel_case, to_pascal_case}, +}; + +#[derive(Clone, Copy)] +pub struct TypescriptTypeMapService; + +impl TypescriptTypeMapService { + pub fn column_requires_parser(&self, r#type: Type) -> bool { + return matches!( + r#type, + Type::Int8 | Type::Serial8 | Type::UserDefined { .. } | Type::Array { .. } + ); + } + + pub fn type_parser(&self, r#type: Type) -> String { + match r#type { + Type::Nullable(r#type) => format!( + "new NullParser({})", + self.type_parser(Type::clone(&*r#type)) + ), + Type::UserDefined { module_path, name } => { + format!("parser.{}.{}()", module_path[1], to_camel_case(&name)) + } + + Type::Array { r#type, dim } if dim != 1 => { + format!( + "{}.arrayOfThis()", + self.type_parser(Type::Array { + r#type, + dim: dim - 1 + }) + ) + } + Type::Array { r#type, dim: 1 } => { + format!( + "new ArrayParser({})", + self.type_parser(Type::clone(&*r#type)) + ) + } + Type::Bool => "new BooleanParser()".into(), + Type::Date | Type::DateTz | Type::Timestamp | Type::TimestampTz => { + "new DateParser()".into() + } + Type::Int8 => "new BigIntParser()".into(), + Type::Float4 | Type::Float8 | Type::Int2 | Type::Int4 => "new NumberParser()".into(), + Type::Bytea => "new BufferParser()".into(), + Type::Json => "new JsonParser()".into(), + _ => "new StringParser()".into(), + } + } +} + +impl TypeMapService for TypescriptTypeMapService { + fn get(&self, current_module: Vec, r#type: &Type) -> LanguageType { + match r#type { + Type::Any | Type::AnyCompatibleNonArray | Type::AnyCompatible => { + LanguageType::annotation("any") + } + Type::Int8 => LanguageType::annotation("bigint"), + Type::AnyArray | Type::AnyCompatibleArray => LanguageType::annotation("Array"), + Type::Json => LanguageType::annotation("any"), + Type::UserDefined { module_path, name } => { + let name: Arc = to_pascal_case(&name).into(); + let module: Arc<_> = module_path.join(".").into(); + let mut annotation = format!("{module}.{name}").into(); + let same_module = current_module + .iter() + .map(|s| &**s) + .eq(module_path.iter().map(|s| &**s)); + if same_module { + annotation = name.clone(); + } + + LanguageType { + name: Some(name.clone()), + annotation, + import: vec![], + module: Some(module), + } + } + Type::Nullable(r#type) => { + let r#type = self.get(current_module, r#type); + LanguageType { + name: r#type.name, + annotation: format!("{} | null", r#type.annotation).into(), + import: r#type.import, + module: r#type.module, + } + } + Type::Array { r#type, dim } => { + let r#type = self.get(current_module, r#type); + let mut annotation = r#type.annotation; + for _ in 0..*dim { + annotation = format!("Array<{}>", annotation).into(); + } + LanguageType { + name: None, + annotation, + import: r#type.import, + module: r#type.module, + } + } + Type::Bool => LanguageType::annotation("boolean").name("boolean"), + Type::Bytea => LanguageType::annotation("Buffer").name("Buffer"), + Type::Int2 | Type::Int4 | Type::Float4 | Type::Float8 => { + LanguageType::annotation("number").name("Date") + } + Type::Date | Type::DateTz | Type::Timestamp | Type::TimestampTz => { + LanguageType::annotation("Date").name("Date") + } + _ => LanguageType::annotation("string").name("string"), + } + } +} diff --git a/codegen/src/presentation/typescript/typescript_type.rs b/codegen/src/presentation/typescript/typescript_type.rs deleted file mode 100644 index f5d19f4..0000000 --- a/codegen/src/presentation/typescript/typescript_type.rs +++ /dev/null @@ -1,6 +0,0 @@ -pub struct TypescriptType { - module: Arc, - import: Arc, - annotation: Arc, - name: Arc, -} diff --git a/codegen/tests/request.json b/codegen/tests/request.json index 6e301bb..024905a 100644 --- a/codegen/tests/request.json +++ b/codegen/tests/request.json @@ -1,341 +1 @@ -{ - "catalog": { - "schemas": [ - { - "name": "public", - "enums": [ - { - "name": "genre", - "values": [ - "comedy", - "drama", - "science fiction", - "fantasy", - "biography" - ] - } - ], - "records": [ - { - "kind": "table", - "name": "author", - "columns": [ - { - "name": "id", - "type": { - "name": "uuid", - "display": "uuid", - "is_array": false, - "schema_name": "pg_catalog", - "is_composite": false, - "array_dimensions": 0 - }, - "default": "gen_random_uuid()", - "is_unique": false, - "is_nullable": false, - "is_foreign_key": false, - "is_primary_key": true, - "foreign_table_name": null, - "foreign_table_schema": null - }, - { - "name": "name", - "type": { - "name": "text", - "display": "text", - "is_array": false, - "schema_name": "pg_catalog", - "is_composite": false, - "array_dimensions": 0 - }, - "default": null, - "is_unique": false, - "is_nullable": false, - "is_foreign_key": false, - "is_primary_key": false, - "foreign_table_name": null, - "foreign_table_schema": null - }, - { - "name": "birthday", - "type": { - "name": "date", - "display": "date", - "is_array": false, - "schema_name": "pg_catalog", - "is_composite": false, - "array_dimensions": 0 - }, - "default": null, - "is_unique": false, - "is_nullable": true, - "is_foreign_key": false, - "is_primary_key": false, - "foreign_table_name": null, - "foreign_table_schema": null - } - ] - }, - { - "kind": "table", - "name": "book", - "columns": [ - { - "name": "id", - "type": { - "name": "uuid", - "display": "uuid", - "is_array": false, - "schema_name": "pg_catalog", - "is_composite": false, - "array_dimensions": 0 - }, - "default": "gen_random_uuid()", - "is_unique": false, - "is_nullable": false, - "is_foreign_key": false, - "is_primary_key": true, - "foreign_table_name": null, - "foreign_table_schema": null - }, - { - "name": "title", - "type": { - "name": "text", - "display": "text", - "is_array": false, - "schema_name": "pg_catalog", - "is_composite": false, - "array_dimensions": 0 - }, - "default": null, - "is_unique": false, - "is_nullable": false, - "is_foreign_key": false, - "is_primary_key": false, - "foreign_table_name": null, - "foreign_table_schema": null - }, - { - "name": "author_id", - "type": { - "name": "uuid", - "display": "uuid", - "is_array": false, - "schema_name": "pg_catalog", - "is_composite": false, - "array_dimensions": 0 - }, - "default": null, - "is_unique": false, - "is_nullable": false, - "is_foreign_key": true, - "is_primary_key": false, - "foreign_table_name": "author", - "foreign_table_schema": "public" - }, - { - "name": "year", - "type": { - "name": "int4", - "display": "integer", - "is_array": false, - "schema_name": "pg_catalog", - "is_composite": false, - "array_dimensions": 0 - }, - "default": null, - "is_unique": false, - "is_nullable": false, - "is_foreign_key": false, - "is_primary_key": false, - "foreign_table_name": null, - "foreign_table_schema": null - }, - { - "name": "isbn", - "type": { - "name": "text", - "display": "text", - "is_array": false, - "schema_name": "pg_catalog", - "is_composite": false, - "array_dimensions": 0 - }, - "default": null, - "is_unique": true, - "is_nullable": false, - "is_foreign_key": false, - "is_primary_key": false, - "foreign_table_name": null, - "foreign_table_schema": null - }, - { - "name": "is_best_seller", - "type": { - "name": "bool", - "display": "boolean", - "is_array": false, - "schema_name": "pg_catalog", - "is_composite": false, - "array_dimensions": 0 - }, - "default": "false", - "is_unique": false, - "is_nullable": true, - "is_foreign_key": false, - "is_primary_key": false, - "foreign_table_name": null, - "foreign_table_schema": null - }, - { - "name": "genre", - "type": { - "name": "text", - "display": "text", - "is_array": false, - "schema_name": "pg_catalog", - "is_composite": false, - "array_dimensions": 0 - }, - "default": null, - "is_unique": false, - "is_nullable": false, - "is_foreign_key": true, - "is_primary_key": false, - "foreign_table_name": "genre", - "foreign_table_schema": "public" - } - ] - } - ] - } - ] - }, - "queries": [ - { - "query": "insert into author (\n id, name, birthday\n) values (\n $1, $2, $3\n) on conflict (id)\ndo update set\n id = $1,\n name = $2,\n birthday = $3\nreturning author;", - "name": "upsert", - "command": "one", - "path": "author.sql", - "annotations": { - "name": { "value": "upsert :one", "line": 2 }, - "not_null_result": { "value": "", "line": 3 } - }, - "output": [ - { - "name": "author", - "type": { "schema": "public", "name": "author", "id": 16386 } - } - ], - "parameters": [ - { - "name": "author.id", - "not_null": true, - "type": { "schema": "pg_catalog", "name": "uuid", "id": 2950 } - }, - { - "name": "author.name", - "not_null": true, - "type": { "schema": "pg_catalog", "name": "text", "id": 25 } - }, - { - "name": "author.birthday", - "not_null": true, - "type": { "schema": "pg_catalog", "name": "date", "id": 1082 } - } - ] - }, - { - "query": "select author from author where id = $1::uuid;", - "name": "get_by_id", - "command": "one", - "path": "author.sql", - "annotations": { "name": { "value": "get_by_id :one", "line": 15 } }, - "output": [ - { - "name": "author", - "type": { "schema": "public", "name": "author", "id": 16386 } - } - ], - "parameters": [ - { - "name": "id", - "not_null": true, - "type": { "schema": "pg_catalog", "name": "uuid", "id": 2950 } - } - ] - }, - { - "query": "select author, array_agg(book) as books\nfrom author\njoin book on author.id = book.author_id\ngroup by author.id;", - "name": "get_all_with_books", - "command": "many", - "path": "author.sql", - "annotations": { - "name": { "value": "get_all_with_books :many", "line": 18 } - }, - "output": [ - { - "name": "author", - "type": { "schema": "public", "name": "author", "id": 16386 } - }, - { - "name": "books", - "type": { "schema": "public", "name": "_book", "id": 16400 } - } - ], - "parameters": [] - }, - { - "query": "select count(*) from author;", - "name": "count", - "command": "val", - "path": "author.sql", - "annotations": { "name": { "value": "count :val", "line": 24 } }, - "output": [ - { - "name": "count", - "type": { "schema": "pg_catalog", "name": "int8", "id": 20 } - } - ], - "parameters": [] - }, - { - "query": "select book from book where id = $1;", - "name": "get_by_id", - "command": "one", - "path": "author.sql", - "annotations": { - "name": { "value": "get_by_id :one", "line": 28 }, - "namespace": { "value": "book", "line": 29 } - }, - "output": [ - { - "name": "book", - "type": { "schema": "public", "name": "book", "id": 16401 } - } - ], - "parameters": [ - { - "name": "id", - "not_null": true, - "type": { "schema": "pg_catalog", "name": "uuid", "id": 2950 } - } - ] - } - ], - "config": { - "version": "1", - "queries": ["book.sql", "author.sql", "queries.sql"], - "disable_cache": false, - "database": { "migrations": ["schema.sql"] }, - "codegen": { - "out": "./queries", - "language": "python", - "driver": "psycopg", - "types": { "pg_catalog.uuid": { "annotation": "str" } }, - "options": { "package": "queries" }, - "enums": ["genre"] - } - } -} +{"catalog":{"schemas":[{"name":"public","enums":[{"name":"genre","values":["comedy","drama","science fiction","fantasy","biography"]}],"records":[{"kind":"table","name":"author","columns":[{"name":"id","type":{"name":"uuid","display":"uuid","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":"gen_random_uuid()","is_unique":false,"is_nullable":false,"is_foreign_key":false,"is_primary_key":true,"foreign_table_name":null,"foreign_table_schema":null},{"name":"full_name","type":{"name":"text","display":"text","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":false,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null},{"name":"birthday","type":{"name":"date","display":"date","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":true,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null}]},{"kind":"table","name":"book","columns":[{"name":"id","type":{"name":"uuid","display":"uuid","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":"gen_random_uuid()","is_unique":false,"is_nullable":false,"is_foreign_key":false,"is_primary_key":true,"foreign_table_name":null,"foreign_table_schema":null},{"name":"title","type":{"name":"text","display":"text","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":false,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null},{"name":"author_id","type":{"name":"uuid","display":"uuid","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":false,"is_foreign_key":true,"is_primary_key":false,"foreign_table_name":"author","foreign_table_schema":"public"},{"name":"year","type":{"name":"int4","display":"integer","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":false,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null},{"name":"isbn","type":{"name":"text","display":"text","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":true,"is_nullable":false,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null},{"name":"is_best_seller","type":{"name":"bool","display":"boolean","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":"false","is_unique":false,"is_nullable":true,"is_foreign_key":false,"is_primary_key":false,"foreign_table_name":null,"foreign_table_schema":null},{"name":"genre","type":{"name":"text","display":"text","is_array":false,"schema_name":"pg_catalog","is_composite":false,"array_dimensions":0},"default":null,"is_unique":false,"is_nullable":false,"is_foreign_key":true,"is_primary_key":false,"foreign_table_name":"genre","foreign_table_schema":"public"}]}]}]},"queries":[{"query":"select author from author;","name":"one_column_requires_parsing","command":"many","path":"author.sql","annotations":{"name":{"value":"one_column_requires_parsing :many","line":2}},"output":[{"name":"author","type":{"schema":"public","name":"author","id":16386}}],"parameters":[]},{"query":"select 1;","name":"one_column_no_parsing","command":"many","path":"author.sql","annotations":{"name":{"value":"one_column_no_parsing :many","line":5}},"output":[{"name":"?column?","type":{"schema":"pg_catalog","name":"int4","id":23}}],"parameters":[]},{"query":"select 1 as one, 2;","name":"multiple_column_no_parsing","command":"many","path":"author.sql","annotations":{"name":{"value":"multiple_column_no_parsing :many","line":8}},"output":[{"name":"one","type":{"schema":"pg_catalog","name":"int4","id":23}},{"name":"?column?","type":{"schema":"pg_catalog","name":"int4","id":23}}],"parameters":[]},{"query":"select author, 1 as one from author;","name":"multiple_column_mixed","command":"many","path":"author.sql","annotations":{"name":{"value":"multiple_column_mixed :many","line":11}},"output":[{"name":"author","type":{"schema":"public","name":"author","id":16386}},{"name":"one","type":{"schema":"pg_catalog","name":"int4","id":23}}],"parameters":[]}],"config":{"version":"1","queries":["book.sql","author.sql","queries.sql"],"disable_cache":false,"database":{"migrations":["schema.sql"]},"codegen":{"out":"./queries","language":"typescript","driver":"postgres","types":{"pg_catalog.uuid":{"annotation":"str"}},"options":{"package":"queries"},"enums":["genre"]}}} \ No newline at end of file diff --git a/pgc.yaml b/pgc.yaml index ca4b11c..34634da 100644 --- a/pgc.yaml +++ b/pgc.yaml @@ -9,8 +9,8 @@ queries: - "author.sql" - "queries.sql" codegen: - language: python # typescript - driver: psycopg + language: typescript + driver: postgres out: ./queries types: diff --git a/schema.sql b/schema.sql index b5a93c0..e4b5980 100644 --- a/schema.sql +++ b/schema.sql @@ -1,6 +1,6 @@ create table author ( id uuid primary key default gen_random_uuid(), - name text not null, + full_name text not null, birthday date ); diff --git a/src/query_collector/query_parser.service.ts b/src/query_collector/query_parser.service.ts index a223f88..9d3679c 100644 --- a/src/query_collector/query_parser.service.ts +++ b/src/query_collector/query_parser.service.ts @@ -55,11 +55,11 @@ export class QueryParserService { parseName(query: RawQuery, annotations: Record) { const name = annotations["name"]; const match = name.value.match( - /(\S+)\s+:(val|exec|one|many)/, + /(\S+)\s+:(one|many|exec)/, ); if (!match) { throw Error( - `"${query.file.path}:${name.line}" invalid query return specifier (expected one of: :val, :one, :many, :exec)`, + `"${query.file.path}:${name.line}" invalid query return specifier (expected one of: :one, :many, :exec)`, ); } From 2a08f892cedc4d70447f4a0980abeae69b6d42d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Vallotton?= Date: Fri, 12 Sep 2025 12:54:02 -0300 Subject: [PATCH 08/10] add tests for python --- codegen/src/faker.rs | 45 +++ codegen/src/ir/mod.rs | 6 +- codegen/src/ir/model_modules/model.rs | 5 +- codegen/src/ir/model_modules/model_module.rs | 2 +- codegen/src/ir/type.rs | 2 +- codegen/src/main.rs | 5 +- codegen/src/mock.rs | 35 +- codegen/src/presentation/environment.rs | 64 ++-- .../presentation/file_generation_config.rs | 2 +- codegen/src/presentation/python/mod.rs | 20 +- .../python/templates/asyncpg-query.j2 | 193 ++++++++++ .../python/templates/asyncpg/query.j2 | 173 --------- .../python/templates/{asyncpg => }/model.j2 | 0 .../templates/{asyncpg => }/model_init.j2 | 0 .../python/templates/psycopg-query.j2 | 255 +++++++++++++ .../python/templates/psycopg/model.j2 | 28 -- .../python/templates/psycopg/model_init.j2 | 12 - .../python/templates/psycopg/query.j2 | 179 --------- .../presentation/python/type_map_service.rs | 112 ++++-- .../src/presentation/templating_service.rs | 31 +- codegen/src/presentation/type_map.rs | 1 - .../src/presentation/type_mapping_service.rs | 11 +- codegen/src/presentation/typescript/mod.rs | 5 +- .../typescript/templates/parsers.ts | 13 +- .../typescript/templates/postgres/query.j2 | 7 +- .../typescript/type_map_service.rs | 18 +- codegen/src/request.rs | 56 ++- codegen/src/utils.rs | 61 +-- deno.json | 3 +- pgc.yaml | 4 +- reference.md | 3 +- src/build/build.service.ts | 2 +- src/codegen/codegen.service.ts | 1 - src/init.ts | 3 +- src/main.ts | 1 - src/schema_service/schema.service.ts | 3 +- tests/.gitignore | 3 + tests/author.sql | 31 ++ tests/book.sql | 30 ++ tests/pgc-asyncpg.yaml | 14 + tests/pgc-psycopg.yaml | 14 + tests/pgc-typscript-postgres.yaml | 14 + tests/python/.python-version | 1 + tests/python/README.md | 0 tests/python/conftest.py | 63 +++ tests/python/pyproject.toml | 15 + tests/python/test_asyncpg.py | 91 +++++ tests/python/test_psycopg.py | 85 +++++ tests/python/uv.lock | 358 ++++++++++++++++++ tests/schema.sql | 27 ++ 50 files changed, 1524 insertions(+), 583 deletions(-) create mode 100644 codegen/src/faker.rs create mode 100644 codegen/src/presentation/python/templates/asyncpg-query.j2 delete mode 100644 codegen/src/presentation/python/templates/asyncpg/query.j2 rename codegen/src/presentation/python/templates/{asyncpg => }/model.j2 (100%) rename codegen/src/presentation/python/templates/{asyncpg => }/model_init.j2 (100%) create mode 100644 codegen/src/presentation/python/templates/psycopg-query.j2 delete mode 100644 codegen/src/presentation/python/templates/psycopg/model.j2 delete mode 100644 codegen/src/presentation/python/templates/psycopg/model_init.j2 delete mode 100644 codegen/src/presentation/python/templates/psycopg/query.j2 delete mode 100644 codegen/src/presentation/type_map.rs create mode 100644 tests/.gitignore create mode 100644 tests/author.sql create mode 100644 tests/book.sql create mode 100644 tests/pgc-asyncpg.yaml create mode 100644 tests/pgc-psycopg.yaml create mode 100644 tests/pgc-typscript-postgres.yaml create mode 100644 tests/python/.python-version create mode 100644 tests/python/README.md create mode 100644 tests/python/conftest.py create mode 100644 tests/python/pyproject.toml create mode 100644 tests/python/test_asyncpg.py create mode 100644 tests/python/test_psycopg.py create mode 100644 tests/python/uv.lock create mode 100644 tests/schema.sql diff --git a/codegen/src/faker.rs b/codegen/src/faker.rs new file mode 100644 index 0000000..9e7347e --- /dev/null +++ b/codegen/src/faker.rs @@ -0,0 +1,45 @@ +use std::{collections::BTreeMap, sync::Arc}; + +use fake::{Dummy, Fake, Faker}; + +pub struct ArcFaker; +pub struct ArcStrFaker; +pub struct ValueFaker; + +impl Dummy for Arc { + fn dummy_with_rng(_: &ArcFaker, rng: &mut R) -> Self { + let str: String = Faker.fake_with_rng(rng); + Arc::from(str) + } +} + +impl Dummy for Arc<[T]> +where + Vec: Dummy, +{ + fn dummy_with_rng(_: &ArcFaker, rng: &mut R) -> Self { + let str: Vec = Faker.fake_with_rng(rng); + Arc::from(str) + } +} + +impl Dummy for Arc<[Arc]> { + fn dummy_with_rng(_: &ArcStrFaker, rng: &mut R) -> Self { + let str: Vec = Faker.fake_with_rng(rng); + str.iter().map(|x| -> Arc { (&**x).into() }).collect() + } +} + +impl Dummy for serde_json::Value { + fn dummy_with_rng(_: &ValueFaker, rng: &mut R) -> Self { + serde_json::Value::Null + } +} + +impl> Dummy for Arc, V>> { + fn dummy_with_rng(_: &ArcFaker, rng: &mut R) -> Self { + let map: BTreeMap = Faker.fake_with_rng(rng); + let map = map.into_iter().map(|(k, v)| (Arc::from(&*k), v)).collect(); + Arc::new(map) + } +} diff --git a/codegen/src/ir/mod.rs b/codegen/src/ir/mod.rs index 7bbacd5..ab055a0 100644 --- a/codegen/src/ir/mod.rs +++ b/codegen/src/ir/mod.rs @@ -24,6 +24,7 @@ pub struct Ir { } pub struct IrService { + request: Request, query_namespace_service: QueryNamespaceService, model_service: ModelService, } @@ -39,18 +40,19 @@ impl IrService { catalog: request.catalog.clone(), }; Ok(IrService { + request, query_namespace_service, model_service, }) } - pub fn build(&mut self, request: Request) -> Ir { + pub fn build(&mut self) -> Ir { let model_modules = self.model_service.create_model_modules(); let query_namespace = self.query_namespace_service.build(); Ir { model_modules, query_namespace, - request, + request: self.request.clone(), } } } diff --git a/codegen/src/ir/model_modules/model.rs b/codegen/src/ir/model_modules/model.rs index d8afa04..c028c6c 100644 --- a/codegen/src/ir/model_modules/model.rs +++ b/codegen/src/ir/model_modules/model.rs @@ -2,10 +2,7 @@ use std::sync::Arc; use serde::{Deserialize, Serialize}; -use crate::{ - ir::r#type::Type, - request::Record, -}; +use crate::{ir::r#type::Type, request::Record}; #[derive(Clone, Serialize, Deserialize)] pub struct Model { diff --git a/codegen/src/ir/model_modules/model_module.rs b/codegen/src/ir/model_modules/model_module.rs index 407da56..89af036 100644 --- a/codegen/src/ir/model_modules/model_module.rs +++ b/codegen/src/ir/model_modules/model_module.rs @@ -3,7 +3,7 @@ use std::{collections::BTreeSet, sync::Arc}; use serde::Serialize; use crate::{ - ir::{model_modules::Model, Type}, + ir::{Type, model_modules::Model}, request::Enum, }; diff --git a/codegen/src/ir/type.rs b/codegen/src/ir/type.rs index 2145992..8e60e40 100644 --- a/codegen/src/ir/type.rs +++ b/codegen/src/ir/type.rs @@ -4,7 +4,7 @@ use minijinja::value::{Enumerator, Object, ObjectRepr}; use serde::{Deserialize, Serialize}; #[derive(Clone, PartialEq, PartialOrd, Ord, Eq, Debug, Serialize, Deserialize)] -#[serde(tag = "variant", content = "c")] +#[serde(tag = "variant", content = "content")] pub enum Type { // A type not matching any of these Other { diff --git a/codegen/src/main.rs b/codegen/src/main.rs index 79deb47..6c8f1cc 100644 --- a/codegen/src/main.rs +++ b/codegen/src/main.rs @@ -9,12 +9,13 @@ use std::sync::atomic::Ordering::Relaxed; use std::{slice, sync::atomic::AtomicU64}; pub mod error; +#[cfg(test)] +pub mod faker; pub mod ir; pub mod mock; pub mod presentation; pub mod request; pub mod response; - mod utils; #[unsafe(no_mangle)] @@ -36,7 +37,7 @@ pub extern "C" fn build(ptr: *mut u8, size: usize) -> *const u8 { fn try_build(ptr: *mut u8, size: usize) -> Result { let request = load_request(ptr, size)?; - let ir = IrService::new(request.clone())?.build(request); + let ir = IrService::new(request.clone())?.build(); let presentation_service = PresentationService { ir }; diff --git a/codegen/src/mock.rs b/codegen/src/mock.rs index 09e3561..c2b6e5e 100644 --- a/codegen/src/mock.rs +++ b/codegen/src/mock.rs @@ -1,4 +1,5 @@ -use crate::request::{Catalog, Request}; +use crate::ir::{Ir, IrService}; +use crate::request::{Catalog, Column, Config, Query, Request}; use crate::{ ir::TypeService, @@ -6,6 +7,26 @@ use crate::{ }; use std::sync::Arc; +#[derive(Default)] +struct RequestBuilder { + enums: Vec, + records: Vec, + queries: Vec, +} + +impl RequestBuilder { + pub fn add_enum(&mut self, name: &str, values: &[&str]) { + self.enums.push(Enum { + name: name.into(), + values: values + .into_iter() + .map(|&str| str.into()) + .collect::>>() + .into(), + }); + } +} + pub fn enums() -> [Enum; 1] { [Enum { name: "myenum".into(), @@ -34,3 +55,15 @@ pub fn catalog() -> Catalog { schemas: [schema()].into(), } } + +pub fn request() -> Request { + Request { + catalog: catalog(), + queries: Default::default(), + config: Config::default(), + } +} + +pub fn ir() -> Ir { + IrService::new(request()).unwrap().build() +} diff --git a/codegen/src/presentation/environment.rs b/codegen/src/presentation/environment.rs index 86173d0..068e659 100644 --- a/codegen/src/presentation/environment.rs +++ b/codegen/src/presentation/environment.rs @@ -6,7 +6,10 @@ use std::{ use heck::{ToKebabCase, ToLowerCamelCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase}; use indexmap::map::serde_seq::deserialize; -use minijinja::{Environment, State, Value, context, value::Object}; +use minijinja::{ + Environment, State, Value, context, + value::{FunctionResult, Object}, +}; use regex::bytes::Regex; use serde::{Deserialize, Deserializer, Serialize, de::IntoDeserializer}; @@ -15,7 +18,7 @@ use crate::{ ir::{Ir, Type}, presentation::{ file_generation_config::TemplateGenConfig, - type_mapping_service::{OverriddenTypeMapService, TypeMapService}, + type_mapping_service::{LanguageType, OverriddenTypeMapService, TypeMapService}, }, }; @@ -35,35 +38,41 @@ pub fn env(ir: Ir, config: &TemplateGenConfig) -> Result, E pub fn add_type_filters(env: &mut Environment<'static>, ir: Ir, config: &TemplateGenConfig) { let service = Arc::new(OverriddenTypeMapService::new(ir, config.type_map_service)); + add_language_type_filter(env, &service, "annotation", |ty| ty.annotation); + add_language_type_filter(env, &service, "name", |ty| ty.name); + add_language_type_filter(env, &service, "imports", |ty| ty.import); + add_language_type_filter(env, &service, "constructor", |ty| ty.constructor); + add_type_filter(env, "is_array", |ty| matches!(ty, Type::Array { .. })); + add_type_filter(env, "is_user_defined", |ty| { + matches!(ty, Type::UserDefined { .. }) + }); + add_type_filter(env, "array_dim", |ty| { + let Type::Array { dim, .. } = ty else { + return 0; + }; + dim + }); +} +fn add_language_type_filter( + env: &mut Environment, + service: &Arc, + name: &'static str, + f: impl Fn(LanguageType) -> T + Sync + Send + 'static, +) { let service_ = service.clone(); - env.add_filter("annotation", move |state: &State, ty: Value| -> Arc { - service_.get(module_path(state), &as_type(ty)).annotation + env.add_filter(name, move |state: &State, ty: Value| -> T { + f(service_.get(module_path(state), &as_type(ty))) }); +} - let service_ = service.clone(); - - env.add_filter( - "name", - move |state: &State, ty: Value| -> Option> { - service_.get(module_path(state), &as_type(ty)).name - }, - ); - let service_ = service.clone(); - env.add_filter( - "imports", - move |state: &State, ty: Value| -> Vec> { - service_.get(module_path(state), &as_type(ty)).import - }, - ); - let service_ = service.clone(); - env.add_filter( - "type_module", - move |state: &State, ty: Value| -> Option> { - service_.get(module_path(state), &as_type(ty)).module - }, - ); +fn add_type_filter( + env: &mut Environment, + name: &'static str, + f: impl Fn(Type) -> T + Sync + Send + 'static, +) { + env.add_filter(name, move |ty: Value| -> T { f(as_type(ty)) }); } pub fn add_templates( @@ -73,6 +82,9 @@ pub fn add_templates( env.add_template("query", config.query_template)?; env.add_template("model", config.model_template)?; env.add_template("model_init", config.model_init_template)?; + for (name, content) in config.other_templates { + env.add_template(name, content)?; + } Ok(()) } diff --git a/codegen/src/presentation/file_generation_config.rs b/codegen/src/presentation/file_generation_config.rs index 057ed2c..2bd0d4e 100644 --- a/codegen/src/presentation/file_generation_config.rs +++ b/codegen/src/presentation/file_generation_config.rs @@ -11,6 +11,6 @@ pub struct TemplateGenConfig { pub model_template: &'static str, pub model_init_template: &'static str, pub type_map_service: &'static dyn TypeMapService, - pub other_templates: Vec, + pub other_templates: &'static [(&'static str, &'static str)], pub register_filters: Option Result<(), Error>>, } diff --git a/codegen/src/presentation/python/mod.rs b/codegen/src/presentation/python/mod.rs index 24d1156..2c0b22c 100644 --- a/codegen/src/presentation/python/mod.rs +++ b/codegen/src/presentation/python/mod.rs @@ -1,6 +1,8 @@ pub use type_map_service::{AsyncpgTypeMapService, PsycopgTypeMapService}; -use crate::{error::Error, ir::Ir, presentation::file_generation_config::TemplateGenConfig}; +use crate::{ + error::Error, ir::Ir, presentation::file_generation_config::TemplateGenConfig, response::File, +}; pub(super) mod driver; pub mod type_map_service; @@ -11,11 +13,11 @@ pub fn asyncpg(ir: &Ir) -> Result { query_directory_entrypoint: "__init__.py", model_directory_entrypoint: "__init__.py", file_extension: "py", - query_template: include_str!("./templates/asyncpg/query.j2"), - model_template: include_str!("./templates/asyncpg/model.j2"), - model_init_template: include_str!("./templates/asyncpg/model_init.j2"), + query_template: include_str!("./templates/asyncpg-query.j2"), + model_template: include_str!("./templates/model.j2"), + model_init_template: include_str!("./templates/model_init.j2"), type_map_service: &AsyncpgTypeMapService, - other_templates: vec![], + other_templates: &[], register_filters: None, }) } @@ -26,11 +28,11 @@ pub fn psycopg(ir: &Ir) -> Result { query_directory_entrypoint: "__init__.py", model_directory_entrypoint: "__init__.py", file_extension: "py", - query_template: include_str!("./templates/psycopg/query.j2"), - model_template: include_str!("./templates/psycopg/model.j2"), - model_init_template: include_str!("./templates/psycopg/model_init.j2"), + query_template: include_str!("./templates/psycopg-query.j2"), + model_template: include_str!("./templates/model.j2"), + model_init_template: include_str!("./templates/model_init.j2"), type_map_service: &PsycopgTypeMapService, - other_templates: vec![], + other_templates: &[], register_filters: None, }) } diff --git a/codegen/src/presentation/python/templates/asyncpg-query.j2 b/codegen/src/presentation/python/templates/asyncpg-query.j2 new file mode 100644 index 0000000..9b5f3d2 --- /dev/null +++ b/codegen/src/presentation/python/templates/asyncpg-query.j2 @@ -0,0 +1,193 @@ +# This file was automatically generated by pgc + +{%- for type in used_types %} +{% for import in (type | imports) %} +{%- set imported_asyncpg = import == "import asyncpg" %} +{%- if import != ""%} +{{import}} +{%- endif %} +{%- endfor %} +{%- endfor %} +{%- if not imported_asyncpg %} +import asyncpg +import typing +{%- endif %} +import dataclasses +from {{ir.request.config.codegen.options.package}} import models +{%- for subnamespace in query_namespace.subnamespaces %} +from . import {{subnamespace}} +{%- endfor %} + +{%- for method in query_namespace.methods %} + +{{ method.query.name | to_screaming_snake_case }} = """ +{{ method.query.query }} +""" +{%- endfor %} +{{"\n"}} +{%- for method in query_namespace.methods %} +{%- if method.output_model != None %} +@dataclasses.dataclass +class {{method.output_model.type | name | to_pascal_case }}: + {%- for field, type in method.output_model.fields | items %} + {{field}}: {{ type | annotation }} + {%- endfor %} + +{% endif %} +{%- for _, input_model in method.input_models | items %} +{%- if method.query.annotations.group_arguments and method.query.annotations.group_arguments.value == "dataclass" %} +@dataclasses.dataclass +class {{ input_model.type | name | to_pascal_case }}: + {%- for field, type in input_model.fields | items %} + {{field}}: {{type | annotation}} + {%- endfor %} + +{%- else %} +class {{ input_model.type | name }}(typing.Protocol): + {%- for field, type in input_model.fields | items %} + @property + def {{field}}(self) -> {{type | annotation}}: ... + {%- endfor %} + +{%- endif %} +{% endfor %} +{%- endfor %} + +@dataclasses.dataclass +class {{ query_namespace.name | to_pascal_case }}Queries: + def __init__(self, connection: asyncpg.Connection): + self.connection = connection + {%- for subnamespace in query_namespace.subnamespaces %} + self.{{subnamespace}} = {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) + {%- endfor %} + + + + + + + +{%- macro HANDLE_NONE_CASE(method) %} + {%- if method.query.annotations.not_null_result %} + assert row is not None, "the query \"{{method.query.name}}\" has returned null, but it is marked with @not_null_result." + {%- else %} + if row is None: + return None + {%- endif %} +{%- endmacro %} + + +{%- macro OR_NONE_TYPE_ANNOTATION(method) -%} +{%- if not method.query.annotations.not_null_result %} | None{%- endif %} +{%- endmacro %} + +{%- macro METHOD_OUTPUT_ANNOTATION(method) -%} + {%- if method.query.command == 'one' -%} + -> {{method.output_type | annotation}}{{(OR_NONE_TYPE_ANNOTATION(method))}}: + {%- elif method.query.command == 'many' -%} + -> list[{{method.output_type | annotation}}]: + {%- else -%} + : + {%- endif -%} +{%- endmacro %} + +{%- macro METHOD_DECLARATION(method) -%} +async def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type | annotation}} + {%- endfor -%} + ){{METHOD_OUTPUT_ANNOTATION(method)}} +{%- endmacro -%} + + +{%- macro QUERY_ARGUMENTS(method) -%} + {{method.query.name | to_screaming_snake_case }} + {%- for parameter in method.query.parameters -%} + , {{parameter.name}} + {%- endfor %} +{%- endmacro %} + + +{%- macro TYPE_CONSTRUCTOR(type, variable, dim=None) %} + {%- if (type | is_array) and dim == 0 -%} + {{TYPE_CONSTRUCTOR(type.content.type, variable)}} + {%- elif (type | is_array) and dim == None -%} + {{TYPE_CONSTRUCTOR(type, variable, type.content.dim)}} + {%- elif (type | is_array) and (type.content.type | constructor) != None -%} + {%- set next_var -%} element{{dim}} {%- endset -%} + [{{TYPE_CONSTRUCTOR(type, next_var, dim - 1)}} for {{next_var}} in {{variable}}] + {%- elif (type | constructor) != None -%} + {{ type | constructor }}(**{{variable}}) + {%- else -%} + {{variable}} + {%- endif -%} +{%- endmacro %} + + +{%- macro ONE_ROW_ONE_COLUMN(method) -%} + row = await self.connection.fetchrow({{QUERY_ARGUMENTS(method)}}) + {{HANDLE_NONE_CASE(method)}} + {%- if (method.output_type | constructor) != None %} + return {{TYPE_CONSTRUCTOR(method.output_type, "row[0]")}} + {%- else %} + return row[0] + {%- endif %} +{%- endmacro -%} + +{%- macro ONE_ROW_MANY_COLUMNS(method) %} + row = await self.connection.fetchrow({{QUERY_ARGUMENTS(method)}}) + {{HANDLE_NONE_CASE(method)}} + return {{method.output_type | name}}({%- for name, type in method.output_model.fields | items %} + {%- set variable -%}row["{{name}}"]{%- endset %} + {{name}}={{TYPE_CONSTRUCTOR(type, variable)}}, + {%- endfor %} + ) +{%- endmacro -%} + + + + +{%- macro MANY_ROWS_ONE_COLUMN(method) -%} + rows = await self.connection.fetch({{QUERY_ARGUMENTS(method)}}) + {%- if (method.output_type | constructor) != None %} + return [{{TYPE_CONSTRUCTOR(method.output_type, "row[0]")}} for row in rows] + {%- else %} + return row[0] + {%- endif %} +{%- endmacro -%} + + +{%- macro MANY_ROWS_MANY_COLUMNS(method) -%} + rows = await self.connection.fetch({{QUERY_ARGUMENTS(method)}}) + + return [ + {{method.output_type | name}}({%- for name, type in method.output_model.fields | items %} + {%- set variable -%}row["{{name}}"]{%- endset %} + {{name}}={{TYPE_CONSTRUCTOR(type, variable)}}, + {%- endfor %} + ) + for row in rows + ] +{%- endmacro -%} + + + +{% macro QUERY_METHOD(method) %} + {{METHOD_DECLARATION(method)}} +{%- if method.query.command == "one" and (method.query.output | length) == 1 %} + {{ONE_ROW_ONE_COLUMN(method)}} +{%- elif method.query.command == "one" %} + {{ONE_ROW_MANY_COLUMNS(method)}} +{%- elif method.query.command == "many" and (method.query.output | length) == 1 %} + {{MANY_ROWS_ONE_COLUMN(method)}} +{%- elif method.query.command == "many" %} + {{MANY_ROWS_MANY_COLUMNS(method)}} +{% endif %} + +{% endmacro %} + + + + {%- for method in query_namespace.methods %} + {{QUERY_METHOD(method)}} + {% endfor %} diff --git a/codegen/src/presentation/python/templates/asyncpg/query.j2 b/codegen/src/presentation/python/templates/asyncpg/query.j2 deleted file mode 100644 index 86a52cb..0000000 --- a/codegen/src/presentation/python/templates/asyncpg/query.j2 +++ /dev/null @@ -1,173 +0,0 @@ -# This file was automatically generated by pgc -{%- for type in used_types %} -{% for import in (type | imports) %} -{%- if import != ""%} -{{import}} -{%- endif %} -{%- endfor %} -{%- endfor %} -import dataclasses -from {{ir.request.config.codegen.options.package}} import models -from asyncpg import Connection -{%- for subnamespace in query_namespace.subnamespaces %} -from . import {{subnamespace}} -{%- endfor %} - -{%- for method in query_namespace.methods %} - -{{ method.query.name | to_screaming_snake_case }} = """ -{{ method.query.query }} -""" -{%- endfor %} -{{"\n"}} -{%- for method in query_namespace.methods %} -{%- if method.output_model != None %} -@dataclasses.dataclass -class {{method.output_model.type | name | to_pascal_case }}: - {%- for field, type in method.output_model.fields | items %} - {{field}}: {{ type | annotation }} - {%- endfor %} - -{% endif %} -{%- for _, input_model in method.input_models | items %} -{%- if method.query.annotations.group_arguments and method.query.annotations.group_arguments.value == "dataclass" %} -@dataclasses.dataclass -class {{ input_model.type | name | to_pascal_case }}: - {%- for field, type in input_model.fields | items %} - {{field}}: {{type | annotation}} - {%- endfor %} - -{%- else %} -class {{ input_model.type | name }}(typing.Protocol): - {%- for field, type in input_model.fields | items %} - @property - def {{field}}(self) -> {{type | annotation}}: ... - {%- endfor %} - -{%- endif %} -{% endfor %} -{%- endfor %} - -@dataclasses.dataclass -class {{ query_namespace.name | to_pascal_case }}Queries: - def __init__(self, connection: Connection): - self.connection = connection - {%- for subnamespace in query_namespace.subnamespaces %} - self.{{subnamespace}} = {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) - {%- endfor %} - - {% for method in query_namespace.methods%} - {%- if method.query.annotations.not_null_result -%} - {%- set HANDLE_NONE = 'assert row is not None, "a query marked with @not_null_result has returned null."' %} - {%- set OR_NONE = '' %} - {% else %} - {%- set HANDLE_NONE = 'if row is None: return None' %} - {%- set OR_NONE = ' | None' %} - {%- endif %} - - {%- if method.query.command == 'one' %} - - {%- if method.query.output | length == 1 %} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ) -> {{method.output_type | annotation}}{{OR_NONE}}: - row = await self.connection.fetchrow( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - {{HANDLE_NONE}} - return row[0] - {%- else %} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ) -> {{method.output_type | annotation}}{{OR_NONE}}: - row = await self.connection.fetchrow( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - {{HANDLE_NONE}} - return {{method.output_type | annotation}}(**row) - {%- endif %} - {%- elif method.query.command == 'many' %} - {%- if method.query.output | length == 1 %} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ) -> list[{{method.output_type | annotation}}]: - rows = await self.connection.fetch( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - return [row[0] for row in rows] - {%- else%} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ) -> list[{{method.output_type | annotation}}]: - rows = await self.connection.fetch( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - return [{{method.output_type | annotation}}(**row) for row in rows] - {%- endif %} - {%- elif method.query.command == 'val' %} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ) -> {{method.output_type | annotation}}{{OR_NONE}}: - row = await self.connection.fetchval( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - {{HANDLE_NONE}} - return row - {%- else %} - async def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ): - return await self.connection.execute( - {{method.query.name | to_screaming_snake_case }} - {%- for parameter in method.query.parameters -%} - , {{parameter.name}} - {%- endfor %} - ) - {%- endif %} - - {% endfor %} - - - -{%- if this_module == [] %} -async def init_connection(conn: Connection): - {%- for _, model_module in ir.model_modules.model_modules | items %} - {%- for model in model_module.classes %} - - await conn.set_type_codec( - {{model.type.pgtype_name | to_c_string }}, - encoder=lambda model: ({% for name, _ in model.fields %}model.{{name}}{% if not loop.last %}, {% endif %}{%endfor%}), - decoder=lambda row: {{model.type.constructor}}(*row), - schema={{model.type.pgtype_schema | to_c_string }}, - format="tuple", - ) - {%- endfor %} - {% endfor %} -{% endif -%} diff --git a/codegen/src/presentation/python/templates/asyncpg/model.j2 b/codegen/src/presentation/python/templates/model.j2 similarity index 100% rename from codegen/src/presentation/python/templates/asyncpg/model.j2 rename to codegen/src/presentation/python/templates/model.j2 diff --git a/codegen/src/presentation/python/templates/asyncpg/model_init.j2 b/codegen/src/presentation/python/templates/model_init.j2 similarity index 100% rename from codegen/src/presentation/python/templates/asyncpg/model_init.j2 rename to codegen/src/presentation/python/templates/model_init.j2 diff --git a/codegen/src/presentation/python/templates/psycopg-query.j2 b/codegen/src/presentation/python/templates/psycopg-query.j2 new file mode 100644 index 0000000..13c639b --- /dev/null +++ b/codegen/src/presentation/python/templates/psycopg-query.j2 @@ -0,0 +1,255 @@ +# This file was automatically generated by pgc + +{%- for type in used_types %} +{% for import in (type | imports) %} +{%- set imported_psycopg = import == "import psycopg" %} +{%- if import != ""%} +{{import}} +{%- endif %} +{%- endfor %} +{%- endfor %} +from psycopg.rows import namedtuple_row +{%- if this_module == [] %} +import json +from psycopg.types.composite import register_composite, CompositeInfo +{%- endif %} +{%- if not imported_psycopg %} +import psycopg +import typing +{%- endif %} +import dataclasses +from {{ir.request.config.codegen.options.package}} import models +{%- for subnamespace in query_namespace.subnamespaces %} +from . import {{subnamespace}} +{%- endfor %} + +{%- for method in query_namespace.methods %} + +{{ method.query.name | to_screaming_snake_case }} = """ +{{ method.query.query | regex_replace('\\$(\\d+)', '%(p$1)s') }} +""" +{%- endfor %} +{{"\n"}} +{%- for method in query_namespace.methods %} +{%- if method.output_model != None %} +@dataclasses.dataclass +class {{method.output_model.type | name | to_pascal_case }}: + {%- for field, type in method.output_model.fields | items %} + {{field}}: {{ type | annotation }} + {%- endfor %} + +{% endif %} +{%- for _, input_model in method.input_models | items %} +{%- if method.query.annotations.group_arguments and method.query.annotations.group_arguments.value == "dataclass" %} +@dataclasses.dataclass +class {{ input_model.type | name | to_pascal_case }}: + {%- for field, type in input_model.fields | items %} + {{field}}: {{type | annotation}} + {%- endfor %} + +{%- else %} +class {{ input_model.type | name }}(typing.Protocol): + {%- for field, type in input_model.fields | items %} + @property + def {{field}}(self) -> {{type | annotation}}: ... + {%- endfor %} + +{%- endif %} +{% endfor %} +{%- endfor %} + +@dataclasses.dataclass +class {{ query_namespace.name | to_pascal_case }}Queries: + def __init__(self, connection: psycopg.Connection): + self.connection = connection + {%- for subnamespace in query_namespace.subnamespaces %} + self.{{subnamespace}} = {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) + {%- endfor %} + +{%- macro HANDLE_NONE_CASE(method) %} + {%- if method.query.annotations.not_null_result %} + assert row is not None, "the query \"{{method.query.name}}\" has returned null, but it is marked with @not_null_result." + {%- else %} + if row is None: + return None + {%- endif %} +{%- endmacro %} + + +{%- macro OR_NONE_TYPE_ANNOTATION(method) -%} +{%- if not method.query.annotations.not_null_result %} | None{%- endif %} +{%- endmacro %} + +{%- macro METHOD_OUTPUT_ANNOTATION(method) -%} + {%- if method.query.command == 'one' -%} + -> {{method.output_type | annotation}}{{(OR_NONE_TYPE_ANNOTATION(method))}}: + {%- elif method.query.command == 'many' -%} + -> list[{{method.output_type | annotation}}]: + {%- else -%} + : + {%- endif -%} +{%- endmacro %} + +{%- macro METHOD_DECLARATION(method) -%} +def {{method.query.name}}(self + {%- for argument, type in method.arguments | items -%} + , {{argument}}: {{type | annotation}} + {%- endfor -%} + ){{METHOD_OUTPUT_ANNOTATION(method)}} +{%- endmacro -%} + + +{%- macro QUERY_ARGUMENTS(method) -%} + {{method.query.name | to_screaming_snake_case }}, { + {%- for parameter in method.query.parameters -%} + "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} + {%- endfor -%} + } +{%- endmacro %} + + +{%- macro ONE_ROW_ONE_COLUMN(method) -%} + row = self.connection.cursor(row_factory=namedtuple_row).execute({{QUERY_ARGUMENTS(method)}}).fetchone() + {{HANDLE_NONE_CASE(method)}} + return row[0] +{%- endmacro -%} + +{%- macro ONE_ROW_MANY_COLUMNS(method) %} + row = self.connection.cursor(row_factory=namedtuple_row).execute({{QUERY_ARGUMENTS(method)}}).fetchone() + {{HANDLE_NONE_CASE(method)}} + return {{method.output_type | name}}({%- for name, type in method.output_model.fields | items %} + {{name}}=row.{{name}}, + {%- endfor %} + ) +{%- endmacro -%} + + + +{%- macro MANY_ROWS_ONE_COLUMN(method) -%} + rows = self.connection.cursor(row_factory=namedtuple_row).execute({{QUERY_ARGUMENTS(method)}}).fetchall() + {%- if (method.output_type | constructor) != None %} + return [row[0] for row in rows] + {%- else %} + return row[0] + {%- endif %} +{%- endmacro -%} + + +{%- macro MANY_ROWS_MANY_COLUMNS(method) -%} + rows = self.connection.cursor(row_factory=namedtuple_row).execute({{QUERY_ARGUMENTS(method)}}).fetchall() + + return [ + {{method.output_type | name}}({%- for name, type in method.output_model.fields | items %} + {{name}}=row.{{name}}, + {%- endfor %} + ) + for row in rows + ] +{%- endmacro -%} + + + +{% macro QUERY_METHOD(method) %} + {{METHOD_DECLARATION(method)}} +{%- if method.query.command == "one" and (method.query.output | length) == 1 %} + {{ONE_ROW_ONE_COLUMN(method)}} +{%- elif method.query.command == "one" %} + {{ONE_ROW_MANY_COLUMNS(method)}} +{%- elif method.query.command == "many" and (method.query.output | length) == 1 %} + {{MANY_ROWS_ONE_COLUMN(method)}} +{%- elif method.query.command == "many" %} + {{MANY_ROWS_MANY_COLUMNS(method)}} +{% endif %} + +{% endmacro %} + + + + {%- for method in query_namespace.methods %} + {{QUERY_METHOD(method)}} + {% endfor %} + + + +{%- if this_module == [] %} + +GET_COMPOSITE_TYPES = """ +with input as ( + select + (e.elem->>0) as schemaname, + (e.elem->>1) as tablename, + e.ord + from jsonb_array_elements(%(data)s::jsonb) with ordinality as e(elem, ord) +) + +select + i.schemaname, + t.typname as name, + t.oid as oid, + t.typarray as array_oid, + t.oid::regtype::text as regtype, + coalesce(a.fnames, '{}') as field_names, + coalesce(a.ftypes, '{}') as field_types +from input i +left join pg_namespace n + on n.nspname = i.schemaname +left join pg_class c + on c.relnamespace = n.oid + and c.relname = i.tablename + and c.relkind in ('r','p') +left join pg_type t + on t.oid = c.reltype +left join lateral ( + select + attrelid, + array_agg(attname) as fnames, + array_agg(atttypid) as ftypes + from ( + select a.attrelid, a.attname, a.atttypid + from pg_attribute a + join pg_type t_ ON t_.typrelid = a.attrelid + where t_.oid = t.oid + and a.attnum > 0 + and not a.attisdropped + order by a.attnum + ) x + group by attrelid +) a on a.attrelid = t.typrelid +""" + + + +def init_connection(conn: psycopg.Connection): + + + type_info_rows = conn.cursor(row_factory=namedtuple_row).execute(GET_COMPOSITE_TYPES, { + "data": json.dumps([ + {%- for module_name, model_module in ir.model_modules.model_modules | items %} + {%- for model in model_module.models %} + ["{{module_name}}", "{{model.name}}"], + {%- endfor %} + {%- endfor %} + ])}).fetchall() + + type_info = { + f'{row.schemaname}.{row.name}': CompositeInfo( + name=row.name, + oid=row.oid, + array_oid=row.array_oid, + field_names=row.field_names, + field_types=row.field_types + ) + for row in type_info_rows + } + + + + {%- for module_name, model_module in ir.model_modules.model_modules | items %} + {%- for model in model_module.models %} + register_composite( + type_info["{{module_name}}.{{ model.name }}"], conn, models.{{module_name}}.{{ model.name | to_pascal_case }} + ) + {%- endfor %} + {%- endfor %} + +{% endif -%} diff --git a/codegen/src/presentation/python/templates/psycopg/model.j2 b/codegen/src/presentation/python/templates/psycopg/model.j2 deleted file mode 100644 index 6e5279f..0000000 --- a/codegen/src/presentation/python/templates/psycopg/model.j2 +++ /dev/null @@ -1,28 +0,0 @@ -import dataclasses -{%- if model_module.enums %} -import enum -{%- endif %} -{%- for type in used_types %} -{%- for import in (type | imports) %} -{{import}} -{%- endfor %} -{%- endfor %} -from {{ir.request.config.codegen.options.package}} import models - -{%- for enum in model_module.enums %} - -class {{enum.name | to_pascal_case }}(enum.StrEnum): - {%- for value in enum.values %} - {{ value | to_screaming_snake_case }} = {{ value | to_c_string }} - {%- endfor %} -{% endfor %} - -{%- for model in model_module.models %} - - -@dataclasses.dataclass -class {{ model.name | to_pascal_case }}: - {%- for field in model.fields %} - {{field.name}}: {{ field.type | annotation }} - {%- endfor %} -{%- endfor %} diff --git a/codegen/src/presentation/python/templates/psycopg/model_init.j2 b/codegen/src/presentation/python/templates/psycopg/model_init.j2 deleted file mode 100644 index 6fc9c54..0000000 --- a/codegen/src/presentation/python/templates/psycopg/model_init.j2 +++ /dev/null @@ -1,12 +0,0 @@ -{%- for module in ir.model_modules.model_modules -%} -from . import {{module}} -{% endfor -%} - - -{%- if ir.model_modules.model_modules["public"] -%} -from .public import ( -{%- for model in ir.model_modules.model_modules["public"].models %} - {{ model.name | to_pascal_case }}, -{%- endfor %} -) -{% endif %} diff --git a/codegen/src/presentation/python/templates/psycopg/query.j2 b/codegen/src/presentation/python/templates/psycopg/query.j2 deleted file mode 100644 index 433add9..0000000 --- a/codegen/src/presentation/python/templates/psycopg/query.j2 +++ /dev/null @@ -1,179 +0,0 @@ -# This file was automatically generated by pgc -{%- for type in used_types %} -{% for import in (type | imports) %} -{%- if import != ""%} -{{import}} -{%- endif %} -{%- endfor %} -{%- endfor %} -import psycopg -import typing -import dataclasses -from psycopg.rows import dict_row -{%- if query_namespace.name == "" %} -from psycopg.types.composite import CompositeInfo, register_composite -{%- endif %} -from {{ir.request.config.codegen.options.package}} import models -{%- for subnamespace in query_namespace.subnamespaces %} -from . import {{subnamespace}} -{%- endfor %} - -{%- for method in query_namespace.methods %} - -{{ method.query.name | to_screaming_snake_case }} = """ -{{ method.query.query | regex_replace('\\$(\\d+)', '%(p$1)s') }} -""" -{%- endfor %} -{{"\n"}} -{%- for method in query_namespace.methods %} -{%- if method.output_model != None %} -@dataclasses.dataclass -class {{method.output_model.type | name }}: - {%- for field, type in method.output_model.fields | items %} - {{field}}: {{type | annotation}} - {%- endfor %} - -{% endif %} -{%- for _, input_model in method.input_models | items %} -{%- if method.query.annotations.group_arguments and method.query.annotations.group_arguments.value == "dataclass" %} -@dataclasses.dataclass -class {{ input_model.type | name }}: - {%- for field, type in input_model.fields | items %} - {{field}}: {{type | annotation}} - {%- endfor %} - -{%- else %} -class {{ input_model.type | name }}(typing.Protocol): - {%- for field, type in input_model.fields | items %} - @property - def {{field}}(self) -> {{type | annotation}}: ... - {%- endfor %} - -{%- endif %} -{% endfor %} -{%- endfor %} - -@dataclasses.dataclass -class {{ query_namespace.name | to_pascal_case }}Queries: - def __init__(self, connection: psycopg.Connection): - self.connection = connection - {%- for subnamespace in query_namespace.subnamespaces %} - self.{{subnamespace}} = {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) - {%- endfor %} - - {% for method in query_namespace.methods%} - {%- if method.query.annotations.not_null_result -%} - {%- set HANDLE_NONE = 'assert row is not None, "a query marked with @not_null_result has returned null."' %} - {%- set OR_NONE = '' %} - {% else %} - {%- set HANDLE_NONE = 'if row is None: return None' %} - {%- set OR_NONE = ' | None' %} - {%- endif %} - - {%- if method.query.command == 'one' %} - - {%- if method.query.output | length == 1 %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ) -> {{method.output_type | annotation}}{{OR_NONE}}: - row = self.connection.execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ).fetchone() - {{HANDLE_NONE}} - return row[0] - {%- else %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ) -> {{method.output_type | annotation}}{{OR_NONE}}: - row = self.connection.cursor(row_factory=dict_row).execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ).fetchone() - {{HANDLE_NONE}} - return {{method.output_type | annotation}}(**row) - - {%- endif %} - {%- elif method.query.command == 'many' %} - {%- if method.query.output | length == 1 %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ) -> list[{{method.output_type | annotation}}]: - rows = self.connection.execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ).fetchall() - return [row[0] for row in rows] - {%- else %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ) -> list[{{method.output_type | annotation}}]: - rows = self.connection.cursor(row_factory=dict_row).execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ).fetchall() - return [{{method.output_type | annotation}}(**row) for row in rows] - - {%- endif %} - {%- elif method.query.command == 'val' %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ) -> {{method.output_type | annotation}}{{OR_NONE}}: - row = self.connection.execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ).fetchone() - {{HANDLE_NONE}} - return row[0] - {%- else %} - def {{method.query.name}}(self - {%- for argument, type in method.arguments | items -%} - , {{argument}}: {{type | annotation}} - {%- endfor -%} - ): - return self.connection.execute( - {{method.query.name | to_screaming_snake_case }}, { - {%- for parameter in method.query.parameters -%} - "p{{loop.index}}": {{parameter.name}} {%- if not loop.last -%}, {% endif -%} - {%- endfor %}} - ) - {%- endif %} - - {% endfor %} - - - -{%- if this_module == [] %} -def init_connection(conn: psycopg.Connection): - {%- for _, model_module in ir.model_modules.model_modules | items %} - {%- for model in model_module.models %} - - info = CompositeInfo.fetch(conn, "\"{{model_module.name}}\".\"{{model.name}}\"") - assert info is not None, "The table \"{{model_module.name}}\".\"{{model.name}}\" was not found." - register_composite( - - info, conn, models.{{model_module.name}}.{{model.name | to_pascal_case }} - ) - {%- endfor %} - {% endfor %} -{% endif -%} diff --git a/codegen/src/presentation/python/type_map_service.rs b/codegen/src/presentation/python/type_map_service.rs index aae5e28..1b76562 100644 --- a/codegen/src/presentation/python/type_map_service.rs +++ b/codegen/src/presentation/python/type_map_service.rs @@ -44,43 +44,14 @@ impl TypeMapService for AsyncpgTypeMapService { #[rustfmt::skip] fn get(&self, current_module: Vec, r#type: &crate::ir::Type) -> LanguageType { match r#type { - Type::UserDefined { module_path, name } => { - let name: Arc = to_pascal_case(&name).into(); - let module: Arc<_> = module_path.join(".").into(); - let mut annotation = format!("{module}.{name}").into(); - let same_module = current_module.iter().map(|s|&**s).eq(module_path.iter().map(|s|&**s)); - if same_module { - annotation = name.clone(); - } - - LanguageType { - name: Some(name.clone()), - annotation, - import: vec![], - module: Some(module) - } - }, - Type::Nullable(r#type) => { - let r#type = self.get(current_module, r#type); - LanguageType { - name: r#type.name, - annotation: format!("{} | None", r#type.annotation).into(), - import: r#type.import, - module: r#type.module - } - } - Type::Array { r#type, dim } => { - let r#type = self.get(current_module, r#type); - let mut annotation = r#type.annotation; - for _ in 0..*dim { - annotation = format!("list[{}]", annotation).into(); - } - LanguageType { - name: None, - annotation, - import: r#type.import, - module: r#type.module - } + Type::UserDefined { .. } => + return self.get_user_defined(current_module, r#type) + , + Type::Nullable(..) => + self.get_nullable(current_module, r#type), + + Type::Array { .. } => { + self.get_array(current_module, r#type) } Type::AnyArray | Type::AnyCompatibleArray => LanguageType::annotation("list"), Type::Void => LanguageType::annotation("None"), @@ -150,3 +121,70 @@ impl TypeMapService for AsyncpgTypeMapService { } } } + +impl AsyncpgTypeMapService { + fn get_user_defined( + &self, + current_module: Vec, + r#type: &crate::ir::Type, + ) -> LanguageType { + let Type::UserDefined { module_path, name } = r#type else { + unreachable!(); + }; + let name: Arc = to_pascal_case(&name).into(); + let module: Arc = module_path.join(".").into(); + let mut annotation = format!("{module}.{name}").into(); + + let same_module = current_module + .iter() + .map(|s| &**s) + .eq(module_path.iter().map(|s| &**s)); + + if same_module { + annotation = name.clone(); + } + + LanguageType { + name: Some(name.clone()), + constructor: Some(annotation.clone()), + annotation, + import: vec![], + } + } + + fn get_nullable(&self, current_module: Vec, r#type: &crate::ir::Type) -> LanguageType { + let Type::Nullable(r#type) = r#type else { + unreachable!(); + }; + let r#type = self.get(current_module, r#type); + LanguageType { + name: r#type.name, + annotation: format!("{} | None", r#type.annotation).into(), + import: r#type.import, + constructor: r#type.constructor.clone(), + } + } + fn get_array(&self, current_module: Vec, r#type: &crate::ir::Type) -> LanguageType { + let Type::Array { r#type, dim } = r#type else { + unreachable!(); + }; + let r#type = self.get(current_module, r#type); + let mut annotation = r#type.annotation; + let mut constructor = r#type.constructor.clone(); + + for i in 0..*dim { + annotation = format!("list[{}]", annotation).into(); + let Some(prev_constructor) = constructor else { + continue; + }; + constructor = + Some(format!("(lambda arr{i}: [*map({prev_constructor}, arr{i})])").into()) + } + LanguageType { + name: None, + annotation, + import: r#type.import, + constructor, + } + } +} diff --git a/codegen/src/presentation/templating_service.rs b/codegen/src/presentation/templating_service.rs index 3c374b8..b69faf1 100644 --- a/codegen/src/presentation/templating_service.rs +++ b/codegen/src/presentation/templating_service.rs @@ -39,17 +39,13 @@ impl TemplatingService { } fn include_other_templates(&self, files: &mut Vec) -> Result<(), Error> { - for file in &self.config.other_templates { - let content = self.environment.render_named_str( - &file.path, - &file.content, - context! { - ir => self.ir - }, - )?; + for (name, _) in self.config.other_templates { + let content = self.environment.get_template(&name)?.render(context! { + ir => self.ir + })?; files.push(File { - path: file.path.clone(), + path: (*name).into(), content: content, }); } @@ -139,7 +135,7 @@ impl TemplatingService { let path; - if namespace.subnamespaces.len() == 0 { + if namespace.subnamespaces.len() == 0 && !module_segments.is_empty() { path = format!( "{}.{}", module_segments.join("/"), @@ -157,3 +153,18 @@ impl TemplatingService { Ok(()) } } + +#[cfg(test)] +mod test { + use std::sync::Arc; + + use fake::{Fake, Faker}; + + use crate::{ir::IrService, request::Request}; + #[test] + fn file_has_name() { + let mut request: Request = Faker.fake(); + request.queries = Arc::default(); + let ir = IrService::new(request).unwrap().build(); + } +} diff --git a/codegen/src/presentation/type_map.rs b/codegen/src/presentation/type_map.rs deleted file mode 100644 index a40e630..0000000 --- a/codegen/src/presentation/type_map.rs +++ /dev/null @@ -1 +0,0 @@ -pub struct TypeMap {} diff --git a/codegen/src/presentation/type_mapping_service.rs b/codegen/src/presentation/type_mapping_service.rs index a4599fc..d7a16db 100644 --- a/codegen/src/presentation/type_mapping_service.rs +++ b/codegen/src/presentation/type_mapping_service.rs @@ -12,7 +12,7 @@ pub struct LanguageType { pub name: Option>, pub annotation: Arc, pub import: Vec>, - pub module: Option>, + pub constructor: Option>, } pub trait TypeMapService: Send + Sync + 'static { @@ -59,7 +59,7 @@ impl LanguageType { annotation: annotation.into(), name: None, import: vec![], - module: None, + constructor: None, } } @@ -74,11 +74,4 @@ impl LanguageType { let import: Vec> = import.into_iter().map(Into::into).collect(); Self { import, ..self } } - - pub fn module(self, module: &str) -> Self { - Self { - module: Some(module.into()), - ..self - } - } } diff --git a/codegen/src/presentation/typescript/mod.rs b/codegen/src/presentation/typescript/mod.rs index 2864797..9648131 100644 --- a/codegen/src/presentation/typescript/mod.rs +++ b/codegen/src/presentation/typescript/mod.rs @@ -21,10 +21,7 @@ pub fn postgres() -> TemplateGenConfig { model_template: include_str!("./templates/postgres/model.j2"), model_init_template: include_str!("./templates/postgres/model_init.j2"), type_map_service: &TypescriptTypeMapService, - other_templates: vec![File { - path: "parsers.ts".into(), - content: include_str!("./templates/parsers.ts").into(), - }], + other_templates: &[("parsers.ts", include_str!("./templates/parsers.ts"))], register_filters: Some(register_filters), } } diff --git a/codegen/src/presentation/typescript/templates/parsers.ts b/codegen/src/presentation/typescript/templates/parsers.ts index 6deeffc..8c3a598 100644 --- a/codegen/src/presentation/typescript/templates/parsers.ts +++ b/codegen/src/presentation/typescript/templates/parsers.ts @@ -1,7 +1,9 @@ -// This file was automatically generated by pgc -// run `pgc build` to regenerate it -// {%- set options = ir.request.config.codegen.options %} -// {%- if options and options == true %} +/** + * This file was automatically generated by pgc + * run `pgc build` to regenerate it + * go to "https://github.com/tvallotton/pgc" for more information + * {%- set options = ir.request.config.codegen.options -%} {%- if options and options == true %} + */ import { Buffer } from "node:buffer"; // {%- endif %} import * as models from "./models/models.ts"; @@ -233,9 +235,8 @@ export class RowParser { /* parsers for custom types {{"*" + "/"}} export const parser = { + {%- for module_name, module in ir.model_modules.model_modules | items %} -{%- if module_name in reserved -%} -{%- set module_name = "_" + module_name %} {%- endif %} {{module_name | to_camel_case }}: { {%- for model in module.models %} diff --git a/codegen/src/presentation/typescript/templates/postgres/query.j2 b/codegen/src/presentation/typescript/templates/postgres/query.j2 index 6f2677f..482f655 100644 --- a/codegen/src/presentation/typescript/templates/postgres/query.j2 +++ b/codegen/src/presentation/typescript/templates/postgres/query.j2 @@ -11,7 +11,12 @@ import postgres from "postgres" import { parser, ArrayParser, BigIntParser } from "./parsers.ts"; import * as models from "./models/models.ts"; {%- for subnamespace in query_namespace.subnamespaces %} +{% set is_directory = (query_namespace.subnamespaces[subnamespace].subnamespaces | length) != 0 %} +{% if is_directory %} +import * as {{subnamespace}} from "./{{subnamespace}}/queries.ts" +{% else %} import * as {{subnamespace}} from "./{{subnamespace}}.ts" +{% endif %} {%- endfor %} {%- for method in query_namespace.methods %} @@ -51,7 +56,7 @@ export class {{ query_namespace.name | to_pascal_case }}Queries { {%- if method.query.annotations.not_null_result -%} {% set HANDLE_NONE -%} if (!rows[0]) { - throw new Error("The query {{method.query.name}} is marked with @not_null_result, but it has returned null."); + throw new Error("The query \"{{method.query.name}}\" is marked with @not_null_result, but it has returned null."); } {% endset %} {%- set OR_NONE = '' %} diff --git a/codegen/src/presentation/typescript/type_map_service.rs b/codegen/src/presentation/typescript/type_map_service.rs index 2afa5e9..eba4997 100644 --- a/codegen/src/presentation/typescript/type_map_service.rs +++ b/codegen/src/presentation/typescript/type_map_service.rs @@ -66,7 +66,7 @@ impl TypeMapService for TypescriptTypeMapService { Type::Json => LanguageType::annotation("any"), Type::UserDefined { module_path, name } => { let name: Arc = to_pascal_case(&name).into(); - let module: Arc<_> = module_path.join(".").into(); + let module: Arc = module_path.join(".").into(); let mut annotation = format!("{module}.{name}").into(); let same_module = current_module .iter() @@ -78,9 +78,9 @@ impl TypeMapService for TypescriptTypeMapService { LanguageType { name: Some(name.clone()), + constructor: Some(annotation.clone()), annotation, import: vec![], - module: Some(module), } } Type::Nullable(r#type) => { @@ -89,20 +89,28 @@ impl TypeMapService for TypescriptTypeMapService { name: r#type.name, annotation: format!("{} | null", r#type.annotation).into(), import: r#type.import, - module: r#type.module, + constructor: r#type.constructor.clone(), } } Type::Array { r#type, dim } => { let r#type = self.get(current_module, r#type); let mut annotation = r#type.annotation; - for _ in 0..*dim { + let mut constructor = r#type.constructor.clone(); + for i in 0..*dim { annotation = format!("Array<{}>", annotation).into(); + + let Some(prev_constructor) = constructor else { + continue; + }; + + constructor = + Some(format!("((arr{i}) => arr{i}.map({prev_constructor}))").into()) } LanguageType { name: None, annotation, import: r#type.import, - module: r#type.module, + constructor, } } Type::Bool => LanguageType::annotation("boolean").name("boolean"), diff --git a/codegen/src/request.rs b/codegen/src/request.rs index 734e01f..e205246 100644 --- a/codegen/src/request.rs +++ b/codegen/src/request.rs @@ -5,121 +5,173 @@ use serde::Deserialize; use serde::Serialize; use serde_json::Value; +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Request { pub catalog: Catalog, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub queries: Arc<[Query]>, pub config: Config, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Catalog { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub schemas: Arc<[Schema]>, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Schema { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub enums: Arc<[Enum]>, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub records: Arc<[Record]>, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Enum { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcStrFaker"))] pub values: Arc<[Arc]>, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Record { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub kind: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub columns: Arc<[Column]>, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Column { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, #[serde(rename = "type")] pub type_field: ColumnType, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub default: Option>, pub is_unique: bool, pub is_nullable: bool, pub is_foreign_key: bool, pub is_primary_key: bool, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub foreign_table_name: Option>, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub foreign_table_schema: Option>, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct ColumnType { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub display: Arc, pub is_array: bool, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub schema_name: Arc, pub is_composite: bool, pub array_dimensions: i64, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Query { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub query: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub command: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub path: Arc, - pub annotations: Arc>, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] + pub annotations: Arc, Annotation>>, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub output: Arc<[OutputColumn]>, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub parameters: Arc<[Parameter]>, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Annotation { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub value: Option>, pub line: i64, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct OutputColumn { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, #[serde(rename = "type")] pub type_: OutputType, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct OutputType { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub schema: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, pub id: i64, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Parameter { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, #[serde(rename = "type")] pub type_: OutputType, pub not_null: bool, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Config { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub version: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcStrFaker"))] pub queries: Arc<[Arc]>, pub codegen: Codegen, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Codegen { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub out: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub language: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub driver: Arc, #[serde(default)] + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub types: Arc, TypeConfig>>, pub options: Value, } +#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct TypeConfig { + #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub annotation: Arc, + #[cfg_attr(test, dummy(faker = "crate::faker::ArcStrFaker"))] #[serde(default)] - pub import: Vec>, + pub import: Arc<[Arc]>, } diff --git a/codegen/src/utils.rs b/codegen/src/utils.rs index ccba0b3..aced155 100644 --- a/codegen/src/utils.rs +++ b/codegen/src/utils.rs @@ -1,57 +1,10 @@ -use std::{ - collections::BTreeMap, - sync::Mutex, -}; +use std::{collections::BTreeMap, sync::Mutex}; use heck::{ToKebabCase, ToLowerCamelCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase}; use minijinja::Environment; use regex::bytes::Regex; use serde::Serialize; -pub fn env() -> Environment<'static> { - let mut env = minijinja::Environment::new(); - - env.add_filter("to_camel_case", to_camel_case); - env.add_filter("to_pascal_case", to_pascal_case); - env.add_filter("to_snake_case", to_snake_case); - env.add_filter("to_kebab_case", to_kebab_case); - env.add_filter("to_screaming_snake_case", to_screaming_snake_case); - env.add_filter("to_c_string", to_c_string); - env.add_filter("starts_with", starts_with); - env.add_filter("strip_prefix", strip_prefix); - env.add_filter("regex_replace", regex_replace); - env -} - -pub fn render(template: &str, context: T) -> String { - env().render_named_str("root", template, context).unwrap() -} - -pub fn regex_replace(text: &str, pattern: &str, replacement: &str) -> String { - static REGEXES: Mutex> = Mutex::new(BTreeMap::new()); - let mut guard = REGEXES.lock().unwrap(); - let entry = guard.entry(pattern.into()); - let regex = entry.or_insert_with(|| Regex::new(pattern).unwrap()); - String::from_utf8( - regex - .replace_all(text.as_bytes(), replacement.as_bytes()) - .into(), - ) - .unwrap() -} - -pub fn to_c_string(s: &str) -> String { - format!("{:?}", s) -} - -pub fn strip_prefix<'a>(text: &'a str, pattern: &str) -> String { - text.strip_prefix(pattern).unwrap_or(text).to_string() -} - -pub fn starts_with(text: &str, pattern: &str) -> bool { - text.starts_with(pattern) -} - pub fn to_camel_case(s: &str) -> String { s.to_lower_camel_case() } @@ -59,15 +12,3 @@ pub fn to_camel_case(s: &str) -> String { pub fn to_pascal_case(s: &str) -> String { s.to_upper_camel_case() } - -pub fn to_snake_case(s: &str) -> String { - s.to_snake_case() -} - -pub fn to_screaming_snake_case(s: &str) -> String { - s.to_shouty_snake_case() -} - -pub fn to_kebab_case(s: &str) -> String { - s.to_kebab_case() -} diff --git a/deno.json b/deno.json index a90057f..9d9bc61 100644 --- a/deno.json +++ b/deno.json @@ -3,7 +3,8 @@ "build:wasm": "deno run --allow-sys --allow-run --allow-net --allow-env --allow-write --allow-read build.ts", "dev": "deno run --allow-sys --allow-net --allow-run --allow-env --allow-write --allow-read src/main.ts", "build": "deno run build:wasm && deno compile --allow-sys --allow-net --allow-run --allow-env --allow-write --allow-read --output pgc src/main.ts", - "install": "deno run build && mv pgc ~/.local/bin" + "test:python": "rm -rf tests/python/dist_asyncpg && rm -rf tests/python/dist_psycopg && deno run dev build -f tests/pgc-asyncpg.yaml && deno run dev build -f tests/pgc-psycopg.yaml && uv run --project tests/python pytest", + "install": "deno run build && mv pgc ~/.local/b in" }, "imports": { "@eemeli/yaml": "jsr:@eemeli/yaml@^2.8.0", diff --git a/pgc.yaml b/pgc.yaml index 34634da..9b93128 100644 --- a/pgc.yaml +++ b/pgc.yaml @@ -9,8 +9,8 @@ queries: - "author.sql" - "queries.sql" codegen: - language: typescript - driver: postgres + language: python + driver: psycopg out: ./queries types: diff --git a/reference.md b/reference.md index f7d2f14..7183fdb 100644 --- a/reference.md +++ b/reference.md @@ -62,7 +62,8 @@ The codegen section has the following arguments: ```yaml codegen: - target: python:asyncpg + language: python + driver: asyncpg out: ./app/queries enums: - genre diff --git a/src/build/build.service.ts b/src/build/build.service.ts index 3c76926..9c04a4d 100644 --- a/src/build/build.service.ts +++ b/src/build/build.service.ts @@ -52,7 +52,7 @@ export class BuildService { queries: await this.getQueries(), config: this.configService.config, }; - console.log(payload); + await this.codegenService.generate(payload); } diff --git a/src/codegen/codegen.service.ts b/src/codegen/codegen.service.ts index 25ae35f..4097565 100644 --- a/src/codegen/codegen.service.ts +++ b/src/codegen/codegen.service.ts @@ -49,7 +49,6 @@ export class CodegenService { } async runWasmCodegenModule(payload: object) { - Deno.writeTextFile("catalog.json", JSON.stringify(payload)); const utf8JsonPayload = await this.serializePayload(payload); const { instance } = await this.loadPlugin(); const exports = instance.exports as any; diff --git a/src/init.ts b/src/init.ts index 9e4f854..f24da01 100644 --- a/src/init.ts +++ b/src/init.ts @@ -7,7 +7,8 @@ queries: - "queries/*.sql" codegen: - target: python:asyncpg + language: python + driver: asyncpg out: ./package/queries # change package to your package name options: package: package.queries # change package to your package name diff --git a/src/main.ts b/src/main.ts index 825a1ad..87d7ea6 100644 --- a/src/main.ts +++ b/src/main.ts @@ -18,7 +18,6 @@ program.command("build").description( buildService = await BuildService.fromConfig(configService); await buildService.build(); } catch (e) { - console.log(e); console.log("error:", (e as Error).message); } finally { await buildService?.close(); diff --git a/src/schema_service/schema.service.ts b/src/schema_service/schema.service.ts index 3d33ee8..07f477b 100644 --- a/src/schema_service/schema.service.ts +++ b/src/schema_service/schema.service.ts @@ -14,7 +14,8 @@ export class SchemaService { async loadCatalog() { await this.loadMigrations(); - return this.querySchemas(); + const { schemas } = await this.querySchemas(); + return { schemas: schemas ?? [] }; } private async loadMigrations() { diff --git a/tests/.gitignore b/tests/.gitignore new file mode 100644 index 0000000..fa680b9 --- /dev/null +++ b/tests/.gitignore @@ -0,0 +1,3 @@ +!schema.sql +!author.sql +.venv diff --git a/tests/author.sql b/tests/author.sql new file mode 100644 index 0000000..edadd84 --- /dev/null +++ b/tests/author.sql @@ -0,0 +1,31 @@ + +-- @name: one_row_one_column :one +select author from author; + +-- @name: one_row_many_coulmns :one +select author, book, 1 as one from author +join book on book.author_id = author.id; + +-- @name: many_rows_one_column :many +select author from author; + +-- @name: many_rows_many_coulmns :many +select 1 as one, author, book from author +join book on book.author_id = author.id; + + +-- @name: insert :one +insert into author (id, full_name, birthday) +values ( + $(author.id), + $(author.full_name), + $(author.birthday) +) +returning author; + + +-- @name: required_parameter :one +select $val; + +-- @name: optional_parameter :one +select ?val; diff --git a/tests/book.sql b/tests/book.sql new file mode 100644 index 0000000..883ebc5 --- /dev/null +++ b/tests/book.sql @@ -0,0 +1,30 @@ + +-- @name: one_row_one_column :one +select author from author; + +-- @name: one_row_many_coulmns :one +select author, book, 1 as one from author +join book on book.author_id = author.id; + +-- @name: many_rows_one_column :many +select author from author; + +-- @name: many_rows_many_coulmns :many +select author, book, 1 as one from author +join book on book.author_id = author.id; + + +-- @name: insert :one +insert into book ( + id, title, author_id, year, isbn, is_best_seller, genre +) values ( + $(book.id), + $(book.title), + $(book.author_id), + $(book.year), + $(book.isbn), + $(book.is_best_seller), + $(book.genre) +) +returning book + \ No newline at end of file diff --git a/tests/pgc-asyncpg.yaml b/tests/pgc-asyncpg.yaml new file mode 100644 index 0000000..568b6a0 --- /dev/null +++ b/tests/pgc-asyncpg.yaml @@ -0,0 +1,14 @@ +version: "1" +database: + migrations: + - tests/schema.sql + +queries: + - tests/author.sql + - tests/book.sql +codegen: + language: python + driver: asyncpg + out: tests/python/dist_asyncpg + options: + package: dist_asyncpg diff --git a/tests/pgc-psycopg.yaml b/tests/pgc-psycopg.yaml new file mode 100644 index 0000000..cd1b3fa --- /dev/null +++ b/tests/pgc-psycopg.yaml @@ -0,0 +1,14 @@ +version: "1" +database: + migrations: + - tests/schema.sql + +queries: + - tests/author.sql + - tests/book.sql +codegen: + language: python + driver: psycopg + out: tests/python/dist_psycopg + options: + package: dist_psycopg diff --git a/tests/pgc-typscript-postgres.yaml b/tests/pgc-typscript-postgres.yaml new file mode 100644 index 0000000..6a1aac2 --- /dev/null +++ b/tests/pgc-typscript-postgres.yaml @@ -0,0 +1,14 @@ +version: "1" +database: + migrations: + - "./migrations/*.sql" + +queries: + - "queries/*.sql" + +codegen: + language: python + driver: asyncpg + out: ./python/dist_psycopg + options: + package: package.queries # change package to your package name diff --git a/tests/python/.python-version b/tests/python/.python-version new file mode 100644 index 0000000..24ee5b1 --- /dev/null +++ b/tests/python/.python-version @@ -0,0 +1 @@ +3.13 diff --git a/tests/python/README.md b/tests/python/README.md new file mode 100644 index 0000000..e69de29 diff --git a/tests/python/conftest.py b/tests/python/conftest.py new file mode 100644 index 0000000..f82969f --- /dev/null +++ b/tests/python/conftest.py @@ -0,0 +1,63 @@ +import os +from typing import AsyncIterator + +import asyncpg +import psycopg +import pytest +import pytest_asyncio +from psycopg.sql import Iterator, LiteralString +from testcontainers.postgres import PostgresContainer + + +@pytest.fixture(scope="session") +def schema(): + + with open("tests/schema.sql") as f: + schema = f.read() + return schema + + +@pytest.fixture(scope="module") +def pg_container() -> Iterator[PostgresContainer]: + """ + Session-scoped PostgreSQL testcontainer. + Starts once for the whole test session and tears down at the end. + """ + container = PostgresContainer("postgres:16-alpine") + container.start() + try: + yield container + finally: + container.stop() + + +@pytest_asyncio.fixture(scope="function") +async def asyncpg_conn( + pg_container: PostgresContainer, schema: str +) -> AsyncIterator[asyncpg.Connection]: + dsn = pg_container.get_connection_url().replace( + "postgresql+psycopg2://", "postgresql://" + ) + + conn = await asyncpg.connect(dsn=dsn) + try: + await conn.execute(schema) + yield conn + finally: + await conn.close() + + +@pytest.fixture(scope="module") +def psycopg_conn( + pg_container: PostgresContainer, schema: LiteralString +) -> Iterator[psycopg.Connection]: + dsn = pg_container.get_connection_url().replace( + "postgresql+psycopg2://", "postgresql://" + ) + conn = psycopg.connect(dsn) + try: + conn.execute(schema) + conn.commit() + yield conn + finally: + conn.close() diff --git a/tests/python/pyproject.toml b/tests/python/pyproject.toml new file mode 100644 index 0000000..e1135ae --- /dev/null +++ b/tests/python/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "tests" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.13" +dependencies = [ + "asyncpg>=0.30.0", + "faker>=37.6.0", + "psycopg>=3.2.9", + "py-pglite[asyncpg,psycopg]>=0.5.0", + "pytest>=8.4.1", + "pytest-asyncio>=1.1.0", + "testcontainers>=4.12.0", +] diff --git a/tests/python/test_asyncpg.py b/tests/python/test_asyncpg.py new file mode 100644 index 0000000..c97e940 --- /dev/null +++ b/tests/python/test_asyncpg.py @@ -0,0 +1,91 @@ +import os +from datetime import datetime +from uuid import UUID, uuid4 + +import faker +import pytest +import pytest_asyncio +from dist_asyncpg import Queries +from dist_asyncpg.models import Author, Book + +fake = faker.Faker() + + +@pytest_asyncio.fixture +async def populate_db(asyncpg_conn): + queries = Queries(asyncpg_conn) + + for _ in range(2): + author = await queries.author.insert( + Author( + id=uuid4(), + full_name=fake.name(), + birthday=fake.date_of_birth(), + ) + ) + await queries.book.insert( + Book( + id=uuid4(), + title=fake.text(), + author_id=author.id, + year=int(fake.year()), + isbn=fake.isbn10(), + is_best_seller=fake.boolean(), + genre="science fiction", + ) + ) + + +@pytest.mark.asyncio +async def test_one_row_one_column(asyncpg_conn, populate_db): + queries = Queries(asyncpg_conn) + author = await queries.author.one_row_one_column() + assert isinstance(author, Author) + + +@pytest.mark.asyncio +async def test_one_row_one_column(asyncpg_conn, populate_db): + queries = Queries(asyncpg_conn) + data = await queries.author.one_row_many_coulmns() + + assert isinstance(data.author, Author) + assert isinstance(data.book, Book) + assert data.one == 1 + + +@pytest.mark.asyncio +async def test_many_rows_one_column(asyncpg_conn, populate_db): + queries = Queries(asyncpg_conn) + rows = await queries.author.many_rows_one_column() + + assert isinstance(rows[0], Author) + + +@pytest.mark.asyncio +async def test_many_rows_many_columns(asyncpg_conn, populate_db): + queries = Queries(asyncpg_conn) + + rows = await queries.author.many_rows_many_coulmns() + + assert isinstance(rows[0].author, Author) + assert isinstance(rows[0].book, Book) + assert rows[0].one == 1 + + +@pytest.mark.asyncio +async def test_required_parameter(asyncpg_conn): + queries = Queries(asyncpg_conn) + + assert "foo" == await queries.author.required_parameter("foo") + + +@pytest.mark.asyncio +async def test_optional_parameter(asyncpg_conn): + queries = Queries(asyncpg_conn) + + assert (await queries.author.optional_parameter(None)) is None + assert (await queries.author.optional_parameter("foo")) == "foo" + + +# def test_optional_parameter(asyncpg_conn): +# pass diff --git a/tests/python/test_psycopg.py b/tests/python/test_psycopg.py new file mode 100644 index 0000000..7a92cd3 --- /dev/null +++ b/tests/python/test_psycopg.py @@ -0,0 +1,85 @@ +import os +from datetime import datetime +from uuid import UUID, uuid4 + +import faker +import pytest +from dist_psycopg import Queries, init_connection +from dist_psycopg.models import Author, Book + +fake = faker.Faker() + + +@pytest.fixture(scope="module") +def populate_db(psycopg_conn): + init_connection(psycopg_conn) + queries = Queries(psycopg_conn) + + for _ in range(2): + author = queries.author.insert( + Author( + id=uuid4(), + full_name=fake.name(), + birthday=fake.date_of_birth(), + ) + ) + queries.book.insert( + Book( + id=uuid4(), + title=fake.text(), + author_id=author.id, + year=fake.year(), + isbn=fake.isbn10(), + is_best_seller=fake.boolean(), + genre="science fiction", + ) + ) + + +def test_one_row_one_column(psycopg_conn, populate_db): + queries = Queries(psycopg_conn) + author = queries.author.one_row_one_column() + assert isinstance(author, Author) + + +def test_one_row_one_column(psycopg_conn, populate_db): + queries = Queries(psycopg_conn) + data = queries.author.one_row_many_coulmns() + + assert isinstance(data.author, Author) + assert isinstance(data.book, Book) + assert data.one == 1 + + +def test_many_rows_one_column(psycopg_conn, populate_db): + queries = Queries(psycopg_conn) + rows = queries.author.many_rows_one_column() + + assert isinstance(rows[0], Author) + + +def test_many_rows_many_columns(psycopg_conn, populate_db): + queries = Queries(psycopg_conn) + + rows = queries.author.many_rows_many_coulmns() + + assert isinstance(rows[0].author, Author) + assert isinstance(rows[0].book, Book) + assert rows[0].one == 1 + + +def test_required_parameter(psycopg_conn): + queries = Queries(psycopg_conn) + + assert "foo" == queries.author.required_parameter("foo") + + +def test_optional_parameter(psycopg_conn): + queries = Queries(psycopg_conn) + + assert queries.author.optional_parameter(None) is None + assert queries.author.optional_parameter("foo") == "foo" + + +def test_optional_parameter(psycopg_conn): + pass diff --git a/tests/python/uv.lock b/tests/python/uv.lock new file mode 100644 index 0000000..d5e9305 --- /dev/null +++ b/tests/python/uv.lock @@ -0,0 +1,358 @@ +version = 1 +revision = 1 +requires-python = ">=3.13" + +[[package]] +name = "asyncpg" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/22/e20602e1218dc07692acf70d5b902be820168d6282e69ef0d3cb920dc36f/asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70", size = 670373 }, + { url = "https://files.pythonhosted.org/packages/3d/b3/0cf269a9d647852a95c06eb00b815d0b95a4eb4b55aa2d6ba680971733b9/asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3", size = 634745 }, + { url = "https://files.pythonhosted.org/packages/8e/6d/a4f31bf358ce8491d2a31bfe0d7bcf25269e80481e49de4d8616c4295a34/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33", size = 3512103 }, + { url = "https://files.pythonhosted.org/packages/96/19/139227a6e67f407b9c386cb594d9628c6c78c9024f26df87c912fabd4368/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4", size = 3592471 }, + { url = "https://files.pythonhosted.org/packages/67/e4/ab3ca38f628f53f0fd28d3ff20edff1c975dd1cb22482e0061916b4b9a74/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4", size = 3496253 }, + { url = "https://files.pythonhosted.org/packages/ef/5f/0bf65511d4eeac3a1f41c54034a492515a707c6edbc642174ae79034d3ba/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba", size = 3662720 }, + { url = "https://files.pythonhosted.org/packages/e7/31/1513d5a6412b98052c3ed9158d783b1e09d0910f51fbe0e05f56cc370bc4/asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590", size = 560404 }, + { url = "https://files.pythonhosted.org/packages/c8/a4/cec76b3389c4c5ff66301cd100fe88c318563ec8a520e0b2e792b5b84972/asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e", size = 621623 }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326 }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008 }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196 }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819 }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350 }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644 }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468 }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187 }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699 }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580 }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366 }, + { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342 }, + { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995 }, + { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640 }, + { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636 }, + { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939 }, + { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580 }, + { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870 }, + { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797 }, + { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224 }, + { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086 }, + { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400 }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "docker" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "requests" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/4a2ea29aeba62471211598dac5d96825bb49348fa07e906ea930394a83ce/docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c", size = 117834 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/26/57c6fb270950d476074c087527a558ccb6f4436657314bfb6cdf484114c4/docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0", size = 147774 }, +] + +[[package]] +name = "faker" +version = "37.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/24/cd/f7679c20f07d9e2013123b7f7e13809a3450a18d938d58e86081a486ea15/faker-37.6.0.tar.gz", hash = "sha256:0f8cc34f30095184adf87c3c24c45b38b33ad81c35ef6eb0a3118f301143012c", size = 1907960 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/7d/8b50e4ac772719777be33661f4bde320793400a706f5eb214e4de46f093c/faker-37.6.0-py3-none-any.whl", hash = "sha256:3c5209b23d7049d596a51db5d76403a0ccfea6fc294ffa2ecfef6a8843b1e6a7", size = 1949837 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, +] + +[[package]] +name = "psutil" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", size = 497003 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", size = 238051 }, + { url = "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", size = 239535 }, + { url = "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", size = 275004 }, + { url = "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", size = 277986 }, + { url = "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", size = 279544 }, + { url = "https://files.pythonhosted.org/packages/50/e6/eecf58810b9d12e6427369784efe814a1eec0f492084ce8eb8f4d89d6d61/psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99", size = 241053 }, + { url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885 }, +] + +[[package]] +name = "psycopg" +version = "3.2.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/27/4a/93a6ab570a8d1a4ad171a1f4256e205ce48d828781312c0bbaff36380ecb/psycopg-3.2.9.tar.gz", hash = "sha256:2fbb46fcd17bc81f993f28c47f1ebea38d66ae97cc2dbc3cad73b37cefbff700", size = 158122 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/b0/a73c195a56eb6b92e937a5ca58521a5c3346fb233345adc80fd3e2f542e2/psycopg-3.2.9-py3-none-any.whl", hash = "sha256:01a8dadccdaac2123c916208c96e06631641c0566b22005493f09663c7a8d3b6", size = 202705 }, +] + +[[package]] +name = "py-pglite" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "psutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/2f/a0f40dfc80859028120162f124b0f679550f503ba58cd0d58f11da392168/py_pglite-0.5.0.tar.gz", hash = "sha256:7a767c391b0d028559e5e29a54652b49cde82e2b1ec93f9400de6ac0234b4b44", size = 31829 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/c9/4e4d9bb993849311c31ed76a1b96768bb291ebc0e3c493b9f5c32e281300/py_pglite-0.5.0-py3-none-any.whl", hash = "sha256:287697109d27d3a3996598b618f1a7d26d40c3538685a54e13004896d17f836a", size = 41329 }, +] + +[package.optional-dependencies] +asyncpg = [ + { name = "asyncpg" }, +] +psycopg = [ + { name = "psycopg" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 }, +] + +[[package]] +name = "pytest" +version = "8.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474 }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/51/f8794af39eeb870e87a8c8068642fc07bce0c854d6865d7dd0f2a9d338c2/pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea", size = 46652 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/9d/bf86eddabf8c6c9cb1ea9a869d6873b46f105a5d292d3a6f7071f5b07935/pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf", size = 15157 }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556 }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700 }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700 }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318 }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714 }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800 }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540 }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738 }, +] + +[[package]] +name = "testcontainers" +version = "4.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docker" }, + { name = "python-dotenv" }, + { name = "typing-extensions" }, + { name = "urllib3" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d3/62/01d9f648e9b943175e0dcddf749cf31c769665d8ba08df1e989427163f33/testcontainers-4.12.0.tar.gz", hash = "sha256:13ee89cae995e643f225665aad8b200b25c4f219944a6f9c0b03249ec3f31b8d", size = 66631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/e8/9e2c392e5d671afda47b917597cac8fde6a452f5776c4c9ceb93fbd2889f/testcontainers-4.12.0-py3-none-any.whl", hash = "sha256:26caef57e642d5e8c5fcc593881cf7df3ab0f0dc9170fad22765b184e226ab15", size = 111791 }, +] + +[[package]] +name = "tests" +version = "0.1.0" +source = { virtual = "." } +dependencies = [ + { name = "asyncpg" }, + { name = "faker" }, + { name = "psycopg" }, + { name = "py-pglite", extra = ["asyncpg", "psycopg"] }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "testcontainers" }, +] + +[package.metadata] +requires-dist = [ + { name = "asyncpg", specifier = ">=0.30.0" }, + { name = "faker", specifier = ">=37.6.0" }, + { name = "psycopg", specifier = ">=3.2.9" }, + { name = "py-pglite", extras = ["asyncpg", "psycopg"], specifier = ">=0.5.0" }, + { name = "pytest", specifier = ">=8.4.1" }, + { name = "pytest-asyncio", specifier = ">=1.1.0" }, + { name = "testcontainers", specifier = ">=4.12.0" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 }, +] + +[[package]] +name = "tzdata" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795 }, +] + +[[package]] +name = "wrapt" +version = "1.17.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003 }, + { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025 }, + { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108 }, + { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072 }, + { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214 }, + { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105 }, + { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766 }, + { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711 }, + { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885 }, + { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896 }, + { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132 }, + { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091 }, + { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172 }, + { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163 }, + { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963 }, + { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945 }, + { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857 }, + { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178 }, + { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310 }, + { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266 }, + { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544 }, + { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283 }, + { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366 }, + { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571 }, + { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094 }, + { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659 }, + { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946 }, + { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717 }, + { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334 }, + { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471 }, + { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591 }, +] diff --git a/tests/schema.sql b/tests/schema.sql new file mode 100644 index 0000000..c36ca3d --- /dev/null +++ b/tests/schema.sql @@ -0,0 +1,27 @@ +create table if not exists author ( + id uuid primary key default gen_random_uuid(), + full_name text not null, + birthday date +); + +create table if not exists genre ( + id text primary key +); + +create table if not exists book ( + id uuid primary key default gen_random_uuid(), + title text not null, + author_id uuid not null references author(id), + year int not null, + isbn text not null unique, + is_best_seller bool default false, + genre text not null references genre(id) +); + +insert into genre values + ('comedy'), + ('drama'), + ('science fiction'), + ('fantasy'), + ('biography') +on conflict do nothing; From 5edcc8e5c3cf5b174dfd8497bf2ad0adac16bd04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Vallotton?= Date: Sat, 13 Sep 2025 13:54:45 -0300 Subject: [PATCH 09/10] add typescript:pg target --- .gitignore | 5 +- .../python/templates/psycopg-query.j2 | 2 +- codegen/src/presentation/typescript/mod.rs | 16 +- .../typescript/templates/parsers.ts | 269 --- .../templates/{postgres => pg}/model.j2 | 2 +- .../templates/{postgres => pg}/model_init.j2 | 1 - .../templates/{postgres => pg}/query.j2 | 231 +- .../typescript/type_map_service.rs | 46 - deno.json | 4 +- deno.lock | 204 +- package-lock.json | 1870 +++++++++++++++++ package.json | 10 + schema.sql | 26 - src/config/config.service.test.ts | 12 +- tests/author.sql | 3 + tests/pgc-typscript-postgres.yaml | 14 - tests/{ => python}/pgc-asyncpg.yaml | 4 +- tests/{ => python}/pgc-psycopg.yaml | 4 +- tests/python/test_asyncpg.py | 38 +- tests/python/test_psycopg.py | 38 +- tests/schema.sql | 7 + tests/typescript/pg.test.ts | 105 + tests/typescript/pgc-pg.yaml | 18 + 23 files changed, 2272 insertions(+), 657 deletions(-) delete mode 100644 codegen/src/presentation/typescript/templates/parsers.ts rename codegen/src/presentation/typescript/templates/{postgres => pg}/model.j2 (100%) rename codegen/src/presentation/typescript/templates/{postgres => pg}/model_init.j2 (78%) rename codegen/src/presentation/typescript/templates/{postgres => pg}/query.j2 (50%) create mode 100644 package-lock.json create mode 100644 package.json delete mode 100644 schema.sql delete mode 100644 tests/pgc-typscript-postgres.yaml rename tests/{ => python}/pgc-asyncpg.yaml (73%) rename tests/{ => python}/pgc-psycopg.yaml (73%) create mode 100644 tests/typescript/pg.test.ts create mode 100644 tests/typescript/pgc-pg.yaml diff --git a/.gitignore b/.gitignore index ba204e8..76098a4 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,7 @@ pgc.yaml author.sql schema.sql queries/* -**/.DS_Store \ No newline at end of file +**/.DS_Store +tests/*/out_* +**/__pycache__ +node_modules diff --git a/codegen/src/presentation/python/templates/psycopg-query.j2 b/codegen/src/presentation/python/templates/psycopg-query.j2 index 13c639b..aa828cf 100644 --- a/codegen/src/presentation/python/templates/psycopg-query.j2 +++ b/codegen/src/presentation/python/templates/psycopg-query.j2 @@ -196,7 +196,7 @@ left join pg_namespace n left join pg_class c on c.relnamespace = n.oid and c.relname = i.tablename - and c.relkind in ('r','p') + and c.relkind in ('r', 'c', 'p', 'v', 'm') left join pg_type t on t.oid = c.reltype left join lateral ( diff --git a/codegen/src/presentation/typescript/mod.rs b/codegen/src/presentation/typescript/mod.rs index 9648131..4cc6765 100644 --- a/codegen/src/presentation/typescript/mod.rs +++ b/codegen/src/presentation/typescript/mod.rs @@ -17,11 +17,11 @@ pub fn postgres() -> TemplateGenConfig { query_directory_entrypoint: "queries.ts", model_directory_entrypoint: "models.ts", file_extension: "ts", - query_template: include_str!("./templates/postgres/query.j2"), - model_template: include_str!("./templates/postgres/model.j2"), - model_init_template: include_str!("./templates/postgres/model_init.j2"), + query_template: include_str!("./templates/pg/query.j2"), + model_template: include_str!("./templates/pg/model.j2"), + model_init_template: include_str!("./templates/pg/model_init.j2"), type_map_service: &TypescriptTypeMapService, - other_templates: &[("parsers.ts", include_str!("./templates/parsers.ts"))], + other_templates: &[], register_filters: Some(register_filters), } } @@ -31,17 +31,9 @@ fn register_filters(env: &mut Environment) -> Result<(), Error> { matches!(Type::from_jinja(ty), Type::Nullable(_)) }); - env.add_filter("type_parser", |value: Value| { - TypescriptTypeMapService.type_parser(Type::from_jinja(value)) - }); - env.add_filter("is_user_defined", |value: Value| -> bool { matches!(Type::from_jinja(value), Type::UserDefined { .. }) }); - env.add_filter("requires_parsing", |value: Value| -> bool { - TypescriptTypeMapService.column_requires_parser(Type::from_jinja(value)) - }); - Ok(()) } diff --git a/codegen/src/presentation/typescript/templates/parsers.ts b/codegen/src/presentation/typescript/templates/parsers.ts deleted file mode 100644 index 8c3a598..0000000 --- a/codegen/src/presentation/typescript/templates/parsers.ts +++ /dev/null @@ -1,269 +0,0 @@ -/** - * This file was automatically generated by pgc - * run `pgc build` to regenerate it - * go to "https://github.com/tvallotton/pgc" for more information - * {%- set options = ir.request.config.codegen.options -%} {%- if options and options == true %} - */ -import { Buffer } from "node:buffer"; -// {%- endif %} -import * as models from "./models/models.ts"; -type Step = (cell: string) => any; - -function trimOuter(str: string, open: string, close: string) { - const s = str.trim(); - if (s.startsWith(open) && s.endsWith(close)) return s.slice(1, -1); - return s; -} - -function unquote(s: string): string { - const t = s.trim(); - if (t.length >= 2 && t.startsWith('"') && t.endsWith('"')) { - // Remove surrounding quotes and unescape \" and \\ (good enough for most PG cases) - return t - .slice(1, -1) - .replace(/\\(["\\])/g, "$1"); - } - return t; -} - -function splitTopLevel( - s: string, - separator: string, - { respectQuotes = true, parens = true, braces = true }: { - respectQuotes?: boolean; - parens?: boolean; - braces?: boolean; - } = {}, -): string[] { - const out: string[] = []; - let buf = ""; - let inQuotes = false; - let parenDepth = 0; - let braceDepth = 0; - - const flush = () => { - out.push(buf); - buf = ""; - }; - - for (let i = 0; i < s.length; i++) { - const ch = s[i]; - - if (respectQuotes && ch === '"' && s[i - 1] !== "\\") { - inQuotes = !inQuotes; - buf += ch; - continue; - } - if (!inQuotes) { - if (parens && (ch === "(" || ch === ")")) { - if (ch === "(") parenDepth++; - else parenDepth--; - buf += ch; - continue; - } - if (braces && (ch === "{" || ch === "}")) { - if (ch === "{") braceDepth++; - else braceDepth--; - buf += ch; - continue; - } - if (parenDepth === 0 && braceDepth === 0 && ch === separator) { - flush(); - continue; - } - } - buf += ch; - } - flush(); - return out.map((t) => t.trim()); -} - -function parsePgRowToCells(row: string): string[] { - const inner = trimOuter(row.trim(), "(", ")"); - if (inner === "") return []; - // Note: allow parentheses/braces in cells; split only at top-level commas - return splitTopLevel(inner, ","); -} - -function parsePgArrayToElements(arr: string): string[] { - const inner = trimOuter(arr.trim(), "{", "}"); - if (inner === "") return []; - // In arrays, elements can be quoted (including quoted rows "(...)") - return splitTopLevel(inner, ",", { - respectQuotes: true, - parens: true, - braces: true, - }); -} - -export interface Parser { - parse(value: string): T; -} - -export class NullParser { - constructor(readonly subparser: Parser) {} - - parse(cell: string): T | null { - const t = cell.trim(); - if (t == "") return null; - return this.subparser.parse(cell); - } -} - -export class NumberParser { - parse(cell: string): number { - const t = cell.trim(); - const q = unquote(t); - const v = Number(q); - if (Number.isNaN(v)) throw new Error(`Invalid number: ${cell}`); - return v; - } -} - -export class BigIntParser { - parse(cell: string): bigint { - if (typeof cell != "string") return cell; - const t = cell.trim(); - const q = unquote(t); - const v = BigInt(q); - return v; - } -} - -export class StringParser { - parse(cell: string): string { - const t = cell.trim(); - return unquote(t).replaceAll(/""/g, '"'); - } -} - -export class DateParser { - parse(cell: string): Date { - const t = unquote(cell.trim()); - const d = new Date(t); - if (Number.isNaN(d.getTime())) throw new Error(`Invalid date: ${cell}`); - return d; - } -} - -export class BooleanParser { - parse(cell: string): boolean { - const t = unquote(cell.trim()); - if (!["t", "f"].includes(t)) { - throw new Error(`Invalid boolean: ${cell}. Expected "t" or "f".`); - } - return t == "t"; - } -} - -export class EnumParser { - parse(cell: string): T { - return new StringParser().parse(cell) as T; - } -} - -export class JsonParser { - parse(cell: string): any { - const t = unquote(cell); - return JSON.parse(t); - } -} - -export class BufferParser { - parse(cell: string): Buffer { - const t = unquote(cell); - return Buffer.from(t.replace(/^\\x/, ""), "hex"); - } -} - -export class ArrayParser { - constructor(readonly elementParser: Parser) {} - parse(array: string | Array): Array { - let stringArray; - - if (typeof array == "string") { - const unquoted = unquote(array.trim()); - stringArray = parsePgArrayToElements(unquoted); - } else { - stringArray = array; - } - - return stringArray.map((element) => this.elementParser.parse(element)); - } - - arrayOfThis() { - return new ArrayParser({ parse: (e) => this.parse(e) }); - } -} - -export class RowParser { - private steps: Parser[]; - private mapFun: (_: T) => V; - - constructor(steps: Parser[] = [], map?: (_: T) => V) { - this.steps = steps; - this.mapFun = map ?? ((row: T) => row as unknown as V); - } - - addColumnParser(parser: Parser): RowParser<[...T, U]> { - return new RowParser<[...T, U]>([...this.steps, parser]); - } - - parse(input: string): V { - input = unquote(input); - const trimmed = input.trim(); - // Accept either full row "(a,b,...)" or a bare CSV (we’ll try row first) - const cells = trimmed.startsWith("(") - ? parsePgRowToCells(trimmed) - : splitTopLevel(trimmed, ","); - if (cells.length !== this.steps.length) { - throw new Error( - `Arity mismatch: expected ${this.steps.length} fields, got ${cells.length} (${ - JSON.stringify(cells) - })`, - ); - } - const out = this.steps.map((parser, i) => parser.parse(cells[i])) as T; - return this.mapFun(out); - } - - map(fun: (_: V) => U): RowParser { - const newMap = (row: T) => fun(this.mapFun(row)); - return new RowParser(this.steps, newMap); - } -} - -/* parsers for custom types {{"*" + "/"}} -export const parser = { - -{%- for module_name, module in ir.model_modules.model_modules | items %} -{%- endif %} - {{module_name | to_camel_case }}: { -{%- for model in module.models %} - {{model.name | to_camel_case }}: () => { - return new RowParser() - {% for field in model.fields -%} - .addColumnParser({{ field.type | type_parser }}) - {% endfor -%} - .map(([ - {%- for field in model.fields -%} - {{field.name | to_camel_case }} {%- if not loop.last %}, {% endif %} - {%- endfor -%} - ]) => ({ {{""}} - {%- for field in model.fields -%} - {{field.name | to_camel_case }} {%- if not loop.last %}, {% endif %} - {%- endfor -%} - {{""}} })) - }, - {%- endfor %} - {%- for enum in module.enums %} - {{enum.name | to_camel_case }}: () => { - return new EnumParser() - }, - {%- endfor %} -} -{% endfor %} - -}; - -/**/ diff --git a/codegen/src/presentation/typescript/templates/postgres/model.j2 b/codegen/src/presentation/typescript/templates/pg/model.j2 similarity index 100% rename from codegen/src/presentation/typescript/templates/postgres/model.j2 rename to codegen/src/presentation/typescript/templates/pg/model.j2 index 7300542..b21ddff 100644 --- a/codegen/src/presentation/typescript/templates/postgres/model.j2 +++ b/codegen/src/presentation/typescript/templates/pg/model.j2 @@ -12,8 +12,8 @@ export enum {{enum.name | to_pascal_case }} { {%- for value in enum.values %} {{ value | to_screaming_snake_case }} = {{ value | to_c_string }}, {%- endfor %} -{%- endfor %} } +{%- endfor %} {%- for model in model_module.models %} diff --git a/codegen/src/presentation/typescript/templates/postgres/model_init.j2 b/codegen/src/presentation/typescript/templates/pg/model_init.j2 similarity index 78% rename from codegen/src/presentation/typescript/templates/postgres/model_init.j2 rename to codegen/src/presentation/typescript/templates/pg/model_init.j2 index 3525f16..ba89c3d 100644 --- a/codegen/src/presentation/typescript/templates/postgres/model_init.j2 +++ b/codegen/src/presentation/typescript/templates/pg/model_init.j2 @@ -1,4 +1,3 @@ -import * as parsers from "../parsers.ts"; {%- for module in ir.model_modules.model_modules -%} export type * as "{{module}}" from "./{{module}}.ts"; {%- endfor %} diff --git a/codegen/src/presentation/typescript/templates/postgres/query.j2 b/codegen/src/presentation/typescript/templates/pg/query.j2 similarity index 50% rename from codegen/src/presentation/typescript/templates/postgres/query.j2 rename to codegen/src/presentation/typescript/templates/pg/query.j2 index 482f655..b49cdbe 100644 --- a/codegen/src/presentation/typescript/templates/postgres/query.j2 +++ b/codegen/src/presentation/typescript/templates/pg/query.j2 @@ -1,6 +1,5 @@ // This file was automatically generated by pgc // run `pgc build` to regenerate it -import postgres from "postgres" {%- for type in used_types %} {% for import in (type | imports) %} {%- if import != ""%} @@ -8,7 +7,12 @@ import postgres from "postgres" {%- endif %} {%- endfor %} {%- endfor %} -import { parser, ArrayParser, BigIntParser } from "./parsers.ts"; +{%- set options = ir.request.config.codegen.options %} +{% if options.deno %} +import * as pg from "npm:pg"; +{% else %} +import * as pg from "pg"; +{% endif %} import * as models from "./models/models.ts"; {%- for subnamespace in query_namespace.subnamespaces %} {% set is_directory = (query_namespace.subnamespaces[subnamespace].subnamespaces | length) != 0 %} @@ -30,7 +34,7 @@ const {{ method.query.name | to_screaming_snake_case }} = ` {%- if method.output_model != None %} export interface {{method.output_model.type | name | to_pascal_case }} { {%- for field, type in method.output_model.fields | items %} - {{field}}: {{ type | annotation }}; // {{ type }} + {{field}}: {{ type | annotation }}; {%- endfor %} } {% endif %} @@ -45,10 +49,13 @@ export interface {{ input_model.type | name }} { export class {{ query_namespace.name | to_pascal_case }}Queries { - constructor(readonly connection: postgres.Sql) { - this.connection = connection + {%- for subnamespace in query_namespace.subnamespaces %} + {{subnamespace}} : {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries; + {%- endfor %} + constructor(readonly client: pg.Client) { + this.client = client {%- for subnamespace in query_namespace.subnamespaces %} - this.{{subnamespace}} = new {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(connection) + this.{{subnamespace}} = new {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(client) {%- endfor %} } @@ -71,19 +78,15 @@ export class {{ query_namespace.name | to_pascal_case }}Queries { {{argument}}: {{type | annotation}} {%- if not loop.last %}, {% endif %} {%- endfor -%} ) { - const rows = await this.connection.unsafe( + const { rows } = await this.client.query( {{method.query.name | to_screaming_snake_case }}, [ {%- for parameter in method.query.parameters -%} {{parameter.name}}, {%- endfor -%} - ], { prepare: true } + ] ) {{HANDLE_NONE}} - {%- if method.output_type | requires_parsing %} - return {{method.output_type | type_parser }}.parse(rows[0]["{{method.query.output[0].name}}"]) as {{method.output_type | annotation}}{{OR_NONE}}; - {%- else %} return rows[0]["{{method.query.output[0].name}}"] as {{method.output_type | annotation}}{{OR_NONE}}; - {%- endif %} } {%- else %} async {{method.query.name}}( @@ -91,23 +94,19 @@ export class {{ query_namespace.name | to_pascal_case }}Queries { {{argument}}: {{type | annotation}}{%- if not loop.last %}, {% endif %} {%- endfor -%} ) { - const rows = await this.connection.unsafe( + const { rows } = await this.client.query( {{method.query.name | to_screaming_snake_case }}, [ {%- for parameter in method.query.parameters -%} {{parameter.name}} {%- if not loop.last %}, {% endif %} {%- endfor -%} - ], { prepare: true } + ] ) {{HANDLE_NONE}} return { {%- for column_name, column_type in method.output_columns | items %} - {%- if column_type | requires_parsing %} - ["{{column_name | to_camel_case }}"]: {{ column_type | type_parser }}.parse(rows[0]["{{column_name}}"]), - {%- else %} ["{{column_name | to_camel_case }}"]: rows[0]["{{column_name}}"], - {%- endif %} {%- endfor %} } as {{method.output_type | annotation}}{{OR_NONE}}; @@ -120,22 +119,14 @@ export class {{ query_namespace.name | to_pascal_case }}Queries { {{argument}}: {{type | annotation}} {% if not loop.last %}, {% endif %} {%- endfor -%} ) { - const rows = await this.connection.unsafe( + const { rows } = await this.client.query( {{method.query.name | to_screaming_snake_case }}, [ {%- for parameter in method.query.parameters -%} {{parameter.name}}{%- if not loop.last %}, {% endif %} {%- endfor -%} - ], { prepare: true } + ] ) - return rows.map(row => { - {%- for column_name, column_type in method.output_columns | items %} - {%- if column_type | requires_parsing %} - return {{ column_type | type_parser }}.parse(row["{{ column_name }}"]) as {{ method.output_type | annotation}}; - {%- else %} - return row["{{method.query.output[0].name}}"] as {{method.output_type | annotation}}; - {%- endif %} - {%- endfor %} - }) + return rows as Array<{{method.output_type | annotation }}>; } {%- else%} async {{method.query.name}}( @@ -143,23 +134,14 @@ export class {{ query_namespace.name | to_pascal_case }}Queries { {{argument}}: {{type | annotation}} {% if not loop.last %}, {% endif %} {%- endfor -%} ) { - const rows = await this.connection.unsafe( + const { rows } = await this.client.query( {{method.query.name | to_screaming_snake_case }}, [ {%- for parameter in method.query.parameters -%} {{parameter.name}} {%- if not loop.last %}, {% endif %} {%- endfor -%} - ], { prepare: true } + ] ) - return rows.map((row) => ({ - {%- for column_name, column_type in method.output_columns | items %} - {%- if column_type | requires_parsing %} - ["{{column_name | to_camel_case }}"]: {{ column_type | type_parser}}.parse(row["{{column_name}}"]), - {%- else %} - ["{{column_name | to_camel_case }}"]: row["{{column_name}}"], - {%- endif %} - - {%- endfor %} - })); + return rows as Array<{{method.output_type | annotation}}>; } {%- endif %} {%- elif method.query.command == 'exec' %} @@ -168,14 +150,177 @@ export class {{ query_namespace.name | to_pascal_case }}Queries { {{argument}}: {{type | annotation}}{%- if not loop.last %}, {% endif %} {%- endfor -%} ): {{method.output_type | annotation}}{{OR_NONE}} { - const rows = await this.connection.unsafe( + await this.client.query( {{method.query.name | to_screaming_snake_case }}, [ {%- for parameter in method.query.parameters -%} {{parameter.name}}, {%- if not loop.last %}, {% endif %} {%- endfor -%} - ], { prepare: true } + ] ) } {%- endif %} {%- endfor %} } + + +{% if this_module == [] %} + +const GET_TYPE_INFO = ` +with input as ( + select + (e.elem->>0) as schemaname, + (e.elem->>1) as tablename, + e.ord + from jsonb_array_elements($1::jsonb) with ordinality as e(elem, ord) +) + +select + i.schemaname, + t.typname as name, + t.oid as oid, + t.typarray as array_oid, + t.oid::regtype::text as regtype, + coalesce(a.fnames, '{}')::text[] as "fieldNames", + coalesce(a.ftypes, '{}') as "fieldTypes" +from input i +left join pg_namespace n + on n.nspname = i.schemaname +left join pg_class c + on c.relnamespace = n.oid + and c.relname = i.tablename + and c.relkind in ('r', 'c', 'p', 'v', 'm') +left join pg_type t + on t.oid = c.reltype +left join lateral ( + select + attrelid, + array_agg(attname) as fnames, + array_agg(atttypid) as ftypes + from ( + select a.attrelid, a.attname, a.atttypid + from pg_attribute a + join pg_type t_ ON t_.typrelid = a.attrelid + where t_.oid = t.oid + and a.attnum > 0 + and not a.attisdropped + order by a.attnum + ) x + group by attrelid +) a on a.attrelid = t.typrelid +`; + + + +function trimOuter(str: string, open: string, close: string) { + const s = str.trim(); + if (s.startsWith(open) && s.endsWith(close)) return s.slice(1, -1); + return s; +} + +function splitTopLevel( + s: string, + separator: string, + { respectQuotes = true, parens = true, braces = true }: { + respectQuotes?: boolean; + parens?: boolean; + braces?: boolean; + } = {}, +): string[] { + const out: string[] = []; + let buf = ""; + let inQuotes = false; + let parenDepth = 0; + let braceDepth = 0; + + const flush = () => { + out.push(buf); + buf = ""; + }; + + for (let i = 0; i < s.length; i++) { + const ch = s[i]; + + if (respectQuotes && ch === '"' && s[i - 1] !== "\\") { + inQuotes = !inQuotes; + buf += ch; + continue; + } + if (!inQuotes) { + if (parens && (ch === "(" || ch === ")")) { + if (ch === "(") parenDepth++; + else parenDepth--; + buf += ch; + continue; + } + if (braces && (ch === "{" || ch === "}")) { + if (ch === "{") braceDepth++; + else braceDepth--; + buf += ch; + continue; + } + if (parenDepth === 0 && braceDepth === 0 && ch === separator) { + flush(); + continue; + } + } + buf += ch; + } + flush(); + return out.map((t) => t.trim()); +} + +function parsePgRowToCells(row: string): string[] { + const inner = trimOuter(row.trim(), "(", ")"); + if (inner === "") return []; + // Note: allow parentheses/braces in cells; split only at top-level commas + return splitTopLevel(inner, ","); +} + +interface TypeInfo { + oid: number; + fieldNames: string[]; + fieldTypes: number[]; +} + +{%- if not options.preserve_casing %} +function toCamelCase(identifier: string) { + return identifier.replace(/(?!^)_([a-z])/g, (_, letter) => letter.toUpperCase()); +} +{%- endif %} + + +function parser(client: pg.Client, typeInfo: TypeInfo) { + return (record: string) => { + const output: Record = {} + parsePgRowToCells(record).forEach((value, i) => { + const fieldName = typeInfo.fieldNames[i]; + const typeOid = typeInfo.fieldTypes[i]; + const parser = client.getTypeParser(typeOid); + {%- if not options.preserve_casing %} + output[toCamelCase(fieldName)] = parser(value); + {%- else %} + output[fieldName] = parser(value); + {%- endif %} + }); + return output; + } +} + +export async function initClient(client: pg.Client) { + const { rows } = await client.query(GET_TYPE_INFO, [ + JSON.stringify([ + {%- for module_name, model_module in ir.model_modules.model_modules | items %} + {%- for model in model_module.models %} + ["{{module_name}}", "{{model.name}}"], + {%- endfor %} + {%- endfor %} + ]) + ]) + + for (const typeInfo of rows) { + client.setTypeParser(typeInfo.oid, parser(client, typeInfo)); + } +} + + +{% endif %} diff --git a/codegen/src/presentation/typescript/type_map_service.rs b/codegen/src/presentation/typescript/type_map_service.rs index eba4997..8c5a816 100644 --- a/codegen/src/presentation/typescript/type_map_service.rs +++ b/codegen/src/presentation/typescript/type_map_service.rs @@ -9,52 +9,6 @@ use crate::{ #[derive(Clone, Copy)] pub struct TypescriptTypeMapService; -impl TypescriptTypeMapService { - pub fn column_requires_parser(&self, r#type: Type) -> bool { - return matches!( - r#type, - Type::Int8 | Type::Serial8 | Type::UserDefined { .. } | Type::Array { .. } - ); - } - - pub fn type_parser(&self, r#type: Type) -> String { - match r#type { - Type::Nullable(r#type) => format!( - "new NullParser({})", - self.type_parser(Type::clone(&*r#type)) - ), - Type::UserDefined { module_path, name } => { - format!("parser.{}.{}()", module_path[1], to_camel_case(&name)) - } - - Type::Array { r#type, dim } if dim != 1 => { - format!( - "{}.arrayOfThis()", - self.type_parser(Type::Array { - r#type, - dim: dim - 1 - }) - ) - } - Type::Array { r#type, dim: 1 } => { - format!( - "new ArrayParser({})", - self.type_parser(Type::clone(&*r#type)) - ) - } - Type::Bool => "new BooleanParser()".into(), - Type::Date | Type::DateTz | Type::Timestamp | Type::TimestampTz => { - "new DateParser()".into() - } - Type::Int8 => "new BigIntParser()".into(), - Type::Float4 | Type::Float8 | Type::Int2 | Type::Int4 => "new NumberParser()".into(), - Type::Bytea => "new BufferParser()".into(), - Type::Json => "new JsonParser()".into(), - _ => "new StringParser()".into(), - } - } -} - impl TypeMapService for TypescriptTypeMapService { fn get(&self, current_module: Vec, r#type: &Type) -> LanguageType { match r#type { diff --git a/deno.json b/deno.json index 9d9bc61..dd45f8e 100644 --- a/deno.json +++ b/deno.json @@ -3,7 +3,8 @@ "build:wasm": "deno run --allow-sys --allow-run --allow-net --allow-env --allow-write --allow-read build.ts", "dev": "deno run --allow-sys --allow-net --allow-run --allow-env --allow-write --allow-read src/main.ts", "build": "deno run build:wasm && deno compile --allow-sys --allow-net --allow-run --allow-env --allow-write --allow-read --output pgc src/main.ts", - "test:python": "rm -rf tests/python/dist_asyncpg && rm -rf tests/python/dist_psycopg && deno run dev build -f tests/pgc-asyncpg.yaml && deno run dev build -f tests/pgc-psycopg.yaml && uv run --project tests/python pytest", + "test:python": "rm -rf tests/python/out_asyncpg tests/python/out_psycopg && deno run dev build -f tests/python/pgc-asyncpg.yaml && deno run dev build -f tests/python/pgc-psycopg.yaml && uv run --project tests/python pytest", + "test:typescript": "rm -rf tests/typescript/out_postgres && deno run dev build -f tests/typescript/pgc-pg.yaml", "install": "deno run build && mv pgc ~/.local/b in" }, "imports": { @@ -16,7 +17,6 @@ "@types/node": "npm:@types/node@^24.0.7", "commander": "npm:commander@^14.0.0", "glob": "npm:glob@^11.0.3", - "postgres": "npm:postgres@^3.4.7", "zod": "npm:zod@^3.25.67" } diff --git a/deno.lock b/deno.lock index ba62ae1..7a62172 100644 --- a/deno.lock +++ b/deno.lock @@ -2,6 +2,7 @@ "version": "5", "specifiers": { "jsr:@eemeli/yaml@^2.8.0": "2.8.0", + "jsr:@std/assert@*": "1.0.13", "jsr:@std/assert@1": "1.0.13", "jsr:@std/fs@*": "1.0.19", "jsr:@std/fs@^1.0.19": "1.0.19", @@ -12,9 +13,8 @@ "jsr:@std/yaml@*": "1.0.8", "jsr:@std/yaml@^1.0.8": "1.0.8", "npm:@electric-sql/pglite@~0.3.4": "0.3.4", - "npm:@types/node@^24.0.7": "24.0.7", + "npm:@faker-js/faker@10": "10.0.0", "npm:commander@14": "14.0.0", - "npm:glob@^11.0.3": "11.0.3", "npm:postgres@^3.4.7": "3.4.7", "npm:zod@^3.25.67": "3.25.67" }, @@ -55,201 +55,15 @@ "@electric-sql/pglite@0.3.4": { "integrity": "sha512-h5hoL2GuxcWN8Q3+jtesIRem14iIvAZVEsTeUF6eO9RiUb6ar73QVIEW9t+Ud58iXAcAE2dFMtWqw3W2Oo4LDw==" }, - "@isaacs/balanced-match@4.0.1": { - "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==" - }, - "@isaacs/brace-expansion@5.0.0": { - "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==", - "dependencies": [ - "@isaacs/balanced-match" - ] - }, - "@isaacs/cliui@8.0.2": { - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", - "dependencies": [ - "string-width@5.1.2", - "string-width-cjs@npm:string-width@4.2.3", - "strip-ansi@7.1.0", - "strip-ansi-cjs@npm:strip-ansi@6.0.1", - "wrap-ansi@8.1.0", - "wrap-ansi-cjs@npm:wrap-ansi@7.0.0" - ] - }, - "@types/node@24.0.7": { - "integrity": "sha512-YIEUUr4yf8q8oQoXPpSlnvKNVKDQlPMWrmOcgzoduo7kvA2UF0/BwJ/eMKFTiTtkNL17I0M6Xe2tvwFU7be6iw==", - "dependencies": [ - "undici-types" - ] - }, - "ansi-regex@5.0.1": { - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "ansi-regex@6.1.0": { - "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==" - }, - "ansi-styles@4.3.0": { - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dependencies": [ - "color-convert" - ] - }, - "ansi-styles@6.2.1": { - "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==" - }, - "color-convert@2.0.1": { - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dependencies": [ - "color-name" - ] - }, - "color-name@1.1.4": { - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + "@faker-js/faker@10.0.0": { + "integrity": "sha512-UollFEUkVXutsaP+Vndjxar40Gs5JL2HeLcl8xO1QAjJgOdhc3OmBFWyEylS+RddWaaBiAzH+5/17PLQJwDiLw==" }, "commander@14.0.0": { "integrity": "sha512-2uM9rYjPvyq39NwLRqaiLtWHyDC1FvryJDa2ATTVims5YAS4PupsEQsDvP14FqhFr0P49CYDugi59xaxJlTXRA==" }, - "cross-spawn@7.0.6": { - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "dependencies": [ - "path-key", - "shebang-command", - "which" - ] - }, - "eastasianwidth@0.2.0": { - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" - }, - "emoji-regex@8.0.0": { - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "emoji-regex@9.2.2": { - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" - }, - "foreground-child@3.3.1": { - "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", - "dependencies": [ - "cross-spawn", - "signal-exit" - ] - }, - "glob@11.0.3": { - "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==", - "dependencies": [ - "foreground-child", - "jackspeak", - "minimatch", - "minipass", - "package-json-from-dist", - "path-scurry" - ], - "bin": true - }, - "is-fullwidth-code-point@3.0.0": { - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" - }, - "isexe@2.0.0": { - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" - }, - "jackspeak@4.1.1": { - "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==", - "dependencies": [ - "@isaacs/cliui" - ] - }, - "lru-cache@11.1.0": { - "integrity": "sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==" - }, - "minimatch@10.0.3": { - "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==", - "dependencies": [ - "@isaacs/brace-expansion" - ] - }, - "minipass@7.1.2": { - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==" - }, - "package-json-from-dist@1.0.1": { - "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==" - }, - "path-key@3.1.1": { - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==" - }, - "path-scurry@2.0.0": { - "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==", - "dependencies": [ - "lru-cache", - "minipass" - ] - }, "postgres@3.4.7": { "integrity": "sha512-Jtc2612XINuBjIl/QTWsV5UvE8UHuNblcO3vVADSrKsrc6RqGX6lOW1cEo3CM2v0XG4Nat8nI+YM7/f26VxXLw==" }, - "shebang-command@2.0.0": { - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dependencies": [ - "shebang-regex" - ] - }, - "shebang-regex@3.0.0": { - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==" - }, - "signal-exit@4.1.0": { - "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==" - }, - "string-width@4.2.3": { - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dependencies": [ - "emoji-regex@8.0.0", - "is-fullwidth-code-point", - "strip-ansi@6.0.1" - ] - }, - "string-width@5.1.2": { - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "dependencies": [ - "eastasianwidth", - "emoji-regex@9.2.2", - "strip-ansi@7.1.0" - ] - }, - "strip-ansi@6.0.1": { - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dependencies": [ - "ansi-regex@5.0.1" - ] - }, - "strip-ansi@7.1.0": { - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dependencies": [ - "ansi-regex@6.1.0" - ] - }, - "undici-types@7.8.0": { - "integrity": "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==" - }, - "which@2.0.2": { - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dependencies": [ - "isexe" - ], - "bin": true - }, - "wrap-ansi@7.0.0": { - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dependencies": [ - "ansi-styles@4.3.0", - "string-width@4.2.3", - "strip-ansi@6.0.1" - ] - }, - "wrap-ansi@8.1.0": { - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "dependencies": [ - "ansi-styles@6.2.1", - "string-width@5.1.2", - "strip-ansi@7.1.0" - ] - }, "zod@3.25.67": { "integrity": "sha512-idA2YXwpCdqUSKRCACDE6ItZD9TZzy3OZMtpfLoh6oPR47lipysRrJfjzMqFxQ3uJuUPyUeWe1r9vLH33xO/Qw==" } @@ -277,6 +91,14 @@ "npm:glob@^11.0.3", "npm:postgres@^3.4.7", "npm:zod@^3.25.67" - ] + ], + "packageJson": { + "dependencies": [ + "npm:@faker-js/faker@10", + "npm:@testcontainers/postgresql@^11.5.1", + "npm:@types/pg@^8.15.5", + "npm:pg@^8.16.3" + ] + } } } diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..e145a02 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,1870 @@ +{ + "name": "pgc", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "@testcontainers/postgresql": "^11.5.1", + "pg": "^8.16.3" + } + }, + "node_modules/@balena/dockerignore": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz", + "integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==" + }, + "node_modules/@grpc/grpc-js": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.13.4.tgz", + "integrity": "sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg==", + "dependencies": { + "@grpc/proto-loader": "^0.7.13", + "@js-sdsl/ordered-map": "^4.4.2" + }, + "engines": { + "node": ">=12.10.0" + } + }, + "node_modules/@grpc/proto-loader": { + "version": "0.7.15", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.7.15.tgz", + "integrity": "sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ==", + "dependencies": { + "lodash.camelcase": "^4.3.0", + "long": "^5.0.0", + "protobufjs": "^7.2.5", + "yargs": "^17.7.2" + }, + "bin": { + "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@js-sdsl/ordered-map": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@js-sdsl/ordered-map/-/ordered-map-4.4.2.tgz", + "integrity": "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@protobufjs/aspromise": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", + "integrity": "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==" + }, + "node_modules/@protobufjs/base64": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz", + "integrity": "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==" + }, + "node_modules/@protobufjs/codegen": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz", + "integrity": "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==" + }, + "node_modules/@protobufjs/eventemitter": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz", + "integrity": "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==" + }, + "node_modules/@protobufjs/fetch": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz", + "integrity": "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==", + "dependencies": { + "@protobufjs/aspromise": "^1.1.1", + "@protobufjs/inquire": "^1.1.0" + } + }, + "node_modules/@protobufjs/float": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz", + "integrity": "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==" + }, + "node_modules/@protobufjs/inquire": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz", + "integrity": "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==" + }, + "node_modules/@protobufjs/path": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz", + "integrity": "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==" + }, + "node_modules/@protobufjs/pool": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz", + "integrity": "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==" + }, + "node_modules/@protobufjs/utf8": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", + "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" + }, + "node_modules/@testcontainers/postgresql": { + "version": "11.5.1", + "resolved": "https://registry.npmjs.org/@testcontainers/postgresql/-/postgresql-11.5.1.tgz", + "integrity": "sha512-6P1QYIKRkktSVwTuwU0Pke5WbXTkvpLleyQcgknJPbZwhaIsCrhnbZlVzj2g/e+Nf9Lmdy1F2OAai+vUrBq0AQ==", + "dependencies": { + "testcontainers": "^11.5.1" + } + }, + "node_modules/@types/docker-modem": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/docker-modem/-/docker-modem-3.0.6.tgz", + "integrity": "sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg==", + "dependencies": { + "@types/node": "*", + "@types/ssh2": "*" + } + }, + "node_modules/@types/dockerode": { + "version": "3.3.43", + "resolved": "https://registry.npmjs.org/@types/dockerode/-/dockerode-3.3.43.tgz", + "integrity": "sha512-YCi0aKKpKeC9dhKTbuglvsWDnAyuIITd6CCJSTKiAdbDzPH4RWu0P9IK2XkJHdyplH6mzYtDYO+gB06JlzcPxg==", + "dependencies": { + "@types/docker-modem": "*", + "@types/node": "*", + "@types/ssh2": "*" + } + }, + "node_modules/@types/node": { + "version": "24.3.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.3.3.tgz", + "integrity": "sha512-GKBNHjoNw3Kra1Qg5UXttsY5kiWMEfoHq2TmXb+b1rcm6N7B3wTrFYIf/oSZ1xNQ+hVVijgLkiDZh7jRRsh+Gw==", + "dependencies": { + "undici-types": "~7.10.0" + } + }, + "node_modules/@types/ssh2": { + "version": "1.15.5", + "resolved": "https://registry.npmjs.org/@types/ssh2/-/ssh2-1.15.5.tgz", + "integrity": "sha512-N1ASjp/nXH3ovBHddRJpli4ozpk6UdDYIX4RJWFa9L1YKnzdhTlVmiGHm4DZnj/jLbqZpes4aeR30EFGQtvhQQ==", + "dependencies": { + "@types/node": "^18.11.18" + } + }, + "node_modules/@types/ssh2-streams": { + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/@types/ssh2-streams/-/ssh2-streams-0.1.12.tgz", + "integrity": "sha512-Sy8tpEmCce4Tq0oSOYdfqaBpA3hDM8SoxoFh5vzFsu2oL+znzGz8oVWW7xb4K920yYMUY+PIG31qZnFMfPWNCg==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/ssh2/node_modules/@types/node": { + "version": "18.19.124", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.124.tgz", + "integrity": "sha512-hY4YWZFLs3ku6D2Gqo3RchTd9VRCcrjqp/I0mmohYeUVA5Y8eCXKJEasHxLAJVZRJuQogfd1GiJ9lgogBgKeuQ==", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/ssh2/node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, + "node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/archiver": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/archiver/-/archiver-7.0.1.tgz", + "integrity": "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==", + "dependencies": { + "archiver-utils": "^5.0.2", + "async": "^3.2.4", + "buffer-crc32": "^1.0.0", + "readable-stream": "^4.0.0", + "readdir-glob": "^1.1.2", + "tar-stream": "^3.0.0", + "zip-stream": "^6.0.1" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/archiver-utils": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/archiver-utils/-/archiver-utils-5.0.2.tgz", + "integrity": "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==", + "dependencies": { + "glob": "^10.0.0", + "graceful-fs": "^4.2.0", + "is-stream": "^2.0.1", + "lazystream": "^1.0.0", + "lodash": "^4.17.15", + "normalize-path": "^3.0.0", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/asn1": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", + "dependencies": { + "safer-buffer": "~2.1.0" + } + }, + "node_modules/async": { + "version": "3.2.6", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.6.tgz", + "integrity": "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==" + }, + "node_modules/async-lock": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/async-lock/-/async-lock-1.4.1.tgz", + "integrity": "sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ==" + }, + "node_modules/b4a": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.7.1.tgz", + "integrity": "sha512-ZovbrBV0g6JxK5cGUF1Suby1vLfKjv4RWi8IxoaO/Mon8BDD9I21RxjHFtgQ+kskJqLAVyQZly3uMBui+vhc8Q==", + "peerDependencies": { + "react-native-b4a": "*" + }, + "peerDependenciesMeta": { + "react-native-b4a": { + "optional": true + } + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/bare-events": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.6.1.tgz", + "integrity": "sha512-AuTJkq9XmE6Vk0FJVNq5QxETrSA/vKHarWVBG5l/JbdCL1prJemiyJqUS0jrlXO0MftuPq4m3YVYhoNc5+aE/g==", + "optional": true + }, + "node_modules/bare-fs": { + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/bare-fs/-/bare-fs-4.4.4.tgz", + "integrity": "sha512-Q8yxM1eLhJfuM7KXVP3zjhBvtMJCYRByoTT+wHXjpdMELv0xICFJX+1w4c7csa+WZEOsq4ItJ4RGwvzid6m/dw==", + "optional": true, + "dependencies": { + "bare-events": "^2.5.4", + "bare-path": "^3.0.0", + "bare-stream": "^2.6.4", + "bare-url": "^2.2.2", + "fast-fifo": "^1.3.2" + }, + "engines": { + "bare": ">=1.16.0" + }, + "peerDependencies": { + "bare-buffer": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + } + } + }, + "node_modules/bare-os": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/bare-os/-/bare-os-3.6.2.tgz", + "integrity": "sha512-T+V1+1srU2qYNBmJCXZkUY5vQ0B4FSlL3QDROnKQYOqeiQR8UbjNHlPa+TIbM4cuidiN9GaTaOZgSEgsvPbh5A==", + "optional": true, + "engines": { + "bare": ">=1.14.0" + } + }, + "node_modules/bare-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bare-path/-/bare-path-3.0.0.tgz", + "integrity": "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw==", + "optional": true, + "dependencies": { + "bare-os": "^3.0.1" + } + }, + "node_modules/bare-stream": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/bare-stream/-/bare-stream-2.7.0.tgz", + "integrity": "sha512-oyXQNicV1y8nc2aKffH+BUHFRXmx6VrPzlnaEvMhram0nPBrKcEdcyBg5r08D0i8VxngHFAiVyn1QKXpSG0B8A==", + "optional": true, + "dependencies": { + "streamx": "^2.21.0" + }, + "peerDependencies": { + "bare-buffer": "*", + "bare-events": "*" + }, + "peerDependenciesMeta": { + "bare-buffer": { + "optional": true + }, + "bare-events": { + "optional": true + } + } + }, + "node_modules/bare-url": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/bare-url/-/bare-url-2.2.2.tgz", + "integrity": "sha512-g+ueNGKkrjMazDG3elZO1pNs3HY5+mMmOet1jtKyhOaCnkLzitxf26z7hoAEkDNgdNmnc1KIlt/dw6Po6xZMpA==", + "optional": true, + "dependencies": { + "bare-path": "^3.0.0" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==", + "dependencies": { + "tweetnacl": "^0.14.3" + } + }, + "node_modules/bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/bl/node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/bl/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/buffer-crc32": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-1.0.0.tgz", + "integrity": "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/buildcheck": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz", + "integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==", + "optional": true, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/byline": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/byline/-/byline-5.0.0.tgz", + "integrity": "sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/cliui/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/compress-commons": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", + "integrity": "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==", + "dependencies": { + "crc-32": "^1.2.0", + "crc32-stream": "^6.0.0", + "is-stream": "^2.0.1", + "normalize-path": "^3.0.0", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + }, + "node_modules/cpu-features": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", + "integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==", + "hasInstallScript": true, + "optional": true, + "dependencies": { + "buildcheck": "~0.0.6", + "nan": "^2.19.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/crc32-stream": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/crc32-stream/-/crc32-stream-6.0.0.tgz", + "integrity": "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==", + "dependencies": { + "crc-32": "^1.2.0", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/docker-compose": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/docker-compose/-/docker-compose-1.3.0.tgz", + "integrity": "sha512-7Gevk/5eGD50+eMD+XDnFnOrruFkL0kSd7jEG4cjmqweDSUhB7i0g8is/nBdVpl+Bx338SqIB2GLKm32M+Vs6g==", + "dependencies": { + "yaml": "^2.2.2" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/docker-modem": { + "version": "5.0.6", + "resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.6.tgz", + "integrity": "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ==", + "dependencies": { + "debug": "^4.1.1", + "readable-stream": "^3.5.0", + "split-ca": "^1.0.1", + "ssh2": "^1.15.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "node_modules/docker-modem/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/dockerode": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.8.tgz", + "integrity": "sha512-HdPBprWmwfHMHi12AVIFDhXIqIS+EpiOVkZaAZxgML4xf5McqEZjJZtahTPkLDxWOt84ApfWPAH9EoQwOiaAIQ==", + "dependencies": { + "@balena/dockerignore": "^1.0.2", + "@grpc/grpc-js": "^1.11.1", + "@grpc/proto-loader": "^0.7.13", + "docker-modem": "^5.0.6", + "protobufjs": "^7.3.2", + "tar-fs": "~2.1.3", + "uuid": "^10.0.0" + }, + "engines": { + "node": ">= 8.0" + } + }, + "node_modules/dockerode/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/dockerode/node_modules/tar-fs": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.3.tgz", + "integrity": "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg==", + "dependencies": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "node_modules/dockerode/node_modules/tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "dependencies": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/fast-fifo": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz", + "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==" + }, + "node_modules/foreground-child": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", + "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-port": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/get-port/-/get-port-7.1.0.tgz", + "integrity": "sha512-QB9NKEeDg3xxVwCCwJQ9+xycaz6pBB6iQ76wiWMl1927n0Kir6alPiP+yuiICLLU4jpMe08dXfpebuQppFA2zw==", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==" + }, + "node_modules/jackspeak": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", + "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/lazystream": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", + "integrity": "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==", + "dependencies": { + "readable-stream": "^2.0.5" + }, + "engines": { + "node": ">= 0.6.3" + } + }, + "node_modules/lazystream/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/lazystream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/lazystream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/lodash.camelcase": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", + "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==" + }, + "node_modules/long": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", + "integrity": "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==" + }, + "node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==" + }, + "node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mkdirp-classic": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==" + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/nan": { + "version": "2.23.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.23.0.tgz", + "integrity": "sha512-1UxuyYGdoQHcGg87Lkqm3FzefucTa0NAiOcuRsDmysep3c1LVCRK2krrUDafMWtjSG04htvAmvg96+SDknOmgQ==", + "optional": true + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==" + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/pg": { + "version": "8.16.3", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.16.3.tgz", + "integrity": "sha512-enxc1h0jA/aq5oSDMvqyW3q89ra6XIIDZgCX9vkMrnz5DFTw/Ny3Li2lFQ+pt3L6MCgm/5o2o8HW9hiJji+xvw==", + "dependencies": { + "pg-connection-string": "^2.9.1", + "pg-pool": "^3.10.1", + "pg-protocol": "^1.10.3", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.2.7" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.2.7.tgz", + "integrity": "sha512-YgCtzMH0ptvZJslLM1ffsY4EuGaU0cx4XSdXLRFae8bPP4dS5xL1tNB3k2o/N64cHJpwU7dxKli/nZ2lUa5fLg==", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.9.1", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.9.1.tgz", + "integrity": "sha512-nkc6NpDcvPVpZXxrreI/FOtX3XemeLl8E0qFr6F2Lrm/I8WOnaWNhIPK2Z7OHpw7gh5XJThi6j6ppgNoaT1w4w==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.10.1.tgz", + "integrity": "sha512-Tu8jMlcX+9d8+QVzKIvM/uJtp07PKr82IUOYEphaWcoBhIYkoHpLXN3qO59nAI11ripznDsEzEv8nUxBVWajGg==", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.10.3.tgz", + "integrity": "sha512-6DIBgBQaTKDJyxnXaLiLR8wBpQQcGWuAESkRBX/t6OwA8YsqP+iVSiond2EDy6Y/dsGk8rh/jtax3js5NeV7JQ==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/process": { + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", + "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "node_modules/proper-lockfile": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/proper-lockfile/-/proper-lockfile-4.1.2.tgz", + "integrity": "sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA==", + "dependencies": { + "graceful-fs": "^4.2.4", + "retry": "^0.12.0", + "signal-exit": "^3.0.2" + } + }, + "node_modules/proper-lockfile/node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + }, + "node_modules/properties-reader": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/properties-reader/-/properties-reader-2.3.0.tgz", + "integrity": "sha512-z597WicA7nDZxK12kZqHr2TcvwNU1GCfA5UwfDY/HDp3hXPoPlb5rlEx9bwGTiJnc0OqbBTkU975jDToth8Gxw==", + "dependencies": { + "mkdirp": "^1.0.4" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "type": "github", + "url": "https://github.com/steveukx/properties?sponsor=1" + } + }, + "node_modules/protobufjs": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-7.5.4.tgz", + "integrity": "sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==", + "hasInstallScript": true, + "dependencies": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/node": ">=13.7.0", + "long": "^5.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/pump": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/readable-stream": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", + "integrity": "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==", + "dependencies": { + "abort-controller": "^3.0.0", + "buffer": "^6.0.3", + "events": "^3.3.0", + "process": "^0.11.10", + "string_decoder": "^1.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "node_modules/readdir-glob": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/readdir-glob/-/readdir-glob-1.1.3.tgz", + "integrity": "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==", + "dependencies": { + "minimatch": "^5.1.0" + } + }, + "node_modules/readdir-glob/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/retry": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/split-ca": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz", + "integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==" + }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "engines": { + "node": ">= 10.x" + } + }, + "node_modules/ssh-remote-port-forward": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/ssh-remote-port-forward/-/ssh-remote-port-forward-1.0.4.tgz", + "integrity": "sha512-x0LV1eVDwjf1gmG7TTnfqIzf+3VPRz7vrNIjX6oYLbeCrf/PeVY6hkT68Mg+q02qXxQhrLjB0jfgvhevoCRmLQ==", + "dependencies": { + "@types/ssh2": "^0.5.48", + "ssh2": "^1.4.0" + } + }, + "node_modules/ssh-remote-port-forward/node_modules/@types/ssh2": { + "version": "0.5.52", + "resolved": "https://registry.npmjs.org/@types/ssh2/-/ssh2-0.5.52.tgz", + "integrity": "sha512-lbLLlXxdCZOSJMCInKH2+9V/77ET2J6NPQHpFI0kda61Dd1KglJs+fPQBchizmzYSOJBgdTajhPqBO1xxLywvg==", + "dependencies": { + "@types/node": "*", + "@types/ssh2-streams": "*" + } + }, + "node_modules/ssh2": { + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.17.0.tgz", + "integrity": "sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==", + "hasInstallScript": true, + "dependencies": { + "asn1": "^0.2.6", + "bcrypt-pbkdf": "^1.0.2" + }, + "engines": { + "node": ">=10.16.0" + }, + "optionalDependencies": { + "cpu-features": "~0.0.10", + "nan": "^2.23.0" + } + }, + "node_modules/streamx": { + "version": "2.22.1", + "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.22.1.tgz", + "integrity": "sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA==", + "dependencies": { + "fast-fifo": "^1.3.2", + "text-decoder": "^1.1.0" + }, + "optionalDependencies": { + "bare-events": "^2.2.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/string-width-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/tar-fs": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-3.1.0.tgz", + "integrity": "sha512-5Mty5y/sOF1YWj1J6GiBodjlDc05CUR8PKXrsnFAiSG0xA+GHeWLovaZPYUDXkH/1iKRf2+M5+OrRgzC7O9b7w==", + "dependencies": { + "pump": "^3.0.0", + "tar-stream": "^3.1.5" + }, + "optionalDependencies": { + "bare-fs": "^4.0.1", + "bare-path": "^3.0.0" + } + }, + "node_modules/tar-stream": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz", + "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==", + "dependencies": { + "b4a": "^1.6.4", + "fast-fifo": "^1.2.0", + "streamx": "^2.15.0" + } + }, + "node_modules/testcontainers": { + "version": "11.5.1", + "resolved": "https://registry.npmjs.org/testcontainers/-/testcontainers-11.5.1.tgz", + "integrity": "sha512-YSSP4lSJB8498zTeu4HYTZYgSky54ozBmIDdC8PFU5inj+vBo5hPpilhcYTgmsqsYjrXOJGV7jl0MWByS7GwuA==", + "dependencies": { + "@balena/dockerignore": "^1.0.2", + "@types/dockerode": "^3.3.42", + "archiver": "^7.0.1", + "async-lock": "^1.4.1", + "byline": "^5.0.0", + "debug": "^4.4.1", + "docker-compose": "^1.2.0", + "dockerode": "^4.0.7", + "get-port": "^7.1.0", + "proper-lockfile": "^4.1.2", + "properties-reader": "^2.3.0", + "ssh-remote-port-forward": "^1.0.4", + "tar-fs": "^3.1.0", + "tmp": "^0.2.4", + "undici": "^7.13.0" + } + }, + "node_modules/text-decoder": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.3.tgz", + "integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==", + "dependencies": { + "b4a": "^1.6.4" + } + }, + "node_modules/tmp": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", + "engines": { + "node": ">=14.14" + } + }, + "node_modules/tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" + }, + "node_modules/undici": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-7.16.0.tgz", + "integrity": "sha512-QEg3HPMll0o3t2ourKwOeUAZ159Kn9mx5pnzHRQO8+Wixmh88YdZRiIwat0iNzNNXn0yoEtXJqFpyW7eM8BV7g==", + "engines": { + "node": ">=20.18.1" + } + }, + "node_modules/undici-types": { + "version": "7.10.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz", + "integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" + }, + "node_modules/uuid": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz", + "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yaml": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", + "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + } + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/zip-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/zip-stream/-/zip-stream-6.0.1.tgz", + "integrity": "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==", + "dependencies": { + "archiver-utils": "^5.0.0", + "compress-commons": "^6.0.2", + "readable-stream": "^4.0.0" + }, + "engines": { + "node": ">= 14" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..3842e99 --- /dev/null +++ b/package.json @@ -0,0 +1,10 @@ +{ + "dependencies": { + "@testcontainers/postgresql": "^11.5.1", + "@types/pg": "^8.15.5", + "pg": "^8.16.3" + }, + "devDependencies": { + "@faker-js/faker": "^10.0.0" + } +} diff --git a/schema.sql b/schema.sql deleted file mode 100644 index e4b5980..0000000 --- a/schema.sql +++ /dev/null @@ -1,26 +0,0 @@ -create table author ( - id uuid primary key default gen_random_uuid(), - full_name text not null, - birthday date -); - -create table genre ( - id text primary key -); - -create table book ( - id uuid primary key default gen_random_uuid(), - title text not null, - author_id uuid not null references author(id), - year int not null, - isbn text not null unique, - is_best_seller bool default false, - genre text not null references genre(id) -); - -insert into genre values - ('comedy'), - ('drama'), - ('science fiction'), - ('fantasy'), - ('biography'); diff --git a/src/config/config.service.test.ts b/src/config/config.service.test.ts index 3026ddb..873119c 100644 --- a/src/config/config.service.test.ts +++ b/src/config/config.service.test.ts @@ -18,9 +18,9 @@ codegen: options: `; -Deno.test(function parseYAML() { - ConfigService.fromFile({ - path: `pgc.yaml`, - content: YAML, - }); -}); +// Deno.test(function parseYAML() { +// ConfigService.fromFile({ +// path: `pgc.yaml`, +// content: YAML, +// }); +// }); diff --git a/tests/author.sql b/tests/author.sql index edadd84..24ded62 100644 --- a/tests/author.sql +++ b/tests/author.sql @@ -29,3 +29,6 @@ select $val; -- @name: optional_parameter :one select ?val; + +-- @name: full_author :many +select * from full_author; \ No newline at end of file diff --git a/tests/pgc-typscript-postgres.yaml b/tests/pgc-typscript-postgres.yaml deleted file mode 100644 index 6a1aac2..0000000 --- a/tests/pgc-typscript-postgres.yaml +++ /dev/null @@ -1,14 +0,0 @@ -version: "1" -database: - migrations: - - "./migrations/*.sql" - -queries: - - "queries/*.sql" - -codegen: - language: python - driver: asyncpg - out: ./python/dist_psycopg - options: - package: package.queries # change package to your package name diff --git a/tests/pgc-asyncpg.yaml b/tests/python/pgc-asyncpg.yaml similarity index 73% rename from tests/pgc-asyncpg.yaml rename to tests/python/pgc-asyncpg.yaml index 568b6a0..172065a 100644 --- a/tests/pgc-asyncpg.yaml +++ b/tests/python/pgc-asyncpg.yaml @@ -9,6 +9,6 @@ queries: codegen: language: python driver: asyncpg - out: tests/python/dist_asyncpg + out: tests/python/out_asyncpg options: - package: dist_asyncpg + package: out_asyncpg diff --git a/tests/pgc-psycopg.yaml b/tests/python/pgc-psycopg.yaml similarity index 73% rename from tests/pgc-psycopg.yaml rename to tests/python/pgc-psycopg.yaml index cd1b3fa..d16dec7 100644 --- a/tests/pgc-psycopg.yaml +++ b/tests/python/pgc-psycopg.yaml @@ -9,6 +9,6 @@ queries: codegen: language: python driver: psycopg - out: tests/python/dist_psycopg + out: tests/python/out_psycopg options: - package: dist_psycopg + package: out_psycopg diff --git a/tests/python/test_asyncpg.py b/tests/python/test_asyncpg.py index c97e940..c8349ab 100644 --- a/tests/python/test_asyncpg.py +++ b/tests/python/test_asyncpg.py @@ -5,14 +5,15 @@ import faker import pytest import pytest_asyncio -from dist_asyncpg import Queries -from dist_asyncpg.models import Author, Book +from out_asyncpg import Queries +from out_asyncpg.author import FullAuthorRow +from out_asyncpg.models import Author, Book fake = faker.Faker() @pytest_asyncio.fixture -async def populate_db(asyncpg_conn): +async def queries(asyncpg_conn): queries = Queries(asyncpg_conn) for _ in range(2): @@ -34,18 +35,17 @@ async def populate_db(asyncpg_conn): genre="science fiction", ) ) + return queries @pytest.mark.asyncio -async def test_one_row_one_column(asyncpg_conn, populate_db): - queries = Queries(asyncpg_conn) +async def test_one_row_one_column(queries: Queries): author = await queries.author.one_row_one_column() assert isinstance(author, Author) @pytest.mark.asyncio -async def test_one_row_one_column(asyncpg_conn, populate_db): - queries = Queries(asyncpg_conn) +async def test_one_row_one_column(queries: Queries): data = await queries.author.one_row_many_coulmns() assert isinstance(data.author, Author) @@ -54,17 +54,14 @@ async def test_one_row_one_column(asyncpg_conn, populate_db): @pytest.mark.asyncio -async def test_many_rows_one_column(asyncpg_conn, populate_db): - queries = Queries(asyncpg_conn) +async def test_many_rows_one_column(queries: Queries): rows = await queries.author.many_rows_one_column() assert isinstance(rows[0], Author) @pytest.mark.asyncio -async def test_many_rows_many_columns(asyncpg_conn, populate_db): - queries = Queries(asyncpg_conn) - +async def test_many_rows_many_columns(queries: Queries): rows = await queries.author.many_rows_many_coulmns() assert isinstance(rows[0].author, Author) @@ -73,19 +70,20 @@ async def test_many_rows_many_columns(asyncpg_conn, populate_db): @pytest.mark.asyncio -async def test_required_parameter(asyncpg_conn): - queries = Queries(asyncpg_conn) - +async def test_required_parameter(queries: Queries): assert "foo" == await queries.author.required_parameter("foo") @pytest.mark.asyncio -async def test_optional_parameter(asyncpg_conn): - queries = Queries(asyncpg_conn) - +async def test_optional_parameter(queries: Queries): assert (await queries.author.optional_parameter(None)) is None assert (await queries.author.optional_parameter("foo")) == "foo" -# def test_optional_parameter(asyncpg_conn): -# pass +@pytest.mark.asyncio +async def test_query_view(queries: Queries): + full_author = await queries.author.full_author() + assert isinstance(full_author[0], FullAuthorRow) + assert isinstance(full_author[0].author, Author) + assert isinstance(full_author[0].books, list) + assert isinstance(full_author[0].books[0], Book) diff --git a/tests/python/test_psycopg.py b/tests/python/test_psycopg.py index 7a92cd3..645d863 100644 --- a/tests/python/test_psycopg.py +++ b/tests/python/test_psycopg.py @@ -4,14 +4,15 @@ import faker import pytest -from dist_psycopg import Queries, init_connection -from dist_psycopg.models import Author, Book +from out_psycopg import Queries, init_connection +from out_psycopg.author import FullAuthorRow +from out_psycopg.models import Author, Book fake = faker.Faker() @pytest.fixture(scope="module") -def populate_db(psycopg_conn): +def queries(psycopg_conn) -> Queries: init_connection(psycopg_conn) queries = Queries(psycopg_conn) @@ -35,15 +36,15 @@ def populate_db(psycopg_conn): ) ) + return queries -def test_one_row_one_column(psycopg_conn, populate_db): - queries = Queries(psycopg_conn) + +def test_one_row_one_column(queries: Queries): author = queries.author.one_row_one_column() assert isinstance(author, Author) -def test_one_row_one_column(psycopg_conn, populate_db): - queries = Queries(psycopg_conn) +def test_one_row_one_column(queries: Queries): data = queries.author.one_row_many_coulmns() assert isinstance(data.author, Author) @@ -51,16 +52,13 @@ def test_one_row_one_column(psycopg_conn, populate_db): assert data.one == 1 -def test_many_rows_one_column(psycopg_conn, populate_db): - queries = Queries(psycopg_conn) +def test_many_rows_one_column(queries: Queries): rows = queries.author.many_rows_one_column() assert isinstance(rows[0], Author) -def test_many_rows_many_columns(psycopg_conn, populate_db): - queries = Queries(psycopg_conn) - +def test_many_rows_many_columns(queries: Queries): rows = queries.author.many_rows_many_coulmns() assert isinstance(rows[0].author, Author) @@ -68,18 +66,18 @@ def test_many_rows_many_columns(psycopg_conn, populate_db): assert rows[0].one == 1 -def test_required_parameter(psycopg_conn): - queries = Queries(psycopg_conn) - +def test_required_parameter(queries: Queries): assert "foo" == queries.author.required_parameter("foo") -def test_optional_parameter(psycopg_conn): - queries = Queries(psycopg_conn) - +def test_optional_parameter(queries: Queries): assert queries.author.optional_parameter(None) is None assert queries.author.optional_parameter("foo") == "foo" -def test_optional_parameter(psycopg_conn): - pass +def test_query_view(queries: Queries): + full_author = queries.author.full_author() + assert isinstance(full_author[0], FullAuthorRow) + assert isinstance(full_author[0].author, Author) + assert isinstance(full_author[0].books, list) + assert isinstance(full_author[0].books[0], Book) diff --git a/tests/schema.sql b/tests/schema.sql index c36ca3d..e7b3b0d 100644 --- a/tests/schema.sql +++ b/tests/schema.sql @@ -25,3 +25,10 @@ insert into genre values ('fantasy'), ('biography') on conflict do nothing; + +create or replace view full_author as ( + select author, array_agg(book) as books + from author + join book on author.id = book.author_id + group by author.id +); \ No newline at end of file diff --git a/tests/typescript/pg.test.ts b/tests/typescript/pg.test.ts new file mode 100644 index 0000000..2ca689a --- /dev/null +++ b/tests/typescript/pg.test.ts @@ -0,0 +1,105 @@ +import * as pg from "pg"; +import { assert, assertEquals, assertInstanceOf } from "jsr:@std/assert"; +import { initClient, Queries } from "./out_pg/queries.ts"; +import { faker } from "@faker-js/faker"; + +import { + PostgreSqlContainer, + StartedPostgreSqlContainer, +} from "@testcontainers/postgresql"; +import { randomUUID } from "node:crypto"; +import { Genre } from "./out_pg/models/models.ts"; +import { json } from "node:stream/consumers"; + +let CONTAINER: StartedPostgreSqlContainer | undefined = undefined; + +Deno.test.beforeAll(async () => { + CONTAINER = await new PostgreSqlContainer("postgres:16-alpine") + .start(); + + const client = new pg.Client({ + connectionString: CONTAINER.getConnectionUri(), + }); + + await client.connect(); + await client.query(await Deno.readTextFile("tests/schema.sql")); +}); + +async function getQueries() { + const client = new pg.Client({ + connectionString: CONTAINER!.getConnectionUri(), + }); + await client.connect(); + await initClient(client); + const queries = new Queries(client); + + const author = await queries.author.insert({ + birthday: faker.date.birthdate(), + full_name: faker.book.author(), + id: randomUUID(), + }); + + await queries.book.insert({ + author_id: author!.id, + year: faker.date.anytime().getFullYear(), + genre: Genre.SCIENCE_FICTION, + is_best_seller: false, + isbn: faker.number.int().toString(), + title: faker.book.title(), + id: randomUUID(), + }); + return queries; +} + +Deno.test("test_one_row_one_column", async () => { + const queries = await getQueries(); + const author = await queries.author.one_row_one_column(); + + assert(typeof (author?.fullName) == "string", JSON.stringify(author)); + assertInstanceOf(author.birthday, Date); + + await queries.client.end(); +}); + +Deno.test.afterAll(async () => { + await CONTAINER?.stop(); +}); + +// @pytest.mark.asyncio +// async def test_one_row_one_column(queries: Queries): +// data = await queries.author.one_row_many_coulmns() + +// assert isinstance(data.author, Author) +// assert isinstance(data.book, Book) +// assert data.one == 1 + +// @pytest.mark.asyncio +// async def test_many_rows_one_column(queries: Queries): +// rows = await queries.author.many_rows_one_column() + +// assert isinstance(rows[0], Author) + +// @pytest.mark.asyncio +// async def test_many_rows_many_columns(queries: Queries): +// rows = await queries.author.many_rows_many_coulmns() + +// assert isinstance(rows[0].author, Author) +// assert isinstance(rows[0].book, Book) +// assert rows[0].one == 1 + +// @pytest.mark.asyncio +// async def test_required_parameter(queries: Queries): +// assert "foo" == await queries.author.required_parameter("foo") + +// @pytest.mark.asyncio +// async def test_optional_parameter(queries: Queries): +// assert (await queries.author.optional_parameter(None)) is None +// assert (await queries.author.optional_parameter("foo")) == "foo" + +// @pytest.mark.asyncio +// async def test_query_view(queries: Queries): +// full_author = await queries.author.full_author() +// assert isinstance(full_author[0], FullAuthorRow) +// assert isinstance(full_author[0].author, Author) +// assert isinstance(full_author[0].books, list) +// assert isinstance(full_author[0].books[0], Book) diff --git a/tests/typescript/pgc-pg.yaml b/tests/typescript/pgc-pg.yaml new file mode 100644 index 0000000..a2abe8d --- /dev/null +++ b/tests/typescript/pgc-pg.yaml @@ -0,0 +1,18 @@ +version: "1" +database: + migrations: + - tests/schema.sql + +queries: + - tests/author.sql + - tests/book.sql + +codegen: + language: typescript + driver: postgres + out: tests/typescript/out_pg + enums: + - public.genre + options: + deno: true + package: out_pg From 9b4942ed6d675ac69c26d064be36247e2bf6ac8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=C3=A1s=20Vallotton?= Date: Mon, 15 Sep 2025 16:51:19 -0300 Subject: [PATCH 10/10] add typescript full support with type overrides and record types --- codegen/src/ir/type.rs | 149 ++++++-------- codegen/src/main.rs | 2 - codegen/src/presentation/environment.rs | 5 +- .../python/templates/asyncpg-query.j2 | 3 +- .../python/templates/psycopg-query.j2 | 6 +- .../src/presentation/templating_service.rs | 15 -- .../src/presentation/type_mapping_service.rs | 35 ++-- .../typescript/templates/pg/query.j2 | 190 +++++++++++------- .../typescript/type_map_service.rs | 4 +- codegen/src/request.rs | 53 +---- codegen/src/utils.rs | 4 - tests/author.sql | 6 +- tests/schema.sql | 2 +- tests/typescript/author.sql | 34 ++++ tests/typescript/book.sql | 30 +++ tests/typescript/pg.test.ts | 153 ++++++++------ tests/typescript/pgc-pg.yaml | 5 +- 17 files changed, 377 insertions(+), 319 deletions(-) create mode 100644 tests/typescript/author.sql create mode 100644 tests/typescript/book.sql diff --git a/codegen/src/ir/type.rs b/codegen/src/ir/type.rs index 8e60e40..0ee6410 100644 --- a/codegen/src/ir/type.rs +++ b/codegen/src/ir/type.rs @@ -6,67 +6,85 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, PartialEq, PartialOrd, Ord, Eq, Debug, Serialize, Deserialize)] #[serde(tag = "variant", content = "content")] pub enum Type { - // A type not matching any of these - Other { - schema: Arc, - name: Arc, - }, - - // Uncategorized + Any, + AnyArray, + AnyCompatible, + AnyCompatibleArray, + AnyCompatibleMultiRange, + AnyCompatibleNonArray, + AnycompatibleRange, + AnyElement, + AnyEnum, + AnyMultiRange, + AnyNonArray, + AnyRange, + Bit, + BitVarying, Bool, - Uuid, - - // Text - Text, - VarChar, + Box, BpChar, Bytea, - - // Numeric types + Cid, + Cidr, + Circle, + Cstring, + Date, + DateMultiRange, + DateRange, + DateTz, + Decimal, + Float4, + Float8, + Inet, Int2, Int4, + Int4MultiRange, + Int4Range, Int8, + Int8MultiRange, + Int8Range, + Interval, + Json, + Jsonb, + JsonPath, + Line, + LSeg, + MacAddr, + MacAddr8, + Money, + Numeric, + NumMultiRange, + NumRange, + Path, + Point, + Polygon, + Range, + Record, Serial2, Serial4, Serial8, - Decimal, - Numeric, - Money, - Float4, - Float8, - - // Time types - Timestamp, - Date, + Text, Time, + Timestamp, TimestampTz, - DateTz, TimeTz, - Range, - Interval, - - // Range types - Int4Range, - Int8Range, - NumRange, - TsRange, - TsTzRange, - DateRange, - DateMultiRange, - Int4MultiRange, - Int8MultiRange, - NumMultiRange, TsMultiRange, + TsQuery, + TsRange, TsTzMultiRange, + TsTzRange, + TsVector, + Unknown, + Uuid, + VarChar, + Void, + Xml, - // Geometric types - Point, - Line, - LSeg, - Box, - Path, - Polygon, - Circle, + // A type not matching any of these + Other { + schema: Arc, + name: Arc, + }, // Generic types Nullable(Arc), @@ -80,45 +98,6 @@ pub enum Type { module_path: Arc<[Arc]>, name: Arc, }, - - // Networking types - Cid, - Cidr, - Inet, - MacAddr, - MacAddr8, - - // Bit string types - Bit, - BitVarying, - - // Text Seach types - TsVector, - TsQuery, - - // Encoding types - Xml, - Json, - Jsonb, - JsonPath, - - // PseudoTypes - Any, - AnyArray, - AnyElement, - AnyNonArray, - AnyEnum, - AnyRange, - AnyMultiRange, - AnyCompatible, - AnyCompatibleArray, - AnyCompatibleMultiRange, - AnyCompatibleNonArray, - AnycompatibleRange, - Cstring, - Record, - Void, - Unknown, } impl Type { diff --git a/codegen/src/main.rs b/codegen/src/main.rs index 6c8f1cc..888bd65 100644 --- a/codegen/src/main.rs +++ b/codegen/src/main.rs @@ -9,8 +9,6 @@ use std::sync::atomic::Ordering::Relaxed; use std::{slice, sync::atomic::AtomicU64}; pub mod error; -#[cfg(test)] -pub mod faker; pub mod ir; pub mod mock; pub mod presentation; diff --git a/codegen/src/presentation/environment.rs b/codegen/src/presentation/environment.rs index 068e659..e0d924d 100644 --- a/codegen/src/presentation/environment.rs +++ b/codegen/src/presentation/environment.rs @@ -146,7 +146,10 @@ pub fn starts_with(text: &str, pattern: &str) -> bool { } pub fn to_camel_case(s: &str) -> String { - s.to_lower_camel_case() + s.split('.') + .map(|s| s.to_lower_camel_case()) + .collect::>() + .join(".") } pub fn to_pascal_case(s: &str) -> String { diff --git a/codegen/src/presentation/python/templates/asyncpg-query.j2 b/codegen/src/presentation/python/templates/asyncpg-query.j2 index 9b5f3d2..f1322b3 100644 --- a/codegen/src/presentation/python/templates/asyncpg-query.j2 +++ b/codegen/src/presentation/python/templates/asyncpg-query.j2 @@ -1,7 +1,7 @@ # This file was automatically generated by pgc {%- for type in used_types %} -{% for import in (type | imports) %} +{%- for import in (type | imports) %} {%- set imported_asyncpg = import == "import asyncpg" %} {%- if import != ""%} {{import}} @@ -17,7 +17,6 @@ from {{ir.request.config.codegen.options.package}} import models {%- for subnamespace in query_namespace.subnamespaces %} from . import {{subnamespace}} {%- endfor %} - {%- for method in query_namespace.methods %} {{ method.query.name | to_screaming_snake_case }} = """ diff --git a/codegen/src/presentation/python/templates/psycopg-query.j2 b/codegen/src/presentation/python/templates/psycopg-query.j2 index aa828cf..8b7323b 100644 --- a/codegen/src/presentation/python/templates/psycopg-query.j2 +++ b/codegen/src/presentation/python/templates/psycopg-query.j2 @@ -1,7 +1,7 @@ # This file was automatically generated by pgc {%- for type in used_types %} -{% for import in (type | imports) %} +{%- for import in (type | imports) %} {%- set imported_psycopg = import == "import psycopg" %} {%- if import != ""%} {{import}} @@ -238,7 +238,7 @@ def init_connection(conn: psycopg.Connection): array_oid=row.array_oid, field_names=row.field_names, field_types=row.field_types - ) + ) for row in type_info_rows } @@ -251,5 +251,5 @@ def init_connection(conn: psycopg.Connection): ) {%- endfor %} {%- endfor %} - + {% endif -%} diff --git a/codegen/src/presentation/templating_service.rs b/codegen/src/presentation/templating_service.rs index b69faf1..95e6808 100644 --- a/codegen/src/presentation/templating_service.rs +++ b/codegen/src/presentation/templating_service.rs @@ -153,18 +153,3 @@ impl TemplatingService { Ok(()) } } - -#[cfg(test)] -mod test { - use std::sync::Arc; - - use fake::{Fake, Faker}; - - use crate::{ir::IrService, request::Request}; - #[test] - fn file_has_name() { - let mut request: Request = Faker.fake(); - request.queries = Arc::default(); - let ir = IrService::new(request).unwrap().build(); - } -} diff --git a/codegen/src/presentation/type_mapping_service.rs b/codegen/src/presentation/type_mapping_service.rs index d7a16db..0eb7537 100644 --- a/codegen/src/presentation/type_mapping_service.rs +++ b/codegen/src/presentation/type_mapping_service.rs @@ -31,25 +31,34 @@ impl OverriddenTypeMapService { overrides: ir.request.config.codegen.types.clone(), } } + + fn get_from_ident(&self, module: Vec, r#type: &Type, ident: &str) -> LanguageType { + let Some(type_config) = self.overrides.get(ident) else { + return self.service.get(module, r#type); + }; + + return LanguageType { + name: None, + annotation: type_config.annotation.clone(), + constructor: None, + import: type_config.import.iter().cloned().collect(), + }; + } } impl TypeMapService for OverriddenTypeMapService { fn get(&self, module: Vec, r#type: &Type) -> LanguageType { - return self.service.get(module, r#type); - // let Ok(ty) = Type::NAMES.binary_search_by(|(_, _, ty)| ty.cmp(r#type)) else { - // }; - // let (_, name, _) = Type::NAMES[ty]; + if let Type::Other { schema, name } = r#type { + return self.get_from_ident(module, r#type, &format!("{}.{}", schema, name)); + } + + let Ok(index) = Type::NAMES.binary_search_by(|(_, _, ty)| ty.cmp(r#type)) else { + return self.service.get(module, r#type); + }; - // let Some(type_config) = self.overrides.get(name) else { - // return self.service.get(module, r#type); - // }; + let (_, name, _) = Type::NAMES[index]; - // return LanguageType { - // name: None, - // annotation: type_config.annotation.clone(), - // import: type_config.import.clone(), - // module: None, - // }; + self.get_from_ident(module, r#type, name) } } diff --git a/codegen/src/presentation/typescript/templates/pg/query.j2 b/codegen/src/presentation/typescript/templates/pg/query.j2 index b49cdbe..91a5462 100644 --- a/codegen/src/presentation/typescript/templates/pg/query.j2 +++ b/codegen/src/presentation/typescript/templates/pg/query.j2 @@ -1,28 +1,27 @@ // This file was automatically generated by pgc // run `pgc build` to regenerate it {%- for type in used_types %} -{% for import in (type | imports) %} +{%- for import in (type | imports) %} {%- if import != ""%} {{import}} {%- endif %} {%- endfor %} {%- endfor %} {%- set options = ir.request.config.codegen.options %} -{% if options.deno %} +{%- if options.deno %} import * as pg from "npm:pg"; -{% else %} +{%- else %} import * as pg from "pg"; -{% endif %} +{%- endif %} import * as models from "./models/models.ts"; {%- for subnamespace in query_namespace.subnamespaces %} -{% set is_directory = (query_namespace.subnamespaces[subnamespace].subnamespaces | length) != 0 %} -{% if is_directory %} +{%- set is_directory = (query_namespace.subnamespaces[subnamespace].subnamespaces | length) != 0 %} +{%- if is_directory %} import * as {{subnamespace}} from "./{{subnamespace}}/queries.ts" -{% else %} +{%- else %} import * as {{subnamespace}} from "./{{subnamespace}}.ts" -{% endif %} +{%- endif %} {%- endfor %} - {%- for method in query_namespace.methods %} const {{ method.query.name | to_screaming_snake_case }} = ` @@ -34,20 +33,54 @@ const {{ method.query.name | to_screaming_snake_case }} = ` {%- if method.output_model != None %} export interface {{method.output_model.type | name | to_pascal_case }} { {%- for field, type in method.output_model.fields | items %} - {{field}}: {{ type | annotation }}; + {{field | to_camel_case }}: {{ type | annotation }}; {%- endfor %} } {% endif %} + {%- for _, input_model in method.input_models | items %} export interface {{ input_model.type | name }} { {%- for field, type in input_model.fields | items %} - {{field}}: {{type | annotation}}; + {%- if type | is_nullable %} + {{field | to_camel_case }}?: {{type | annotation}}; + {%- else %} + {{field | to_camel_case }}: {{type | annotation}}; + {%- endif %} {%- endfor %} } {% endfor %} + +{%- if method.arguments | length > 0 %} +export interface {{ method.query.name | to_pascal_case }}Parameter { + {%- for argument, type in method.arguments | items %} + {%- if type | is_nullable %} + {{argument}}?: {{type | annotation}}; + {%- else %} + {{argument}}: {{type | annotation}}; + {%- endif %} + {%- endfor %} +} +{% endif %} + {%- endfor %} +{%- macro METHOD_ARGUMENTS(method) -%} + {%- if method.arguments | length > 0 -%} + { {% for argument, type in method.arguments | items -%} + {{argument}}{%- if not loop.last %}, {% endif %} + {%- endfor %} }: {{method.query.name | to_pascal_case }}Parameter + {%- endif %} +{%- endmacro -%} + +{%- macro QUERY_PARAMETERS(method) -%} +[ + {%- for parameter in method.query.parameters -%} + {{parameter.name | to_camel_case }} {%- if not loop.last %}, {% endif %} + {%- endfor -%} +] +{%- endmacro %} + export class {{ query_namespace.name | to_pascal_case }}Queries { {%- for subnamespace in query_namespace.subnamespaces %} {{subnamespace}} : {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries; @@ -58,8 +91,8 @@ export class {{ query_namespace.name | to_pascal_case }}Queries { this.{{subnamespace}} = new {{subnamespace}}.{{subnamespace | to_pascal_case }}Queries(client) {%- endfor %} } - {%- for method in query_namespace.methods%} + {{""}} {%- if method.query.annotations.not_null_result -%} {% set HANDLE_NONE -%} if (!rows[0]) { @@ -73,37 +106,19 @@ export class {{ query_namespace.name | to_pascal_case }}Queries { {%- endif %} {%- if method.query.command == 'one' %} {%- if method.query.output | length == 1 %} - async {{method.query.name}}( - {%- for argument, type in method.arguments | items -%} - {{argument}}: {{type | annotation}} {%- if not loop.last %}, {% endif %} - {%- endfor -%} - ) { + async {{ method.query.name }}({{METHOD_ARGUMENTS(method)}}) { const { rows } = await this.client.query( - {{method.query.name | to_screaming_snake_case }}, [ - {%- for parameter in method.query.parameters -%} - {{parameter.name}}, - {%- endfor -%} - ] - ) + {{method.query.name | to_screaming_snake_case }}, {{QUERY_PARAMETERS(method)}} + ); {{HANDLE_NONE}} return rows[0]["{{method.query.output[0].name}}"] as {{method.output_type | annotation}}{{OR_NONE}}; } {%- else %} - async {{method.query.name}}( - {%- for argument, type in method.arguments | items -%} - {{argument}}: {{type | annotation}}{%- if not loop.last %}, {% endif %} - {%- endfor -%} - ) { + async {{ method.query.name }}({{METHOD_ARGUMENTS(method)}}) { const { rows } = await this.client.query( - {{method.query.name | to_screaming_snake_case }}, [ - {%- for parameter in method.query.parameters -%} - {{parameter.name}} {%- if not loop.last %}, {% endif %} - {%- endfor -%} - ] - ) + {{method.query.name | to_screaming_snake_case }}, {{QUERY_PARAMETERS(method)}} + ); {{HANDLE_NONE}} - - return { {%- for column_name, column_type in method.output_columns | items %} ["{{column_name | to_camel_case }}"]: rows[0]["{{column_name}}"], @@ -114,49 +129,28 @@ export class {{ query_namespace.name | to_pascal_case }}Queries { {%- endif %} {%- elif method.query.command == 'many' %} {%- if method.query.output | length == 1 %} - async {{method.query.name}}( - {%- for argument, type in method.arguments | items -%} - {{argument}}: {{type | annotation}} {% if not loop.last %}, {% endif %} - {%- endfor -%} - ) { + async {{ method.query.name }}({{METHOD_ARGUMENTS(method)}}) { const { rows } = await this.client.query( - {{method.query.name | to_screaming_snake_case }}, [ - {%- for parameter in method.query.parameters -%} - {{parameter.name}}{%- if not loop.last %}, {% endif %} - {%- endfor -%} - ] - ) - return rows as Array<{{method.output_type | annotation }}>; + {{method.query.name | to_screaming_snake_case }}, {{QUERY_PARAMETERS(method)}} + ); + return rows.map(row => row["{{method.query.output[0].name}}"]) as Array<{{method.output_type | annotation }}>; } {%- else%} - async {{method.query.name}}( - {%- for argument, type in method.arguments | items -%} - {{argument}}: {{type | annotation}} {% if not loop.last %}, {% endif %} - {%- endfor -%} - ) { + async {{ method.query.name }}({{METHOD_ARGUMENTS(method)}}) { const { rows } = await this.client.query( - {{method.query.name | to_screaming_snake_case }}, [ - {%- for parameter in method.query.parameters -%} - {{parameter.name}} {%- if not loop.last %}, {% endif %} - {%- endfor -%} - ] - ) - return rows as Array<{{method.output_type | annotation}}>; + {{method.query.name | to_screaming_snake_case }}, {{QUERY_PARAMETERS(method)}}); + return rows.map((row) => ({ + {%- for column_name, column_type in method.output_columns | items %} + ["{{column_name | to_camel_case }}"]: row["{{column_name}}"], + {%- endfor %} + })) as Array<{{method.output_type | annotation}}>; } {%- endif %} {%- elif method.query.command == 'exec' %} - async {{method.query.name}}( - {%- for argument, type in method.arguments | items -%} - {{argument}}: {{type | annotation}}{%- if not loop.last %}, {% endif %} - {%- endfor -%} - ): {{method.output_type | annotation}}{{OR_NONE}} { + async {{ method.query.name }}({{METHOD_ARGUMENTS(method)}}): {{method.output_type | annotation}}{{OR_NONE}} { await this.client.query( - {{method.query.name | to_screaming_snake_case }}, [ - {%- for parameter in method.query.parameters -%} - {{parameter.name}}, {%- if not loop.last %}, {% endif %} - {%- endfor -%} - ] - ) + {{method.query.name | to_screaming_snake_case }}, {{QUERY_PARAMETERS(method)}} + ); } {%- endif %} {%- endfor %} @@ -178,7 +172,7 @@ select i.schemaname, t.typname as name, t.oid as oid, - t.typarray as array_oid, + t.typarray as "arrayOid", t.oid::regtype::text as regtype, coalesce(a.fnames, '{}')::text[] as "fieldNames", coalesce(a.ftypes, '{}') as "fieldTypes" @@ -276,8 +270,49 @@ function parsePgRowToCells(row: string): string[] { return splitTopLevel(inner, ","); } +function parsePostgresArray(input: string): string[] { + if (!input.startsWith("{") || !input.endsWith("}")) { + throw new Error("Invalid Postgres array format"); + } + + const result: string[] = []; + let i = 1; // skip opening '{' + let current = ""; + let inQuotes = false; + let escaped = false; + + while (i < input.length - 1) { // skip closing '}' + const char = input[i]; + + if (escaped) { + // Postgres uses backslash as escape inside quoted strings + current += char; + escaped = false; + } else if (char === "\\") { + escaped = true; + } else if (char === '"') { + inQuotes = !inQuotes; + } else if (char === "," && !inQuotes) { + result.push(current); + current = ""; + } else { + current += char; + } + + i++; + } + + // push last element if any + if (current.length > 0 || input[input.length - 2] === ",") { + result.push(current); + } + + return result; +} + interface TypeInfo { oid: number; + arrayOid: number; fieldNames: string[]; fieldTypes: number[]; } @@ -289,7 +324,7 @@ function toCamelCase(identifier: string) { {%- endif %} -function parser(client: pg.Client, typeInfo: TypeInfo) { +function recordParser(client: pg.Client, typeInfo: TypeInfo) { return (record: string) => { const output: Record = {} parsePgRowToCells(record).forEach((value, i) => { @@ -306,6 +341,12 @@ function parser(client: pg.Client, typeInfo: TypeInfo) { } } +function arrayParser(client: pg.Client, typeInfo: TypeInfo) { + return (arrayLiteral: string) => { + return parsePostgresArray(arrayLiteral).map(recordParser(client, typeInfo)) + } +} + export async function initClient(client: pg.Client) { const { rows } = await client.query(GET_TYPE_INFO, [ JSON.stringify([ @@ -318,7 +359,8 @@ export async function initClient(client: pg.Client) { ]) for (const typeInfo of rows) { - client.setTypeParser(typeInfo.oid, parser(client, typeInfo)); + client.setTypeParser(typeInfo.oid, recordParser(client, typeInfo)); + client.setTypeParser(typeInfo.arrayOid, arrayParser(client, typeInfo)); } } diff --git a/codegen/src/presentation/typescript/type_map_service.rs b/codegen/src/presentation/typescript/type_map_service.rs index 8c5a816..f985801 100644 --- a/codegen/src/presentation/typescript/type_map_service.rs +++ b/codegen/src/presentation/typescript/type_map_service.rs @@ -3,7 +3,7 @@ use std::sync::Arc; use crate::{ ir::Type, presentation::type_mapping_service::{LanguageType, TypeMapService}, - utils::{to_camel_case, to_pascal_case}, + utils::to_pascal_case, }; #[derive(Clone, Copy)] @@ -15,7 +15,7 @@ impl TypeMapService for TypescriptTypeMapService { Type::Any | Type::AnyCompatibleNonArray | Type::AnyCompatible => { LanguageType::annotation("any") } - Type::Int8 => LanguageType::annotation("bigint"), + Type::Int8 => LanguageType::annotation("string"), Type::AnyArray | Type::AnyCompatibleArray => LanguageType::annotation("Array"), Type::Json => LanguageType::annotation("any"), Type::UserDefined { module_path, name } => { diff --git a/codegen/src/request.rs b/codegen/src/request.rs index e205246..de46b59 100644 --- a/codegen/src/request.rs +++ b/codegen/src/request.rs @@ -5,173 +5,122 @@ use serde::Deserialize; use serde::Serialize; use serde_json::Value; -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Request { pub catalog: Catalog, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub queries: Arc<[Query]>, pub config: Config, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Catalog { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub schemas: Arc<[Schema]>, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Schema { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub enums: Arc<[Enum]>, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub records: Arc<[Record]>, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Enum { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcStrFaker"))] pub values: Arc<[Arc]>, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Record { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub kind: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub columns: Arc<[Column]>, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Column { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, #[serde(rename = "type")] pub type_field: ColumnType, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub default: Option>, pub is_unique: bool, pub is_nullable: bool, pub is_foreign_key: bool, pub is_primary_key: bool, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub foreign_table_name: Option>, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub foreign_table_schema: Option>, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct ColumnType { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub display: Arc, pub is_array: bool, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub schema_name: Arc, pub is_composite: bool, pub array_dimensions: i64, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Query { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub query: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub command: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub path: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub annotations: Arc, Annotation>>, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub output: Arc<[OutputColumn]>, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub parameters: Arc<[Parameter]>, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Annotation { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub value: Option>, pub line: i64, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct OutputColumn { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, #[serde(rename = "type")] pub type_: OutputType, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct OutputType { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub schema: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, pub id: i64, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Parameter { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub name: Arc, #[serde(rename = "type")] pub type_: OutputType, pub not_null: bool, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Config { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub version: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcStrFaker"))] pub queries: Arc<[Arc]>, pub codegen: Codegen, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Codegen { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub out: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub language: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub driver: Arc, #[serde(default)] - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub types: Arc, TypeConfig>>, pub options: Value, } -#[cfg_attr(test, derive(fake::Dummy))] #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct TypeConfig { - #[cfg_attr(test, dummy(faker = "crate::faker::ArcFaker"))] pub annotation: Arc, - #[cfg_attr(test, dummy(faker = "crate::faker::ArcStrFaker"))] + #[serde(default)] pub import: Arc<[Arc]>, } diff --git a/codegen/src/utils.rs b/codegen/src/utils.rs index aced155..a101a82 100644 --- a/codegen/src/utils.rs +++ b/codegen/src/utils.rs @@ -5,10 +5,6 @@ use minijinja::Environment; use regex::bytes::Regex; use serde::Serialize; -pub fn to_camel_case(s: &str) -> String { - s.to_lower_camel_case() -} - pub fn to_pascal_case(s: &str) -> String { s.to_upper_camel_case() } diff --git a/tests/author.sql b/tests/author.sql index 24ded62..c9e0ef7 100644 --- a/tests/author.sql +++ b/tests/author.sql @@ -31,4 +31,8 @@ select $val; select ?val; -- @name: full_author :many -select * from full_author; \ No newline at end of file +select * from full_author; + + +-- @name: get_json :one +select '{}'::json; diff --git a/tests/schema.sql b/tests/schema.sql index e7b3b0d..6abb0b3 100644 --- a/tests/schema.sql +++ b/tests/schema.sql @@ -31,4 +31,4 @@ create or replace view full_author as ( from author join book on author.id = book.author_id group by author.id -); \ No newline at end of file +); diff --git a/tests/typescript/author.sql b/tests/typescript/author.sql new file mode 100644 index 0000000..f64127c --- /dev/null +++ b/tests/typescript/author.sql @@ -0,0 +1,34 @@ + +-- @name: oneRowOneColumn :one +select author from author; + +-- @name: oneRowManyColumns :one +select author, book, 1 as one from author +join book on book.author_id = author.id; + +-- @name: manyRowsOneColumn :many +select author from author; + +-- @name: manyRowsManyColumns :many +select 1 as one, author, book from author +join book on book.author_id = author.id; + + +-- @name: insert :one +insert into author (id, full_name, birthday) +values ( + $(author.id), + $(author.full_name), + $(author.birthday) +) +returning author; + + +-- @name: requiredParameter :one +select $val; + +-- @name: optionalParameter :one +select ?val; + +-- @name: fullAuthor :many +select * from full_author; diff --git a/tests/typescript/book.sql b/tests/typescript/book.sql new file mode 100644 index 0000000..7cdedc2 --- /dev/null +++ b/tests/typescript/book.sql @@ -0,0 +1,30 @@ + +-- @name: one_row_one_column :one +select author from author; + +-- @name: one_row_many_coulmns :one +select author, book, 1 as one from author +join book on book.author_id = author.id; + +-- @name: many_rows_one_column :many +select author from author; + +-- @name: many_rows_many_coulmns :many +select author, book, 1 as one from author +join book on book.author_id = author.id; + + +-- @name: insert :one +insert into book ( + id, title, author_id, year, isbn, is_best_seller, genre +) values ( + $(book.id), + $(book.title), + $(book.author_id), + $(book.year), + $(book.isbn), + $(book.is_best_seller), + $(book.genre) +) +returning book; + diff --git a/tests/typescript/pg.test.ts b/tests/typescript/pg.test.ts index 2ca689a..058ccca 100644 --- a/tests/typescript/pg.test.ts +++ b/tests/typescript/pg.test.ts @@ -1,7 +1,7 @@ +import { faker } from "@faker-js/faker"; +import { assert, assertEquals, assertExists, assertInstanceOf } from "jsr:@std/assert"; import * as pg from "pg"; -import { assert, assertEquals, assertInstanceOf } from "jsr:@std/assert"; import { initClient, Queries } from "./out_pg/queries.ts"; -import { faker } from "@faker-js/faker"; import { PostgreSqlContainer, @@ -9,10 +9,9 @@ import { } from "@testcontainers/postgresql"; import { randomUUID } from "node:crypto"; import { Genre } from "./out_pg/models/models.ts"; -import { json } from "node:stream/consumers"; let CONTAINER: StartedPostgreSqlContainer | undefined = undefined; - +let queries: Queries; Deno.test.beforeAll(async () => { CONTAINER = await new PostgreSqlContainer("postgres:16-alpine") .start(); @@ -23,83 +22,113 @@ Deno.test.beforeAll(async () => { await client.connect(); await client.query(await Deno.readTextFile("tests/schema.sql")); -}); - -async function getQueries() { - const client = new pg.Client({ - connectionString: CONTAINER!.getConnectionUri(), - }); - await client.connect(); await initClient(client); - const queries = new Queries(client); + queries = new Queries(client); const author = await queries.author.insert({ - birthday: faker.date.birthdate(), - full_name: faker.book.author(), - id: randomUUID(), + author: { + id: randomUUID(), + birthday: faker.date.birthdate(), + fullName: faker.book.author(), + } }); - await queries.book.insert({ - author_id: author!.id, - year: faker.date.anytime().getFullYear(), - genre: Genre.SCIENCE_FICTION, - is_best_seller: false, - isbn: faker.number.int().toString(), - title: faker.book.title(), - id: randomUUID(), - }); - return queries; -} + const book = { + authorId: author!.id, + year: faker.date.anytime().getFullYear(), + genre: Genre.SCIENCE_FICTION, + isBestSeller: false, + isbn: faker.number.int().toString(), + title: faker.book.title(), + id: randomUUID(), + } + await queries.book.insert({ book }); + +}); + +// async function getQueries() { +// const client = new pg.Client({ +// connectionString: CONTAINER!.getConnectionUri(), +// }); +// await client.connect(); +// await initClient(client); +// const queries = new Queries(client); + +// await queries.book.insert({ book }); +// return queries; +// } Deno.test("test_one_row_one_column", async () => { - const queries = await getQueries(); - const author = await queries.author.one_row_one_column(); + // const queries = await getQueries(); + const author = await queries.author.oneRowOneColumn(); assert(typeof (author?.fullName) == "string", JSON.stringify(author)); assertInstanceOf(author.birthday, Date); - await queries.client.end(); + // await queries.client.end(); }); -Deno.test.afterAll(async () => { - await CONTAINER?.stop(); +Deno.test("test_one_row_many_columns", async () => { + // const queries = await getQueries(); + const data = await queries.author.oneRowManyColumns(); + assertInstanceOf(data?.author.birthday, Date); + assertEquals(data.one, 1); + // await queries.client.end(); +}) + + +Deno.test("test_many_rows_one_column", async () => { + // const queries = await getQueries(); + const data = await queries.author.manyRowsOneColumn(); + assertInstanceOf(data[0].birthday, Date, `${JSON.stringify(data)}`); + // await queries.client.end(); +}) + + +Deno.test("test_many_rows_many_columns", async () => { + // const queries = await getQueries(); + const data = await queries.author.manyRowsManyColumns(); + assertInstanceOf(data[0].author.birthday, Date); + assert(!!data[0].book.id); + assertEquals(data[0].one, 1); + // await queries.client.end(); +}) + + +Deno.test("test_required_parameter", async () => { + // const queries = await getQueries(); + const foo = await queries.author.requiredParameter({ val: "foo" }); + assertEquals(foo, "foo"); + // await queries.client.end(); +}) + + + +Deno.test("test_optional_parameter", async () => { + // const queries = await getQueries(); + const nullValue = await queries.author.optionalParameter({}); + assertEquals(nullValue, null); + // await queries.client.end(); +}) + + +Deno.test("test_query_view", async () => { + // const queries = await getQueries(); + const fullAuthor = await queries.author.fullAuthor(); + assertExists(fullAuthor[0].author); + assertInstanceOf(fullAuthor[0].books, Array, `${fullAuthor[0].books}`); + assertExists(fullAuthor[0].books[0].id); + // await queries.client.end(); }); -// @pytest.mark.asyncio -// async def test_one_row_one_column(queries: Queries): -// data = await queries.author.one_row_many_coulmns() -// assert isinstance(data.author, Author) -// assert isinstance(data.book, Book) -// assert data.one == 1 -// @pytest.mark.asyncio -// async def test_many_rows_one_column(queries: Queries): -// rows = await queries.author.many_rows_one_column() -// assert isinstance(rows[0], Author) -// @pytest.mark.asyncio -// async def test_many_rows_many_columns(queries: Queries): -// rows = await queries.author.many_rows_many_coulmns() -// assert isinstance(rows[0].author, Author) -// assert isinstance(rows[0].book, Book) -// assert rows[0].one == 1 -// @pytest.mark.asyncio -// async def test_required_parameter(queries: Queries): -// assert "foo" == await queries.author.required_parameter("foo") - -// @pytest.mark.asyncio -// async def test_optional_parameter(queries: Queries): -// assert (await queries.author.optional_parameter(None)) is None -// assert (await queries.author.optional_parameter("foo")) == "foo" +Deno.test.afterAll(async () => { + await queries.client.end(); + await CONTAINER?.stop(); -// @pytest.mark.asyncio -// async def test_query_view(queries: Queries): -// full_author = await queries.author.full_author() -// assert isinstance(full_author[0], FullAuthorRow) -// assert isinstance(full_author[0].author, Author) -// assert isinstance(full_author[0].books, list) -// assert isinstance(full_author[0].books[0], Book) +}); diff --git a/tests/typescript/pgc-pg.yaml b/tests/typescript/pgc-pg.yaml index a2abe8d..b0b5377 100644 --- a/tests/typescript/pgc-pg.yaml +++ b/tests/typescript/pgc-pg.yaml @@ -4,8 +4,8 @@ database: - tests/schema.sql queries: - - tests/author.sql - - tests/book.sql + - tests/typescript/author.sql + - tests/typescript/book.sql codegen: language: typescript @@ -13,6 +13,7 @@ codegen: out: tests/typescript/out_pg enums: - public.genre + options: deno: true package: out_pg