diff --git a/Cargo.lock b/Cargo.lock index d98afc3..49c1737 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -109,6 +109,12 @@ dependencies = [ "memchr", ] +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + [[package]] name = "amq-protocol" version = "8.3.1" @@ -446,6 +452,15 @@ dependencies = [ "syn", ] +[[package]] +name = "atoi" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28d99ec8bfea296261ca1af174f24225171fea9664ba9003cbebee704810528" +dependencies = [ + "num-traits", +] + [[package]] name = "atomic-waker" version = "1.1.2" @@ -1008,6 +1023,9 @@ name = "bitflags" version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +dependencies = [ + "serde_core", +] [[package]] name = "block-buffer" @@ -1056,6 +1074,12 @@ version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + [[package]] name = "bytes" version = "1.11.0" @@ -1320,6 +1344,21 @@ dependencies = [ "libc", ] +[[package]] +name = "crc" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + [[package]] name = "crc32fast" version = "1.5.0" @@ -1359,6 +1398,15 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "crossbeam-queue" +version = "0.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.21" @@ -1509,6 +1557,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", + "const-oid", "crypto-common", "subtle", ] @@ -1556,6 +1605,12 @@ dependencies = [ "syn", ] +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + [[package]] name = "dunce" version = "1.0.5" @@ -1601,6 +1656,9 @@ name = "either" version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] [[package]] name = "elliptic-curve" @@ -1638,6 +1696,17 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "etcetera" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" +dependencies = [ + "cfg-if", + "home", + "windows-sys 0.48.0", +] + [[package]] name = "event-listener" version = "5.4.1" @@ -1737,6 +1806,12 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "form_urlencoded" version = "1.2.2" @@ -1813,6 +1888,17 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-intrusive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d930c203dd0b6ff06e0201a4a2fe9149b43c684fd4420555b26d21b1a02956f" +dependencies = [ + "futures-core", + "lock_api", + "parking_lot", +] + [[package]] name = "futures-io" version = "0.3.31" @@ -2005,12 +2091,32 @@ dependencies = [ "tracing", ] +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash", +] + [[package]] name = "hashbrown" version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.5", +] + [[package]] name = "heck" version = "0.5.0" @@ -2029,6 +2135,15 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + [[package]] name = "hmac" version = "0.12.1" @@ -2428,7 +2543,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.16.1", ] [[package]] @@ -2567,6 +2682,9 @@ name = "lazy_static" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin", +] [[package]] name = "libc" @@ -2600,6 +2718,17 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" +[[package]] +name = "libredox" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" +dependencies = [ + "bitflags 2.10.0", + "libc", + "redox_syscall", +] + [[package]] name = "librocksdb-sys" version = "0.17.3+10.4.2" @@ -2615,6 +2744,17 @@ dependencies = [ "zstd-sys", ] +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "cc", + "pkg-config", + "vcpkg", +] + [[package]] name = "libz-sys" version = "1.1.23" @@ -2685,6 +2825,16 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" +[[package]] +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +dependencies = [ + "cfg-if", + "digest", +] + [[package]] name = "memchr" version = "2.7.6" @@ -2851,6 +3001,22 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-bigint-dig" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7" +dependencies = [ + "lazy_static", + "libm", + "num-integer", + "num-iter", + "num-traits", + "rand 0.8.5", + "smallvec", + "zeroize", +] + [[package]] name = "num-conv" version = "0.1.0" @@ -2866,6 +3032,17 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.19" @@ -2873,6 +3050,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", + "libm", ] [[package]] @@ -3176,6 +3354,17 @@ dependencies = [ "futures-io", ] +[[package]] +name = "pkcs1" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8ffb9f10fa047879315e6625af03c164b16962a5368d724ed16323b68ace47f" +dependencies = [ + "der 0.7.10", + "pkcs8 0.10.2", + "spki 0.7.3", +] + [[package]] name = "pkcs12" version = "0.1.0" @@ -3294,7 +3483,7 @@ version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" dependencies = [ - "toml_edit 0.23.7", + "toml_edit", ] [[package]] @@ -3635,16 +3824,18 @@ dependencies = [ "anyhow", "axum", "base64 0.22.1", + "chrono", "clap", "rohas-codegen", "rohas-dev-server", "rohas-engine", + "rohas-orm", "rohas-parser", "serde", "serde_json", "tokio", "toml", - "toml_edit 0.22.27", + "toml_edit", "tower", "tower-http", "tracing", @@ -3657,6 +3848,7 @@ name = "rohas-codegen" version = "0.1.0" dependencies = [ "anyhow", + "rohas-orm", "rohas-parser", "serde", "serde_json", @@ -3735,6 +3927,38 @@ dependencies = [ "uuid", ] +[[package]] +name = "rohas-orm" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "chrono", + "futures", + "pyo3", + "rohas-orm-macros", + "rohas-parser", + "serde", + "serde_json", + "sqlx", + "tempfile", + "thiserror 2.0.17", + "tokio", + "tokio-test", + "tracing", + "tracing-subscriber", + "uuid", +] + +[[package]] +name = "rohas-orm-macros" +version = "0.1.0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "rohas-parser" version = "0.1.0" @@ -3786,6 +4010,26 @@ dependencies = [ "uuid", ] +[[package]] +name = "rsa" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88" +dependencies = [ + "const-oid", + "digest", + "num-bigint-dig", + "num-integer", + "num-traits", + "pkcs1", + "pkcs8 0.10.2", + "rand_core 0.6.4", + "signature 2.2.0", + "spki 0.7.3", + "subtle", + "zeroize", +] + [[package]] name = "rustc-hash" version = "2.1.1" @@ -4308,6 +4552,9 @@ name = "smallvec" version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +dependencies = [ + "serde", +] [[package]] name = "socket2" @@ -4358,6 +4605,204 @@ dependencies = [ "der 0.7.10", ] +[[package]] +name = "sqlx" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fefb893899429669dcdd979aff487bd78f4064e5e7907e4269081e0ef7d97dc" +dependencies = [ + "sqlx-core", + "sqlx-macros", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", +] + +[[package]] +name = "sqlx-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee6798b1838b6a0f69c007c133b8df5866302197e404e8b6ee8ed3e3a5e68dc6" +dependencies = [ + "base64 0.22.1", + "bytes", + "chrono", + "crc", + "crossbeam-queue", + "either", + "event-listener", + "futures-core", + "futures-intrusive", + "futures-io", + "futures-util", + "hashbrown 0.15.5", + "hashlink", + "indexmap", + "log", + "memchr", + "once_cell", + "percent-encoding", + "rustls 0.23.35", + "serde", + "serde_json", + "sha2", + "smallvec", + "thiserror 2.0.17", + "tokio", + "tokio-stream", + "tracing", + "url", + "uuid", + "webpki-roots 0.26.11", +] + +[[package]] +name = "sqlx-macros" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2d452988ccaacfbf5e0bdbc348fb91d7c8af5bee192173ac3636b5fb6e6715d" +dependencies = [ + "proc-macro2", + "quote", + "sqlx-core", + "sqlx-macros-core", + "syn", +] + +[[package]] +name = "sqlx-macros-core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19a9c1841124ac5a61741f96e1d9e2ec77424bf323962dd894bdb93f37d5219b" +dependencies = [ + "dotenvy", + "either", + "heck", + "hex", + "once_cell", + "proc-macro2", + "quote", + "serde", + "serde_json", + "sha2", + "sqlx-core", + "sqlx-mysql", + "sqlx-postgres", + "sqlx-sqlite", + "syn", + "tokio", + "url", +] + +[[package]] +name = "sqlx-mysql" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa003f0038df784eb8fecbbac13affe3da23b45194bd57dba231c8f48199c526" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.10.0", + "byteorder", + "bytes", + "chrono", + "crc", + "digest", + "dotenvy", + "either", + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "generic-array", + "hex", + "hkdf", + "hmac", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "percent-encoding", + "rand 0.8.5", + "rsa", + "serde", + "sha1", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.17", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-postgres" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db58fcd5a53cf07c184b154801ff91347e4c30d17a3562a635ff028ad5deda46" +dependencies = [ + "atoi", + "base64 0.22.1", + "bitflags 2.10.0", + "byteorder", + "chrono", + "crc", + "dotenvy", + "etcetera", + "futures-channel", + "futures-core", + "futures-util", + "hex", + "hkdf", + "hmac", + "home", + "itoa", + "log", + "md-5", + "memchr", + "once_cell", + "rand 0.8.5", + "serde", + "serde_json", + "sha2", + "smallvec", + "sqlx-core", + "stringprep", + "thiserror 2.0.17", + "tracing", + "uuid", + "whoami", +] + +[[package]] +name = "sqlx-sqlite" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2d12fe70b2c1b4401038055f90f151b78208de1f9f89a7dbfd41587a10c3eea" +dependencies = [ + "atoi", + "chrono", + "flume", + "futures-channel", + "futures-core", + "futures-executor", + "futures-intrusive", + "futures-util", + "libsqlite3-sys", + "log", + "percent-encoding", + "serde", + "serde_urlencoded", + "sqlx-core", + "thiserror 2.0.17", + "tracing", + "url", + "uuid", +] + [[package]] name = "stable_deref_trait" version = "1.2.1" @@ -4373,6 +4818,17 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + [[package]] name = "strsim" version = "0.11.1" @@ -4618,6 +5074,21 @@ dependencies = [ "zerovec", ] +[[package]] +name = "tinyvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + [[package]] name = "tokio" version = "1.48.0" @@ -4745,18 +5216,12 @@ dependencies = [ "indexmap", "serde_core", "serde_spanned", - "toml_datetime 0.7.3", + "toml_datetime", "toml_parser", "toml_writer", "winnow 0.7.13", ] -[[package]] -name = "toml_datetime" -version = "0.6.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" - [[package]] name = "toml_datetime" version = "0.7.3" @@ -4768,25 +5233,14 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.27" +version = "0.23.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +checksum = "5d7cbc3b4b49633d57a0509303158ca50de80ae32c265093b24c414705807832" dependencies = [ "indexmap", - "toml_datetime 0.6.11", - "toml_write", - "winnow 0.7.13", -] - -[[package]] -name = "toml_edit" -version = "0.23.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6485ef6d0d9b5d0ec17244ff7eb05310113c3f316f2d14200d4de56b3cb98f8d" -dependencies = [ - "indexmap", - "toml_datetime 0.7.3", + "toml_datetime", "toml_parser", + "toml_writer", "winnow 0.7.13", ] @@ -4799,12 +5253,6 @@ dependencies = [ "winnow 0.7.13", ] -[[package]] -name = "toml_write" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" - [[package]] name = "toml_writer" version = "1.0.4" @@ -4984,12 +5432,33 @@ version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + [[package]] name = "unicode-ident" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" +[[package]] +name = "unicode-normalization" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd4f6878c9cb28d874b009da9e8d183b5abc80117c40bbd187a1fde336be6e8" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7df058c713841ad818f1dc5d3fd88063241cc61f49f5fbea4b951e8cf5a8d71d" + [[package]] name = "unicode-segmentation" version = "1.12.0" @@ -5140,6 +5609,12 @@ dependencies = [ "wit-bindgen", ] +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + [[package]] name = "wasm-bindgen" version = "0.2.105" @@ -5215,6 +5690,16 @@ dependencies = [ "winsafe", ] +[[package]] +name = "whoami" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4a4db5077702ca3015d3d02d74974948aba2ad9e12ab7df718ee64ccd7e97d" +dependencies = [ + "libredox", + "wasite", +] + [[package]] name = "winapi" version = "0.3.9" @@ -5385,6 +5870,15 @@ dependencies = [ "windows-link 0.2.1", ] +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + [[package]] name = "windows-sys" version = "0.52.0" @@ -5421,6 +5915,21 @@ dependencies = [ "windows-link 0.2.1", ] +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + [[package]] name = "windows-targets" version = "0.52.6" @@ -5463,6 +5972,12 @@ dependencies = [ "windows-link 0.1.3", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" @@ -5475,6 +5990,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + [[package]] name = "windows_aarch64_msvc" version = "0.52.6" @@ -5487,6 +6008,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + [[package]] name = "windows_i686_gnu" version = "0.52.6" @@ -5511,6 +6038,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + [[package]] name = "windows_i686_msvc" version = "0.52.6" @@ -5523,6 +6056,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + [[package]] name = "windows_x86_64_gnu" version = "0.52.6" @@ -5535,6 +6074,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" @@ -5547,6 +6092,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + [[package]] name = "windows_x86_64_msvc" version = "0.52.6" diff --git a/Cargo.toml b/Cargo.toml index fcd9a1c..b7ee7ea 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,6 +9,8 @@ members = [ "crates/rohas-cli", "crates/rohas-dev-server", "crates/rohas-telemetry", + "crates/rohas-orm", + "crates/rohas-orm-macros", "crates/rohas-adapters/adapter-memory", "crates/rohas-adapters/adapter-nats", "crates/rohas-adapters/adapter-kafka", @@ -95,6 +97,8 @@ rohas-engine = { version = "0.1.0", path = "crates/rohas-engine" } rohas-cron = { version = "0.1.0", path = "crates/rohas-cron" } rohas-cli = { version = "0.1.0", path = "crates/rohas-cli" } rohas-dev-server = { version = "0.1.0", path = "crates/rohas-dev-server" } +rohas-orm = { version = "0.1.0", path = "crates/rohas-orm" } +rohas-orm-macros = { version = "0.1.0", path = "crates/rohas-orm-macros" } adapter-memory = { version = "0.1.0", path = "crates/rohas-adapters/adapter-memory" } adapter-nats = { version = "0.1.0", path = "crates/rohas-adapters/adapter-nats" } adapter-kafka = { version = "0.1.0", path = "crates/rohas-adapters/adapter-kafka" } diff --git a/crates/rohas-cli/Cargo.toml b/crates/rohas-cli/Cargo.toml index 4742a0f..b935390 100644 --- a/crates/rohas-cli/Cargo.toml +++ b/crates/rohas-cli/Cargo.toml @@ -17,6 +17,7 @@ rohas-parser = { workspace = true } rohas-codegen = { workspace = true } rohas-engine = { workspace = true } rohas-dev-server = { workspace = true } +rohas-orm = { workspace = true } clap = { workspace = true } tokio = { workspace = true } @@ -24,7 +25,7 @@ anyhow = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } toml = { workspace = true } -toml_edit = "0.22" +toml_edit = "0.23.9" tracing = { workspace = true } tracing-subscriber = { workspace = true } axum = { workspace = true } @@ -32,4 +33,4 @@ tower = { workspace = true } tower-http = { version = "0.6.6", features = ["fs"] } uuid = { workspace = true } base64 = { workspace = true } - +chrono = { workspace = true } diff --git a/crates/rohas-cli/src/commands/db.rs b/crates/rohas-cli/src/commands/db.rs new file mode 100644 index 0000000..c202263 --- /dev/null +++ b/crates/rohas-cli/src/commands/db.rs @@ -0,0 +1,381 @@ +use anyhow::{Context, Result}; +use rohas_orm::{Database, MigrationManager}; +use rohas_parser::Parser; +use std::path::PathBuf; +use tracing::info; + +pub async fn init( + database_url: String, + migrations_dir: Option, + migration_name: Option, + schema_path: Option, +) -> Result<()> { + info!("Initializing database: {}", database_url); + + let db = Database::connect(&database_url) + .await + .context("Failed to connect to database")?; + + let migrations_dir = migrations_dir.unwrap_or_else(|| PathBuf::from("migrations")); + + let manager = MigrationManager::new(migrations_dir.clone(), db.clone()); + manager.init().await.context("Failed to initialize migrations")?; + + info!("Database initialized successfully!"); + info!("Migration tracking table created: _rohas_migrations"); + + if let Some(name) = migration_name { + let schema_path = schema_path.unwrap_or_else(|| PathBuf::from("schema")); + + info!("Scanning schema at: {}", schema_path.display()); + info!("Creating migration: {}", name); + + let schema = if schema_path.is_file() { + Parser::parse_file(&schema_path) + .context("Failed to parse schema file")? + } else if schema_path.is_dir() { + let mut full_schema = rohas_parser::ast::Schema::new(); + fn scan_directory(dir: &std::path::Path, schema: &mut rohas_parser::ast::Schema) -> Result<()> { + let entries = std::fs::read_dir(dir) + .context(format!("Failed to read directory: {:?}", dir))?; + + for entry in entries { + let entry = entry?; + let path = entry.path(); + + if path.is_dir() { + scan_directory(&path, schema)?; + } else if path.extension().and_then(|s| s.to_str()) == Some("ro") { + let file_schema = Parser::parse_file(&path) + .context(format!("Failed to parse {:?}", path))?; + schema.models.extend(file_schema.models); + } + } + Ok(()) + } + + scan_directory(&schema_path, &mut full_schema) + .context("Failed to scan schema directory")?; + full_schema + } else { + anyhow::bail!("Schema path not found: {}", schema_path.display()); + }; + + let (up_sql, down_sql) = manager.generate_migration_from_schema( + &schema, + ) + .await + .context("Failed to generate migration SQL")?; + + let up_sql_trimmed = up_sql.trim(); + if up_sql_trimmed.is_empty() { + info!("No schema changes detected. Migration not created."); + return Ok(()); + } + + let mut migration = manager.create_migration(&name) + .context("Failed to create migration file")?; + + migration.up_sql = up_sql; + migration.down_sql = down_sql; + + let file_path = migrations_dir.join(format!("{}.sql", migration.name)); + let content = format!( + "-- Up Migration\n{}\n\n-- Down Migration\n{}", + migration.up_sql, migration.down_sql + ); + std::fs::write(&file_path, content) + .context("Failed to write migration file")?; + + info!("Migration file created: {}", file_path.display()); + + info!("Applying migration..."); + manager.apply_migration(&migration).await + .context("Failed to apply migration")?; + + info!("Migration applied successfully!"); + } + + Ok(()) +} + +pub async fn migrate( + database_url: String, + migrations_dir: Option, +) -> Result<()> { + info!("Applying pending migrations to database: {}", database_url); + + let db = Database::connect(&database_url) + .await + .context("Failed to connect to database")?; + + let migrations_dir = migrations_dir.unwrap_or_else(|| PathBuf::from("migrations")); + let manager = MigrationManager::new(migrations_dir.clone(), db.clone()); + + manager.init().await.ok(); + + let applied = manager.get_applied_migrations().await + .context("Failed to get applied migrations")?; + + let mut migration_files = Vec::new(); + if migrations_dir.exists() { + let entries = std::fs::read_dir(&migrations_dir) + .context("Failed to read migrations directory")?; + + for entry in entries { + let entry = entry?; + let path = entry.path(); + if path.extension().and_then(|s| s.to_str()) == Some("sql") { + if let Some(file_name) = path.file_stem().and_then(|s| s.to_str()) { + if !applied.contains(&file_name.to_string()) { + migration_files.push(path); + } + } + } + } + } + + migration_files.sort(); + + if migration_files.is_empty() { + info!("No pending migrations found."); + return Ok(()); + } + + info!("Found {} pending migration(s)", migration_files.len()); + + for file_path in migration_files { + let content = std::fs::read_to_string(&file_path) + .context(format!("Failed to read migration file: {:?}", file_path))?; + + // Parse migration file (format: -- Up Migration\n...\n\n-- Down Migration\n...) + let parts: Vec<&str> = content.split("-- Down Migration").collect(); + if parts.len() != 2 { + anyhow::bail!("Invalid migration file format: {:?}", file_path); + } + + let up_sql = parts[0] + .replace("-- Up Migration", "") + .trim() + .to_string(); + let down_sql = parts[1].trim().to_string(); + + let migration_name = file_path + .file_stem() + .and_then(|s| s.to_str()) + .unwrap_or("unknown") + .to_string(); + + let timestamp = migration_name + .split('_') + .next() + .and_then(|s| s.parse::().ok()) + .unwrap_or_else(|| chrono::Utc::now().timestamp()); + + let migration = rohas_orm::Migration { + name: migration_name, + timestamp, + up_sql, + down_sql, + }; + + info!("Applying migration: {}", migration.name); + manager.apply_migration(&migration).await + .context(format!("Failed to apply migration: {}", migration.name))?; + } + + info!("All migrations applied successfully!"); + + Ok(()) +} + +pub async fn deploy( + database_url: String, + migrations_dir: Option, +) -> Result<()> { + info!("Deploying migrations to database: {}", database_url); + + let db = Database::connect(&database_url) + .await + .context("Failed to connect to database")?; + + let migrations_dir = migrations_dir.unwrap_or_else(|| PathBuf::from("migrations")); + let manager = MigrationManager::new(migrations_dir.clone(), db.clone()); + + manager.init().await.ok(); + + let applied = manager.get_applied_migrations().await + .context("Failed to get applied migrations")?; + + let mut migration_files = Vec::new(); + if migrations_dir.exists() { + let entries = std::fs::read_dir(&migrations_dir) + .context("Failed to read migrations directory")?; + + for entry in entries { + let entry = entry?; + let path = entry.path(); + if path.extension().and_then(|s| s.to_str()) == Some("sql") { + if let Some(file_name) = path.file_stem().and_then(|s| s.to_str()) { + if !applied.contains(&file_name.to_string()) { + migration_files.push(path); + } + } + } + } + } + + migration_files.sort(); + + if migration_files.is_empty() { + info!("No pending migrations found."); + return Ok(()); + } + + info!("Found {} pending migration(s)", migration_files.len()); + + for file_path in migration_files { + let content = std::fs::read_to_string(&file_path) + .context(format!("Failed to read migration file: {:?}", file_path))?; + + // Parse migration file (format: -- Up Migration\n...\n\n-- Down Migration\n...) + let parts: Vec<&str> = content.split("-- Down Migration").collect(); + if parts.len() != 2 { + anyhow::bail!("Invalid migration file format: {:?}", file_path); + } + + let up_sql = parts[0] + .replace("-- Up Migration", "") + .trim() + .to_string(); + let down_sql = parts[1].trim().to_string(); + + let migration_name = file_path + .file_stem() + .and_then(|s| s.to_str()) + .unwrap_or("unknown") + .to_string(); + + let timestamp = migration_name + .split('_') + .next() + .and_then(|s| s.parse::().ok()) + .unwrap_or_else(|| chrono::Utc::now().timestamp()); + + let migration = rohas_orm::Migration { + name: migration_name, + timestamp, + up_sql, + down_sql, + }; + + info!("Applying migration: {}", migration.name); + manager.apply_migration(&migration).await + .context(format!("Failed to apply migration: {}", migration.name))?; + } + + info!("All migrations deployed successfully!"); + + Ok(()) +} + +/// Revert migrations (rollback the last N applied migrations) +pub async fn revert( + database_url: String, + migrations_dir: Option, + count: u32, +) -> Result<()> { + info!("Reverting {} migration(s) from database: {}", count, database_url); + + let db = Database::connect(&database_url) + .await + .context("Failed to connect to database")?; + + let migrations_dir = migrations_dir.unwrap_or_else(|| PathBuf::from("migrations")); + let manager = MigrationManager::new(migrations_dir.clone(), db.clone()); + + manager.init().await.ok(); + + let applied = manager.get_applied_migrations().await + .context("Failed to get applied migrations")?; + + if applied.is_empty() { + info!("No applied migrations found."); + return Ok(()); + } + + let mut applied_migrations = Vec::new(); + if migrations_dir.exists() { + let entries = std::fs::read_dir(&migrations_dir) + .context("Failed to read migrations directory")?; + + for entry in entries { + let entry = entry?; + let path = entry.path(); + if path.extension().and_then(|s| s.to_str()) == Some("sql") { + if let Some(file_name) = path.file_stem().and_then(|s| s.to_str()) { + if applied.contains(&file_name.to_string()) { + applied_migrations.push(path); + } + } + } + } + } + + applied_migrations.sort(); + applied_migrations.reverse(); + + let migrations_to_revert = applied_migrations.into_iter().take(count as usize).collect::>(); + + if migrations_to_revert.is_empty() { + info!("No migrations to revert."); + return Ok(()); + } + + info!("Reverting {} migration(s)", migrations_to_revert.len()); + + for file_path in migrations_to_revert { + let content = std::fs::read_to_string(&file_path) + .context(format!("Failed to read migration file: {:?}", file_path))?; + + // Parse migration file (format: -- Up Migration\n...\n\n-- Down Migration\n...) + let parts: Vec<&str> = content.split("-- Down Migration").collect(); + if parts.len() != 2 { + anyhow::bail!("Invalid migration file format: {:?}", file_path); + } + + let up_sql = parts[0] + .replace("-- Up Migration", "") + .trim() + .to_string(); + let down_sql = parts[1].trim().to_string(); + + let migration_name = file_path + .file_stem() + .and_then(|s| s.to_str()) + .unwrap_or("unknown") + .to_string(); + + + let timestamp = migration_name + .split('_') + .next() + .and_then(|s| s.parse::().ok()) + .unwrap_or_else(|| chrono::Utc::now().timestamp()); + + let migration = rohas_orm::Migration { + name: migration_name, + timestamp, + up_sql, + down_sql, + }; + + info!("Reverting migration: {}", migration.name); + manager.rollback_migration(&migration).await + .context(format!("Failed to revert migration: {}", migration.name))?; + } + + info!("Migration(s) reverted successfully!"); + + Ok(()) +} diff --git a/crates/rohas-cli/src/commands/mod.rs b/crates/rohas-cli/src/commands/mod.rs index 94a78b9..ead39a1 100644 --- a/crates/rohas-cli/src/commands/mod.rs +++ b/crates/rohas-cli/src/commands/mod.rs @@ -1,4 +1,5 @@ pub mod codegen; +pub mod db; pub mod dev; pub mod init; pub mod list; diff --git a/crates/rohas-cli/src/main.rs b/crates/rohas-cli/src/main.rs index 6a1d5fa..ca9973b 100644 --- a/crates/rohas-cli/src/main.rs +++ b/crates/rohas-cli/src/main.rs @@ -88,9 +88,59 @@ enum Commands { schema: PathBuf, }, + Db { + #[command(subcommand)] + command: DbCommands, + }, + Version, } +#[derive(Subcommand)] +enum DbCommands { + + Init { + #[arg(short, long)] + url: String, + + #[arg(short, long, default_value = "migrations")] + migrations: PathBuf, + + #[arg(short, long)] + name: Option, + + #[arg(short, long, default_value = "schema")] + schema: Option, + }, + + Migrate { + #[arg(short, long)] + url: String, + + #[arg(short, long, default_value = "migrations")] + migrations: PathBuf, + }, + + Deploy { + #[arg(short, long)] + url: String, + + #[arg(short, long, default_value = "migrations")] + migrations: PathBuf, + }, + + Revert { + #[arg(short, long)] + url: String, + + #[arg(short, long, default_value = "migrations")] + migrations: PathBuf, + + #[arg(short, long, default_value = "1")] + count: u32, + }, +} + use std::sync::Arc; use tracing_subscriber::reload::Handle; @@ -158,6 +208,22 @@ async fn main() -> anyhow::Result<()> { Commands::ListEvents { schema } => { commands::list::list_events(schema).await?; } + Commands::Db { command } => { + match command { + DbCommands::Init { url, migrations, name, schema } => { + commands::db::init(url, Some(migrations), name, schema).await?; + } + DbCommands::Migrate { url, migrations } => { + commands::db::migrate(url, Some(migrations)).await?; + } + DbCommands::Deploy { url, migrations } => { + commands::db::deploy(url, Some(migrations)).await?; + } + DbCommands::Revert { url, migrations, count } => { + commands::db::revert(url, Some(migrations), count).await?; + } + } + } Commands::Version => { println!("rohas {}", env!("CARGO_PKG_VERSION")); } diff --git a/crates/rohas-codegen/Cargo.toml b/crates/rohas-codegen/Cargo.toml index c9f2c30..df7bf52 100644 --- a/crates/rohas-codegen/Cargo.toml +++ b/crates/rohas-codegen/Cargo.toml @@ -9,6 +9,7 @@ repository = { workspace = true } [dependencies] rohas-parser = { workspace = true } +rohas-orm = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } thiserror = { workspace = true } diff --git a/crates/rohas-codegen/src/config.rs b/crates/rohas-codegen/src/config.rs index bb30853..0bb6809 100644 --- a/crates/rohas-codegen/src/config.rs +++ b/crates/rohas-codegen/src/config.rs @@ -114,6 +114,7 @@ requires-python = ">=3.9" dependencies = [ "pydantic>=2.0.0", "typing-extensions>=4.0.0", + "rohas-orm>=0.1.0", ] [project.optional-dependencies] diff --git a/crates/rohas-codegen/src/error.rs b/crates/rohas-codegen/src/error.rs index e524628..377de60 100644 --- a/crates/rohas-codegen/src/error.rs +++ b/crates/rohas-codegen/src/error.rs @@ -25,3 +25,9 @@ impl From for CodegenError { CodegenError::Template(err.to_string()) } } + +impl From for CodegenError { + fn from(err: rohas_orm::Error) -> Self { + CodegenError::GenerationFailed(format!("ORM error: {}", err)) + } +} diff --git a/crates/rohas-codegen/src/generator.rs b/crates/rohas-codegen/src/generator.rs index 677fc5d..03c707d 100644 --- a/crates/rohas-codegen/src/generator.rs +++ b/crates/rohas-codegen/src/generator.rs @@ -133,7 +133,7 @@ impl Generator { fn generate_python(&self, schema: &Schema, output_dir: &Path) -> Result<()> { python::generate_state(output_dir)?; - python::generate_models(schema, output_dir)?; + python::generate_models_with_orm(schema, output_dir)?; python::generate_dtos(schema, output_dir)?; python::generate_apis(schema, output_dir)?; python::generate_events(schema, output_dir)?; @@ -155,8 +155,8 @@ impl Generator { info!("Generating Rust code..."); info!("Generating state..."); rust::generate_state(output_dir)?; - info!("Generating models..."); - rust::generate_models(schema, output_dir)?; + info!("Generating models with ORM support..."); + rust::generate_models_with_orm(schema, output_dir)?; info!("Generating DTOs..."); rust::generate_dtos(schema, output_dir)?; info!("Generating APIs..."); diff --git a/crates/rohas-codegen/src/python.rs b/crates/rohas-codegen/src/python.rs index f0fc686..7fbaa0f 100644 --- a/crates/rohas-codegen/src/python.rs +++ b/crates/rohas-codegen/src/python.rs @@ -16,6 +16,26 @@ pub fn generate_models(schema: &Schema, output_dir: &Path) -> Result<()> { Ok(()) } +pub fn generate_models_with_orm(schema: &Schema, output_dir: &Path) -> Result<()> { + use rohas_orm::codegen::Codegen; + use std::collections::HashSet; + use tracing::info; + + let models_dir = output_dir.join("generated/models"); + + info!("Generating ORM models from schema..."); + + let mut codegen = Codegen::new(models_dir.clone()); + + codegen.models = schema.models.clone(); + codegen.model_names = schema.models.iter().map(|m| m.name.clone()).collect::>(); + + codegen.generate_python_models()?; + + info!("ORM models generated successfully"); + Ok(()) +} + fn generate_model_content(model: &Model) -> String { let mut content = String::new(); diff --git a/crates/rohas-codegen/src/rust.rs b/crates/rohas-codegen/src/rust.rs index 33970cc..d49f4de 100644 --- a/crates/rohas-codegen/src/rust.rs +++ b/crates/rohas-codegen/src/rust.rs @@ -1,8 +1,9 @@ use crate::error::Result; -use crate::templates; +use crate::{CodegenError, templates}; use rohas_parser::{Api, Event, FieldType, Model, Schema, WebSocket}; use std::fs; use std::path::Path; +use tracing::info; /// Rust reserved keywords that need to be escaped with r# const RUST_RESERVED_KEYWORDS: &[&str] = &[ @@ -41,6 +42,25 @@ pub fn generate_models(schema: &Schema, output_dir: &Path) -> Result<()> { Ok(()) } +pub fn generate_models_with_orm(schema: &Schema, output_dir: &Path) -> Result<()> { + use rohas_orm::codegen::Codegen; + use std::collections::HashSet; + + let models_dir = output_dir.join("generated/models"); + + info!("Generating ORM models from schema..."); + + let mut codegen = Codegen::new(models_dir.clone()); + + codegen.models = schema.models.clone(); + codegen.model_names = schema.models.iter().map(|m| m.name.clone()).collect::>(); + + codegen.generate_rust_models()?; + + info!("ORM models generated successfully"); + Ok(()) +} + fn generate_model_content(model: &Model) -> String { let mut content = String::new(); diff --git a/crates/rohas-orm-macros/Cargo.toml b/crates/rohas-orm-macros/Cargo.toml new file mode 100644 index 0000000..aa42438 --- /dev/null +++ b/crates/rohas-orm-macros/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "rohas-orm-macros" +version = { workspace = true } +edition = { workspace = true } +authors = { workspace = true } +license = { workspace = true } +description = "Proc macros for rohas-orm" +repository = { workspace = true } + +[lib] +proc-macro = true + +[dependencies] +syn = { version = "2.0", features = ["full", "extra-traits"] } +quote = "1.0" +proc-macro2 = "1.0" + diff --git a/crates/rohas-orm-macros/src/lib.rs b/crates/rohas-orm-macros/src/lib.rs new file mode 100644 index 0000000..d16ac21 --- /dev/null +++ b/crates/rohas-orm-macros/src/lib.rs @@ -0,0 +1,182 @@ +//! Proc macros for rohas-orm +//! +//! Provides derive macros for models, queries, and relationships + +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, DeriveInput, Data, DataStruct, Fields, LitStr}; + +/// Derive macro for Model trait +/// +/// # Example +/// +/// ```rust,ignore +/// #[derive(Model)] +/// #[table_name = "users"] +/// struct User { +/// #[primary_key] +/// id: i64, +/// name: String, +/// email: String, +/// } +/// ``` +#[proc_macro_derive(Model, attributes(table_name, primary_key))] +pub fn derive_model(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let name = &input.ident; + + // Extract table name from attributes + let table_name = extract_table_name(&input); + let primary_key = extract_primary_key(&input); + + let primary_key_str = LitStr::new(&primary_key.to_string(), primary_key.span()); + let expanded = quote! { + impl rohas_orm::Model for #name { + fn table_name() -> &'static str { + #table_name + } + + fn primary_key() -> &'static str { + #primary_key_str + } + + fn primary_key_value(&self) -> rohas_orm::Result> { + Ok(Box::new(self.#primary_key.clone())) + } + + async fn find_by_id(db: &rohas_orm::Database, id: i64) -> rohas_orm::Result> { + use rohas_orm::Query; + let query = rohas_orm::QueryBuilder::select_all() + .from(Self::table_name()) + .where_eq_num(Self::primary_key(), id) + .limit(1); + + let results = query.execute(db).await?; + if results.is_empty() { + return Ok(None); + } + + let model: Self = serde_json::from_value(results[0].clone()) + .map_err(|e| rohas_orm::Error::Serialization(e))?; + Ok(Some(model)) + } + + async fn find_all(db: &rohas_orm::Database) -> rohas_orm::Result> { + use rohas_orm::Query; + let query = rohas_orm::QueryBuilder::select_all() + .from(Self::table_name()); + + let results = query.execute(db).await?; + let models: Vec = results.into_iter() + .map(|v| serde_json::from_value(v).map_err(|e| rohas_orm::Error::Serialization(e))) + .collect::>>()?; + Ok(models) + } + + async fn save(&self, db: &rohas_orm::Database) -> rohas_orm::Result<()> { + use rohas_orm::Query; + let pk_value = self.primary_key_value()?; + let pk_num = pk_value.downcast_ref::() + .ok_or_else(|| rohas_orm::Error::Validation("Primary key must be i64".to_string()))?; + + // Check if exists + if Self::find_by_id(db, *pk_num).await?.is_some() { + // Update + let json = serde_json::to_value(self) + .map_err(|e| rohas_orm::Error::Serialization(e))?; + let mut update = rohas_orm::QueryBuilder::update(Self::table_name()); + + if let serde_json::Value::Object(map) = json { + for (key, value) in map { + if key != Self::primary_key() { + let val_str = match value { + serde_json::Value::String(s) => s, + serde_json::Value::Number(n) => n.to_string(), + serde_json::Value::Bool(b) => b.to_string(), + _ => value.to_string(), + }; + update = update.set(&key, &val_str); + } + } + } + + update = update.where_eq_num(Self::primary_key(), *pk_num); + update.execute_affected(db).await?; + } else { + // Insert + let json = serde_json::to_value(self) + .map_err(|e| rohas_orm::Error::Serialization(e))?; + + if let serde_json::Value::Object(map) = json { + let columns: Vec = map.keys().cloned().collect(); + let value_strings: Vec = map.values().map(|v| { + match v { + serde_json::Value::String(s) => format!("'{}'", s.replace("'", "''")), + serde_json::Value::Number(n) => n.to_string(), + serde_json::Value::Bool(b) => b.to_string(), + _ => format!("'{}'", v.to_string().replace("'", "''")), + } + }).collect(); + let values: Vec<&str> = value_strings.iter().map(|s| s.as_str()).collect(); + + let insert = rohas_orm::QueryBuilder::insert(Self::table_name()) + .values(values); + insert.execute_affected(db).await?; + } + } + + Ok(()) + } + + async fn delete(&self, db: &rohas_orm::Database) -> rohas_orm::Result<()> { + use rohas_orm::Query; + let pk_value = self.primary_key_value()?; + let pk_num = pk_value.downcast_ref::() + .ok_or_else(|| rohas_orm::Error::Validation("Primary key must be i64".to_string()))?; + + let delete = rohas_orm::QueryBuilder::delete(Self::table_name()) + .where_eq_num(Self::primary_key(), *pk_num); + delete.execute_affected(db).await?; + Ok(()) + } + + async fn create(db: &rohas_orm::Database, data: Self) -> rohas_orm::Result { + data.save(db).await?; + Ok(data) + } + + async fn update(db: &rohas_orm::Database, id: i64, data: Self) -> rohas_orm::Result { + data.save(db).await?; + Ok(data) + } + } + }; + + TokenStream::from(expanded) +} + +fn extract_table_name(input: &DeriveInput) -> String { + for attr in &input.attrs { + if attr.path().is_ident("table_name") { + if let Ok(meta) = attr.parse_args::() { + return meta.value(); + } + } + } + let name = input.ident.to_string().to_lowercase(); + format!("{}s", name) +} + +fn extract_primary_key(input: &DeriveInput) -> syn::Ident { + if let Data::Struct(DataStruct { fields: Fields::Named(ref fields), .. }) = input.data { + for field in &fields.named { + for attr in &field.attrs { + if attr.path().is_ident("primary_key") { + return field.ident.clone().unwrap(); + } + } + } + } + syn::parse_str("id").unwrap() +} + diff --git a/crates/rohas-orm/.github/workflows/publish-pypi.yml b/crates/rohas-orm/.github/workflows/publish-pypi.yml new file mode 100644 index 0000000..97a1ad1 --- /dev/null +++ b/crates/rohas-orm/.github/workflows/publish-pypi.yml @@ -0,0 +1,35 @@ +name: Publish to PyPI + +on: + release: + types: [created] + workflow_dispatch: + +jobs: + publish: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install Rust + uses: dtolnay/rust-toolchain@stable + + - name: Install maturin + uses: PyO3/maturin-action@v1 + with: + manylinux: auto + command: build + args: --release --out dist + + - name: Build wheels + run: | + cd crates/rohas-orm + maturin build --release --out dist + + - name: Publish to PyPI + env: + MATURIN_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} + run: | + cd crates/rohas-orm + maturin publish --username __token__ --password $MATURIN_PASSWORD + diff --git a/crates/rohas-orm/.gitignore b/crates/rohas-orm/.gitignore new file mode 100644 index 0000000..5716391 --- /dev/null +++ b/crates/rohas-orm/.gitignore @@ -0,0 +1,30 @@ +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +*.egg +*.egg-info/ +dist/ +build/ +.eggs/ +.venv/ +venv/ +ENV/ +env/ + +# Rust +target/ +Cargo.lock + +# IDE +.vscode/ +.idea/ +*.swp +*.swo + +# OS +.DS_Store +Thumbs.db + + \ No newline at end of file diff --git a/crates/rohas-orm/Cargo.toml b/crates/rohas-orm/Cargo.toml new file mode 100644 index 0000000..cdf347f --- /dev/null +++ b/crates/rohas-orm/Cargo.toml @@ -0,0 +1,44 @@ +[package] +name = "rohas-orm" +version = { workspace = true } +edition = { workspace = true } +authors = { workspace = true } +license = { workspace = true } +description = "ORM for Rohas with Rust macros, Python annotations, codegen, and query builder" +repository = { workspace = true } + +[lib] +name = "rohas_orm" +crate-type = ["cdylib", "rlib"] +path = "src/lib.rs" + +[dependencies] +rohas-orm-macros = { version = "0.1.0", path = "../rohas-orm-macros" } +rohas-parser = { workspace = true } + +tokio = { workspace = true } +futures = { workspace = true } + +serde = { workspace = true } +serde_json = { workspace = true } + +anyhow = { workspace = true } +thiserror = { workspace = true } + +tracing = { workspace = true } + +sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "mysql", "sqlite", "chrono", "uuid"] } +async-trait = "0.1" + +pyo3 = { version = "0.27.1", features = ["auto-initialize", "abi3-py310"] } + +uuid = { workspace = true } + +chrono = { workspace = true } + +[dev-dependencies] +tokio-test = "0.4" +tracing-subscriber = { workspace = true } +tempfile = "3.8" + + diff --git a/crates/rohas-orm/Makefile b/crates/rohas-orm/Makefile new file mode 100644 index 0000000..454daf5 --- /dev/null +++ b/crates/rohas-orm/Makefile @@ -0,0 +1,40 @@ +.PHONY: build develop test publish clean + +# Build the Python package +build: + maturin build --release + +# Install in development mode (requires virtualenv) +develop: + @if [ -z "$$VIRTUAL_ENV" ] && [ -z "$$CONDA_PREFIX" ] && [ ! -d ".venv" ]; then \ + echo "Creating virtualenv..."; \ + python3 -m venv .venv; \ + echo "Activate with: source .venv/bin/activate"; \ + exit 1; \ + fi + maturin develop + +# Run tests +test: + python -m pytest tests/ || echo "No tests yet" + +# Publish to PyPI (requires PYPI_TOKEN) +publish: + maturin publish --username __token__ --password $(PYPI_TOKEN) + +# Clean build artifacts +clean: + cargo clean + rm -rf target/wheels + rm -rf dist + rm -rf *.egg-info + +# Check if maturin is installed +check-maturin: + @which maturin > /dev/null || (echo "maturin not found. Install with: pip install maturin" && exit 1) + +# Full development setup +setup: check-maturin + pip install maturin + maturin develop + diff --git a/crates/rohas-orm/README.md b/crates/rohas-orm/README.md new file mode 100644 index 0000000..f5e603e --- /dev/null +++ b/crates/rohas-orm/README.md @@ -0,0 +1,208 @@ +# rohas-orm (WIP) + +A modern ORM for Rohas with Rust macros, Python annotations, code generation, and a fluent query builder. + +## Features + +- **Rust Macros**: Derive macros for models, queries, and relationships +- **Python Annotations**: Full Python support via PyO3 +- **Code Generation**: Generate type-safe models from Rohas schemas +- **Query Builder**: Fluent API for building complex SQL queries +- **Multi-database**: Support for PostgreSQL, MySQL, and SQLite +- **Async/Await**: Built on Tokio for async operations + +## Installation + +Add to your `Cargo.toml`: + +```toml +[dependencies] +rohas-orm = { path = "../rohas-orm" } +``` + +## Usage + +### Rust + +```rust +use rohas_orm::prelude::*; +use serde::{Deserialize, Serialize}; + +#[derive(Model, Debug, Clone, Serialize, Deserialize)] +#[table_name = "users"] +struct User { + #[primary_key] + id: i64, + name: String, + email: String, + created_at: chrono::DateTime, +} + +#[tokio::main] +async fn main() -> Result<()> { + let db = Database::connect("postgresql://localhost/mydb").await?; + + // Find by ID + let user = User::find_by_id(&db, 1).await?; + println!("User: {:?}", user); + + // Find all + let users = User::find_all(&db).await?; + + // Create new user + let new_user = User { + id: 0, + name: "John Doe".to_string(), + email: "john@example.com".to_string(), + created_at: chrono::Utc::now(), + }; + new_user.save(&db).await?; + + // Query builder + let query = QueryBuilder::select(&["id", "name"]) + .from("users") + .where_eq("email", "john@example.com") + .order_by("name", "ASC") + .limit(10); + + let results = query.execute(&db).await?; + + Ok(()) +} +``` + +### Python + +```python +from rohas_orm import Model, Database, Field +from datetime import datetime + +class User(Model): + id: int = Field(primary_key=True) + name: str + email: str + created_at: datetime + +async def main(): + db = Database("postgresql://localhost/mydb") + + # Find by ID + user = await User.find_by_id(db, 1) + print(f"User: {user}") + + # Query builder + query = QueryBuilder.select_all() \ + .from_("users") \ + .where_eq("email", "john@example.com") \ + .order_by("name", "ASC") \ + .limit(10) + + results = await db.query(query) + print(results) +``` + +## Code Generation + +Generate models from Rohas schema files with full support for relationships and attributes: + +### From Schema Directory + +```rust +use rohas_orm::prelude::*; + +// Load all .ro files from a directory +let mut codegen = Codegen::new("src/generated".into()); +codegen.load_schema_dir("schema/models")?; + +// Generate Rust models +codegen.generate_rust_models()?; + +// Generate Python models +codegen.generate_python_models()?; +``` + +### From Single Schema File + +```rust +use rohas_orm::prelude::*; + +let mut codegen = Codegen::new("src/generated".into()); +codegen.load_schema_file("schema/models/user.ro")?; +codegen.generate_rust_models()?; +``` + +### Schema Features Supported + +- **Attributes**: `@id`, `@auto`, `@unique`, `@default(now)`, `@relation` +- **Relationships**: + - One-to-One: `user User?` or `user User` + - One-to-Many: `posts Post[]` + - Many-to-Many: `tags Tag[]` (with join table) + - BelongsTo: `userId Int @relation(User)` +- **Field Types**: `Int`, `String`, `Boolean`, `Float`, `DateTime`, `Json`, `Custom` +- **Optional Fields**: `email String?` + +### Example Schema + +```rohas +model User { + id Int @id @auto + name String + email String @unique + createdAt DateTime @default(now) + posts Post[] +} + +model Post { + id Int @id @auto + title String + content String + userId Int @relation(User) + author User? @relation(userId) + createdAt DateTime @default(now) +} +``` + +This will generate: +- Model structs with proper types +- Relationship loading methods (`load_posts()`, `load_author()`) +- Foreign key handling +- Attribute support (primary keys, unique constraints, defaults) + +## Query Builder + +The query builder provides a fluent API for constructing SQL queries: + +```rust +// SELECT +let query = QueryBuilder::select(&["id", "name", "email"]) + .from("users") + .where_eq("active", "true") + .order_by("created_at", "DESC") + .limit(10) + .offset(0); + +// INSERT +let query = QueryBuilder::insert("users") + .values(vec!["1", "John Doe", "john@example.com"]); + +// UPDATE +let query = QueryBuilder::update("users") + .set("name", "Jane Doe") + .where_eq_num("id", 1); + +// DELETE +let query = QueryBuilder::delete("users") + .where_eq_num("id", 1); +``` + +## Database Support + +- **PostgreSQL**: `postgresql://user:pass@host/dbname` +- **MySQL**: `mysql://user:pass@host/dbname` +- **SQLite**: `sqlite://path/to/database.db` + +## License + +MIT OR Apache-2.0 + diff --git a/crates/rohas-orm/pyproject.toml b/crates/rohas-orm/pyproject.toml new file mode 100644 index 0000000..578013f --- /dev/null +++ b/crates/rohas-orm/pyproject.toml @@ -0,0 +1,35 @@ +[build-system] +requires = ["maturin>=1.0,<2.0"] +build-backend = "maturin" + +[project] +name = "rohas-orm" +version = "0.1.0" +description = "ORM for Rohas with Rust macros, Python annotations, codegen, and query builder" +readme = "README.md" +requires-python = ">=3.9" +license = { text = "MIT OR Apache-2.0" } +authors = [ + { name = "Rohas Contributors" } +] +keywords = ["orm", "database", "rohas", "sql", "query-builder"] +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Rust", + "Topic :: Database", + "Topic :: Software Development :: Libraries :: Python Modules", +] + +[project.urls] +Homepage = "https://github.com/rohas-dev/rohas" +Documentation = "https://rohas.dev/docs" +Repository = "https://github.com/rohas-dev/rohas" +Issues = "https://github.com/rohas-dev/rohas/issues" diff --git a/crates/rohas-orm/rohas_orm/__init__.py b/crates/rohas-orm/rohas_orm/__init__.py new file mode 100644 index 0000000..96aac37 --- /dev/null +++ b/crates/rohas-orm/rohas_orm/__init__.py @@ -0,0 +1,201 @@ +""" +rohas_orm - A modern ORM with Rust macros, Python annotations, codegen, and query builder. + +This package is a Python extension module built with PyO3. +The Python bindings are defined in src/python.rs and compiled to a native extension. +""" + +__version__ = "0.1.0" + +try: + import sys + import importlib.util + import os + + Database = None + QueryBuilder = None + Field = None + Table = None + Index = None + Unique = None + connect = None + + try: + from rohas_orm import rohas_orm as _extension + Database = getattr(_extension, 'PyDatabase', None) + QueryBuilder = getattr(_extension, 'PyQueryBuilder', None) + Field = getattr(_extension, 'PyField', None) + Table = getattr(_extension, 'PyTable', None) + Index = getattr(_extension, 'PyIndex', None) + Unique = getattr(_extension, 'PyUnique', None) + connect = getattr(_extension, 'connect', None) + + if Database is None or QueryBuilder is None or Field is None or connect is None: + available = [x for x in dir(_extension) if not x.startswith('_')] + raise ImportError( + f"Extension module imported but missing required components.\n" + f"Database: {Database is not None}, QueryBuilder: {QueryBuilder is not None}, " + f"Field: {Field is not None}, connect: {connect is not None}\n" + f"Available in extension: {available}" + ) + except ImportError as e: + try: + import importlib + _extension = importlib.import_module('rohas_orm') + if hasattr(_extension, 'PyDatabase'): + Database = _extension.PyDatabase + QueryBuilder = _extension.PyQueryBuilder + Field = _extension.PyField + Table = getattr(_extension, 'PyTable', None) + Index = getattr(_extension, 'PyIndex', None) + Unique = getattr(_extension, 'PyUnique', None) + connect = _extension.connect + else: + raise e + except Exception: + if 'rohas_orm.rohas_orm' in sys.modules: + try: + _ext_mod = sys.modules['rohas_orm.rohas_orm'] + if hasattr(_ext_mod, 'PyDatabase'): + Database = _ext_mod.PyDatabase + QueryBuilder = _ext_mod.PyQueryBuilder + Field = _ext_mod.PyField + Table = getattr(_ext_mod, 'PyTable', None) + Index = getattr(_ext_mod, 'PyIndex', None) + Unique = getattr(_ext_mod, 'PyUnique', None) + connect = _ext_mod.connect + except Exception: + pass + except Exception as e: + import warnings + warnings.warn(f"Failed to load rohas_orm extension: {e}", ImportWarning) + pass + + if Database is None or QueryBuilder is None or Field is None or connect is None: + current_dir = os.path.dirname(os.path.abspath(__file__)) + so_path = os.path.join(current_dir, "rohas_orm.abi3.so") + + error_details = [] + error_details.append(f"Database: {Database is not None}") + error_details.append(f"QueryBuilder: {QueryBuilder is not None}") + error_details.append(f"Field: {Field is not None}") + error_details.append(f"connect: {connect is not None}") + error_details.append(f"Extension .so exists: {os.path.exists(so_path) if so_path else False}") + + if 'rohas_orm.rohas_orm' in sys.modules: + error_details.append("Extension module found in sys.modules") + else: + error_details.append("Extension module NOT in sys.modules") + + raise ImportError( + f"rohas_orm extension module not available.\n" + f"Details: {', '.join(error_details)}\n\n" + f"The extension module needs to be built and installed.\n\n" + f"For development:\n" + f" cd crates/rohas-orm\n" + f" maturin develop\n\n" + f"Or install via pip:\n" + f" pip install rohas-orm" + ) + + if Table is None: + def _raise_table_error(*args, **kwargs): + raise ImportError("Table decorator requires rohas_orm extension. Please rebuild the package.") + + class _TableStub: + def __init__(self, **kwargs): + _raise_table_error() + def __call__(self, cls): + _raise_table_error() + Table = _TableStub + + if Index is None: + def _raise_index_error(*args, **kwargs): + raise ImportError("Index decorator requires rohas_orm extension. Please rebuild the package.") + + class _IndexStub: + def __init__(self, **kwargs): + _raise_index_error() + def __call__(self, cls): + _raise_index_error() + Index = _IndexStub + + if Unique is None: + def _raise_unique_error(*args, **kwargs): + raise ImportError("Unique decorator requires rohas_orm extension. Please rebuild the package.") + + class _UniqueStub: + def __init__(self, **kwargs): + _raise_unique_error() + def __call__(self, cls): + _raise_unique_error() + Unique = _UniqueStub + + from .model import Model + + __all__ = [ + "Database", + "QueryBuilder", + "Field", + "Table", + "Index", + "Unique", + "Model", + "connect", + "__version__", + ] +except (ImportError, AttributeError, OSError, FileNotFoundError) as e: + __all__ = ["__version__"] + + _error_msg = ( + "rohas_orm extension module not found.\n" + "The Python bindings are compiled from Rust code in src/python.rs.\n\n" + "To install:\n" + " pip install rohas-orm\n\n" + "For development:\n" + " cd crates/rohas-orm\n" + " maturin develop" + ) + + def _raise_error(): + raise ImportError(_error_msg) + + class Database: + """Database connection - requires compiled extension""" + def __init__(self, url: str): + _raise_error() + + class QueryBuilder: + """Query builder - requires compiled extension""" + def __init__(self): + _raise_error() + + class Field: + """Field annotation - requires compiled extension""" + def __init__(self, **kwargs): + _raise_error() + + class Table: + """Table decorator - requires compiled extension""" + def __init__(self, **kwargs): + _raise_error() + + class Index: + """Index decorator - requires compiled extension""" + def __init__(self, **kwargs): + _raise_error() + + class Unique: + """Unique constraint decorator - requires compiled extension""" + def __init__(self, **kwargs): + _raise_error() + + class Model: + """Model base class - requires compiled extension""" + def __init__(self, **kwargs): + _raise_error() + + def connect(url: str): + """Connect to database - requires compiled extension""" + _raise_error() + diff --git a/crates/rohas-orm/rohas_orm/model.py b/crates/rohas-orm/rohas_orm/model.py new file mode 100644 index 0000000..fb508e3 --- /dev/null +++ b/crates/rohas-orm/rohas_orm/model.py @@ -0,0 +1,198 @@ +""" +Model base class for rohas-orm. + +This provides the base Model class that generated models inherit from. +""" + +from typing import Optional, Dict, Any, List, TypeVar, Type +from rohas_orm import Database, QueryBuilder, Field +from .utils import dict_from_pydict, sanitize_sql_value + +T = TypeVar('T', bound='Model') + + +class Model: + """ + Base class for ORM models. + + Generated models should inherit from this class. + """ + + def __init__(self, **kwargs): + """ + Initialize model instance with keyword arguments. + + Args: + **kwargs: Field values to set on the instance + """ + for key, value in kwargs.items(): + setattr(self, key, value) + + @classmethod + def table_name(cls) -> str: + """Get the table name for this model. + + Checks for __table_name__ attribute set by @Table decorator, + otherwise defaults to lowercase class name + 's'. + """ + # Check if table name was set by @Table decorator + if hasattr(cls, '__table_name__'): + return cls.__table_name__ + return f"{cls.__name__.lower()}s" + + @classmethod + def primary_key(cls) -> str: + """Get the primary key field name (defaults to 'id').""" + # Check for fields with primary_key=True + if hasattr(cls, '__annotations__'): + for field_name, field_value in cls.__annotations__.items(): + if hasattr(cls, field_name): + field = getattr(cls, field_name) + if isinstance(field, Field) and hasattr(field, 'primary_key') and field.primary_key: + return field_name + return "id" + + @classmethod + def find_by_id(cls: Type[T], db: Database, id: int) -> Optional[T]: + """ + Find a model by its primary key. + + Args: + db: Database connection + id: Primary key value + + Returns: + Model instance or None if not found + """ + query = QueryBuilder.select_all() + query.from_(cls.table_name()) + query.where_eq_num(cls.primary_key(), id) + query.limit(1) + + results = db.query(query) + if not results or len(results) == 0: + return None + + # Convert result dict to model instance + # results is a list of PyDict objects + row = results[0] + data = dict_from_pydict(row) + return cls.from_dict(data) + + @classmethod + def find_all(cls: Type[T], db: Database) -> List[T]: + """ + Find all models. + + Args: + db: Database connection + + Returns: + List of model instances + """ + query = QueryBuilder.select_all() + query.from_(cls.table_name()) + results = db.query(query) + instances = [] + for row in results: + data = dict_from_pydict(row) + instances.append(cls.from_dict(data)) + return instances + + @classmethod + def from_dict(cls: Type[T], data: Dict[str, Any]) -> T: + """ + Create a model instance from a dictionary. + + Args: + data: Dictionary with field values + + Returns: + Model instance + """ + instance = cls.__new__(cls) + for key, value in data.items(): + setattr(instance, key, value) + return instance + + def to_dict(self) -> Dict[str, Any]: + """ + Convert model instance to dictionary. + + Returns: + Dictionary with field values + """ + result = {} + if hasattr(self, '__annotations__'): + for field_name in self.__annotations__.keys(): + if hasattr(self, field_name): + result[field_name] = getattr(self, field_name) + return result + + def save(self, db: Database) -> None: + """ + Save the model (insert or update). + + Args: + db: Database connection + """ + pk_field = self.primary_key() + pk_value = getattr(self, pk_field, None) + + data = self.to_dict() + + existing = None + if pk_value is not None: + existing = self.__class__.find_by_id(db, pk_value) + + if existing: + query = QueryBuilder.update(self.table_name()) + for key, value in data.items(): + if key != pk_field: + query.set(key, sanitize_sql_value(value)) + query.where_eq_num(pk_field, pk_value) + db.execute(query.to_sql()) + else: + columns = list(data.keys()) + values = [sanitize_sql_value(data[col]) for col in columns] + + query = QueryBuilder.insert(self.table_name()) + query.values(values) + db.execute(query.to_sql()) + + def delete(self, db: Database) -> None: + """ + Delete the model from the database. + + Args: + db: Database connection + """ + pk_field = self.primary_key() + pk_value = getattr(self, pk_field) + + query = QueryBuilder.delete(self.table_name()) + query.where_eq_num(pk_field, pk_value) + + db.execute(query.to_sql()) + + @classmethod + def create(cls: Type[T], db: Database, **kwargs) -> T: + """ + Create a new model instance and save it. + + Args: + db: Database connection + **kwargs: Field values + + Returns: + Created model instance + """ + instance = cls(**kwargs) + instance.save(db) + return instance + + def __repr__(self) -> str: + """String representation of the model.""" + fields = ", ".join(f"{k}={v!r}" for k, v in self.to_dict().items()) + return f"{self.__class__.__name__}({fields})" + diff --git a/crates/rohas-orm/rohas_orm/utils.py b/crates/rohas-orm/rohas_orm/utils.py new file mode 100644 index 0000000..2cb1945 --- /dev/null +++ b/crates/rohas-orm/rohas_orm/utils.py @@ -0,0 +1,46 @@ +""" +Utility functions for rohas-orm. +""" + +from typing import Any, Dict, List + + +def dict_from_pydict(pydict) -> Dict[str, Any]: + """ + Convert a PyDict to a regular Python dict. + + Args: + pydict: PyDict object from PyO3 + + Returns: + Regular Python dictionary + """ + result = {} + for key in pydict.keys(): + result[key] = pydict[key] + return result + + +def sanitize_sql_value(value: Any) -> str: + """ + Sanitize a value for use in SQL queries. + + Args: + value: Value to sanitize + + Returns: + SQL-safe string representation + """ + if value is None: + return "NULL" + elif isinstance(value, str): + escaped = value.replace("'", "''") + return f"'{escaped}'" + elif isinstance(value, (int, float)): + return str(value) + elif isinstance(value, bool): + return "1" if value else "0" + else: + escaped = str(value).replace("'", "''") + return f"'{escaped}'" + diff --git a/crates/rohas-orm/src/codegen.rs b/crates/rohas-orm/src/codegen.rs new file mode 100644 index 0000000..948a898 --- /dev/null +++ b/crates/rohas-orm/src/codegen.rs @@ -0,0 +1,606 @@ +use crate::error::{Error, Result}; +use rohas_parser::ast::{FieldType, Model as ParserModel, Schema}; +use rohas_parser::Parser; +use std::collections::HashSet; +use std::fs; +use std::path::{Path, PathBuf}; + +#[derive(Debug, Clone)] +pub struct Relationship { + pub field_name: String, + pub related_model: String, + pub relationship_type: RelationshipType, + pub foreign_key: Option, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum RelationshipType { + OneToOne, + OneToMany, + ManyToMany, + BelongsTo, +} + +pub struct Codegen { + pub output_dir: PathBuf, + pub models: Vec, + pub model_names: HashSet, +} + +impl Codegen { + pub fn new(output_dir: PathBuf) -> Self { + Self { + output_dir, + models: Vec::new(), + model_names: HashSet::new(), + } + } + + pub fn load_schema_dir>(&mut self, schema_dir: P) -> Result<()> { + let schema_dir = schema_dir.as_ref(); + + if !schema_dir.exists() { + return Err(Error::Codegen(format!("Schema directory does not exist: {:?}", schema_dir))); + } + + let mut schema = Schema::new(); + + self.find_and_parse_ro_files(schema_dir, &mut schema)?; + + schema.validate() + .map_err(|e| Error::Codegen(format!("Schema validation failed: {}", e)))?; + + self.models = schema.models; + self.model_names = self.models.iter().map(|m| m.name.clone()).collect(); + + Ok(()) + } + + pub fn load_schema_file>(&mut self, schema_file: P) -> Result<()> { + let schema = Parser::parse_file(schema_file) + .map_err(|e| Error::Codegen(format!("Failed to parse schema file: {}", e)))?; + + schema.validate() + .map_err(|e| Error::Codegen(format!("Schema validation failed: {}", e)))?; + + self.models = schema.models; + self.model_names = self.models.iter().map(|m| m.name.clone()).collect(); + + Ok(()) + } + + fn find_and_parse_ro_files(&self, dir: &Path, schema: &mut Schema) -> Result<()> { + let entries = fs::read_dir(dir) + .map_err(|e| Error::Codegen(format!("Failed to read directory {:?}: {}", dir, e)))?; + + for entry in entries { + let entry = entry.map_err(|e| Error::Codegen(format!("Failed to read directory entry: {}", e)))?; + let path = entry.path(); + + if path.is_dir() { + self.find_and_parse_ro_files(&path, schema)?; + } else if path.extension().and_then(|s| s.to_str()) == Some("ro") { + let file_schema = Parser::parse_file(&path) + .map_err(|e| Error::Codegen(format!("Failed to parse {:?}: {}", path, e)))?; + + schema.models.extend(file_schema.models); + schema.apis.extend(file_schema.apis); + schema.events.extend(file_schema.events); + schema.crons.extend(file_schema.crons); + schema.inputs.extend(file_schema.inputs); + schema.websockets.extend(file_schema.websockets); + } + } + + Ok(()) + } + + pub fn generate_rust_models(&self) -> Result<()> { + std::fs::create_dir_all(&self.output_dir) + .map_err(|e| Error::Codegen(format!("Failed to create output directory: {}", e)))?; + + let mut mod_rs = String::new(); + mod_rs.push_str("// Auto-generated models - DO NOT EDIT\n\n"); + + for model in &self.models { + let rust_code = self.generate_rust_model(model)?; + let file_path = self.output_dir.join(format!("{}.rs", model.name.to_lowercase())); + + std::fs::write(&file_path, rust_code) + .map_err(|e| Error::Codegen(format!("Failed to write file {:?}: {}", file_path, e)))?; + + mod_rs.push_str(&format!("pub mod {};\n", model.name.to_lowercase())); + } + + std::fs::write(self.output_dir.join("mod.rs"), mod_rs) + .map_err(|e| Error::Codegen(format!("Failed to write mod.rs: {}", e)))?; + + Ok(()) + } + + pub fn generate_python_models(&self) -> Result<()> { + std::fs::create_dir_all(&self.output_dir) + .map_err(|e| Error::Codegen(format!("Failed to create output directory: {}", e)))?; + + // Generate __init__.py + let mut init_py = String::new(); + init_py.push_str("# Auto-generated models - DO NOT EDIT\n\n"); + + for model in &self.models { + let python_code = self.generate_python_model(model)?; + let file_path = self.output_dir.join(format!("{}.py", model.name.to_lowercase())); + + std::fs::write(&file_path, python_code) + .map_err(|e| Error::Codegen(format!("Failed to write file {:?}: {}", file_path, e)))?; + + init_py.push_str(&format!("from .{} import {}\n", model.name.to_lowercase(), model.name)); + } + + init_py.push_str("\n__all__ = [\n"); + for model in &self.models { + init_py.push_str(&format!(" '{}',\n", model.name)); + } + init_py.push_str("]\n"); + + std::fs::write(self.output_dir.join("__init__.py"), init_py) + .map_err(|e| Error::Codegen(format!("Failed to write __init__.py: {}", e)))?; + + Ok(()) + } + + /// Detect relationships for a model + fn detect_relationships(&self, model: &ParserModel) -> Vec { + let mut relationships = Vec::new(); + + for field in &model.fields { + if let FieldType::Custom(ref model_name) = field.field_type { + if self.model_names.contains(model_name) { + // Check for relationship attributes + let relation_attr = field.attributes.iter().find(|a| a.name == "relation"); + let one_to_one_attr = field.attributes.iter().find(|a| a.name == "oneToOne"); + let one_to_many_attr = field.attributes.iter().find(|a| a.name == "oneToMany"); + let many_to_many_attr = field.attributes.iter().find(|a| a.name == "manyToMany"); + + let foreign_key = relation_attr + .and_then(|a| a.args.first()) + .cloned(); + + // Determine relationship type based on attributes or field type + let rel_type = if many_to_many_attr.is_some() { + RelationshipType::ManyToMany + } else if one_to_many_attr.is_some() { + RelationshipType::OneToMany + } else if one_to_one_attr.is_some() { + RelationshipType::OneToOne + } else if field.optional { + RelationshipType::BelongsTo + } else { + RelationshipType::OneToOne + }; + + relationships.push(Relationship { + field_name: field.name.clone(), + related_model: model_name.clone(), + relationship_type: rel_type, + foreign_key, + }); + } + } else if let FieldType::Array(inner) = &field.field_type { + if let FieldType::Custom(ref model_name) = **inner { + if self.model_names.contains(model_name) { + // Check for explicit relationship type + let many_to_many_attr = field.attributes.iter().find(|a| a.name == "manyToMany"); + let one_to_many_attr = field.attributes.iter().find(|a| a.name == "oneToMany"); + + let rel_type = if many_to_many_attr.is_some() { + RelationshipType::ManyToMany + } else if one_to_many_attr.is_some() { + RelationshipType::OneToMany + } else { + RelationshipType::ManyToMany // Default for arrays + }; + + relationships.push(Relationship { + field_name: field.name.clone(), + related_model: model_name.clone(), + relationship_type: rel_type, + foreign_key: None, + }); + } + } + } + } + + relationships + } + + fn generate_rust_model(&self, model: &ParserModel) -> Result { + let mut code = String::new(); + + code.push_str("// Auto-generated - DO NOT EDIT\n"); + code.push_str("use rohas_orm::prelude::*;\n"); + code.push_str("use serde::{Deserialize, Serialize};\n"); + code.push_str("use chrono::{DateTime, Utc};\n\n"); + + // Generate relationships + let relationships = self.detect_relationships(model); + if !relationships.is_empty() { + for rel in &relationships { + code.push_str(&format!("use super::{};\n", rel.related_model.to_lowercase())); + } + code.push_str("\n"); + } + + code.push_str("#[derive(Model, Debug, Clone, Serialize, Deserialize)]\n"); + code.push_str(&format!("#[table_name = \"{}\"]\n", model.name.to_lowercase())); + code.push_str(&format!("pub struct {} {{\n", model.name)); + + let mut has_primary_key = false; + for field in &model.fields { + // Handle attributes + let is_primary = field.attributes.iter().any(|attr| attr.name == "id"); + let is_unique = field.attributes.iter().any(|attr| attr.name == "unique"); + let has_default = field.attributes.iter().find(|attr| attr.name == "default"); + let is_auto = field.attributes.iter().any(|attr| attr.name == "auto"); + + if is_primary && !has_primary_key { + code.push_str(" #[primary_key]\n"); + has_primary_key = true; + } + + let mut rust_type = if let FieldType::Custom(ref model_name) = field.field_type { + if self.model_names.contains(model_name) { + if field.optional { + format!("Option<{}>", model_name) + } else { + model_name.clone() + } + } else { + self.field_type_to_rust(&field.field_type) + } + } else if let FieldType::Array(inner) = &field.field_type { + if let FieldType::Custom(ref model_name) = **inner { + if self.model_names.contains(model_name) { + format!("Vec<{}>", model_name) + } else { + self.field_type_to_rust(&field.field_type) + } + } else { + self.field_type_to_rust(&field.field_type) + } + } else { + self.field_type_to_rust(&field.field_type) + }; + + if field.optional && !rust_type.starts_with("Option<") && !rust_type.starts_with("Vec<") { + rust_type = format!("Option<{}>", rust_type); + } + + if let Some(default) = has_default { + if let Some(default_value) = default.args.first() { + match default_value.as_str() { + "now" => { + code.push_str(" #[serde(default)]\n"); + } + _ => { + // For other defaults, we'll handle them in the struct initialization + } + } + } + } + + code.push_str(&format!(" pub {}: {},\n", field.name, rust_type)); + } + + code.push_str("}\n\n"); + + // Generate relationship methods + for rel in &relationships { + match rel.relationship_type { + RelationshipType::BelongsTo | RelationshipType::OneToOne => { + code.push_str(&format!( + "impl {} {{\n", + model.name + )); + code.push_str(&format!( + " pub async fn load_{}(&self, db: &Database) -> Result> {{\n", + rel.field_name, rel.related_model + )); + if let Some(ref fk) = rel.foreign_key { + code.push_str(&format!( + " if let Some(fk_value) = self.{} {{\n", + fk + )); + code.push_str(&format!( + " {}::find_by_id(db, fk_value).await\n", + rel.related_model + )); + code.push_str(" } else {\n"); + code.push_str(" Ok(None)\n"); + code.push_str(" }\n"); + } else { + code.push_str(&format!( + " if let Some(fk_value) = self.{} {{\n", + rel.field_name + )); + code.push_str(&format!( + " {}::find_by_id(db, fk_value).await\n", + rel.related_model + )); + code.push_str(" } else {\n"); + code.push_str(" Ok(None)\n"); + code.push_str(" }\n"); + } + code.push_str(" }\n"); + code.push_str("}\n\n"); + } + RelationshipType::OneToMany => { + code.push_str(&format!( + "impl {} {{\n", + model.name + )); + code.push_str(&format!( + " pub async fn load_{}(&self, db: &Database) -> Result> {{\n", + rel.field_name, rel.related_model + )); + code.push_str(&format!( + " use rohas_orm::Query;\n", + )); + code.push_str(&format!( + " let query = QueryBuilder::select_all()\n" + )); + code.push_str(&format!( + " .from(\"{}\")\n", + rel.related_model.to_lowercase() + )); + code.push_str(&format!( + " .where_eq_num(\"{}\", self.id);\n", + format!("{}Id", model.name.to_lowercase()) + )); + code.push_str(" let results = query.execute(db).await?;\n"); + code.push_str(&format!( + " results.into_iter()\n" + )); + code.push_str(&format!( + " .map(|v| serde_json::from_value(v).map_err(|e| Error::Serialization(e)))\n" + )); + code.push_str(&format!( + " .collect::>>()\n", + rel.related_model + )); + code.push_str(" }\n"); + code.push_str("}\n\n"); + } + RelationshipType::ManyToMany => { + // Many-to-many relationships need a join table + code.push_str(&format!( + "impl {} {{\n", + model.name + )); + code.push_str(&format!( + " pub async fn load_{}(&self, db: &Database) -> Result> {{\n", + rel.field_name, rel.related_model + )); + code.push_str(" // Many-to-many relationship - implement join table query\n"); + code.push_str(" todo!(\"Many-to-many relationships require join table implementation\")\n"); + code.push_str(" }\n"); + code.push_str("}\n\n"); + } + } + } + + Ok(code) + } + + fn generate_python_model(&self, model: &ParserModel) -> Result { + let mut code = String::new(); + + code.push_str("# Auto-generated - DO NOT EDIT\n"); + code.push_str("from rohas_orm import Model, Field, Database, Table, Index, Unique\n"); + code.push_str("from datetime import datetime\n"); + code.push_str("from typing import Optional, List\n\n"); + + let relationships = self.detect_relationships(model); + if !relationships.is_empty() { + for rel in &relationships { + code.push_str(&format!("from .{} import {}\n", rel.related_model.to_lowercase(), rel.related_model)); + } + code.push_str("\n"); + } + + let mut decorators = Vec::new(); + + let table_name = self.get_table_name(model); + decorators.push(format!("@Table(name=\"{}\")", table_name)); + + let unique_fields: Vec = model.fields + .iter() + .filter(|f| f.attributes.iter().any(|attr| attr.name == "unique")) + .map(|f| f.name.clone()) + .collect(); + + if !unique_fields.is_empty() { + let fields_str = unique_fields.iter() + .map(|f| format!("\"{}\"", f)) + .collect::>() + .join(", "); + decorators.push(format!("@Unique(fields=[{}])", fields_str)); + } + + let index_fields: Vec = model.fields + .iter() + .filter(|f| f.attributes.iter().any(|attr| attr.name == "index")) + .map(|f| f.name.clone()) + .collect(); + + if !index_fields.is_empty() { + let fields_str = index_fields.iter() + .map(|f| format!("\"{}\"", f)) + .collect::>() + .join(", "); + decorators.push(format!("@Index(fields=[{}])", fields_str)); + } + + for decorator in &decorators { + code.push_str(&format!("{}\n", decorator)); + } + + code.push_str(&format!("class {}(Model):\n", model.name)); + code.push_str(&format!(" \"\"\"Generated model for {}\"\"\"\n\n", model.name)); + + let mut has_primary_key = false; + for field in &model.fields { + let is_primary = field.attributes.iter().any(|attr| attr.name == "id"); + let is_unique = field.attributes.iter().any(|attr| attr.name == "unique"); + let has_default = field.attributes.iter().find(|attr| attr.name == "default"); + let is_auto = field.attributes.iter().any(|attr| attr.name == "auto"); + + let mut python_type = if let FieldType::Custom(ref model_name) = field.field_type { + if self.model_names.contains(model_name) { + if field.optional { + format!("Optional[{}]", model_name) + } else { + model_name.clone() + } + } else { + self.field_type_to_python(&field.field_type) + } + } else if let FieldType::Array(inner) = &field.field_type { + if let FieldType::Custom(ref model_name) = **inner { + if self.model_names.contains(model_name) { + format!("List[{}]", model_name) + } else { + self.field_type_to_python(&field.field_type) + } + } else { + self.field_type_to_python(&field.field_type) + } + } else { + self.field_type_to_python(&field.field_type) + }; + + if field.optional && !python_type.starts_with("Optional[") && !python_type.starts_with("List[") { + python_type = format!("Optional[{}]", python_type); + } + + let mut field_attrs = Vec::::new(); + if is_primary && !has_primary_key { + field_attrs.push("primary_key=True".to_string()); + has_primary_key = true; + } + if is_unique { + field_attrs.push("unique=True".to_string()); + } + if let Some(default) = has_default { + if let Some(default_value) = default.args.first() { + if default_value == "now" { + field_attrs.push("default=datetime.now".to_string()); + } else { + field_attrs.push(format!("default=\"{}\"", default_value)); + } + } + } + + let field_def = if !field_attrs.is_empty() { + format!("{}: {} = Field({})", field.name, python_type, field_attrs.join(", ")) + } else { + format!("{}: {}", field.name, python_type) + }; + code.push_str(&format!(" {}\n", field_def)); + } + + if !relationships.is_empty() { + code.push_str("\n"); + for rel in &relationships { + match rel.relationship_type { + RelationshipType::BelongsTo | RelationshipType::OneToOne => { + code.push_str(&format!( + " async def load_{}(self, db: Database) -> Optional[{}]:\n", + rel.field_name, rel.related_model + )); + if let Some(ref fk) = rel.foreign_key { + code.push_str(&format!( + " return await {}.find_by_id(db, getattr(self, '{}'))\n", + rel.related_model, fk + )); + } else { + code.push_str(&format!( + " return await {}.find_by_id(db, getattr(self, '{}'))\n", + rel.related_model, rel.field_name + )); + } + } + RelationshipType::OneToMany => { + code.push_str(&format!( + " async def load_{}(self, db: Database) -> List[{}]:\n", + rel.field_name, rel.related_model + )); + code.push_str(" from rohas_orm import QueryBuilder\n"); + code.push_str(&format!( + " query = QueryBuilder.select_all()\\\n" + )); + code.push_str(&format!( + " .from_(\"{}\")\\\n", + rel.related_model.to_lowercase() + )); + code.push_str(&format!( + " .where_eq_num(\"{}\", self.id)\n", + format!("{}Id", model.name.to_lowercase()) + )); + code.push_str(" return await db.query(query)\n"); + } + RelationshipType::ManyToMany => { + code.push_str(&format!( + " async def load_{}(self, db: Database) -> List[{}]:\n", + rel.field_name, rel.related_model + )); + code.push_str(" # Many-to-many relationship - implement join table query\n"); + code.push_str(" raise NotImplementedError(\"Many-to-many relationships require join table implementation\")\n"); + } + } + } + } + + Ok(code) + } + + fn field_type_to_rust(&self, field_type: &FieldType) -> String { + match field_type { + FieldType::String => "String".to_string(), + FieldType::Int => "i64".to_string(), + FieldType::Float => "f64".to_string(), + FieldType::Boolean => "bool".to_string(), + FieldType::DateTime => "DateTime".to_string(), + FieldType::Json => "serde_json::Value".to_string(), + FieldType::Array(inner) => { + format!("Vec<{}>", self.field_type_to_rust(inner)) + } + FieldType::Custom(name) => name.clone(), + } + } + + fn field_type_to_python(&self, field_type: &FieldType) -> String { + match field_type { + FieldType::String => "str".to_string(), + FieldType::Int => "int".to_string(), + FieldType::Float => "float".to_string(), + FieldType::Boolean => "bool".to_string(), + FieldType::DateTime => "datetime".to_string(), + FieldType::Json => "dict".to_string(), + FieldType::Array(inner) => { + format!("list[{}]", self.field_type_to_python(inner)) + } + FieldType::Custom(name) => name.clone(), + } + } + + fn get_table_name(&self, model: &ParserModel) -> String { + if let Some(table_attr) = model.attributes.iter().find(|attr| attr.name == "table") { + if let Some(table_name) = table_attr.args.first() { + return table_name.clone(); + } + } + + format!("{}s", model.name.to_lowercase()) + } +} diff --git a/crates/rohas-orm/src/connection.rs b/crates/rohas-orm/src/connection.rs new file mode 100644 index 0000000..837b16a --- /dev/null +++ b/crates/rohas-orm/src/connection.rs @@ -0,0 +1,119 @@ +use crate::error::{Error, Result}; +use sqlx::{postgres::PgPoolOptions, sqlite::{SqlitePoolOptions, SqliteConnectOptions}, mysql::MySqlPoolOptions, Pool, Postgres, Sqlite, MySql}; +use std::sync::Arc; +use std::str::FromStr; +use tracing::{debug, info}; + +#[derive(Clone)] +pub enum DatabasePool { + Postgres(Pool), + Sqlite(Pool), + MySql(Pool), +} + +#[derive(Clone)] +pub struct Database { + pool: Arc, + database_type: DatabaseType, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum DatabaseType { + Postgres, + Sqlite, + MySql, +} + +impl Database { + /// Connect to a database using a connection string + /// + /// Supports: + /// - `postgresql://...` or `postgres://...` for PostgreSQL + /// - `sqlite://...` or `sqlite:...` for SQLite + /// - `mysql://...` or `mariadb://...` for MySQL/MariaDB + pub async fn connect(url: &str) -> Result { + info!("Connecting to database: {}", url); + + let (pool, db_type) = if url.starts_with("postgresql://") || url.starts_with("postgres://") { + let pool = PgPoolOptions::new() + .max_connections(10) + .connect(url) + .await + .map_err(|e| Error::Connection(format!("Failed to connect to PostgreSQL: {}", e)))?; + (DatabasePool::Postgres(pool), DatabaseType::Postgres) + } else if url.starts_with("sqlite://") || url.starts_with("sqlite:") { + let sqlite_url = url.replace("sqlite://", "").replace("sqlite:", ""); + let connection_string = format!("sqlite://{}", sqlite_url); + let options = SqliteConnectOptions::from_str(&connection_string) + .map_err(|e| Error::Connection(format!("Failed to parse SQLite URL: {}", e)))? + .create_if_missing(true); + let pool = SqlitePoolOptions::new() + .max_connections(10) + .connect_with(options) + .await + .map_err(|e| Error::Connection(format!("Failed to connect to SQLite: {}", e)))?; + (DatabasePool::Sqlite(pool), DatabaseType::Sqlite) + } else if url.starts_with("mysql://") || url.starts_with("mariadb://") { + let pool = MySqlPoolOptions::new() + .max_connections(10) + .connect(url) + .await + .map_err(|e| Error::Connection(format!("Failed to connect to MySQL: {}", e)))?; + (DatabasePool::MySql(pool), DatabaseType::MySql) + } else { + return Err(Error::Connection(format!("Unsupported database URL: {}", url))); + }; + + info!("Successfully connected to database"); + + Ok(Self { + pool: Arc::new(pool), + database_type: db_type, + }) + } + + pub fn database_type(&self) -> DatabaseType { + self.database_type + } + + pub fn postgres_pool(&self) -> Result<&Pool> { + match self.pool.as_ref() { + DatabasePool::Postgres(pool) => Ok(pool), + _ => Err(Error::Connection("Not a PostgreSQL database".to_string())), + } + } + + pub fn sqlite_pool(&self) -> Result<&Pool> { + match self.pool.as_ref() { + DatabasePool::Sqlite(pool) => Ok(pool), + _ => Err(Error::Connection("Not a SQLite database".to_string())), + } + } + + pub fn mysql_pool(&self) -> Result<&Pool> { + match self.pool.as_ref() { + DatabasePool::MySql(pool) => Ok(pool), + _ => Err(Error::Connection("Not a MySQL database".to_string())), + } + } + + pub async fn execute(&self, query: &str) -> Result { + debug!("Executing query: {}", query); + + match &*self.pool { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(query).execute(pool).await?; + Ok(result.rows_affected()) + } + DatabasePool::Sqlite(pool) => { + let result = sqlx::query(query).execute(pool).await?; + Ok(result.rows_affected()) + } + DatabasePool::MySql(pool) => { + let result = sqlx::query(query).execute(pool).await?; + Ok(result.rows_affected()) + } + } + } +} + diff --git a/crates/rohas-orm/src/error.rs b/crates/rohas-orm/src/error.rs new file mode 100644 index 0000000..fc211ee --- /dev/null +++ b/crates/rohas-orm/src/error.rs @@ -0,0 +1,40 @@ +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum Error { + #[error("Database connection error: {0}")] + Connection(String), + + #[error("Query execution error: {0}")] + Query(String), + + #[error("Model validation error: {0}")] + Validation(String), + + #[error("Migration error: {0}")] + Migration(String), + + #[error("Code generation error: {0}")] + Codegen(String), + + #[error("Serialization error: {0}")] + Serialization(#[from] serde_json::Error), + + #[error("SQLx error: {0}")] + Sqlx(String), + + #[error("IO error: {0}")] + Io(#[from] std::io::Error), + + #[error("Other error: {0}")] + Other(String), +} + +pub type Result = std::result::Result; + +impl From for Error { + fn from(err: sqlx::Error) -> Self { + Error::Sqlx(err.to_string()) + } +} + diff --git a/crates/rohas-orm/src/lib.rs b/crates/rohas-orm/src/lib.rs new file mode 100644 index 0000000..f8f2246 --- /dev/null +++ b/crates/rohas-orm/src/lib.rs @@ -0,0 +1,88 @@ +//! Rohas ORM - A modern ORM with Rust macros, Python annotations, codegen, and query builder +//! +//! # Features +//! +//! - **Rust Macros**: Derive macros for models, queries, and relationships +//! - **Python Annotations**: Full Python support via PyO3 +//! - **Code Generation**: Generate type-safe models from schemas +//! - **Query Builder**: Fluent API for building complex queries +//! - **Multi-database**: Support for PostgreSQL, MySQL, and SQLite +//! - **Async/Await**: Built on Tokio for async operations +//! +//! # Example (Rust) +//! +//! ```rust,no_run +//! use rohas_orm::prelude::*; +//! use serde::{Serialize, Deserialize}; +//! +//! #[derive(Model, Debug, Clone, Serialize, Deserialize)] +//! #[table_name = "users"] +//! struct User { +//! #[primary_key] +//! id: i64, +//! name: String, +//! email: String, +//! created_at: chrono::DateTime, +//! } +//! +//! #[tokio::main] +//! async fn main() -> Result<()> { +//! let db = Database::connect("postgresql://localhost/mydb").await?; +//! +//! let user = User::find_by_id(&db, 1).await?; +//! println!("User: {:?}", user); +//! +//! Ok(()) +//! } +//! ``` +//! +//! # Example (Python) +//! +//! ```python +//! from rohas_orm import Model, Database, Field, Table, Index, Unique +//! +//! @Table(name="users") +//! @Index(name="idx_email", fields=["email"]) +//! @Unique(fields=["email"]) +//! class User(Model): +//! id: int = Field(primary_key=True) +//! name: str +//! email: str +//! created_at: datetime +//! +//! async def main(): +//! db = await Database.connect("postgresql://localhost/mydb") +//! user = await User.find_by_id(db, 1) +//! print(f"User: {user}") +//! ``` + +pub mod connection; +pub mod error; +pub mod model; +pub mod query; +pub mod query_builder; +pub mod codegen; +pub mod python; +pub mod migration; + +pub use codegen::{Codegen, Relationship, RelationshipType}; + +pub use python::*; + +pub use connection::Database; +pub use error::{Error, Result}; +pub use model::Model; +pub use query::Query; +pub use query_builder::QueryBuilder; +pub use migration::{Migration, MigrationManager}; + +pub mod prelude { + pub use crate::codegen::{Codegen, Relationship, RelationshipType}; + pub use crate::connection::Database; + pub use crate::error::{Error, Result}; + pub use crate::model::Model; + pub use crate::query::Query; + pub use crate::query_builder::QueryBuilder; + pub use rohas_orm_macros::*; +} + diff --git a/crates/rohas-orm/src/migration.rs b/crates/rohas-orm/src/migration.rs new file mode 100644 index 0000000..2fe9f27 --- /dev/null +++ b/crates/rohas-orm/src/migration.rs @@ -0,0 +1,790 @@ +use crate::connection::{Database, DatabaseType}; +use crate::error::{Error, Result}; +use rohas_parser::ast::{Field, FieldType, Model, Schema}; +use std::fs; +use std::path::PathBuf; +use chrono::Utc; +use sqlx::Row; + +#[derive(Debug, Clone)] +pub struct Migration { + pub name: String, + pub timestamp: i64, + pub up_sql: String, + pub down_sql: String, +} + +pub struct MigrationManager { + migrations_dir: PathBuf, + pub(crate) database: Database, +} + +impl MigrationManager { + pub fn new(migrations_dir: PathBuf, database: Database) -> Self { + Self { + migrations_dir, + database, + } + } + + pub async fn init(&self) -> Result<()> { + let create_table_sql = match self.database.database_type() { + DatabaseType::Postgres => r#" + CREATE TABLE IF NOT EXISTS _rohas_migrations ( + id SERIAL PRIMARY KEY, + name VARCHAR(255) NOT NULL UNIQUE, + timestamp BIGINT NOT NULL, + applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ); + "#, + DatabaseType::Sqlite => r#" + CREATE TABLE IF NOT EXISTS _rohas_migrations ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + name TEXT NOT NULL UNIQUE, + timestamp INTEGER NOT NULL, + applied_at DATETIME DEFAULT CURRENT_TIMESTAMP + ); + "#, + DatabaseType::MySql => r#" + CREATE TABLE IF NOT EXISTS _rohas_migrations ( + id INT AUTO_INCREMENT PRIMARY KEY, + name VARCHAR(255) NOT NULL UNIQUE, + timestamp BIGINT NOT NULL, + applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + ); + "#, + }; + + self.database.execute(create_table_sql.trim()).await?; + Ok(()) + } + + pub async fn generate_migration_from_schema( + &self, + schema: &Schema, + ) -> Result<(String, String)> { + let database_type = self.database.database_type(); + let mut up_sql = String::new(); + let mut down_sql = String::new(); + + let existing_tables = self.get_existing_tables().await?; + + for model in &schema.models { + let table_name = Self::get_table_name(model); + + if existing_tables.contains(&table_name) { + let (alter_up, alter_down) = self.generate_alter_table(model, &table_name, &database_type, Some(schema)).await?; + if !alter_up.is_empty() { + up_sql.push_str(&alter_up); + up_sql.push_str("\n\n"); + + if !alter_down.is_empty() { + down_sql.push_str(&alter_down); + down_sql.push_str("\n\n"); + } + } + } else { + let create_table = Self::generate_create_table_with_schema(model, &database_type, Some(schema))?; + up_sql.push_str(&create_table); + up_sql.push_str("\n\n"); + + down_sql.push_str(&format!("DROP TABLE IF EXISTS {};\n", table_name)); + } + } + + let join_tables = Self::generate_join_tables(schema, &database_type)?; + up_sql.push_str(&join_tables.up); + down_sql.push_str(&join_tables.down); + + Ok((up_sql, down_sql)) + } + + async fn get_existing_tables(&self) -> Result> { + use std::collections::HashMap; + let query = match self.database.database_type() { + DatabaseType::Postgres => "SELECT tablename FROM pg_tables WHERE schemaname = 'public'", + DatabaseType::Sqlite => "SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'", + DatabaseType::MySql => "SELECT table_name FROM information_schema.tables WHERE table_schema = DATABASE()", + }; + + use crate::query::Query; + use crate::query_builder::QueryBuilder; + + let mut tables = Vec::new(); + + match self.database.database_type() { + DatabaseType::Sqlite => { + let pool = self.database.sqlite_pool()?; + let query = "SELECT name FROM sqlite_master WHERE type='table'"; + let rows = sqlx::query(query) + .fetch_all(pool) + .await + .map_err(|e| Error::Connection(format!("Failed to get tables: {}", e)))?; + + for row in rows { + if let Ok(name) = row.try_get::("name") { + if name != "_rohas_migrations" && !name.starts_with("sqlite_") { + tables.push(name); + } + } + } + } + DatabaseType::Postgres => { + let results = QueryBuilder::select_all() + .from("pg_tables") + .where_eq("schemaname", "public") + .execute(&self.database) + .await?; + + for row in results { + if let Some(table_name) = row.get("tablename").and_then(|v| v.as_str()) { + tables.push(table_name.to_string()); + } + } + } + DatabaseType::MySql => { + let results = QueryBuilder::select_all() + .from("information_schema.tables") + .execute(&self.database) + .await?; + + for row in results { + if let Some(table_name) = row.get("table_name").and_then(|v| v.as_str()) { + tables.push(table_name.to_string()); + } + } + } + } + + Ok(tables) + } + + async fn generate_alter_table( + &self, + model: &Model, + table_name: &str, + db_type: &DatabaseType, + schema: Option<&Schema>, + ) -> Result<(String, String)> { + let existing_columns = self.get_table_columns(table_name).await?; + + let mut expected_columns = std::collections::HashMap::new(); + for field in &model.fields { + if let FieldType::Custom(_) = &field.field_type { + if field.attributes.iter().any(|attr| attr.name == "relation") { + continue; + } + } + + if let FieldType::Array(inner) = &field.field_type { + if let FieldType::Custom(_) = inner.as_ref() { + continue; + } + } + + let column_def = Self::field_to_column(field, db_type)?; + let column_name = field.name.clone(); + expected_columns.insert(column_name.clone(), column_def.column.clone()); + } + + let mut alter_sql = String::new(); + let mut down_sql = String::new(); + + let removed_columns: Vec<(String, String)> = existing_columns.iter() + .filter(|(name, _)| !expected_columns.contains_key(*name)) + .map(|(name, typ)| (name.clone(), typ.clone())) + .collect(); + + let new_columns: Vec<(String, String, String)> = expected_columns.iter() + .filter(|(name, _)| !existing_columns.contains_key(*name)) + .map(|(name, def)| { + let parts: Vec<&str> = def.split_whitespace().collect(); + let col_type = if parts.len() >= 2 { + parts[1].to_uppercase() + } else { + "TEXT".to_string() + }; + (name.clone(), def.clone(), col_type) + }) + .collect(); + + let mut handled_removed = std::collections::HashSet::new(); + let mut handled_new = std::collections::HashSet::new(); + + for (old_name, old_type) in &removed_columns { + for (new_name, new_def, new_type) in &new_columns { + if handled_new.contains(new_name) { + continue; + } + if old_type.to_uppercase() == *new_type { + match db_type { + DatabaseType::Postgres => { + alter_sql.push_str(&format!("ALTER TABLE {} RENAME COLUMN {} TO {};\n", table_name, old_name, new_name)); + down_sql.push_str(&format!("ALTER TABLE {} RENAME COLUMN {} TO {};\n", table_name, new_name, old_name)); + } + DatabaseType::Sqlite => { + // SQLite supports RENAME COLUMN (since version 3.25.0) + alter_sql.push_str(&format!("ALTER TABLE {} RENAME COLUMN {} TO {};\n", table_name, old_name, new_name)); + down_sql.push_str(&format!("ALTER TABLE {} RENAME COLUMN {} TO {};\n", table_name, new_name, old_name)); + } + DatabaseType::MySql => { + let parts: Vec<&str> = new_def.split_whitespace().collect(); + if parts.len() >= 2 { + let type_and_constraints = parts[1..].join(" "); + alter_sql.push_str(&format!("ALTER TABLE {} CHANGE COLUMN {} {} {};\n", table_name, old_name, new_name, type_and_constraints)); + down_sql.push_str(&format!("ALTER TABLE {} CHANGE COLUMN {} {} {};\n", table_name, new_name, old_name, type_and_constraints)); + } + } + } + handled_removed.insert(old_name.clone()); + handled_new.insert(new_name.clone()); + break; + } + } + } + + for (column_name, column_def, _) in &new_columns { + if !handled_new.contains(column_name) { + match db_type { + DatabaseType::Postgres | DatabaseType::MySql => { + alter_sql.push_str(&format!("ALTER TABLE {} ADD COLUMN {};\n", table_name, column_def)); + down_sql.push_str(&format!("ALTER TABLE {} DROP COLUMN {};\n", table_name, column_name)); + } + DatabaseType::Sqlite => { + let parts: Vec<&str> = column_def.split_whitespace().collect(); + if parts.len() >= 2 { + let type_and_constraints = parts[1..].join(" "); + let mut alter_stmt = format!("ALTER TABLE {} ADD COLUMN {} {}", table_name, column_name, type_and_constraints); + alter_sql.push_str(&format!("{};\n", alter_stmt)); + } else { + alter_sql.push_str(&format!("ALTER TABLE {} ADD COLUMN {} TEXT;\n", table_name, column_name)); + } + down_sql.push_str(&format!("ALTER TABLE {} DROP COLUMN {};\n", table_name, column_name)); + } + } + } + } + + for (column_name, _) in &removed_columns { + if !handled_removed.contains(column_name) { + match db_type { + DatabaseType::Postgres | DatabaseType::MySql => { + alter_sql.push_str(&format!("ALTER TABLE {} DROP COLUMN {};\n", table_name, column_name)); + down_sql.push_str(&format!("-- Cannot automatically recreate dropped column {}\n", column_name)); + } + DatabaseType::Sqlite => { + alter_sql.push_str(&format!("-- SQLite DROP COLUMN requires recreating the table\n")); + alter_sql.push_str(&format!("-- ALTER TABLE {} DROP COLUMN {};\n", table_name, column_name)); + down_sql.push_str(&format!("-- Cannot automatically recreate dropped column {}\n", column_name)); + } + } + } + } + + Ok((alter_sql, down_sql)) + } + + async fn get_table_columns(&self, table_name: &str) -> Result> { + use std::collections::HashMap; + let mut columns = HashMap::new(); + + match self.database.database_type() { + DatabaseType::Sqlite => { + let pool = self.database.sqlite_pool()?; + let query = format!("PRAGMA table_info({})", table_name); + + let rows = sqlx::query(&query) + .fetch_all(pool) + .await + .map_err(|e| Error::Connection(format!("Failed to get table info: {}", e)))?; + + for row in rows { + if let Ok(name) = row.try_get::("name") { + if let Ok(typ) = row.try_get::("type") { + columns.insert(name, typ.to_uppercase()); + } + } + } + } + DatabaseType::Postgres => { + let query = format!( + "SELECT column_name, data_type FROM information_schema.columns WHERE table_name = '{}'", + table_name + ); + use crate::query::Query; + use crate::query_builder::QueryBuilder; + + let results = QueryBuilder::select_all() + .from("information_schema.columns") + .where_eq("table_name", table_name) + .execute(&self.database) + .await?; + + for row in results { + if let Some(col_name) = row.get("column_name").and_then(|v| v.as_str()) { + if let Some(data_type) = row.get("data_type").and_then(|v| v.as_str()) { + columns.insert(col_name.to_string(), data_type.to_string()); + } + } + } + } + DatabaseType::MySql => { + let query = format!( + "SELECT column_name, data_type FROM information_schema.columns WHERE table_name = '{}' AND table_schema = DATABASE()", + table_name + ); + use crate::query::Query; + use crate::query_builder::QueryBuilder; + + let results = QueryBuilder::select_all() + .from("information_schema.columns") + .where_eq("table_name", table_name) + .execute(&self.database) + .await?; + + for row in results { + if let Some(col_name) = row.get("column_name").and_then(|v| v.as_str()) { + if let Some(data_type) = row.get("data_type").and_then(|v| v.as_str()) { + columns.insert(col_name.to_string(), data_type.to_string()); + } + } + } + } + } + + Ok(columns) + } + + fn generate_create_table_with_schema( + model: &Model, + db_type: &DatabaseType, + schema: Option<&Schema>, + ) -> Result { + let table_name = Self::get_table_name(model); + let mut sql = format!("CREATE TABLE IF NOT EXISTS {} (\n", table_name); + + let mut columns = Vec::new(); + let mut primary_key = None; + let mut unique_constraints = Vec::new(); + let mut indexes = Vec::new(); + + let mut primary_key_in_column = false; + for field in &model.fields { + if let FieldType::Custom(_) = &field.field_type { + if field.attributes.iter().any(|attr| attr.name == "relation") { + continue; + } + } + + if let FieldType::Array(inner) = &field.field_type { + if let FieldType::Custom(_) = inner.as_ref() { + continue; + } + } + + let column_def = Self::field_to_column(field, db_type)?; + columns.push(format!(" {}", column_def.column)); + + if column_def.is_primary { + if column_def.column.contains("PRIMARY KEY") { + primary_key_in_column = true; + } else { + primary_key = Some(field.name.clone()); + } + } + + if column_def.is_unique { + unique_constraints.push(field.name.clone()); + } + + if column_def.has_index { + indexes.push(field.name.clone()); + } + } + + sql.push_str(&columns.join(",\n")); + + if let Some(pk) = primary_key { + sql.push_str(&format!(",\n PRIMARY KEY ({})", pk)); + } + + for unique_field in unique_constraints { + sql.push_str(&format!(",\n UNIQUE ({})", unique_field)); + } + + if let Some(schema) = schema { + for field in &model.fields { + if let FieldType::Custom(ref model_name) = field.field_type { + if schema.models.iter().any(|m| m.name == *model_name) { + if let Some(rel_attr) = field.attributes.iter().find(|a| a.name == "relation") { + if let Some(fk_field_name) = rel_attr.args.first() { + if let Some(related_model) = schema.models.iter().find(|m| m.name == *model_name) { + let pk_field = related_model + .fields + .iter() + .find(|f| f.attributes.iter().any(|a| a.name == "id")) + .map(|f| f.name.clone()) + .unwrap_or_else(|| "id".to_string()); + + let related_table = Self::get_table_name(related_model); + + sql.push_str(&format!( + ",\n FOREIGN KEY ({}) REFERENCES {}({}) ON DELETE CASCADE", + fk_field_name, related_table, pk_field + )); + } + } + } + } + } + } + } + + sql.push_str("\n);\n"); + + for index_field in indexes { + sql.push_str(&format!( + "CREATE INDEX IF NOT EXISTS idx_{}_{} ON {} ({});\n", + table_name, index_field, table_name, index_field + )); + } + + Ok(sql) + } + + fn field_to_column(field: &Field, db_type: &DatabaseType) -> Result { + let mut column_type = match &field.field_type { + FieldType::Int => match db_type { + DatabaseType::Postgres => "BIGINT".to_string(), + DatabaseType::Sqlite => "INTEGER".to_string(), + DatabaseType::MySql => "BIGINT".to_string(), + }, + FieldType::String => match db_type { + DatabaseType::Postgres => "TEXT".to_string(), + DatabaseType::Sqlite => "TEXT".to_string(), + DatabaseType::MySql => "TEXT".to_string(), + }, + FieldType::Boolean => match db_type { + DatabaseType::Postgres => "BOOLEAN".to_string(), + DatabaseType::Sqlite => "INTEGER".to_string(), + DatabaseType::MySql => "BOOLEAN".to_string(), + }, + FieldType::Float => match db_type { + DatabaseType::Postgres => "DOUBLE PRECISION".to_string(), + DatabaseType::Sqlite => "REAL".to_string(), + DatabaseType::MySql => "DOUBLE".to_string(), + }, + FieldType::DateTime => match db_type { + DatabaseType::Postgres => "TIMESTAMP".to_string(), + DatabaseType::Sqlite => "DATETIME".to_string(), + DatabaseType::MySql => "DATETIME".to_string(), + }, + FieldType::Json => match db_type { + DatabaseType::Postgres => "JSONB".to_string(), + DatabaseType::Sqlite => "TEXT".to_string(), + DatabaseType::MySql => "JSON".to_string(), + }, + FieldType::Custom(_) => { + match db_type { + DatabaseType::Postgres => "BIGINT".to_string(), + DatabaseType::Sqlite => "INTEGER".to_string(), + DatabaseType::MySql => "BIGINT".to_string(), + } + } + FieldType::Array(_) => { + match db_type { + DatabaseType::Postgres => "JSONB".to_string(), + DatabaseType::Sqlite => "TEXT".to_string(), + DatabaseType::MySql => "JSON".to_string(), + } + } + }; + + let is_primary = field.attributes.iter().any(|attr| attr.name == "id"); + let is_auto = field.attributes.iter().any(|attr| attr.name == "auto"); + + if is_primary && is_auto && *db_type == DatabaseType::Sqlite { + column_type = "INTEGER".to_string(); + column_type.push_str(" PRIMARY KEY AUTOINCREMENT"); + } else if is_auto { + column_type = match db_type { + DatabaseType::Postgres => format!("{} SERIAL", column_type), + DatabaseType::Sqlite => format!("{} AUTOINCREMENT", column_type), + DatabaseType::MySql => format!("{} AUTO_INCREMENT", column_type), + }; + } + + if !field.optional { + column_type.push_str(" NOT NULL"); + } + + if let Some(default_attr) = field.attributes.iter().find(|attr| attr.name == "default") { + if let Some(default_value) = default_attr.args.first() { + if default_value == "now" { + match db_type { + DatabaseType::Postgres => column_type.push_str(" DEFAULT CURRENT_TIMESTAMP"), + DatabaseType::Sqlite => column_type.push_str(" DEFAULT CURRENT_TIMESTAMP"), + DatabaseType::MySql => column_type.push_str(" DEFAULT CURRENT_TIMESTAMP"), + } + } else { + column_type.push_str(&format!(" DEFAULT '{}'", default_value)); + } + } + } + + let is_unique = field.attributes.iter().any(|attr| attr.name == "unique"); + let has_index = field.attributes.iter().any(|attr| attr.name == "index"); + + Ok(ColumnDefinition { + column: format!("{} {}", field.name, column_type), + is_primary, + is_unique, + has_index, + }) + } + + fn generate_join_tables( + schema: &Schema, + _db_type: &DatabaseType, + ) -> Result { + let mut up_sql = String::new(); + let mut down_sql = String::new(); + let mut processed_join_tables = std::collections::HashSet::new(); + + for model in &schema.models { + for field in &model.fields { + if let FieldType::Array(inner) = &field.field_type { + if let FieldType::Custom(related_model) = inner.as_ref() { + let is_one_to_many = schema + .models + .iter() + .find(|m| m.name == *related_model) + .map(|related_model| { + related_model.fields.iter().any(|f| { + if let FieldType::Custom(ref rel_model_name) = f.field_type { + if *rel_model_name == model.name { + f.attributes.iter().any(|a| a.name == "relation") + } else { + false + } + } else { + false + } + }) + }) + .unwrap_or(false); + + if is_one_to_many { + continue; + } + + let (model1_name, model2_name) = if model.name < *related_model { + (&model.name, related_model) + } else { + (related_model, &model.name) + }; + let join_table_name = Self::get_join_table_name(model1_name, model2_name); + + if processed_join_tables.contains(&join_table_name) { + continue; + } + processed_join_tables.insert(join_table_name.clone()); + + let model1 = schema.models.iter().find(|m| m.name == *model1_name).unwrap(); + let model2 = schema.models.iter().find(|m| m.name == *model2_name).unwrap(); + + let table1 = Self::get_table_name(model1); + let table2 = Self::get_table_name(model2); + + let pk1 = model1 + .fields + .iter() + .find(|f| f.attributes.iter().any(|a| a.name == "id")) + .map(|f| f.name.clone()) + .unwrap_or_else(|| "id".to_string()); + + let pk2 = model2 + .fields + .iter() + .find(|f| f.attributes.iter().any(|a| a.name == "id")) + .map(|f| f.name.clone()) + .unwrap_or_else(|| "id".to_string()); + + let fk1 = format!("{}_id", model1_name.to_lowercase()); + let fk2 = format!("{}_id", model2_name.to_lowercase()); + + up_sql.push_str(&format!( + "CREATE TABLE IF NOT EXISTS {} (\n", + join_table_name + )); + up_sql.push_str(&format!(" {} BIGINT NOT NULL,\n", fk1)); + up_sql.push_str(&format!(" {} BIGINT NOT NULL,\n", fk2)); + up_sql.push_str(&format!( + " PRIMARY KEY ({}, {}),\n", + fk1, fk2 + )); + up_sql.push_str(&format!( + " FOREIGN KEY ({}) REFERENCES {}({}) ON DELETE CASCADE,\n", + fk1, table1, pk1 + )); + up_sql.push_str(&format!( + " FOREIGN KEY ({}) REFERENCES {}({}) ON DELETE CASCADE\n", + fk2, table2, pk2 + )); + up_sql.push_str(");\n\n"); + + down_sql.push_str(&format!("DROP TABLE IF EXISTS {};\n", join_table_name)); + } + } + } + } + + Ok(JoinTablesResult { up: up_sql, down: down_sql }) + } + + fn get_table_name(model: &Model) -> String { + if let Some(table_attr) = model.attributes.iter().find(|attr| attr.name == "table") { + if let Some(table_name) = table_attr.args.first() { + return table_name.clone(); + } + } + format!("{}s", model.name.to_lowercase()) + } + + fn get_table_name_from_model_name(model_name: &str) -> String { + format!("{}s", model_name.to_lowercase()) + } + + fn get_join_table_name(model1: &str, model2: &str) -> String { + let mut names = vec![model1.to_lowercase(), model2.to_lowercase()]; + names.sort(); + format!("{}_{}", names[0], names[1]) + } + + pub fn create_migration(&self, name: &str) -> Result { + fs::create_dir_all(&self.migrations_dir) + .map_err(|e| Error::Codegen(format!("Failed to create migrations directory: {}", e)))?; + + let timestamp = Utc::now().timestamp(); + let migration_name = format!("{}_{}", timestamp, name.replace(" ", "_").to_lowercase()); + let file_path = self.migrations_dir.join(format!("{}.sql", migration_name)); + + let migration = Migration { + name: migration_name.clone(), + timestamp, + up_sql: format!("-- Migration: {}\n-- Up migration\n", name), + down_sql: format!("-- Migration: {}\n-- Down migration\n", name), + }; + + let content = format!( + "-- Up Migration\n{}\n\n-- Down Migration\n{}", + migration.up_sql, migration.down_sql + ); + fs::write(&file_path, content) + .map_err(|e| Error::Codegen(format!("Failed to write migration file: {}", e)))?; + + Ok(migration) + } + + pub async fn apply_migration(&self, migration: &Migration) -> Result<()> { + if self.is_migration_applied(&migration.name).await? { + return Ok(()); + } + + let statements: Vec<&str> = migration.up_sql + .split(';') + .map(|s| s.trim()) + .filter(|s| !s.is_empty()) + .collect(); + + for statement in statements { + if !statement.trim().is_empty() { + self.database.execute(statement).await?; + } + } + + let insert_sql = match self.database.database_type() { + DatabaseType::Postgres | DatabaseType::MySql => format!( + "INSERT INTO _rohas_migrations (name, timestamp) VALUES ('{}', {})", + migration.name, migration.timestamp + ), + DatabaseType::Sqlite => format!( + "INSERT INTO _rohas_migrations (name, timestamp) VALUES ('{}', {})", + migration.name, migration.timestamp + ), + }; + + self.database.execute(&insert_sql).await?; + Ok(()) + } + + pub async fn rollback_migration(&self, migration: &Migration) -> Result<()> { + if !self.is_migration_applied(&migration.name).await? { + return Ok(()); + } + + let statements: Vec<&str> = migration.down_sql + .split(';') + .map(|s| s.trim()) + .filter(|s| !s.is_empty()) + .collect(); + + for statement in statements { + if !statement.trim().is_empty() { + self.database.execute(statement).await?; + } + } + + let delete_sql = format!("DELETE FROM _rohas_migrations WHERE name = '{}'", migration.name); + self.database.execute(&delete_sql).await?; + + Ok(()) + } + + async fn is_migration_applied(&self, name: &str) -> Result { + use crate::query::Query; + use crate::query_builder::QueryBuilder; + let query = QueryBuilder::select_all() + .from("_rohas_migrations") + .where_eq("name", name) + .limit(1); + + let results = query.execute(&self.database).await?; + Ok(!results.is_empty()) + } + + pub async fn get_applied_migrations(&self) -> Result> { + use crate::query::Query; + use crate::query_builder::QueryBuilder; + let query = QueryBuilder::select(&["name"]) + .from("_rohas_migrations") + .order_by("timestamp", "ASC"); + + let results = query.execute(&self.database).await?; + let mut migrations = Vec::new(); + + for row in results { + if let serde_json::Value::Object(map) = row { + if let Some(name_value) = map.get("name").or_else(|| map.get("column_0")) { + if let serde_json::Value::String(name) = name_value { + migrations.push(name.clone()); + } + } + } + } + + Ok(migrations) + } +} + +struct ColumnDefinition { + column: String, + is_primary: bool, + is_unique: bool, + has_index: bool, +} + +struct JoinTablesResult { + up: String, + down: String, +} diff --git a/crates/rohas-orm/src/model.rs b/crates/rohas-orm/src/model.rs new file mode 100644 index 0000000..f3e7034 --- /dev/null +++ b/crates/rohas-orm/src/model.rs @@ -0,0 +1,48 @@ +use crate::connection::Database; +use crate::error::{Error, Result}; +use serde::{Deserialize, Serialize}; +use std::marker::PhantomData; + +/// Trait for database models +/// +/// This trait is automatically derived when using the `#[derive(Model)]` macro +pub trait Model: Serialize + for<'de> Deserialize<'de> + Send + Sync + Clone { + fn table_name() -> &'static str; + + fn primary_key() -> &'static str; + + fn primary_key_value(&self) -> Result>; + + async fn find_by_id(db: &Database, id: i64) -> Result>; + + async fn find_all(db: &Database) -> Result>; + + async fn save(&self, db: &Database) -> Result<()>; + + async fn delete(&self, db: &Database) -> Result<()>; + + async fn create(db: &Database, data: Self) -> Result; + + async fn update(db: &Database, id: i64, data: Self) -> Result; +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BaseModel { + #[serde(skip)] + _phantom: PhantomData, +} + +impl BaseModel { + pub fn new() -> Self { + Self { + _phantom: PhantomData, + } + } +} + +impl Default for BaseModel { + fn default() -> Self { + Self::new() + } +} + diff --git a/crates/rohas-orm/src/python.rs b/crates/rohas-orm/src/python.rs new file mode 100644 index 0000000..abbdde1 --- /dev/null +++ b/crates/rohas-orm/src/python.rs @@ -0,0 +1,422 @@ +//! Python bindings for rohas-orm using PyO3 + +use crate::connection::Database; +use crate::error::{Error, Result}; +use crate::query::Query; +use crate::query_builder::QueryBuilder; +use pyo3::exceptions::{PyConnectionError, PyRuntimeError}; +use pyo3::prelude::*; +use pyo3::types::{PyDict, PyList, PyModule, PyString, PyFloat, PyInt, PyBool, PyType}; +use std::sync::Arc; + +#[pymodule] +#[pyo3(name = "rohas_orm")] +fn rohas_orm(m: &Bound<'_, PyModule>) -> PyResult<()> { + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_class::()?; + m.add_function(wrap_pyfunction!(connect, m)?)?; + Ok(()) +} + +/// Python wrapper for Database +#[pyclass] +pub struct PyDatabase { + db: Arc, +} + +#[pymethods] +impl PyDatabase { + #[new] + fn new(url: &str) -> PyResult { + let rt = tokio::runtime::Runtime::new() + .map_err(|e| PyErr::new::(format!("Failed to create runtime: {}", e)))?; + + let db = rt.block_on(Database::connect(url)) + .map_err(|e| PyErr::new::(format!("{}", e)))?; + + Ok(Self { + db: Arc::new(db), + }) + } + + #[classmethod] + fn connect(_cls: &Bound<'_, PyType>, url: &str) -> PyResult { + Self::new(url) + } + + fn execute(&self, query: &str) -> PyResult { + let db = self.db.clone(); + let query = query.to_string(); + + let rt = tokio::runtime::Runtime::new() + .map_err(|e| PyErr::new::(format!("Failed to create runtime: {}", e)))?; + + let result = rt.block_on(db.execute(&query)) + .map_err(|e| PyErr::new::(format!("{}", e)))?; + + Ok(result) + } + + fn query(&self, query: &PyQueryBuilder, py: Python<'_>) -> PyResult> { + let db = self.db.clone(); + let query_builder = query.builder.clone(); + + let rt = tokio::runtime::Runtime::new() + .map_err(|e| PyErr::new::(format!("Failed to create runtime: {}", e)))?; + + let results = rt.block_on(async { + query_builder.execute(&db).await + }) + .map_err(|e| PyErr::new::(format!("{}", e)))?; + + let list = PyList::empty(py); + for result in results { + if let serde_json::Value::Object(map) = result { + let dict = PyDict::new(py); + for (key, value) in map { + let py_value: PyObject = match value { + serde_json::Value::String(s) => PyString::new(py, &s).into(), + serde_json::Value::Number(n) => { + if let Some(i) = n.as_i64() { + PyInt::new(py, i).into() + } else if let Some(f) = n.as_f64() { + PyFloat::new(py, f).into() + } else { + PyString::new(py, &n.to_string()).into() + } + } + serde_json::Value::Bool(b) => { + // Convert bool to int (0 or 1) for compatibility + PyInt::new(py, if b { 1 } else { 0 }).into() + } + serde_json::Value::Null => py.None().into(), + _ => PyString::new(py, &value.to_string()).into(), + }; + dict.set_item(key, py_value)?; + } + list.append(dict)?; + } + } + Ok(list.into()) + } +} + +/// Python wrapper for QueryBuilder +#[pyclass] +pub struct PyQueryBuilder { + builder: QueryBuilder, +} + +#[pymethods] +impl PyQueryBuilder { + #[new] + fn new() -> Self { + Self { + builder: QueryBuilder::select_all(), + } + } + + #[staticmethod] + fn select(columns: Vec) -> Self { + let cols: Vec<&str> = columns.iter().map(|s| s.as_str()).collect(); + Self { + builder: QueryBuilder::select(&cols), + } + } + + #[staticmethod] + fn select_all() -> Self { + Self { + builder: QueryBuilder::select_all(), + } + } + + fn from_(&mut self, table: &str) -> PyResult<()> { + self.builder = self.builder.clone().from(table); + Ok(()) + } + + fn where_eq(&mut self, column: &str, value: &str) -> PyResult<()> { + self.builder = self.builder.clone().where_eq(column, value); + Ok(()) + } + + fn where_eq_num(&mut self, column: &str, value: i64) -> PyResult<()> { + self.builder = self.builder.clone().where_eq_num(column, value); + Ok(()) + } + + fn order_by(&mut self, column: &str, direction: &str) -> PyResult<()> { + self.builder = self.builder.clone().order_by(column, direction); + Ok(()) + } + + fn limit(&mut self, limit: u64) -> PyResult<()> { + self.builder = self.builder.clone().limit(limit); + Ok(()) + } + + fn offset(&mut self, offset: u64) -> PyResult<()> { + self.builder = self.builder.clone().offset(offset); + Ok(()) + } + + #[staticmethod] + fn insert(table: &str) -> Self { + Self { + builder: QueryBuilder::insert(table), + } + } + + fn values(&mut self, values: Vec) -> PyResult<()> { + let vals: Vec<&str> = values.iter().map(|s| s.as_str()).collect(); + self.builder = self.builder.clone().values(vals); + Ok(()) + } + + #[staticmethod] + fn update(table: &str) -> Self { + Self { + builder: QueryBuilder::update(table), + } + } + + fn set(&mut self, column: &str, value: &str) -> PyResult<()> { + self.builder = self.builder.clone().set(column, value); + Ok(()) + } + + #[staticmethod] + fn delete(table: &str) -> Self { + Self { + builder: QueryBuilder::delete(table), + } + } + + fn to_sql(&self) -> String { + self.builder.to_sql() + } +} + +#[pyclass] +pub struct PyField { + primary_key: bool, + nullable: bool, + default: Option>, +} + +#[pymethods] +impl PyField { + #[new] + #[pyo3(signature = (*, primary_key = false, nullable = false, default = None, unique = false))] + fn new( + primary_key: bool, + nullable: bool, + default: Option>, + unique: bool, + ) -> Self { + Self { + primary_key, + nullable, + default, + } + } +} + +/// Table decorator for model classes +/// +/// This decorator allows users to configure table names and other model settings. +/// Example: +/// ```python +/// from rohas_orm import Table +/// +/// @Table(name="users") +/// class User(Model): +/// id: int = Field(primary_key=True) +/// name: str +/// ``` +#[pyclass] +pub struct PyTable { + name: Option, + schema: Option, +} + +#[pymethods] +impl PyTable { + #[new] + #[pyo3(signature = (*, name = None, schema = None))] + fn new(name: Option, schema: Option) -> Self { + Self { name, schema } + } + + fn __call__( + &self, + py: Python<'_>, + cls: Py, + ) -> PyResult { + let cls_bound = cls.bind(py); + + if let Some(ref table_name) = self.name { + cls_bound.setattr("__table_name__", table_name.as_str())?; + } + + if let Some(ref schema_name) = self.schema { + cls_bound.setattr("__table_schema__", schema_name.as_str())?; + } + + Ok(cls.into()) + } + + fn get_name(&self) -> Option { + self.name.clone() + } + + fn get_schema(&self) -> Option { + self.schema.clone() + } +} + +/// Index decorator for model fields +/// +/// Example: +/// ```python +/// from rohas_orm import Index +/// +/// @Index(fields=["email", "name"]) +/// class User(Model): +/// email: str +/// name: str +/// ``` +#[pyclass] +pub struct PyIndex { + fields: Vec, + name: Option, + unique: bool, +} + +#[pymethods] +impl PyIndex { + #[new] + #[pyo3(signature = (*, fields = None, name = None, unique = false))] + fn new( + fields: Option>, + name: Option, + unique: bool, + ) -> Self { + Self { + fields: fields.unwrap_or_default(), + name, + unique, + } + } + + + fn __call__( + &self, + py: Python<'_>, + cls: Py, + ) -> PyResult { + let cls_bound = cls.bind(py); + + if !self.fields.is_empty() { + let fields_list = PyList::empty(py); + for field in &self.fields { + fields_list.append(field)?; + } + cls_bound.setattr("__indexes__", fields_list.as_any())?; + } + + if let Some(ref index_name) = self.name { + cls_bound.setattr("__index_name__", index_name.as_str())?; + } + + if self.unique { + cls_bound.setattr("__index_unique__", true)?; + } + + Ok(cls.into()) + } + + fn get_fields(&self) -> Vec { + self.fields.clone() + } + + fn get_name(&self) -> Option { + self.name.clone() + } + + fn is_unique(&self) -> bool { + self.unique + } +} + +/// Unique constraint decorator for model fields +/// +/// Example: +/// ```python +/// from rohas_orm import Unique +/// +/// @Unique(fields=["email"]) +/// class User(Model): +/// email: str +/// ``` +#[pyclass] +pub struct PyUnique { + fields: Vec, + name: Option, +} + +#[pymethods] +impl PyUnique { + #[new] + #[pyo3(signature = (*, fields = None, name = None))] + fn new( + fields: Option>, + name: Option, + ) -> Self { + Self { + fields: fields.unwrap_or_default(), + name, + } + } + + fn __call__( + &self, + py: Python<'_>, + cls: Py, + ) -> PyResult { + let cls_bound = cls.bind(py); + + if !self.fields.is_empty() { + let fields_list = PyList::empty(py); + for field in &self.fields { + fields_list.append(field)?; + } + cls_bound.setattr("__unique_constraints__", fields_list.as_any())?; + } + + if let Some(ref constraint_name) = self.name { + cls_bound.setattr("__unique_name__", constraint_name.as_str())?; + } + + Ok(cls.into()) + } + + + fn get_fields(&self) -> Vec { + self.fields.clone() + } + + fn get_name(&self) -> Option { + self.name.clone() + } +} + +#[pyfunction] +fn connect(url: &str) -> PyResult { + PyDatabase::new(url) +} diff --git a/crates/rohas-orm/src/query.rs b/crates/rohas-orm/src/query.rs new file mode 100644 index 0000000..a30fb2e --- /dev/null +++ b/crates/rohas-orm/src/query.rs @@ -0,0 +1,151 @@ +use crate::connection::Database; +use crate::error::Result; +use crate::query_builder::QueryBuilder; +use sqlx::Row; + +/// Query trait for building and executing queries +pub trait Query { + async fn execute(&self, db: &Database) -> Result>; + + async fn execute_one(&self, db: &Database) -> Result>; + + async fn execute_affected(&self, db: &Database) -> Result; + + fn to_sql(&self) -> String; +} + +impl Query for QueryBuilder { + async fn execute(&self, db: &Database) -> Result> { + let sql = self.to_sql(); + + let results = match db.database_type() { + crate::connection::DatabaseType::Postgres => { + let pool = db.postgres_pool()?; + let rows = sqlx::query(&sql).fetch_all(pool).await?; + convert_pg_rows(rows)? + } + crate::connection::DatabaseType::Sqlite => { + let pool = db.sqlite_pool()?; + let rows = sqlx::query(&sql).fetch_all(pool).await?; + convert_sqlite_rows(rows)? + } + crate::connection::DatabaseType::MySql => { + let pool = db.mysql_pool()?; + let rows = sqlx::query(&sql).fetch_all(pool).await?; + convert_mysql_rows(rows)? + } + }; + + Ok(results) + } + + async fn execute_one(&self, db: &Database) -> Result> { + let results = self.execute(db).await?; + Ok(results.into_iter().next()) + } + + async fn execute_affected(&self, db: &Database) -> Result { + let sql = self.to_sql(); + db.execute(&sql).await + } + + fn to_sql(&self) -> String { + QueryBuilder::to_sql(self) + } +} + +fn convert_pg_rows(rows: Vec) -> Result> { + let mut results = Vec::new(); + for row in rows { + let mut map = serde_json::Map::new(); + for i in 0..row.len() { + let name = format!("column_{}", i); + let value = get_pg_value(&row, i)?; + map.insert(name, value); + } + results.push(serde_json::Value::Object(map)); + } + Ok(results) +} + +fn get_pg_value(row: &sqlx::postgres::PgRow, i: usize) -> Result { + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::Number(v.into())); + } + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::String(v)); + } + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::Number( + serde_json::Number::from_f64(v).unwrap_or(serde_json::Number::from(0)) + )); + } + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::Bool(v)); + } + Ok(serde_json::Value::Null) +} + +fn convert_sqlite_rows(rows: Vec) -> Result> { + let mut results = Vec::new(); + for row in rows { + let mut map = serde_json::Map::new(); + for i in 0..row.len() { + let name = format!("column_{}", i); + let value = get_sqlite_value(&row, i)?; + map.insert(name, value); + } + results.push(serde_json::Value::Object(map)); + } + Ok(results) +} + +fn get_sqlite_value(row: &sqlx::sqlite::SqliteRow, i: usize) -> Result { + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::Number(v.into())); + } + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::String(v)); + } + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::Number( + serde_json::Number::from_f64(v).unwrap_or(serde_json::Number::from(0)) + )); + } + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::Bool(v)); + } + Ok(serde_json::Value::Null) +} + +fn convert_mysql_rows(rows: Vec) -> Result> { + let mut results = Vec::new(); + for row in rows { + let mut map = serde_json::Map::new(); + for i in 0..row.len() { + let name = format!("column_{}", i); + let value = get_mysql_value(&row, i)?; + map.insert(name, value); + } + results.push(serde_json::Value::Object(map)); + } + Ok(results) +} + +fn get_mysql_value(row: &sqlx::mysql::MySqlRow, i: usize) -> Result { + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::Number(v.into())); + } + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::String(v)); + } + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::Number( + serde_json::Number::from_f64(v).unwrap_or(serde_json::Number::from(0)) + )); + } + if let Ok(v) = row.try_get::(i) { + return Ok(serde_json::Value::Bool(v)); + } + Ok(serde_json::Value::Null) +} diff --git a/crates/rohas-orm/src/query_builder.rs b/crates/rohas-orm/src/query_builder.rs new file mode 100644 index 0000000..19cd325 --- /dev/null +++ b/crates/rohas-orm/src/query_builder.rs @@ -0,0 +1,436 @@ + +/// Fluent query builder for constructing SQL queries +#[derive(Debug, Clone)] +pub struct QueryBuilder { + select: Vec, + from: Option, + joins: Vec, + where_clauses: Vec, + order_by: Vec, + group_by: Vec, + having: Vec, + limit: Option, + offset: Option, + query_type: QueryType, + insert_table: Option, + insert_values: Vec>, + update_table: Option, + update_values: Vec<(String, String)>, + delete_table: Option, +} + +#[derive(Debug, Clone)] +enum QueryType { + Select, + Insert, + Update, + Delete, +} + +#[derive(Debug, Clone)] +struct Join { + join_type: JoinType, + table: String, + condition: String, +} + +#[derive(Debug, Clone)] +enum JoinType { + Inner, + Left, + Right, + Full, +} + +#[derive(Debug, Clone)] +struct WhereClause { + column: String, + operator: String, + value: String, + logical_op: LogicalOp, +} + +#[derive(Debug, Clone)] +enum LogicalOp { + And, + Or, +} + +#[derive(Debug, Clone)] +struct OrderBy { + column: String, + direction: OrderDirection, +} + +#[derive(Debug, Clone)] +enum OrderDirection { + Asc, + Desc, +} + +impl QueryBuilder { + /// Create a new SELECT query builder + pub fn select(columns: &[&str]) -> Self { + Self { + select: columns.iter().map(|s| s.to_string()).collect(), + from: None, + joins: Vec::new(), + where_clauses: Vec::new(), + order_by: Vec::new(), + group_by: Vec::new(), + having: Vec::new(), + limit: None, + offset: None, + query_type: QueryType::Select, + insert_table: None, + insert_values: Vec::new(), + update_table: None, + update_values: Vec::new(), + delete_table: None, + } + } + + /// Create a new SELECT * query builder + pub fn select_all() -> Self { + Self::select(&["*"]) + } + + /// Set the FROM clause + pub fn from(mut self, table: &str) -> Self { + self.from = Some(table.to_string()); + self + } + + /// Add an INNER JOIN + pub fn inner_join(mut self, table: &str, condition: &str) -> Self { + self.joins.push(Join { + join_type: JoinType::Inner, + table: table.to_string(), + condition: condition.to_string(), + }); + self + } + + /// Add a LEFT JOIN + pub fn left_join(mut self, table: &str, condition: &str) -> Self { + self.joins.push(Join { + join_type: JoinType::Left, + table: table.to_string(), + condition: condition.to_string(), + }); + self + } + + /// Add a WHERE clause with AND + pub fn where_eq(mut self, column: &str, value: &str) -> Self { + self.where_clauses.push(WhereClause { + column: column.to_string(), + operator: "=".to_string(), + value: format!("'{}'", value.replace("'", "''")), + logical_op: LogicalOp::And, + }); + self + } + + /// Add a WHERE clause with AND (numeric) + pub fn where_eq_num(mut self, column: &str, value: i64) -> Self { + self.where_clauses.push(WhereClause { + column: column.to_string(), + operator: "=".to_string(), + value: value.to_string(), + logical_op: LogicalOp::And, + }); + self + } + + /// Add a WHERE clause with AND (parameterized) + pub fn where_eq_param(mut self, column: &str, param: &str) -> Self { + self.where_clauses.push(WhereClause { + column: column.to_string(), + operator: "=".to_string(), + value: param.to_string(), + logical_op: LogicalOp::And, + }); + self + } + + /// Add a WHERE clause with OR + pub fn or_where_eq(mut self, column: &str, value: &str) -> Self { + self.where_clauses.push(WhereClause { + column: column.to_string(), + operator: "=".to_string(), + value: format!("'{}'", value.replace("'", "''")), + logical_op: LogicalOp::Or, + }); + self + } + + /// Add ORDER BY clause + pub fn order_by(mut self, column: &str, direction: &str) -> Self { + self.order_by.push(OrderBy { + column: column.to_string(), + direction: if direction.to_uppercase() == "DESC" { + OrderDirection::Desc + } else { + OrderDirection::Asc + }, + }); + self + } + + /// Add LIMIT clause + pub fn limit(mut self, limit: u64) -> Self { + self.limit = Some(limit); + self + } + + /// Add OFFSET clause + pub fn offset(mut self, offset: u64) -> Self { + self.offset = Some(offset); + self + } + + /// Create a new INSERT query builder + pub fn insert(table: &str) -> Self { + Self { + select: Vec::new(), + from: None, + joins: Vec::new(), + where_clauses: Vec::new(), + order_by: Vec::new(), + group_by: Vec::new(), + having: Vec::new(), + limit: None, + offset: None, + query_type: QueryType::Insert, + insert_table: Some(table.to_string()), + insert_values: Vec::new(), + update_table: None, + update_values: Vec::new(), + delete_table: None, + } + } + + /// Add values to INSERT + pub fn values(mut self, values: Vec<&str>) -> Self { + self.insert_values.push(values.iter().map(|s| { + if s.parse::().is_ok() || s.parse::().is_ok() { + s.to_string() + } else { + format!("'{}'", s.replace("'", "''")) + } + }).collect()); + self + } + + /// Create a new UPDATE query builder + pub fn update(table: &str) -> Self { + Self { + select: Vec::new(), + from: None, + joins: Vec::new(), + where_clauses: Vec::new(), + order_by: Vec::new(), + group_by: Vec::new(), + having: Vec::new(), + limit: None, + offset: None, + query_type: QueryType::Update, + insert_table: None, + insert_values: Vec::new(), + update_table: Some(table.to_string()), + update_values: Vec::new(), + delete_table: None, + } + } + + /// Set a column value in UPDATE + pub fn set(mut self, column: &str, value: &str) -> Self { + let val = if value.parse::().is_ok() || value.parse::().is_ok() { + value.to_string() + } else { + format!("'{}'", value.replace("'", "''")) + }; + self.update_values.push((column.to_string(), val)); + self + } + + /// Create a new DELETE query builder + pub fn delete(table: &str) -> Self { + Self { + select: Vec::new(), + from: None, + joins: Vec::new(), + where_clauses: Vec::new(), + order_by: Vec::new(), + group_by: Vec::new(), + having: Vec::new(), + limit: None, + offset: None, + query_type: QueryType::Delete, + insert_table: None, + insert_values: Vec::new(), + update_table: None, + update_values: Vec::new(), + delete_table: Some(table.to_string()), + } + } + + /// Convert the query builder to SQL string + pub fn to_sql(&self) -> String { + match &self.query_type { + QueryType::Select => self.build_select(), + QueryType::Insert => self.build_insert(), + QueryType::Update => self.build_update(), + QueryType::Delete => self.build_delete(), + } + } + + fn build_select(&self) -> String { + let mut sql = String::new(); + + sql.push_str("SELECT "); + sql.push_str(&self.select.join(", ")); + + if let Some(ref from) = self.from { + sql.push_str(" FROM "); + sql.push_str(from); + } + + for join in &self.joins { + match join.join_type { + JoinType::Inner => sql.push_str(" INNER JOIN "), + JoinType::Left => sql.push_str(" LEFT JOIN "), + JoinType::Right => sql.push_str(" RIGHT JOIN "), + JoinType::Full => sql.push_str(" FULL JOIN "), + } + sql.push_str(&join.table); + sql.push_str(" ON "); + sql.push_str(&join.condition); + } + + if !self.where_clauses.is_empty() { + sql.push_str(" WHERE "); + for (i, clause) in self.where_clauses.iter().enumerate() { + if i > 0 { + match clause.logical_op { + LogicalOp::And => sql.push_str(" AND "), + LogicalOp::Or => sql.push_str(" OR "), + } + } + sql.push_str(&clause.column); + sql.push_str(" "); + sql.push_str(&clause.operator); + sql.push_str(" "); + sql.push_str(&clause.value); + } + } + + if !self.order_by.is_empty() { + sql.push_str(" ORDER BY "); + let orders: Vec = self.order_by.iter().map(|ob| { + format!( + "{} {}", + ob.column, + match ob.direction { + OrderDirection::Asc => "ASC", + OrderDirection::Desc => "DESC", + } + ) + }).collect(); + sql.push_str(&orders.join(", ")); + } + + if let Some(limit) = self.limit { + sql.push_str(&format!(" LIMIT {}", limit)); + } + + if let Some(offset) = self.offset { + sql.push_str(&format!(" OFFSET {}", offset)); + } + + sql + } + + fn build_insert(&self) -> String { + let mut sql = String::new(); + + if let Some(ref table) = self.insert_table { + sql.push_str("INSERT INTO "); + sql.push_str(table); + + if !self.insert_values.is_empty() { + sql.push_str(" VALUES "); + let values: Vec = self.insert_values.iter().map(|row| { + format!("({})", row.join(", ")) + }).collect(); + sql.push_str(&values.join(", ")); + } + } + + sql + } + + fn build_update(&self) -> String { + let mut sql = String::new(); + + if let Some(ref table) = self.update_table { + sql.push_str("UPDATE "); + sql.push_str(table); + sql.push_str(" SET "); + + let sets: Vec = self.update_values.iter() + .map(|(col, val)| format!("{} = {}", col, val)) + .collect(); + sql.push_str(&sets.join(", ")); + + if !self.where_clauses.is_empty() { + sql.push_str(" WHERE "); + for (i, clause) in self.where_clauses.iter().enumerate() { + if i > 0 { + match clause.logical_op { + LogicalOp::And => sql.push_str(" AND "), + LogicalOp::Or => sql.push_str(" OR "), + } + } + sql.push_str(&clause.column); + sql.push_str(" "); + sql.push_str(&clause.operator); + sql.push_str(" "); + sql.push_str(&clause.value); + } + } + } + + sql + } + + fn build_delete(&self) -> String { + let mut sql = String::new(); + + if let Some(ref table) = self.delete_table { + sql.push_str("DELETE FROM "); + sql.push_str(table); + + if !self.where_clauses.is_empty() { + sql.push_str(" WHERE "); + for (i, clause) in self.where_clauses.iter().enumerate() { + if i > 0 { + match clause.logical_op { + LogicalOp::And => sql.push_str(" AND "), + LogicalOp::Or => sql.push_str(" OR "), + } + } + sql.push_str(&clause.column); + sql.push_str(" "); + sql.push_str(&clause.operator); + sql.push_str(" "); + sql.push_str(&clause.value); + } + } + } + + sql + } +} + diff --git a/crates/rohas-orm/tests/migration_sqlite_test.rs b/crates/rohas-orm/tests/migration_sqlite_test.rs new file mode 100644 index 0000000..c45ce4e --- /dev/null +++ b/crates/rohas-orm/tests/migration_sqlite_test.rs @@ -0,0 +1,244 @@ +use rohas_orm::{Database, MigrationManager, Query, QueryBuilder}; +use rohas_parser::ast::{Attribute, Field, FieldType, Model, Schema}; +use tempfile::TempDir; + +fn create_test_schema() -> Schema { + let mut schema = Schema::new(); + + let user_model = Model { + name: "User".to_string(), + fields: vec![ + Field { + name: "id".to_string(), + field_type: FieldType::Int, + optional: false, + attributes: vec![ + Attribute { name: "id".to_string(), args: vec![] }, + Attribute { name: "auto".to_string(), args: vec![] }, + ], + }, + Field { + name: "name".to_string(), + field_type: FieldType::String, + optional: false, + attributes: vec![], + }, + Field { + name: "email".to_string(), + field_type: FieldType::String, + optional: false, + attributes: vec![Attribute { name: "unique".to_string(), args: vec![] }], + }, + Field { + name: "createdAt".to_string(), + field_type: FieldType::DateTime, + optional: false, + attributes: vec![Attribute { name: "default".to_string(), args: vec!["now".to_string()] }], + }, + ], + attributes: vec![], + }; + + // Post model with relationship + let post_model = Model { + name: "Post".to_string(), + fields: vec![ + Field { + name: "id".to_string(), + field_type: FieldType::Int, + optional: false, + attributes: vec![ + Attribute { name: "id".to_string(), args: vec![] }, + Attribute { name: "auto".to_string(), args: vec![] }, + ], + }, + Field { + name: "title".to_string(), + field_type: FieldType::String, + optional: false, + attributes: vec![], + }, + Field { + name: "content".to_string(), + field_type: FieldType::String, + optional: true, + attributes: vec![], + }, + Field { + name: "authorId".to_string(), + field_type: FieldType::Int, + optional: false, + attributes: vec![], + }, + Field { + name: "author".to_string(), + field_type: FieldType::Custom("User".to_string()), + optional: true, + attributes: vec![ + Attribute { name: "relation".to_string(), args: vec!["authorId".to_string()] }, + ], + }, + ], + attributes: vec![], + }; + + schema.models.push(user_model); + schema.models.push(post_model); + schema +} + +async fn setup_test_db() -> (Database, TempDir) { + let temp_dir = TempDir::new().unwrap(); + let db_path = temp_dir.path().join("test.db"); + let db_path_str = db_path.to_string_lossy(); + let db_url = if db_path_str.starts_with('/') { + format!("sqlite://{}", db_path_str) + } else { + format!("sqlite:///{}", db_path_str) + }; + let db = Database::connect(&db_url).await.unwrap(); + (db, temp_dir) +} + +#[tokio::test] +async fn test_init_migration_table() { + let (db, _temp_dir) = setup_test_db().await; + let migrations_dir = std::path::PathBuf::from("migrations"); + let manager = MigrationManager::new(migrations_dir, db.clone()); + + manager.init().await.unwrap(); + + let query = QueryBuilder::select_all() + .from("_rohas_migrations") + .limit(1); + + let _results = query.execute(&db).await.unwrap(); + assert!(true, "Migration table created successfully"); +} + +#[tokio::test] +async fn test_generate_migration_from_schema() { + let schema = create_test_schema(); + let (db, _temp_dir) = setup_test_db().await; + + let manager = MigrationManager::new(std::path::PathBuf::from("migrations"), db.clone()); + let (up_sql, down_sql) = manager.generate_migration_from_schema( + &schema, + ).await.unwrap(); + + assert!(up_sql.contains("CREATE TABLE"), "Up migration should contain CREATE TABLE"); + assert!(up_sql.contains("users"), "Should create users table"); + assert!(up_sql.contains("posts"), "Should create posts table"); + assert!(up_sql.contains("PRIMARY KEY"), "Should have primary key"); + assert!(up_sql.contains("UNIQUE"), "Should have unique constraint"); + + assert!(down_sql.contains("DROP TABLE"), "Down migration should contain DROP TABLE"); +} + +#[tokio::test] +async fn test_apply_migration() { + let (db, _temp_dir) = setup_test_db().await; + let temp_migrations = TempDir::new().unwrap(); + let migrations_dir = temp_migrations.path().to_path_buf(); + + let manager = MigrationManager::new(migrations_dir.clone(), db.clone()); + manager.init().await.unwrap(); + + let schema = create_test_schema(); + let manager = MigrationManager::new(std::path::PathBuf::from("migrations"), db.clone()); + let (up_sql, down_sql) = manager.generate_migration_from_schema( + &schema, + ).await.unwrap(); + + let migration = rohas_orm::Migration { + name: "test_migration".to_string(), + timestamp: chrono::Utc::now().timestamp(), + up_sql, + down_sql, + }; + + manager.apply_migration(&migration).await.unwrap(); + + let query = QueryBuilder::select_all() + .from("users") + .limit(1); + let _results = query.execute(&db).await.unwrap(); + + let query = QueryBuilder::select_all() + .from("posts") + .limit(1); + let _results = query.execute(&db).await.unwrap(); + + let applied = manager.get_applied_migrations().await.unwrap(); + assert!(applied.contains(&"test_migration".to_string()), "Migration should be recorded"); +} + +#[tokio::test] +async fn test_migration_idempotency() { + let (db, _temp_dir) = setup_test_db().await; + let temp_migrations = TempDir::new().unwrap(); + let migrations_dir = temp_migrations.path().to_path_buf(); + + let manager = MigrationManager::new(migrations_dir, db.clone()); + manager.init().await.unwrap(); + + let schema = create_test_schema(); + let manager = MigrationManager::new(std::path::PathBuf::from("migrations"), db.clone()); + let (up_sql, down_sql) = manager.generate_migration_from_schema( + &schema, + ).await.unwrap(); + + let migration = rohas_orm::Migration { + name: "test_migration_2".to_string(), + timestamp: chrono::Utc::now().timestamp(), + up_sql: up_sql.clone(), + down_sql, + }; + + manager.apply_migration(&migration).await.unwrap(); + manager.apply_migration(&migration).await.unwrap(); + + let applied = manager.get_applied_migrations().await.unwrap(); + let count = applied.iter().filter(|&n| n == "test_migration_2").count(); + assert_eq!(count, 1, "Migration should only be recorded once"); +} + +#[tokio::test] +async fn test_foreign_key_constraints() { + let (db, _temp_dir) = setup_test_db().await; + let temp_migrations = TempDir::new().unwrap(); + let migrations_dir = temp_migrations.path().to_path_buf(); + + let manager = MigrationManager::new(migrations_dir, db.clone()); + manager.init().await.unwrap(); + + let schema = create_test_schema(); + let (up_sql, _down_sql) = manager.generate_migration_from_schema( + &schema, + ).await.unwrap(); + + assert!(up_sql.contains("FOREIGN KEY"), "Should contain foreign key constraint"); + assert!(up_sql.contains("authorId"), "Should reference authorId field"); + assert!(up_sql.contains("users"), "Should reference users table"); + + let migration = rohas_orm::Migration { + name: "test_fk_migration".to_string(), + timestamp: chrono::Utc::now().timestamp(), + up_sql, + down_sql: "DROP TABLE IF EXISTS posts; DROP TABLE IF EXISTS users;".to_string(), + }; + + manager.apply_migration(&migration).await.unwrap(); + + let insert_user_sql = "INSERT INTO users (id, name, email, createdAt) VALUES (1, 'Test User', 'test@example.com', CURRENT_TIMESTAMP)"; + db.execute(insert_user_sql).await.unwrap(); + + let insert_post_sql = "INSERT INTO posts (id, title, content, authorId) VALUES (1, 'Test Post', 'Content', 1)"; + db.execute(insert_post_sql).await.unwrap(); + + let query = QueryBuilder::select_all() + .from("posts") + .where_eq_num("id", 1); + let results = query.execute(&db).await.unwrap(); + assert!(!results.is_empty(), "Post should be inserted"); +} diff --git a/crates/rohas-runtime/src/python_runtime.rs b/crates/rohas-runtime/src/python_runtime.rs index 737a6ba..8821eca 100644 --- a/crates/rohas-runtime/src/python_runtime.rs +++ b/crates/rohas-runtime/src/python_runtime.rs @@ -56,7 +56,46 @@ pub struct PythonRuntime { impl PythonRuntime { pub fn new() -> Result { - Python::with_gil(|_| { + Python::with_gil(|py| { + match py.import("site") { + Ok(_) => { + if let Ok(sys) = py.import("sys") { + if let Ok(sys_path) = sys.getattr("path") { + if let Ok(path_list) = sys_path.extract::>() { + let site_packages_count = path_list.iter() + .filter(|p| p.contains("site-packages") || p.contains("dist-packages")) + .count(); + let venv_count = path_list.iter() + .filter(|p| p.contains("venv") || p.contains("conda") || p.contains(".virtualenv")) + .count(); + + if site_packages_count > 0 || venv_count > 0 { + info!( + "Python site module loaded - found {} site-packages paths and {} virtual environment paths", + site_packages_count, + venv_count + ); + debug!("Python sys.path after site initialization ({} total entries):", path_list.len()); + for (i, path) in path_list.iter().enumerate() { + if i < 5 || path.contains("site-packages") || path.contains("venv") || path.contains("conda") { + debug!(" [{}] {}", i, path); + } + } + if path_list.len() > 5 { + debug!(" ... and {} more entries", path_list.len() - 5); + } + } else { + debug!("Python site module loaded, but no virtual environment detected"); + } + } + } + } + } + Err(e) => { + debug!("Failed to import site module (non-fatal): {}", e); + } + } + info!("Python runtime initialized"); }); diff --git a/examples/hello-world/database.db b/examples/hello-world/database.db new file mode 100644 index 0000000..5ae0d2d Binary files /dev/null and b/examples/hello-world/database.db differ diff --git a/examples/hello-world/migrations/1765128765_migration_name.sql b/examples/hello-world/migrations/1765128765_migration_name.sql new file mode 100644 index 0000000..f4acabd --- /dev/null +++ b/examples/hello-world/migrations/1765128765_migration_name.sql @@ -0,0 +1,35 @@ +-- Up Migration +CREATE TABLE IF NOT EXISTS posts ( + id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + title TEXT NOT NULL, + content TEXT, + tests TEXT, + userId INTEGER NOT NULL, + createdAt DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + updatedAt DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (userId) REFERENCES users(id) ON DELETE CASCADE +); + + +CREATE TABLE IF NOT EXISTS users ( + id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + name TEXT NOT NULL, + email TEXT NOT NULL, + createdAt DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + updatedAt DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + UNIQUE (email) +); + + +CREATE TABLE IF NOT EXISTS healthresponses ( + status TEXT NOT NULL, + timestamp TEXT NOT NULL +); + + + + +-- Down Migration +DROP TABLE IF EXISTS posts; +DROP TABLE IF EXISTS users; +DROP TABLE IF EXISTS healthresponses; diff --git a/examples/hello-world/migrations/1765129659_add_deleted_at_to_user.sql b/examples/hello-world/migrations/1765129659_add_deleted_at_to_user.sql new file mode 100644 index 0000000..91a8301 --- /dev/null +++ b/examples/hello-world/migrations/1765129659_add_deleted_at_to_user.sql @@ -0,0 +1,10 @@ +-- Up Migration +ALTER TABLE users RENAME COLUMN deletedAt TO deletedAta; + + + + +-- Down Migration +ALTER TABLE users RENAME COLUMN deletedAta TO deletedAt; + + diff --git a/examples/hello-world/migrations/1765129735_add_deletedat_new_to_user.sql b/examples/hello-world/migrations/1765129735_add_deletedat_new_to_user.sql new file mode 100644 index 0000000..0270f96 --- /dev/null +++ b/examples/hello-world/migrations/1765129735_add_deletedat_new_to_user.sql @@ -0,0 +1,10 @@ +-- Up Migration +ALTER TABLE users ADD COLUMN deletedAt DATETIME; + + + + +-- Down Migration +ALTER TABLE users DROP COLUMN deletedAt; + + diff --git a/examples/hello-world/migrations/1765129879_add_all_relationships.sql b/examples/hello-world/migrations/1765129879_add_all_relationships.sql new file mode 100644 index 0000000..f9a4b62 --- /dev/null +++ b/examples/hello-world/migrations/1765129879_add_all_relationships.sql @@ -0,0 +1,54 @@ +-- Up Migration +CREATE TABLE IF NOT EXISTS profiles ( + id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + bio TEXT, + avatar TEXT, + userId INTEGER NOT NULL, + createdAt DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + updatedAt DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + UNIQUE (userId), + FOREIGN KEY (userId) REFERENCES users(id) ON DELETE CASCADE +); + + +ALTER TABLE users ADD COLUMN profile INTEGER; +-- SQLite DROP COLUMN requires recreating the table +-- ALTER TABLE users DROP COLUMN deletedAta; + + +CREATE TABLE IF NOT EXISTS tags ( + id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + name TEXT NOT NULL, + createdAt DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + updatedAt DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + UNIQUE (name) +); + + +CREATE TABLE IF NOT EXISTS post_tag ( + post_id BIGINT NOT NULL, + tag_id BIGINT NOT NULL, + PRIMARY KEY (post_id, tag_id), + FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE, + FOREIGN KEY (tag_id) REFERENCES tags(id) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS post_tag ( + tag_id BIGINT NOT NULL, + post_id BIGINT NOT NULL, + PRIMARY KEY (tag_id, post_id), + FOREIGN KEY (tag_id) REFERENCES tags(id) ON DELETE CASCADE, + FOREIGN KEY (post_id) REFERENCES posts(id) ON DELETE CASCADE +); + + + +-- Down Migration +DROP TABLE IF EXISTS profiles; +ALTER TABLE users DROP COLUMN profile; +-- Cannot automatically recreate dropped column deletedAta + + +DROP TABLE IF EXISTS tags; +DROP TABLE IF EXISTS post_tag; +DROP TABLE IF EXISTS post_tag; diff --git a/examples/hello-world/pyproject.toml b/examples/hello-world/pyproject.toml index 273b3f2..0a4f761 100644 --- a/examples/hello-world/pyproject.toml +++ b/examples/hello-world/pyproject.toml @@ -1,11 +1,12 @@ [project] -name = "rohas-app" +name = "hello-world" version = "0.1.0" description = "Rohas event-driven application" requires-python = ">=3.9" dependencies = [ "pydantic>=2.0.0", "typing-extensions>=4.0.0", + "rohas-orm>=0.1.0", ] [project.optional-dependencies] diff --git a/examples/hello-world/schema/models/post.ro b/examples/hello-world/schema/models/post.ro new file mode 100644 index 0000000..67575af --- /dev/null +++ b/examples/hello-world/schema/models/post.ro @@ -0,0 +1,11 @@ +model Post { + id Int @id @auto + title String + content String? + tests String? + userId Int + author User? @relation(userId) // Many-to-One: Post belongs to one User + tags Tag[] // Many-to-Many: Post has many Tags + createdAt DateTime @default(now) + updatedAt DateTime @default(now) +} diff --git a/examples/hello-world/schema/models/profile.ro b/examples/hello-world/schema/models/profile.ro new file mode 100644 index 0000000..f9e4508 --- /dev/null +++ b/examples/hello-world/schema/models/profile.ro @@ -0,0 +1,9 @@ +model Profile { + id Int @id @auto + bio String? + avatar String? + userId Int @unique + user User? @relation(userId) + createdAt DateTime @default(now) + updatedAt DateTime @default(now) +} diff --git a/examples/hello-world/schema/models/tag.ro b/examples/hello-world/schema/models/tag.ro new file mode 100644 index 0000000..21ba8e5 --- /dev/null +++ b/examples/hello-world/schema/models/tag.ro @@ -0,0 +1,7 @@ +model Tag { + id Int @id @auto + name String @unique + posts Post[] + createdAt DateTime @default(now) + updatedAt DateTime @default(now) +} diff --git a/examples/hello-world/schema/models/user.ro b/examples/hello-world/schema/models/user.ro index 7325a83..8933f10 100644 --- a/examples/hello-world/schema/models/user.ro +++ b/examples/hello-world/schema/models/user.ro @@ -2,5 +2,9 @@ model User { id Int @id @auto name String email String @unique + posts Post[] // One-to-Many: User has many Posts + profile Profile? // One-to-One: User has one Profile (virtual field) + deletedAt DateTime? createdAt DateTime @default(now) + updatedAt DateTime @default(now) } diff --git a/examples/hello-world/src/.editorconfig b/examples/hello-world/src/.editorconfig new file mode 100644 index 0000000..22777ef --- /dev/null +++ b/examples/hello-world/src/.editorconfig @@ -0,0 +1,24 @@ +# EditorConfig is awesome: https://EditorConfig.org + +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +[*.{ts,tsx,js,jsx,json}] +indent_style = space +indent_size = 2 + +[*.{py}] +indent_style = space +indent_size = 4 + +[*.{yml,yaml}] +indent_style = space +indent_size = 2 + +[*.md] +trim_trailing_whitespace = false diff --git a/examples/hello-world/src/.gitignore b/examples/hello-world/src/.gitignore new file mode 100644 index 0000000..bdf3be8 --- /dev/null +++ b/examples/hello-world/src/.gitignore @@ -0,0 +1,52 @@ +# Dependencies +node_modules/ +__pycache__/ +*.pyc +*.pyo +*.pyd +.Python +env/ +venv/ +ENV/ +.venv/ + +# Build outputs +dist/ +build/ +*.egg-info/ +.tsbuildinfo + +# IDE +.vscode/ +.idea/ +*.swp +*.swo +*~ +.DS_Store + +# Logs +*.log +logs/ +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# Environment variables +.env +.env.local +.env.*.local + +# OS +.DS_Store +Thumbs.db + +# Testing +coverage/ +.coverage +.pytest_cache/ +*.cover +.hypothesis/ + +# Rohas compiled output +.rohas/ +src/generated/ diff --git a/examples/hello-world/src/README.md b/examples/hello-world/src/README.md new file mode 100644 index 0000000..9e3e416 --- /dev/null +++ b/examples/hello-world/src/README.md @@ -0,0 +1,109 @@ +# src + +Rohas event-driven application + +## Project Structure + +``` +├── schema/ # Schema definitions (.ro files) +│ ├── api/ # API endpoint schemas +│ ├── events/ # Event schemas +│ ├── models/ # Data model schemas +│ └── cron/ # Cron job schemas +├── src/ +│ ├── generated/ # Auto-generated types (DO NOT EDIT) +│ └── handlers/ # Your handler implementations +│ ├── api/ # API handlers +│ ├── events/ # Event handlers +│ └── cron/ # Cron job handlers +└── config/ # Configuration files +``` + +## Getting Started + +### Installation + +```bash +# Install dependencies (TypeScript) +npm install + +# Or for Python +pip install -r requirements.txt +``` + +### Development + +```bash +# Generate code from schema +rohas codegen + +# Start development server +rohas dev + +# Validate schema +rohas validate +``` + +## Schema Overview + + +### APIs + +- `GET /health` - Health +- `POST /users` - CreateUser +- `GET /test` - Test +- `GET /timeline/fast` - TimelineTestFast +- `GET /timeline/slow` - TimelineTestSlow +- `GET /timeline/very-slow` - TimelineTestVerySlow +- `GET /timeline/multi-step` - TimelineTestMultiStep + +### Events + +- `FastCompleted` - Payload: Json +- `SlowCompleted` - Payload: Json +- `VerySlowCompleted` - Payload: Json +- `BottleneckDetected` - Payload: Json +- `MajorBottleneckDetected` - Payload: Json +- `ValidationComplete` - Payload: Json +- `ProcessingComplete` - Payload: Json +- `ExternalCallComplete` - Payload: Json +- `FinalizationComplete` - Payload: Json +- `CleanupStep1` - Payload: Json +- `CleanupStep2` - Payload: Json +- `BottleneckLogged` - Payload: Json +- `WelcomeEmailSent` - Payload: Json +- `UserCreated` - Payload: User +- `ManualTrigger` - Payload: String + +### Cron Jobs + +- `DailyCleanup` - Schedule: 0 */5 * * * * + + +## Handler Naming Convention + +Handler files must be named exactly as the API/Event/Cron name in the schema: + +- API `Health` → `src/handlers/api/Health.ts` +- Event `UserCreated` → Handler defined in event schema +- Cron `DailyCleanup` → `src/handlers/cron/DailyCleanup.ts` + +## Generated Code + +The `src/generated/` directory contains auto-generated TypeScript types and interfaces. +**DO NOT EDIT** these files manually - they will be regenerated when you run `rohas codegen`. + +## Adding New Features + +1. Define your schema in `schema/` directory +2. Run `rohas codegen` to generate types and handler stubs +3. Implement your handler logic in `src/handlers/` +4. Test with `rohas dev` + +## Configuration + +See `config/rohas.toml` for project configuration. + +## License + +MIT diff --git a/examples/hello-world/src/handlers/api/test.py b/examples/hello-world/src/handlers/api/test.py index decace8..ced0812 100644 --- a/examples/hello-world/src/handlers/api/test.py +++ b/examples/hello-world/src/handlers/api/test.py @@ -1,3 +1,4 @@ +from rohas_orm import connect from datetime import datetime from generated.state import State from generated.models.user import User @@ -5,7 +6,7 @@ async def handle_test(req: TestRequest, state: State) -> TestResponse: user = User(id=1, name='John Doe', email='john.doe@example.com', createdAt=datetime.now()) - + db = connect("sqlite://:memory:") # Explicitly trigger event with custom payload state.trigger_event('UserCreated', { 'id': user.id, @@ -32,4 +33,12 @@ async def handle_test(req: TestRequest, state: State) -> TestResponse: state.logger.debug('Hello, world!') state.logger.trace('Hello, world!') + # user = User(id=1, name='John Doe', email='john.doe@example.com', createdAt=datetime.now()) + # save() is synchronous (blocks internally) + user.save(db) + + users = User.find_all(db) + + print(users) + return TestResponse(data="Hello, world!s") diff --git a/examples/hello-world/src/pyproject.toml b/examples/hello-world/src/pyproject.toml new file mode 100644 index 0000000..fc4327b --- /dev/null +++ b/examples/hello-world/src/pyproject.toml @@ -0,0 +1,32 @@ +[project] +name = "src" +version = "0.1.0" +description = "Rohas event-driven application" +requires-python = ">=3.9" +dependencies = [ + "pydantic>=2.0.0", + "typing-extensions>=4.0.0", + "rohas-orm>=0.1.0", +] + +[project.optional-dependencies] +dev = [ + "pytest>=7.0.0", + "black>=23.0.0", + "mypy>=1.0.0", + "ruff>=0.1.0", +] + +[tool.black] +line-length = 100 +target-version = ['py39', 'py310', 'py311'] + +[tool.mypy] +python_version = "3.9" +strict = true +warn_return_any = true +warn_unused_configs = true + +[tool.ruff] +line-length = 100 +target-version = "py39" diff --git a/examples/hello-world/src/requirements.txt b/examples/hello-world/src/requirements.txt new file mode 100644 index 0000000..34973ad --- /dev/null +++ b/examples/hello-world/src/requirements.txt @@ -0,0 +1,6 @@ +# Python dependencies for Rohas project +# Add your project-specific dependencies here + +# Common dependencies +pydantic>=2.0.0 +typing-extensions>=4.0.0