From bc62c358e2e10f38d233aadfc473985682cc8bfb Mon Sep 17 00:00:00 2001 From: William Batista Date: Sun, 29 Oct 2023 22:58:08 -0400 Subject: [PATCH] Using authentication in the file server RAHHHHHH YEAAAAAA. It takes advantage of the expiration times as well as what usernames are allowed to download. Macaroons are really cool. --- .gitignore | 3 +- ...84bb9682e1e9ed1d72c89c2e4d6a9066f4f9e.json | 38 -- ...6c361c33aa8a71f128149f79a9df7d90c8f1a.json | 12 - ...1ebd283f7d40ccf1b7ac8cf79108aabd66122.json | 20 - Cargo.lock | 298 +++++++++++- Cargo.toml | 1 + auth/Cargo.toml | 13 +- .../20231025032307_create_users.sql | 6 + auth/rustfmt.toml | 1 + auth/src/main.rs | 114 +++-- bfsp/Cargo.toml | 4 + bfsp/rustfmt.toml | 1 + bfsp/src/lib.rs | 117 +++++ ...e322dc8e513c2477c5c485557abf42132ed90.json | 26 -- ...f01ff396b75789dd8549d8b034343a5d90110.json | 44 -- ...898a2feee638f6ac6c40fbdb8cd585d67ac1c.json | 12 - ...6ca630eadbfd0aa0905400ac927115863af3c.json | 12 - ...0b4065abb13047308395de87dc1255c9e799a.json | 12 - ...493187dad72a8cd5da6495070254985507c69.json | 20 - ...01d6870f8d10c7eb66590ffc23ca9798742b7.json | 20 - ...bbd6f7a50e7b81f0935cd85a9c8f5d939df3d.json | 12 - ...abfafb74da77f3f02f81fdda819041e0a4cca.json | 20 - ...a862e07dbc11f367c0b6955e911119bdc660d.json | 12 - ...e477b8beca44fcb46fb98d1e2751c24ac29c7.json | 32 -- ...9cb99f5f348e1e84e687ee0018c79d0402867.json | 12 - ...5904859a97abe9337cdfcca183ff0ec0f4bfd.json | 26 -- cli/Cargo.toml | 5 +- cli/flake.nix | 1 + cli/migrations/20231013033720_create_keys.sql | 3 +- cli/rustfmt.toml | 1 + cli/src/main.rs | 433 +++++++++++------- flake.lock | 30 +- flake.nix | 1 + migrations/20231018041636_chunks.sql | 3 +- src/db.rs | 69 +-- src/main.rs | 161 ++++++- 36 files changed, 991 insertions(+), 604 deletions(-) delete mode 100644 .sqlx/query-794284f4d0baa1904aa3e6da10f84bb9682e1e9ed1d72c89c2e4d6a9066f4f9e.json delete mode 100644 .sqlx/query-e5c9fe5d80662319b22480aa53e6c361c33aa8a71f128149f79a9df7d90c8f1a.json delete mode 100644 .sqlx/query-edbf4dc284e12c929251d0ac0491ebd283f7d40ccf1b7ac8cf79108aabd66122.json create mode 100644 auth/migrations/20231025032307_create_users.sql create mode 100644 auth/rustfmt.toml create mode 100644 bfsp/rustfmt.toml delete mode 100644 cli/.sqlx/query-0f8986b6e007bd919a4e0c1695be322dc8e513c2477c5c485557abf42132ed90.json delete mode 100644 cli/.sqlx/query-120595aaef9c8b95c552dfecad8f01ff396b75789dd8549d8b034343a5d90110.json delete mode 100644 cli/.sqlx/query-1f25ee2d22351e26c2d82e4f67d898a2feee638f6ac6c40fbdb8cd585d67ac1c.json delete mode 100644 cli/.sqlx/query-392c5e533ba1c22d4067839bec16ca630eadbfd0aa0905400ac927115863af3c.json delete mode 100644 cli/.sqlx/query-3a7d5ada427caa9535e4cd7e7ac0b4065abb13047308395de87dc1255c9e799a.json delete mode 100644 cli/.sqlx/query-46d8bba45723576aef83026b2a4493187dad72a8cd5da6495070254985507c69.json delete mode 100644 cli/.sqlx/query-508f7a207e1d454d5bff1813f0c01d6870f8d10c7eb66590ffc23ca9798742b7.json delete mode 100644 cli/.sqlx/query-74bd58b016f88b5fbafbc9af6fbbbd6f7a50e7b81f0935cd85a9c8f5d939df3d.json delete mode 100644 cli/.sqlx/query-9f035ef640f878816dc3a89d0e8abfafb74da77f3f02f81fdda819041e0a4cca.json delete mode 100644 cli/.sqlx/query-a92f90e82b86bc57b42411d7f51a862e07dbc11f367c0b6955e911119bdc660d.json delete mode 100644 cli/.sqlx/query-e2e28afeea514252646fe23f211e477b8beca44fcb46fb98d1e2751c24ac29c7.json delete mode 100644 cli/.sqlx/query-e998823098cca1e20cc8a6bc8c69cb99f5f348e1e84e687ee0018c79d0402867.json delete mode 100644 cli/.sqlx/query-f2c7fa3d6eac061360cf7ea89295904859a97abe9337cdfcca183ff0ec0f4bfd.json create mode 100644 cli/rustfmt.toml diff --git a/.gitignore b/.gitignore index 7fb7ffe..9fdc579 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ **/target **/.direnv /chunks -**/data.db +**/data.db* +**/output.log diff --git a/.sqlx/query-794284f4d0baa1904aa3e6da10f84bb9682e1e9ed1d72c89c2e4d6a9066f4f9e.json b/.sqlx/query-794284f4d0baa1904aa3e6da10f84bb9682e1e9ed1d72c89c2e4d6a9066f4f9e.json deleted file mode 100644 index f2fd996..0000000 --- a/.sqlx/query-794284f4d0baa1904aa3e6da10f84bb9682e1e9ed1d72c89c2e4d6a9066f4f9e.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "db_name": "SQLite", - "query": "select hash, chunk_size, nonce, indice from chunks where id = ?", - "describe": { - "columns": [ - { - "name": "hash", - "ordinal": 0, - "type_info": "Text" - }, - { - "name": "chunk_size", - "ordinal": 1, - "type_info": "Int64" - }, - { - "name": "nonce", - "ordinal": 2, - "type_info": "Blob" - }, - { - "name": "indice", - "ordinal": 3, - "type_info": "Int64" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - false, - false, - false, - false - ] - }, - "hash": "794284f4d0baa1904aa3e6da10f84bb9682e1e9ed1d72c89c2e4d6a9066f4f9e" -} diff --git a/.sqlx/query-e5c9fe5d80662319b22480aa53e6c361c33aa8a71f128149f79a9df7d90c8f1a.json b/.sqlx/query-e5c9fe5d80662319b22480aa53e6c361c33aa8a71f128149f79a9df7d90c8f1a.json deleted file mode 100644 index a089337..0000000 --- a/.sqlx/query-e5c9fe5d80662319b22480aa53e6c361c33aa8a71f128149f79a9df7d90c8f1a.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "insert into chunks (hash, id, chunk_size, indice, nonce) values ( ?, ?, ?, ?, ? )", - "describe": { - "columns": [], - "parameters": { - "Right": 5 - }, - "nullable": [] - }, - "hash": "e5c9fe5d80662319b22480aa53e6c361c33aa8a71f128149f79a9df7d90c8f1a" -} diff --git a/.sqlx/query-edbf4dc284e12c929251d0ac0491ebd283f7d40ccf1b7ac8cf79108aabd66122.json b/.sqlx/query-edbf4dc284e12c929251d0ac0491ebd283f7d40ccf1b7ac8cf79108aabd66122.json deleted file mode 100644 index f9f18ec..0000000 --- a/.sqlx/query-edbf4dc284e12c929251d0ac0491ebd283f7d40ccf1b7ac8cf79108aabd66122.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "SQLite", - "query": "select id from chunks where id = ?", - "describe": { - "columns": [ - { - "name": "id", - "ordinal": 0, - "type_info": "Text" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - false - ] - }, - "hash": "edbf4dc284e12c929251d0ac0491ebd283f7d40ccf1b7ac8cf79108aabd66122" -} diff --git a/Cargo.lock b/Cargo.lock index a8d285f..fa1e5f6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -50,6 +50,15 @@ dependencies = [ "version_check", ] +[[package]] +name = "aho-corasick" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +dependencies = [ + "memchr", +] + [[package]] name = "allocator-api2" version = "0.2.16" @@ -162,11 +171,12 @@ dependencies = [ "argon2", "axum", "axum-macros", + "bfsp", + "fern", + "humantime", + "log", "macaroon", "rand", - "serde", - "serde_derive", - "serde_json", "sqlx", "tokio", ] @@ -278,7 +288,11 @@ dependencies = [ "anyhow", "blake3", "chacha20poly1305", + "macaroon", + "regex", "rkyv", + "serde", + "serde_derive", "sqlx", "tokio", "uuid", @@ -487,6 +501,7 @@ dependencies = [ "futures", "humantime", "log", + "reqwest", "rkyv", "sqlx", "tokio", @@ -523,6 +538,22 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" +[[package]] +name = "core-foundation" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" + [[package]] name = "cpufeatures" version = "0.2.10" @@ -669,6 +700,15 @@ dependencies = [ "serde", ] +[[package]] +name = "encoding_rs" +version = "0.8.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +dependencies = [ + "cfg-if", +] + [[package]] name = "encrypted_file_server" version = "0.1.0" @@ -680,6 +720,7 @@ dependencies = [ "futures", "humantime", "log", + "macaroon", "once_cell", "rand", "rkyv", @@ -901,6 +942,25 @@ version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +[[package]] +name = "h2" +version = "0.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap 1.9.3", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "hashbrown" version = "0.12.3" @@ -1027,6 +1087,7 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", + "h2", "http", "http-body", "httparse", @@ -1040,6 +1101,20 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" +dependencies = [ + "futures-util", + "http", + "hyper", + "rustls", + "tokio", + "tokio-rustls", +] + [[package]] name = "idna" version = "0.4.0" @@ -1050,6 +1125,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + [[package]] name = "indexmap" version = "2.0.2" @@ -1078,6 +1163,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + [[package]] name = "is-terminal" version = "0.4.9" @@ -1641,6 +1732,35 @@ dependencies = [ "thiserror", ] +[[package]] +name = "regex" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + [[package]] name = "rend" version = "0.4.1" @@ -1650,6 +1770,60 @@ dependencies = [ "bytecheck", ] +[[package]] +name = "reqwest" +version = "0.11.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" +dependencies = [ + "base64 0.21.4", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "system-configuration", + "tokio", + "tokio-rustls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", + "winreg", +] + +[[package]] +name = "ring" +version = "0.17.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb0205304757e5d899b9c2e448b867ffd03ae7f988002e47cd24954391394d0b" +dependencies = [ + "cc", + "getrandom", + "libc", + "spin 0.9.8", + "untrusted", + "windows-sys", +] + [[package]] name = "rkyv" version = "0.7.42" @@ -1719,6 +1893,37 @@ dependencies = [ "windows-sys", ] +[[package]] +name = "rustls" +version = "0.21.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "446e14c5cda4f3f30fe71863c34ec70f5ac79d6087097ad0bb433e1be5edf04c" +dependencies = [ + "log", + "ring", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-pemfile" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" +dependencies = [ + "base64 0.21.4", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.14" @@ -1746,6 +1951,16 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "seahash" version = "4.1.0" @@ -1754,18 +1969,18 @@ checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" [[package]] name = "serde" -version = "1.0.189" +version = "1.0.190" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e422a44e74ad4001bdc8eede9a4570ab52f71190e9c076d14369f38b9200537" +checksum = "91d3c334ca1ee894a2c6f6ad698fe8c435b76d504b13d436f0685d648d6d96f7" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.189" +version = "1.0.190" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e48d1f918009ce3145511378cf68d613e3b3d9137d67272562080d68a2b32d5" +checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3" dependencies = [ "proc-macro2", "quote", @@ -1976,7 +2191,7 @@ dependencies = [ "futures-util", "hashlink", "hex", - "indexmap", + "indexmap 2.0.2", "log", "memchr", "once_cell", @@ -2186,6 +2401,27 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tap" version = "1.0.1" @@ -2270,6 +2506,16 @@ dependencies = [ "syn 2.0.38", ] +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls", + "tokio", +] + [[package]] name = "tokio-stream" version = "0.1.14" @@ -2281,6 +2527,20 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + [[package]] name = "tower" version = "0.4.13" @@ -2396,6 +2656,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + [[package]] name = "url" version = "2.4.1" @@ -2550,6 +2816,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki-roots" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" + [[package]] name = "whoami" version = "1.4.1" @@ -2653,6 +2925,16 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys", +] + [[package]] name = "wyz" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index 31b94a0..b1b5b5d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,6 +16,7 @@ rkyv = { version = "0.7", features = ["validation"] } async-trait = { version = "0.1" } once_cell = "1" futures = { version = "0.3", features = ["executor"] } +macaroon = "0.3.0" [workspace] diff --git a/auth/Cargo.toml b/auth/Cargo.toml index 2455238..5a7ed5a 100644 --- a/auth/Cargo.toml +++ b/auth/Cargo.toml @@ -7,11 +7,12 @@ edition = "2021" anyhow = "1" axum = "0.6.20" rand = "0.8" -serde_derive = "1.0.189" -serde_json = "1.0.107" tokio = { version = "1", features = ["net", "io-util", "macros", "rt-multi-thread"] } sqlx = { version = "0.7", features = ["runtime-tokio", "sqlite"] } -serde = "1.0.189" -axum-macros = "0.3.8" -argon2 = "0.5.2" -macaroon = "0.3.0" +axum-macros = "0.3" +argon2 = "0.5" +macaroon = "0.3" +fern = "0.6" +log = "0.4" +humantime = "2.1.0" +bfsp = { path = "../bfsp" } diff --git a/auth/migrations/20231025032307_create_users.sql b/auth/migrations/20231025032307_create_users.sql new file mode 100644 index 0000000..053180b --- /dev/null +++ b/auth/migrations/20231025032307_create_users.sql @@ -0,0 +1,6 @@ +create table users ( +username text primary key not null, +email text not null, +password text not null, +salt text not null +) diff --git a/auth/rustfmt.toml b/auth/rustfmt.toml new file mode 100644 index 0000000..3a26366 --- /dev/null +++ b/auth/rustfmt.toml @@ -0,0 +1 @@ +edition = "2021" diff --git a/auth/src/main.rs b/auth/src/main.rs index 78b1f94..432f6c9 100644 --- a/auth/src/main.rs +++ b/auth/src/main.rs @@ -1,22 +1,44 @@ -use std::convert::Infallible; use std::env; use std::net::SocketAddr; use std::sync::Arc; +use std::time::{SystemTime, UNIX_EPOCH}; use anyhow::Result; use argon2::password_hash::SaltString; use argon2::{Argon2, PasswordHash, PasswordHasher, PasswordVerifier}; use axum::extract::State; +use axum::http::StatusCode; use axum::routing::{get, post}; use axum::{Json, Router}; +use axum_macros::debug_handler; +use bfsp::{CreateUserRequest, LoginRequest, UsernameCaveat}; use macaroon::{Macaroon, MacaroonKey}; use rand::rngs::OsRng; -use serde::de::IntoDeserializer; -use serde_derive::{Deserialize, Serialize}; use sqlx::{Row, SqlitePool}; #[tokio::main] async fn main() -> Result<()> { + fern::Dispatch::new() + .format(|out, msg, record| { + out.finish(format_args!( + "[{} {} {}] {}", + humantime::format_rfc3339(std::time::SystemTime::now()), + record.level(), + record.target(), + msg + )) + }) // Add blanket level filter - + .level(log::LevelFilter::Trace) + .level_for("sqlx", log::LevelFilter::Warn) + .level_for("hyper", log::LevelFilter::Warn) + // - and per-module overrides + // Output to stdout, files, and other Dispatch configurations + .chain(std::io::stdout()) + .chain(fern::log_file("output.log").unwrap()) + // Apply globally + .apply() + .unwrap(); + macaroon::initialize().unwrap(); //FIXME: please don't hard code the authentication key let key = Arc::new(MacaroonKey::generate(b"key")); @@ -48,55 +70,47 @@ async fn main() -> Result<()> { Ok(()) } -#[derive(Serialize, Deserialize)] -pub struct CreateUserRequest { - email: String, - username: String, - password: String, -} - -impl CreateUserRequest { - /// Returns true if the email is valid, false otherwise - fn validate_email() -> bool { - //FIXME - true - } -} - async fn create_user( State(pool): State>, Json(req): Json, -) -> String { +) -> (StatusCode, String) { let argon2 = Argon2::default(); let salt: SaltString = SaltString::generate(&mut OsRng); let hashed_password = match argon2.hash_password(req.password.as_bytes(), &salt) { Ok(password) => password, Err(err) => { //TODO: log errors - return "Internal server error".to_string(); + return ( + StatusCode::INTERNAL_SERVER_ERROR, + "Internal server error".to_string(), + ); } }; - sqlx::query("insert into users ( username, email, password, salt ) values ( ?, ?, ?, ? )") + match sqlx::query("insert into users ( username, email, password, salt ) values ( ?, ?, ?, ? )") .bind(&req.username) .bind(req.email) .bind(hashed_password.to_string()) .bind(salt.to_string()) - .execute(pool.as_ref()).await.unwrap(); - - "Registered user".to_string() -} - -#[derive(Serialize, Deserialize)] -struct LoginRequest { - username: String, - password: String, + .execute(pool.as_ref()) + .await + { + Ok(_) => (StatusCode::OK, "registered user".to_string()), + Err(err) => match err.into_database_error().unwrap().kind() { + sqlx::error::ErrorKind::UniqueViolation => todo!(), + sqlx::error::ErrorKind::ForeignKeyViolation => todo!(), + sqlx::error::ErrorKind::NotNullViolation => todo!(), + sqlx::error::ErrorKind::CheckViolation => todo!(), + sqlx::error::ErrorKind::Other => todo!(), + _ => todo!(), + }, + } } async fn login( State(pool): State>, Json(login_request): Json, -) -> String { +) -> (StatusCode, String) { println!("Received login reqest"); let password: Option = sqlx::query("select password from users where username = ?") @@ -109,7 +123,10 @@ async fn login( let password_hash = match password { Some(password) => password, None => { - return "Username or password is incorrect".to_string(); + return ( + StatusCode::UNAUTHORIZED, + "Username or password is incorrect".to_string(), + ); } }; @@ -118,20 +135,43 @@ async fn login( .verify_password(login_request.password.as_bytes(), &password_hash) .is_err() { - return "Username or password is incorrect".to_string(); + return ( + StatusCode::UNAUTHORIZED, + "Username or password is incorrect".to_string(), + ); } + let key = MacaroonKey::generate(b"key"); + let mut macaroon = match Macaroon::create( None, - &MacaroonKey::generate(b"key"), - format!("{}-{}", login_request.username, rand::random::()).into(), + &key, + format!("{}-{}", login_request.username, rand::random::()).into(), ) { Ok(macaroon) => macaroon, //FIXME: deal with this Err(error) => panic!("Error creating macaroon: {:?}", error), }; - macaroon.add_first_party_caveat(format!("username = {}", login_request.username).into()); + macaroon.add_first_party_caveat( + UsernameCaveat { + username: login_request.username, + } + .into(), + ); + macaroon.add_first_party_caveat( + format!( + "expires = {}", + SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs() + ) + .into(), + ); - macaroon.serialize(macaroon::Format::V2).unwrap() + ( + StatusCode::OK, + macaroon.serialize(macaroon::Format::V2JSON).unwrap(), + ) } diff --git a/bfsp/Cargo.toml b/bfsp/Cargo.toml index 877c693..a8a3416 100644 --- a/bfsp/Cargo.toml +++ b/bfsp/Cargo.toml @@ -14,6 +14,10 @@ sqlx = { version = "0.7", default-features = false } uuid = { version = "1", features = ["v4"] } chacha20poly1305 = { version = "0.10", features = ["std"] } zstd = "0.13.0" +serde = "1" +serde_derive = "1" +regex = "1" +macaroon = "0.3" [dev-dependencies] tokio = { version = "1", features = ["fs", "io-util", "macros", "rt-multi-thread"] } diff --git a/bfsp/rustfmt.toml b/bfsp/rustfmt.toml new file mode 100644 index 0000000..3a26366 --- /dev/null +++ b/bfsp/rustfmt.toml @@ -0,0 +1 @@ +edition = "2021" diff --git a/bfsp/src/lib.rs b/bfsp/src/lib.rs index c5d19c7..2068c95 100644 --- a/bfsp/src/lib.rs +++ b/bfsp/src/lib.rs @@ -9,6 +9,8 @@ pub use crypto::*; use anyhow::{anyhow, Error, Result}; use blake3::Hasher; +use macaroon::ByteString; +use regex::Regex; use sqlx::{sqlite::SqliteRow, Row, Sqlite}; use std::{ collections::{HashMap, HashSet}, @@ -625,3 +627,118 @@ pub fn parallel_hash_chunk(chunk: &[u8]) -> ChunkHash { hasher.update(chunk); hasher.finalize().into() } + +#[derive(Clone)] +pub struct UsernameCaveat { + pub username: String, +} + +impl Into for UsernameCaveat { + fn into(self) -> ByteString { + self.to_string().into() + } +} + +impl ToString for UsernameCaveat { + fn to_string(&self) -> String { + format!("username = {}", self.username) + } +} + +impl TryFrom for UsernameCaveat { + type Error = anyhow::Error; + + fn try_from(value: ByteString) -> Result { + let value: String = String::from_utf8(value.0.clone())?; + + let re = Regex::new("username = (?[a-zA-Z0-9_-]*)")?; + let Some(caps) = re.captures(&value) else { + return Err(anyhow!("invalid username caveat")); + }; + let username = &caps["username"]; + + Ok(Self { + username: username.to_string(), + }) + } +} + +#[derive(Clone)] +pub struct ExpirationCaveat { + pub expiration: u64, +} + +impl Into for ExpirationCaveat { + fn into(self) -> ByteString { + self.to_string().into() + } +} + +impl ToString for ExpirationCaveat { + fn to_string(&self) -> String { + format!("expires = {}", self.expiration) + } +} + +impl TryFrom<&ByteString> for ExpirationCaveat { + type Error = anyhow::Error; + + fn try_from(value: &ByteString) -> Result { + let value: String = String::from_utf8(value.0.clone())?; + + let re = Regex::new("expires = (?[a-zA-Z0-9_-]*)")?; + let Some(caps) = re.captures(&value) else { + return Err(anyhow!("invalid expiration caveat")); + }; + let expiration = &caps["expires"].to_string(); + let expiration: u64 = expiration.parse()?; + + Ok(Self { expiration }) + } +} + +#[derive(Debug, Archive, Serialize, Deserialize)] +#[archive(check_bytes)] +pub struct Authentication { + pub macaroon: String, +} + +impl Authentication { + pub fn to_bytes(&self) -> Result { + let bytes = rkyv::to_bytes::<_, 1024>(self)?; + let mut buf: AlignedVec = { + let mut buf = AlignedVec::with_capacity(2); + buf.extend_from_slice(&(bytes.len() as u16).to_be_bytes()); + buf + }; + buf.extend_from_slice(&bytes); + + Ok(buf) + } + + pub fn try_from_bytes(bytes: &[u8]) -> Result<&ArchivedAuthentication> { + rkyv::check_archived_root::(bytes) + .map_err(|_| anyhow!("Error deserializing PartsUploaded")) + } +} + +#[derive(serde_derive::Serialize, serde_derive::Deserialize)] +pub struct CreateUserRequest { + pub email: String, + pub username: String, + pub password: String, +} + +impl CreateUserRequest { + /// Returns true if the email is valid, false otherwise + fn validate_email() -> bool { + //FIXME + true + } +} + +#[derive(serde_derive::Serialize, serde_derive::Deserialize)] +pub struct LoginRequest { + pub username: String, + pub password: String, +} diff --git a/cli/.sqlx/query-0f8986b6e007bd919a4e0c1695be322dc8e513c2477c5c485557abf42132ed90.json b/cli/.sqlx/query-0f8986b6e007bd919a4e0c1695be322dc8e513c2477c5c485557abf42132ed90.json deleted file mode 100644 index ff1a14d..0000000 --- a/cli/.sqlx/query-0f8986b6e007bd919a4e0c1695be322dc8e513c2477c5c485557abf42132ed90.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "db_name": "SQLite", - "query": "select hash, file_path from files where instr(file_path, ?) > 1", - "describe": { - "columns": [ - { - "name": "hash", - "ordinal": 0, - "type_info": "Text" - }, - { - "name": "file_path", - "ordinal": 1, - "type_info": "Text" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - false, - false - ] - }, - "hash": "0f8986b6e007bd919a4e0c1695be322dc8e513c2477c5c485557abf42132ed90" -} diff --git a/cli/.sqlx/query-120595aaef9c8b95c552dfecad8f01ff396b75789dd8549d8b034343a5d90110.json b/cli/.sqlx/query-120595aaef9c8b95c552dfecad8f01ff396b75789dd8549d8b034343a5d90110.json deleted file mode 100644 index da9f864..0000000 --- a/cli/.sqlx/query-120595aaef9c8b95c552dfecad8f01ff396b75789dd8549d8b034343a5d90110.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "db_name": "SQLite", - "query": "select hash, id, chunk_size, indice, nonce from chunks where file_hash = ?", - "describe": { - "columns": [ - { - "name": "hash", - "ordinal": 0, - "type_info": "Text" - }, - { - "name": "id", - "ordinal": 1, - "type_info": "Text" - }, - { - "name": "chunk_size", - "ordinal": 2, - "type_info": "Int64" - }, - { - "name": "indice", - "ordinal": 3, - "type_info": "Int64" - }, - { - "name": "nonce", - "ordinal": 4, - "type_info": "Blob" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - false, - false, - false, - false, - false - ] - }, - "hash": "120595aaef9c8b95c552dfecad8f01ff396b75789dd8549d8b034343a5d90110" -} diff --git a/cli/.sqlx/query-1f25ee2d22351e26c2d82e4f67d898a2feee638f6ac6c40fbdb8cd585d67ac1c.json b/cli/.sqlx/query-1f25ee2d22351e26c2d82e4f67d898a2feee638f6ac6c40fbdb8cd585d67ac1c.json deleted file mode 100644 index 1b85497..0000000 --- a/cli/.sqlx/query-1f25ee2d22351e26c2d82e4f67d898a2feee638f6ac6c40fbdb8cd585d67ac1c.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "delete from chunks where file_hash = ?", - "describe": { - "columns": [], - "parameters": { - "Right": 1 - }, - "nullable": [] - }, - "hash": "1f25ee2d22351e26c2d82e4f67d898a2feee638f6ac6c40fbdb8cd585d67ac1c" -} diff --git a/cli/.sqlx/query-392c5e533ba1c22d4067839bec16ca630eadbfd0aa0905400ac927115863af3c.json b/cli/.sqlx/query-392c5e533ba1c22d4067839bec16ca630eadbfd0aa0905400ac927115863af3c.json deleted file mode 100644 index d59f290..0000000 --- a/cli/.sqlx/query-392c5e533ba1c22d4067839bec16ca630eadbfd0aa0905400ac927115863af3c.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "insert into chunks\n (hash, id, chunk_size, indice, file_hash, nonce )\n values ( ?, ?, ?, ?, ?, ? )\n ", - "describe": { - "columns": [], - "parameters": { - "Right": 6 - }, - "nullable": [] - }, - "hash": "392c5e533ba1c22d4067839bec16ca630eadbfd0aa0905400ac927115863af3c" -} diff --git a/cli/.sqlx/query-3a7d5ada427caa9535e4cd7e7ac0b4065abb13047308395de87dc1255c9e799a.json b/cli/.sqlx/query-3a7d5ada427caa9535e4cd7e7ac0b4065abb13047308395de87dc1255c9e799a.json deleted file mode 100644 index fd90066..0000000 --- a/cli/.sqlx/query-3a7d5ada427caa9535e4cd7e7ac0b4065abb13047308395de87dc1255c9e799a.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "insert into files\n (file_path, hash, chunk_size, chunks)\n values ( ?, ?, ?, ? )\n ", - "describe": { - "columns": [], - "parameters": { - "Right": 4 - }, - "nullable": [] - }, - "hash": "3a7d5ada427caa9535e4cd7e7ac0b4065abb13047308395de87dc1255c9e799a" -} diff --git a/cli/.sqlx/query-46d8bba45723576aef83026b2a4493187dad72a8cd5da6495070254985507c69.json b/cli/.sqlx/query-46d8bba45723576aef83026b2a4493187dad72a8cd5da6495070254985507c69.json deleted file mode 100644 index 8af7346..0000000 --- a/cli/.sqlx/query-46d8bba45723576aef83026b2a4493187dad72a8cd5da6495070254985507c69.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "SQLite", - "query": "select enc_key from keys where username = ?", - "describe": { - "columns": [ - { - "name": "enc_key", - "ordinal": 0, - "type_info": "Blob" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - false - ] - }, - "hash": "46d8bba45723576aef83026b2a4493187dad72a8cd5da6495070254985507c69" -} diff --git a/cli/.sqlx/query-508f7a207e1d454d5bff1813f0c01d6870f8d10c7eb66590ffc23ca9798742b7.json b/cli/.sqlx/query-508f7a207e1d454d5bff1813f0c01d6870f8d10c7eb66590ffc23ca9798742b7.json deleted file mode 100644 index 5d79a8b..0000000 --- a/cli/.sqlx/query-508f7a207e1d454d5bff1813f0c01d6870f8d10c7eb66590ffc23ca9798742b7.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "SQLite", - "query": "select hash from files where file_path = ?", - "describe": { - "columns": [ - { - "name": "hash", - "ordinal": 0, - "type_info": "Text" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - false - ] - }, - "hash": "508f7a207e1d454d5bff1813f0c01d6870f8d10c7eb66590ffc23ca9798742b7" -} diff --git a/cli/.sqlx/query-74bd58b016f88b5fbafbc9af6fbbbd6f7a50e7b81f0935cd85a9c8f5d939df3d.json b/cli/.sqlx/query-74bd58b016f88b5fbafbc9af6fbbbd6f7a50e7b81f0935cd85a9c8f5d939df3d.json deleted file mode 100644 index 9491db1..0000000 --- a/cli/.sqlx/query-74bd58b016f88b5fbafbc9af6fbbbd6f7a50e7b81f0935cd85a9c8f5d939df3d.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "insert into keys (enc_key, username) values ( ?, ? )", - "describe": { - "columns": [], - "parameters": { - "Right": 2 - }, - "nullable": [] - }, - "hash": "74bd58b016f88b5fbafbc9af6fbbbd6f7a50e7b81f0935cd85a9c8f5d939df3d" -} diff --git a/cli/.sqlx/query-9f035ef640f878816dc3a89d0e8abfafb74da77f3f02f81fdda819041e0a4cca.json b/cli/.sqlx/query-9f035ef640f878816dc3a89d0e8abfafb74da77f3f02f81fdda819041e0a4cca.json deleted file mode 100644 index dd43bb6..0000000 --- a/cli/.sqlx/query-9f035ef640f878816dc3a89d0e8abfafb74da77f3f02f81fdda819041e0a4cca.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "SQLite", - "query": "select id from chunks where file_hash = ?", - "describe": { - "columns": [ - { - "name": "id", - "ordinal": 0, - "type_info": "Text" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - false - ] - }, - "hash": "9f035ef640f878816dc3a89d0e8abfafb74da77f3f02f81fdda819041e0a4cca" -} diff --git a/cli/.sqlx/query-a92f90e82b86bc57b42411d7f51a862e07dbc11f367c0b6955e911119bdc660d.json b/cli/.sqlx/query-a92f90e82b86bc57b42411d7f51a862e07dbc11f367c0b6955e911119bdc660d.json deleted file mode 100644 index fd452ea..0000000 --- a/cli/.sqlx/query-a92f90e82b86bc57b42411d7f51a862e07dbc11f367c0b6955e911119bdc660d.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "insert into chunks\n (hash, id, file_hash, chunk_size)\n values ( ?, ?, ?, ? )\n ", - "describe": { - "columns": [], - "parameters": { - "Right": 4 - }, - "nullable": [] - }, - "hash": "a92f90e82b86bc57b42411d7f51a862e07dbc11f367c0b6955e911119bdc660d" -} diff --git a/cli/.sqlx/query-e2e28afeea514252646fe23f211e477b8beca44fcb46fb98d1e2751c24ac29c7.json b/cli/.sqlx/query-e2e28afeea514252646fe23f211e477b8beca44fcb46fb98d1e2751c24ac29c7.json deleted file mode 100644 index ae21736..0000000 --- a/cli/.sqlx/query-e2e28afeea514252646fe23f211e477b8beca44fcb46fb98d1e2751c24ac29c7.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "db_name": "SQLite", - "query": "select file_path, hash, chunk_size from files where file_path = ?", - "describe": { - "columns": [ - { - "name": "file_path", - "ordinal": 0, - "type_info": "Text" - }, - { - "name": "hash", - "ordinal": 1, - "type_info": "Text" - }, - { - "name": "chunk_size", - "ordinal": 2, - "type_info": "Int64" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - false, - false, - false - ] - }, - "hash": "e2e28afeea514252646fe23f211e477b8beca44fcb46fb98d1e2751c24ac29c7" -} diff --git a/cli/.sqlx/query-e998823098cca1e20cc8a6bc8c69cb99f5f348e1e84e687ee0018c79d0402867.json b/cli/.sqlx/query-e998823098cca1e20cc8a6bc8c69cb99f5f348e1e84e687ee0018c79d0402867.json deleted file mode 100644 index 61c1b6a..0000000 --- a/cli/.sqlx/query-e998823098cca1e20cc8a6bc8c69cb99f5f348e1e84e687ee0018c79d0402867.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "update files set hash = ?, chunk_size = ?, chunks = ? where file_path = ?", - "describe": { - "columns": [], - "parameters": { - "Right": 4 - }, - "nullable": [] - }, - "hash": "e998823098cca1e20cc8a6bc8c69cb99f5f348e1e84e687ee0018c79d0402867" -} diff --git a/cli/.sqlx/query-f2c7fa3d6eac061360cf7ea89295904859a97abe9337cdfcca183ff0ec0f4bfd.json b/cli/.sqlx/query-f2c7fa3d6eac061360cf7ea89295904859a97abe9337cdfcca183ff0ec0f4bfd.json deleted file mode 100644 index 318046a..0000000 --- a/cli/.sqlx/query-f2c7fa3d6eac061360cf7ea89295904859a97abe9337cdfcca183ff0ec0f4bfd.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "db_name": "SQLite", - "query": "select indice, id from chunks where file_hash = ?", - "describe": { - "columns": [ - { - "name": "indice", - "ordinal": 0, - "type_info": "Int64" - }, - { - "name": "id", - "ordinal": 1, - "type_info": "Text" - } - ], - "parameters": { - "Right": 1 - }, - "nullable": [ - false, - false - ] - }, - "hash": "f2c7fa3d6eac061360cf7ea89295904859a97abe9337cdfcca183ff0ec0f4bfd" -} diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 3ed001f..976f2d7 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -7,13 +7,14 @@ edition = "2021" [dependencies] anyhow = "1" bfsp = { path = "../bfsp" } -fern = "0.6" humantime = "2" +fern = "0.6" log = "0.4" rkyv = "0.7" sqlx = { version = "0.7", features = ["sqlite", "runtime-tokio"] } clap = { version = "4", features = ["derive"] } -futures = "0.3.28" +futures = "0.3" +reqwest = { version = "0.11", default-features = false, features = ["json", "rustls-tls"] } [target.'cfg(not(target_arch = "wasm32"))'.dependencies] tokio = { version = "1", features = ["full"] } diff --git a/cli/flake.nix b/cli/flake.nix index e511dc8..056fc58 100644 --- a/cli/flake.nix +++ b/cli/flake.nix @@ -46,6 +46,7 @@ cargo-outdated cargo-watch sqlx-cli + rust-analyzer ]; }; }); diff --git a/cli/migrations/20231013033720_create_keys.sql b/cli/migrations/20231013033720_create_keys.sql index eae9ae1..f581aa1 100644 --- a/cli/migrations/20231013033720_create_keys.sql +++ b/cli/migrations/20231013033720_create_keys.sql @@ -1,4 +1,5 @@ create table keys ( username text primary key not null, -enc_key blob not null +enc_key blob not null, +macaroon text not null ) diff --git a/cli/rustfmt.toml b/cli/rustfmt.toml new file mode 100644 index 0000000..3a26366 --- /dev/null +++ b/cli/rustfmt.toml @@ -0,0 +1 @@ +edition = "2021" diff --git a/cli/src/main.rs b/cli/src/main.rs index 7684eec..de8eafe 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,19 +1,20 @@ use anyhow::{anyhow, Context, Error}; -use sqlx::SqlitePool; +use sqlx::{Row, SqlitePool}; use std::collections::{HashMap, HashSet}; use std::env; use std::fmt::Display; -use std::path::Path; +use std::path::{Path, PathBuf}; use anyhow::Result; use bfsp::{ compressed_encrypted_chunk_from_file, hash_chunk, hash_file, parallel_hash_chunk, - use_parallel_hasher, Action, ChunkID, ChunkMetadata, ChunksUploaded, ChunksUploadedQuery, - DownloadChunkReq, EncryptionKey, FileHash, FileHeader, + use_parallel_hasher, Action, Authentication, ChunkID, ChunkMetadata, ChunksUploaded, + ChunksUploadedQuery, CreateUserRequest, DownloadChunkReq, EncryptionKey, FileHash, FileHeader, + LoginRequest, }; use log::{debug, trace}; use rkyv::Deserialize; -use tokio::fs::{self, File, OpenOptions}; +use tokio::fs::{self, canonicalize, File, OpenOptions}; use tokio::io::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt}; use tokio::net::TcpStream; @@ -35,6 +36,15 @@ enum Commands { file_path: String, download_to: Option, }, + Signup { + username: String, + email: String, + password: String, + }, + Login { + username: String, + password: String, + }, } #[tokio::main] @@ -51,6 +61,7 @@ async fn main() { }) // Add blanket level filter - .level(log::LevelFilter::Trace) .level_for("sqlx", log::LevelFilter::Warn) + .level_for("hyper", log::LevelFilter::Warn) // - and per-module overrides // Output to stdout, files, and other Dispatch configurations .chain(std::io::stdout()) @@ -59,40 +70,40 @@ async fn main() { .apply() .unwrap(); - let pool_url = |_| format!( - "sqlite:{}/data.db", - env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string()) - ); - let pool = sqlx::SqlitePool::connect(&env::var("DATABASE_URL").unwrap_or_else(pool_url)).await.unwrap(); + let pool_url = |_| { + format!( + "sqlite:{}/data.db", + env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".to_string()) + ) + }; + let pool = sqlx::SqlitePool::connect(&env::var("DATABASE_URL").unwrap_or_else(pool_url)) + .await + .unwrap(); sqlx::migrate!().run(&pool).await.unwrap(); - let key: EncryptionKey = - match sqlx::query!("select enc_key from keys where username = ?", "null") - .fetch_optional(&pool) - .await - .unwrap() - { - Some(key_info) => key_info.enc_key.try_into().unwrap(), - None => { - let key = EncryptionKey::new(); - sqlx::query!( - "insert into keys (enc_key, username) values ( ?, ? )", - key, - "null" - ) - .execute(&pool) - .await - .unwrap(); - - key - } - }; + let client = reqwest::ClientBuilder::new().build().unwrap(); let args = Args::parse(); match args.command { Commands::Upload { file_path } => { + let (key, macaroon): (EncryptionKey, String) = + match sqlx::query("select enc_key, macaroon from keys") + .fetch_optional(&pool) + .await + .unwrap() + { + Some(key_info) => ( + key_info.get::, _>("enc_key").try_into().unwrap(), + key_info.get::("macaroon"), + ), + None => { + println!("Please login."); + return; + } + }; + let mut sock = TcpStream::connect("127.0.0.1:9999").await.unwrap(); if let Err(err) = add_file(&file_path, &pool).await { @@ -111,7 +122,7 @@ async fn main() { let file_headers = file_headers_from_path(&file_path, &pool).await.unwrap(); let file_header = file_headers.first().unwrap(); - upload_file(&file_path, file_header, &mut sock, &key) + upload_file(&file_path, file_header, &mut sock, &key, macaroon) .await .unwrap() } @@ -119,6 +130,22 @@ async fn main() { file_path, download_to, } => { + let (key, macaroon): (EncryptionKey, String) = + match sqlx::query("select enc_key, macaroon from keys") + .fetch_optional(&pool) + .await + .unwrap() + { + Some(key_info) => ( + key_info.get::, _>("enc_key").try_into().unwrap(), + key_info.get::("macaroon"), + ), + None => { + println!("Please login."); + return; + } + }; + let mut sock = TcpStream::connect("127.0.0.1:9999").await.unwrap(); let file_headers = file_headers_from_path(&file_path, &pool).await.unwrap(); let file_header = file_headers @@ -131,10 +158,62 @@ async fn main() { download_to.unwrap_or_else(|| "./".to_string()), &mut sock, &key, + macaroon, ) .await .unwrap(); } + Commands::Signup { + username, + email, + password, + } => { + let response = client + .post("http://127.0.0.1:3000/create_user") + .json(&CreateUserRequest { + email, + username, + password, + }) + .send() + .await + .unwrap(); + + let response_status = response.status(); + + if response_status.is_success() { + println!("Successfully registered! Please sign in"); + } else { + let response_text = response.text().await.unwrap(); + println!("Error when trying to register: '{}'", response_text); + } + } + Commands::Login { username, password } => { + let response = client + .post("http://127.0.0.1:3000/login_user") + .json(&LoginRequest { username, password }) + .send() + .await + .unwrap(); + + let response_status = response.status(); + let response_text = response.text().await.unwrap(); + + if response_status.is_success() { + let key = EncryptionKey::new(); + sqlx::query("insert into keys (enc_key, username, macaroon) values ( ?, ?, ? )") + .bind(key) + .bind("billy") + .bind(response_text) + .execute(&pool) + .await + .unwrap(); + + println!("Successfully logged in!!!") + } else { + println!("Got response: '{response_text}' with status: '{response_status}'"); + } + } } } @@ -145,12 +224,13 @@ pub async fn query_chunks_uploaded( ) -> Result { trace!("Querying chunks uploaded"); - let chunk_ids = sqlx::query!("select id from chunks where file_hash = ?", file_hash) + let chunk_ids = sqlx::query("select id from chunks where file_hash = ?") + .bind(file_hash) .fetch_all(pool) .await .with_context(|| format!("Failed to get chunk IDs for file hash {}", file_hash))? .into_iter() - .map(|chunk_info| chunk_info.id.try_into()) + .map(|chunk_info| chunk_info.get::("id").try_into()) .collect::>>()?; let chunks_uploaded_query = ChunksUploadedQuery { chunks: chunk_ids }; @@ -198,13 +278,13 @@ pub async fn update_file(file_path: &str, file: &mut File, pool: &SqlitePool) -> let file_path = fs::canonicalize(file_path).await?; let file_path = file_path.to_str().unwrap(); - let original_file_hash: FileHash = - sqlx::query!("select hash from files where file_path = ?", file_path) - .fetch_one(pool) - .await - .with_context(|| "Error getting file id for updating file")? - .hash - .try_into()?; + let original_file_hash: FileHash = sqlx::query("select hash from files where file_path = ?") + .bind(file_path) + .fetch_one(pool) + .await + .with_context(|| "Error getting file id for updating file")? + .get::("hash") + .try_into()?; let file_header = FileHeader::from_file(file).await?; file.rewind().await?; @@ -213,44 +293,43 @@ pub async fn update_file(file_path: &str, file: &mut File, pool: &SqlitePool) -> let num_chunks: i64 = file_header.chunks.len().try_into().unwrap(); - sqlx::query!( - "update files set hash = ?, chunk_size = ?, chunks = ? where file_path = ?", - file_hash, - file_header.chunk_size, - num_chunks, - file_path - ) - .execute(pool) - .await - .map_err(|err| match err { - sqlx::Error::Database(err) => match err.kind() { - sqlx::error::ErrorKind::UniqueViolation => { - Error::new(AddFileRecoverableErr::FileAlreadyAdded) - } + sqlx::query("update files set hash = ?, chunk_size = ?, chunks = ? where file_path = ?") + .bind(file_hash) + .bind(file_header.chunk_size) + .bind(num_chunks) + .bind(file_path) + .execute(pool) + .await + .map_err(|err| match err { + sqlx::Error::Database(err) => match err.kind() { + sqlx::error::ErrorKind::UniqueViolation => { + Error::new(AddFileRecoverableErr::FileAlreadyAdded) + } + _ => anyhow!("Unknown database error: {err:#}"), + }, _ => anyhow!("Unknown database error: {err:#}"), - }, - _ => anyhow!("Unknown database error: {err:#}"), - })?; + })?; trace!("Deleting chunks for file {file_path}"); - sqlx::query!("delete from chunks where file_hash = ?", original_file_hash) + sqlx::query("delete from chunks where file_hash = ?") + .bind(original_file_hash) .execute(pool) .await?; trace!("Inserting chunks for file {file_path}"); for (chunk_id, chunk) in file_header.chunks.iter() { - sqlx::query!( + sqlx::query( r#"insert into chunks (hash, id, file_hash, chunk_size) values ( ?, ?, ?, ? ) "#, - chunk.hash, - chunk_id, - file_header.hash, - chunk.size, ) + .bind(&chunk.hash) + .bind(chunk_id) + .bind(&file_header.hash) + .bind(chunk.size) .execute(pool) .await?; } @@ -276,16 +355,16 @@ pub async fn add_file(file_path: &str, pool: &SqlitePool) -> Result<()> { let num_chunks: i64 = file_header.chunks.len().try_into().unwrap(); - sqlx::query!( + sqlx::query( r#"insert into files (file_path, hash, chunk_size, chunks) values ( ?, ?, ?, ? ) "#, - file_path, - file_hash, - file_header.chunk_size, - num_chunks, ) + .bind(file_path) + .bind(&file_hash) + .bind(file_header.chunk_size) + .bind(num_chunks) .execute(pool) .await .map_err(|err| match err { @@ -306,18 +385,18 @@ pub async fn add_file(file_path: &str, pool: &SqlitePool) -> Result<()> { .try_into() .unwrap(); - sqlx::query!( + sqlx::query( r#"insert into chunks (hash, id, chunk_size, indice, file_hash, nonce ) values ( ?, ?, ?, ?, ?, ? ) "#, - chunk.hash, - chunk_id, - chunk.size, - indice, - file_hash, - chunk.nonce, ) + .bind(&chunk.hash) + .bind(chunk_id) + .bind(chunk.size) + .bind(indice) + .bind(&file_hash) + .bind(&chunk.nonce) .execute(pool) .await .map_err(|err| match err { @@ -339,6 +418,7 @@ pub async fn upload_file( file_header: &FileHeader, sock: &mut TcpStream, key: &EncryptionKey, + macaroon: String, ) -> Result<()> { trace!("Uploading file"); @@ -361,6 +441,16 @@ pub async fn upload_file( let action = Action::UploadChunk.into(); sock.write_u16(action).await?; + + sock.write_all( + &Authentication { + macaroon: macaroon.clone(), + } + .to_bytes() + .unwrap(), + ) + .await?; + sock.write_all(chunk_meta.to_bytes()?.as_slice()).await?; let chunk = @@ -380,6 +470,7 @@ pub async fn download_file + Display>( path_to_download_to: P, sock: &mut TcpStream, key: &EncryptionKey, + macaroon: String, ) -> Result<()> { trace!("Downloading file {path_to_download_to}"); @@ -407,6 +498,16 @@ pub async fn download_file + Display>( // TODO: can this be optimized by seing which chunks have changed for chunk_id in file_header.chunks.keys() { sock.write_u16(action).await?; + + sock.write_all( + &Authentication { + macaroon: macaroon.clone(), + } + .to_bytes() + .unwrap(), + ) + .await?; + let download_chunk_req = DownloadChunkReq { chunk_id: *chunk_id, }; @@ -474,54 +575,51 @@ pub async fn download_file + Display>( /// Ensures that the file given and file header as exists in the database are the same async fn validate_file(file_path: &str, pool: &SqlitePool) -> Result { - let file_info = sqlx::query!( - "select file_path, hash, chunk_size from files where file_path = ?", - file_path - ) - .fetch_one(pool) - .await?; + let file_info = + sqlx::query("select file_path, hash, chunk_size from files where file_path = ?") + .bind(file_path) + .fetch_one(pool) + .await?; - let chunk_size = file_info.chunk_size; - let file_hash: FileHash = file_info.hash.try_into()?; + let chunk_size: u32 = file_info.get("chunk_size"); + let file_hash: FileHash = file_info.get::("hash").try_into()?; // TODO: return cstuom error for when file isn't added - let chunks = sqlx::query!( - "select hash, id, chunk_size, indice, nonce from chunks where file_hash = ?", - file_hash - ) - .fetch_all(pool) - .await - .with_context(|| "Error querying db for chunks: ")?; + let chunks = + sqlx::query("select hash, id, chunk_size, indice, nonce from chunks where file_hash = ?") + .bind(&file_hash) + .fetch_all(pool) + .await + .with_context(|| "Error querying db for chunks: ")?; let chunks = chunks.into_iter().map(|chunk| ChunkMetadata { - id: chunk.id.try_into().unwrap(), - hash: chunk.hash.try_into().unwrap(), - size: chunk.chunk_size.try_into().unwrap(), - indice: chunk.indice.try_into().unwrap(), - nonce: chunk.nonce.try_into().unwrap(), + id: chunk.get::("id").try_into().unwrap(), + hash: chunk.get::("hash").try_into().unwrap(), + size: chunk.get::("chunk_size"), + indice: chunk.get::("indice").try_into().unwrap(), + nonce: chunk.get::, _>("nonce").try_into().unwrap(), }); - let chunk_indices: HashMap = sqlx::query!( - "select indice, id from chunks where file_hash = ?", - file_hash - ) - .fetch_all(pool) - .await - .with_context(|| "Error querying db for chunk indicecs: ")? - .into_iter() - .map(|chunk_info| { - let chunk_id: ChunkID = chunk_info.id.try_into().unwrap(); - let chunk_indice: u64 = chunk_info.indice.try_into().unwrap(); + let chunk_indices: HashMap = + sqlx::query("select indice, id from chunks where file_hash = ?") + .bind(&file_hash) + .fetch_all(pool) + .await + .with_context(|| "Error querying db for chunk indicecs: ")? + .into_iter() + .map(|chunk_info| { + let chunk_id: ChunkID = chunk_info.get::("id").try_into().unwrap(); + let chunk_indice: u64 = chunk_info.get::("indice").try_into().unwrap(); - (chunk_id, chunk_indice) - }) - .collect(); + (chunk_id, chunk_indice) + }) + .collect(); let chunks = chunks.into_iter().map(|chunk| (chunk.id, chunk)).collect(); let file_header_sql = FileHeader { hash: file_hash, - chunk_size: chunk_size.try_into().unwrap(), + chunk_size, chunks, chunk_indices, }; @@ -536,66 +634,67 @@ async fn validate_file(file_path: &str, pool: &SqlitePool) -> Result { } async fn file_headers_from_path(path: &str, pool: &SqlitePool) -> Result> { - futures::future::try_join_all( - sqlx::query!( - "select hash, file_path from files where instr(file_path, ?) > 1", - path - ) - .fetch_all(pool) - .await? - .into_iter() - .map(|file_info| async { - let file_hash: FileHash = file_info.hash.try_into()?; - let file_path = file_info.file_path; - - let chunks = sqlx::query!( - "select hash, id, chunk_size, indice, nonce from chunks where file_hash = ?", - file_hash - ) - .fetch_all(pool) - .await - .with_context(|| "Error querying db for chunks: ")?; + let path = canonicalize(path).await?; + let path = path.to_string_lossy(); - let chunks = chunks.into_iter().map(|chunk| ChunkMetadata { - id: chunk.id.try_into().unwrap(), - hash: chunk.hash.try_into().unwrap(), - size: chunk.chunk_size.try_into().unwrap(), - indice: chunk.indice.try_into().unwrap(), - nonce: chunk.nonce.try_into().unwrap(), - }); - - let chunk_indices: HashMap = sqlx::query!( - "select indice, id from chunks where file_hash = ?", - file_hash - ) + futures::future::try_join_all( + sqlx::query("select hash, file_path from files where file_path = ?") + .bind(path) .fetch_all(pool) - .await - .with_context(|| "Error querying db for chunk indicecs: ")? + .await? .into_iter() - .map(|chunk_info| { - let chunk_id: ChunkID = chunk_info.id.try_into().unwrap(); - let chunk_indice: u64 = chunk_info.indice.try_into().unwrap(); + .map(|file_info| async move { + let file_hash: FileHash = file_info.get::("hash").try_into()?; + let file_path = file_info.get::<&str, _>("file_path"); - (chunk_id, chunk_indice) - }) - .collect(); - - let chunks = chunks.into_iter().map(|chunk| (chunk.id, chunk)).collect(); - - let file_info = sqlx::query!( - "select file_path, hash, chunk_size from files where file_path = ?", - file_path - ) - .fetch_one(pool) - .await?; - - Ok(FileHeader { - hash: file_info.hash.try_into()?, - chunk_size: file_info.chunk_size.try_into()?, - chunks, - chunk_indices, - }) - }), + let chunks = sqlx::query( + "select hash, id, chunk_size, indice, nonce from chunks where file_hash = ?", + ) + .bind(&file_hash) + .fetch_all(pool) + .await + .with_context(|| "Error querying db for chunks: ")?; + + let chunks = chunks.into_iter().map(|chunk| ChunkMetadata { + id: chunk.get::("id").try_into().unwrap(), + hash: chunk.get::("hash").try_into().unwrap(), + size: chunk.get::("chunk_size"), + indice: chunk.get::("indice").try_into().unwrap(), + nonce: chunk.get::, _>("nonce").try_into().unwrap(), + }); + + let chunk_indices: HashMap = + sqlx::query("select indice, id from chunks where file_hash = ?") + .bind(&file_hash) + .fetch_all(pool) + .await + .with_context(|| "Error querying db for chunk indicecs: ")? + .into_iter() + .map(|chunk_info| { + let chunk_id: ChunkID = + chunk_info.get::("id").try_into().unwrap(); + let chunk_indice: u64 = + chunk_info.get::("indice").try_into().unwrap(); + + (chunk_id, chunk_indice) + }) + .collect(); + + let chunks = chunks.into_iter().map(|chunk| (chunk.id, chunk)).collect(); + + let file_info = + sqlx::query("select hash, chunk_size from files where file_path = ?") + .bind(file_path) + .fetch_one(pool) + .await?; + + Ok(FileHeader { + hash: file_info.get::("hash").try_into()?, + chunk_size: file_info.get::("chunk_size"), + chunks, + chunk_indices, + }) + }), ) .await } diff --git a/flake.lock b/flake.lock index 4c0e88b..44b930a 100644 --- a/flake.lock +++ b/flake.lock @@ -7,11 +7,11 @@ ] }, "locked": { - "lastModified": 1697677553, - "narHash": "sha256-ozj7HFo/1iQdzZ2U6tHP4QBW59eUbDZ/5HI8lLe9wos=", + "lastModified": 1697840921, + "narHash": "sha256-zXHwu104SQOxogkMgg+w22c3+zI/FvK83TAkfLmeKw0=", "owner": "ipetkov", "repo": "crane", - "rev": "bc5fa8cd53ef32b9b827f24b993c42a8c4dd913b", + "rev": "758ae442227103fa501276e8225609a11c99718e", "type": "github" }, "original": { @@ -43,11 +43,11 @@ "systems": "systems_2" }, "locked": { - "lastModified": 1694529238, - "narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=", + "lastModified": 1681202837, + "narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=", "owner": "numtide", "repo": "flake-utils", - "rev": "ff7b65b44d01cf9ba6a71320833626af21126384", + "rev": "cfacdce06f30d2b68473a46042957675eebb3401", "type": "github" }, "original": { @@ -58,11 +58,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1697660476, - "narHash": "sha256-w5/4HKG6/TPOFUki4rYUbME8zC6+suT6hSunyFD4BlI=", + "lastModified": 1697756275, + "narHash": "sha256-KAZ2F9He5oH2NPxhWDLmtGAsiBjPi7yps1OGZu6peMM=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "964a525d67348323be4fa100345d37b361ebd36e", + "rev": "d042a296139c6a111be3e3d5dc9ef6783b5e7c16", "type": "github" }, "original": { @@ -74,11 +74,11 @@ }, "nixpkgs_2": { "locked": { - "lastModified": 1697730408, - "narHash": "sha256-Ww//zzukdTrwTrCUkaJA/NsaLEfUfQpWZXBdXBYfhak=", + "lastModified": 1681358109, + "narHash": "sha256-eKyxW4OohHQx9Urxi7TQlFBTDWII+F+x2hklDOQPB50=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "ff0a5a776b56e0ca32d47a4a47695452ec7f7d80", + "rev": "96ba1c52e54e74c3197f4d43026b3f3d92e83ff9", "type": "github" }, "original": { @@ -102,11 +102,11 @@ "nixpkgs": "nixpkgs_2" }, "locked": { - "lastModified": 1697767917, - "narHash": "sha256-9+FjCVE1Y7iUKohBF43yD05KoQB+FPcw/XL2rlKkjqY=", + "lastModified": 1697854201, + "narHash": "sha256-H+0Wb20PQx/8N7X/OfwwAVPeN9TbfjcyG0sXbdgsh50=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "679ea0878edc749f23516ea6d7ffa974c6304bf5", + "rev": "6e8e3332433847cd56186b1f6fc8c47603cf5b46", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index dee5eb7..2e28286 100644 --- a/flake.nix +++ b/flake.nix @@ -47,6 +47,7 @@ cargo-watch sqlx-cli flyctl + rust-analyzer ]; }; }); diff --git a/migrations/20231018041636_chunks.sql b/migrations/20231018041636_chunks.sql index a30e3fa..0c8a542 100644 --- a/migrations/20231018041636_chunks.sql +++ b/migrations/20231018041636_chunks.sql @@ -3,5 +3,6 @@ hash text primary key unique not null, id text unique not null, indice int not null, chunk_size int not null, -nonce blob not null +nonce blob not null, +username text not null ) diff --git a/src/db.rs b/src/db.rs index 44f3b9b..edadeab 100644 --- a/src/db.rs +++ b/src/db.rs @@ -3,14 +3,18 @@ use std::env; use anyhow::Result; use async_trait::async_trait; use bfsp::{ChunkID, ChunkMetadata}; -use sqlx::SqlitePool; +use sqlx::{Row, SqlitePool}; #[async_trait] pub trait ChunkDatabase: Sized { async fn new() -> Result; - async fn contains_chunk(&self, chunk_id: ChunkID) -> Result; - async fn insert_chunk(&self, chunk_meta: ChunkMetadata) -> Result<()>; - async fn get_chunk_meta(&self, chunk_id: ChunkID) -> Result>; + async fn contains_chunk(&self, chunk_id: ChunkID, username: &str) -> Result; + async fn insert_chunk(&self, chunk_meta: ChunkMetadata, username: &str) -> Result<()>; + async fn get_chunk_meta( + &self, + chunk_id: ChunkID, + username: &str, + ) -> Result>; } pub struct SqliteDB { @@ -20,50 +24,63 @@ pub struct SqliteDB { #[async_trait] impl ChunkDatabase for SqliteDB { async fn new() -> Result { - let pool = sqlx::SqlitePool::connect(&env::var("DATABASE_URL").unwrap_or_else(|_| "sqlite:./data.db".to_string())).await?; + let pool = sqlx::SqlitePool::connect( + &env::var("DATABASE_URL").unwrap_or_else(|_| "sqlite:./data.db".to_string()), + ) + .await?; sqlx::migrate!().run(&pool).await?; Ok(SqliteDB { pool }) } - async fn contains_chunk(&self, chunk_id: ChunkID) -> Result { - Ok(sqlx::query!("select id from chunks where id = ?", chunk_id) - .fetch_optional(&self.pool) - .await? - .is_some()) + async fn contains_chunk(&self, chunk_id: ChunkID, username: &str) -> Result { + Ok( + sqlx::query("select id from chunks where id = ? AND username = ?") + .bind(chunk_id) + .bind(username) + .fetch_optional(&self.pool) + .await? + .is_some(), + ) } - async fn insert_chunk(&self, chunk_meta: ChunkMetadata) -> Result<()> { + async fn insert_chunk(&self, chunk_meta: ChunkMetadata, username: &str) -> Result<()> { let indice: i64 = chunk_meta.indice.try_into().unwrap(); - sqlx::query!( - "insert into chunks (hash, id, chunk_size, indice, nonce) values ( ?, ?, ?, ?, ? )", - chunk_meta.hash, - chunk_meta.id, - chunk_meta.size, - indice, - chunk_meta.nonce + sqlx::query( + "insert into chunks (hash, id, chunk_size, indice, nonce, username) values ( ?, ?, ?, ?, ?, ? )", ) + .bind(chunk_meta.hash) + .bind(chunk_meta.id) + .bind(chunk_meta.size) + .bind(indice) + .bind(chunk_meta.nonce) + .bind(username) .execute(&self.pool) .await?; Ok(()) } - async fn get_chunk_meta(&self, chunk_id: ChunkID) -> Result> { - Ok(sqlx::query!( - "select hash, chunk_size, nonce, indice from chunks where id = ?", - chunk_id + async fn get_chunk_meta( + &self, + chunk_id: ChunkID, + username: &str, + ) -> Result> { + Ok(sqlx::query( + "select hash, chunk_size, nonce, indice from chunks where id = ? and username = ?", ) + .bind(chunk_id) + .bind(username) .fetch_optional(&self.pool) .await? .map(|chunk_info| ChunkMetadata { id: chunk_id, - hash: chunk_info.hash.try_into().unwrap(), - size: chunk_info.chunk_size.try_into().unwrap(), - indice: chunk_info.indice.try_into().unwrap(), - nonce: chunk_info.nonce.try_into().unwrap(), + hash: chunk_info.get::("hash").try_into().unwrap(), + size: chunk_info.get::("chunk_size"), + indice: chunk_info.get::("indice").try_into().unwrap(), + nonce: chunk_info.get::, _>("nonce").try_into().unwrap(), })) } } diff --git a/src/main.rs b/src/main.rs index cc0a2db..2d8600b 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,11 +1,21 @@ // TODO: StorageBackendTrait mod db; -use std::{collections::HashMap, os::unix::prelude::MetadataExt, sync::Arc}; +use std::{ + collections::HashMap, + ops::Deref, + os::unix::prelude::MetadataExt, + sync::Arc, + time::{SystemTime, UNIX_EPOCH}, +}; use anyhow::{anyhow, Result}; -use bfsp::{Action, ChunkID, ChunkMetadata, ChunksUploaded, ChunksUploadedQuery, DownloadChunkReq}; +use bfsp::{ + Action, Authentication, ChunkID, ChunkMetadata, ChunksUploaded, ChunksUploadedQuery, + DownloadChunkReq, ExpirationCaveat, UsernameCaveat, +}; use log::{debug, info, trace}; +use macaroon::{ByteString, Caveat, Macaroon, MacaroonKey, Verifier}; use rkyv::Deserialize; use tokio::{ fs, @@ -37,6 +47,8 @@ async fn main() -> Result<()> { fs::create_dir_all("./chunks/").await?; + let macaroon_key = MacaroonKey::generate(b"key"); + info!("Starting server!"); debug!("Initializing database"); @@ -61,11 +73,21 @@ async fn main() -> Result<()> { .unwrap(); match action { - Action::UploadChunk => handle_upload_chunk(&mut sock, db.as_ref()).await.unwrap(), + Action::UploadChunk => { + handle_upload_chunk(&mut sock, db.as_ref(), &macaroon_key) + .await + .unwrap() + } Action::QueryChunksUploaded => { - query_chunks_uploaded(&mut sock, db.as_ref()).await.unwrap() + query_chunks_uploaded(&mut sock, db.as_ref(), &macaroon_key) + .await + .unwrap() + } + Action::DownloadChunk => { + handle_download_chunk(&mut sock, db.as_ref(), &macaroon_key) + .await + .unwrap() } - Action::DownloadChunk => handle_download_chunk(&mut sock, db.as_ref()).await.unwrap(), }; } @@ -79,7 +101,40 @@ async fn main() -> Result<()> { pub async fn handle_download_chunk( sock: &mut TcpStream, chunk_db: &D, + macaroon_key: &MacaroonKey, ) -> Result<()> { + let req_len = sock.read_u16().await? as usize; + let mut buf = vec![0; req_len]; + sock.read_exact(&mut buf).await?; + + let auth = rkyv::check_archived_root::(&buf) + .map_err(|_| anyhow!("could not deserialize authentication"))?; + + let macaroon = Macaroon::deserialize(auth.macaroon.as_str())?; + let caveats = macaroon.first_party_caveats(); + + let username_caveat = caveats + .iter() + .find_map(|caveat| { + let Caveat::FirstParty(caveat) = caveat else { + return None; + }; + let username_caveat: UsernameCaveat = match caveat.predicate().try_into() { + Ok(caveat) => caveat, + Err(_) => return None, + }; + + Some(username_caveat) + }) + .unwrap(); + + let mut verifier = Verifier::default(); + + verifier.satisfy_exact(username_caveat.clone().into()); + verifier.satisfy_general(check_token_expired); + + verifier.verify(&macaroon, &macaroon_key, Vec::new())?; + debug!("Handling chunk request"); trace!("Getting request length"); let req_len = sock.read_u16().await? as usize; @@ -95,7 +150,7 @@ pub async fn handle_download_chunk( let path = format!("chunks/{}", req.chunk_id); let chunk_meta = chunk_db - .get_chunk_meta(req.chunk_id) + .get_chunk_meta(req.chunk_id, username_caveat.username.as_str()) .await? .ok_or_else(|| anyhow!("chunk not found"))?; let chunk_meta_bytes = chunk_meta.to_bytes()?; @@ -119,7 +174,41 @@ pub async fn handle_download_chunk( } // TODO: very ddosable by querying many chunks at once -async fn query_chunks_uploaded(sock: &mut TcpStream, chunk_db: &D) -> Result<()> { +async fn query_chunks_uploaded( + sock: &mut TcpStream, + chunk_db: &D, + macaroon_key: &MacaroonKey, +) -> Result<()> { + let req_len = sock.read_u16().await? as usize; + let mut buf = vec![0; req_len]; + sock.read_exact(&mut buf).await?; + + let auth = rkyv::check_archived_root::(&buf) + .map_err(|_| anyhow!("could not deserialize authentication"))?; + + let macaroon = Macaroon::deserialize(auth.macaroon.as_str())?; + let caveats = macaroon.first_party_caveats(); + + let username_caveat = caveats + .iter() + .find_map(|caveat| { + let Caveat::FirstParty(caveat) = caveat else { + return None; + }; + let username_caveat: UsernameCaveat = match caveat.predicate().try_into() { + Ok(caveat) => caveat, + Err(_) => return None, + }; + + Some(username_caveat) + }) + .unwrap(); + + let mut verifier = Verifier::default(); + verifier.satisfy_exact(username_caveat.clone().into()); + + verifier.verify(&macaroon, &macaroon_key, Vec::new())?; + let chunks_uploaded_query_len: u16 = sock.read_u16().await?; let mut chunks_uploaded_query_bin = vec![0; chunks_uploaded_query_len as usize]; @@ -131,13 +220,16 @@ async fn query_chunks_uploaded(sock: &mut TcpStream, chunk_db: let chunks_uploaded_query: ChunksUploadedQuery = chunks_uploaded_query.deserialize(&mut rkyv::Infallible)?; + let username = &username_caveat.username; + let chunks_uploaded: HashMap = futures::future::join_all( chunks_uploaded_query .chunks .iter() .map(|chunk_id| async move { let chunk_id: ChunkID = *chunk_id; - let contains_chunk: bool = chunk_db.contains_chunk(chunk_id).await.unwrap(); + let contains_chunk: bool = + chunk_db.contains_chunk(chunk_id, &username).await.unwrap(); (chunk_id, contains_chunk) }), @@ -155,8 +247,42 @@ async fn query_chunks_uploaded(sock: &mut TcpStream, chunk_db: Ok(()) } -async fn handle_upload_chunk(sock: &mut TcpStream, chunk_db: &D) -> Result<()> { +async fn handle_upload_chunk( + sock: &mut TcpStream, + chunk_db: &D, + macaroon_key: &MacaroonKey, +) -> Result<()> { trace!("Handling chunk upload"); + let req_len = sock.read_u16().await? as usize; + let mut buf = vec![0; req_len]; + sock.read_exact(&mut buf).await?; + + let auth = rkyv::check_archived_root::(&buf) + .map_err(|_| anyhow!("could not deserialize authentication"))?; + + let macaroon = Macaroon::deserialize(auth.macaroon.as_str())?; + let caveats = macaroon.first_party_caveats(); + + let username_caveat = caveats + .iter() + .find_map(|caveat| { + let Caveat::FirstParty(caveat) = caveat else { + return None; + }; + let username_caveat: UsernameCaveat = match caveat.predicate().try_into() { + Ok(caveat) => caveat, + Err(_) => return None, + }; + + Some(username_caveat) + }) + .unwrap(); + + let mut verifier = Verifier::default(); + verifier.satisfy_general(check_token_expired); + verifier.satisfy_exact(username_caveat.clone().into()); + verifier.verify(&macaroon, &macaroon_key, Vec::new())?; + let chunk_metadata_len = sock.read_u16().await? as usize; let mut chunk_metadata_buf = vec![0; chunk_metadata_len]; sock.read_exact(&mut chunk_metadata_buf[..chunk_metadata_len]) @@ -197,7 +323,22 @@ async fn handle_upload_chunk(sock: &mut TcpStream, chunk_db: & } } - chunk_db.insert_chunk(chunk_metadata).await?; + chunk_db + .insert_chunk(chunk_metadata, &username_caveat.username) + .await?; Ok(()) } + +fn check_token_expired(caveat: &ByteString) -> bool { + let caveat: ExpirationCaveat = match caveat.try_into() { + Ok(caveat) => caveat, + Err(_) => return false, + }; + let current_time = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_secs(); + + current_time > caveat.expiration +}