diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 307cd1c..f784740 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,10 +18,10 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - name: Install `nasm` - run: sudo apt-get install yasm -y + - name: Install `yasm` + run: sudo apt-get update && sudo apt-get install build-essential yasm -y - name: Set up Rust toolchain uses: actions-rs/toolchain@v1 @@ -47,7 +47,7 @@ jobs: path: target key: ${{ runner.os }}-cargo-build-target-${{ matrix.toolchain }}-${{ hashFiles('**/Cargo.lock') }} - - name: Cache cargo bin # bc wow why does it take three minutes to build `cargo-deny` + - name: Cache cargo bin uses: actions/cache@v4 with: path: ~/.cargo/bin @@ -61,17 +61,50 @@ jobs: with: tool: cargo-deny,cargo-nextest,cargo-rdme + - name: Run tests + run: cargo nextest run + + - name: Run doctests + run: cargo test --doc + - name: Check for unused dependencies uses: bnjbvr/cargo-machete@main - - name: Make sure README is up-to-date - run: cargo rdme --check + cargo_deny: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 - - name: Run cargo-deny - run: cargo deny check + - name: Set up Rust toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true - - name: Run tests - run: cargo nextest run + - name: Install `cargo` tools to do more testing + uses: taiki-e/install-action@v2 + with: + tool: cargo-deny - - name: Run doctests - run: cargo test --doc \ No newline at end of file + + readme: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + + - name: Install `yasm` + run: sudo apt-get update && sudo apt-get install build-essential yasm -y + + - name: Set up Rust toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true + + - name: Install `cargo` tools to do more testing + uses: taiki-e/install-action@v2 + with: + tool: cargo-rdme + + - name: Make sure README is up-to-date + run: cargo rdme --check \ No newline at end of file diff --git a/.sqlx/query-2e2da80807e94a55a5b6151735df807ec79b1277abe271c113e429f94b3cdb1b.json b/.sqlx/query-2e2da80807e94a55a5b6151735df807ec79b1277abe271c113e429f94b3cdb1b.json new file mode 100644 index 0000000..31a408d --- /dev/null +++ b/.sqlx/query-2e2da80807e94a55a5b6151735df807ec79b1277abe271c113e429f94b3cdb1b.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "\n INSERT INTO info \n (id, path, album, filesize, format, creation_date, modification_date, first_seen_date, width_px, height_px, specific_metadata, other_metadata, tags)\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)\n ON CONFLICT(id)\n DO UPDATE SET\n path = excluded.path,\n album = excluded.album,\n filesize = excluded.filesize,\n format = excluded.format,\n creation_date = excluded.creation_date,\n width_px = excluded.width_px,\n height_px = excluded.height_px,\n specific_metadata = excluded.specific_metadata,\n other_metadata = excluded.other_metadata,\n tags = excluded.tags;\n ", + "describe": { + "columns": [], + "parameters": { + "Right": 13 + }, + "nullable": [] + }, + "hash": "2e2da80807e94a55a5b6151735df807ec79b1277abe271c113e429f94b3cdb1b" +} diff --git a/.sqlx/query-b684f6f8d49f6605881a61d83f0174ebac2e8a7063cff012f36708b418b4061d.json b/.sqlx/query-b684f6f8d49f6605881a61d83f0174ebac2e8a7063cff012f36708b418b4061d.json deleted file mode 100644 index 2c6bb22..0000000 --- a/.sqlx/query-b684f6f8d49f6605881a61d83f0174ebac2e8a7063cff012f36708b418b4061d.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "\n INSERT INTO info \n (id, path, filesize, format, creation_date, modification_date, first_seen_date, width_px, height_px, specific_metadata, other_metadata, tags)\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)\n ON CONFLICT(id)\n DO UPDATE SET\n path = excluded.path,\n filesize = excluded.filesize,\n format = excluded.format,\n creation_date = excluded.creation_date,\n width_px = excluded.width_px,\n height_px = excluded.height_px,\n specific_metadata = excluded.specific_metadata,\n other_metadata = excluded.other_metadata,\n tags = excluded.tags;\n ", - "describe": { - "columns": [], - "parameters": { - "Right": 12 - }, - "nullable": [] - }, - "hash": "b684f6f8d49f6605881a61d83f0174ebac2e8a7063cff012f36708b418b4061d" -} diff --git a/Cargo.toml b/Cargo.toml index 6a09968..ee7cf97 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,10 +12,6 @@ async-watcher = "0.3.0" tracing = "0.1.40" async-walkdir = "2.0.0" rand = "0.8.5" -# flutter_rust_bridge = "2.3.0" # note: this is to specify acceptable inputs for flutter ffi - -# kinda unused rn -jiff = { version = "0.1.13", features = ["serde"] } # async tokio = { version = "1.40", features = ["macros", "rt-multi-thread"] } @@ -51,6 +47,8 @@ chrono = { version = "0.4.39", features = ["serde"] } uuid = { version = "1.12.0", features = ["v4", "serde"] } avif-parse = "1.3.2" blake3 = { version = "1.5.5", features = ["mmap", "rayon"] } +sea-query = { version = "0.32.1", default-features = false, features = ["derive", "backend-sqlite", "with-chrono", "chrono"] } +sea-query-binder = { version = "0.7.0", features = ["chrono", "runtime-tokio", "sqlx-sqlite", "with-chrono"] } # crc32fast = "1.4.2" # sys dependencies @@ -71,3 +69,4 @@ tracing-subscriber = "0.3.18" dirs = "6.0.0" anyhow = "1.0.95" console-subscriber = "0.4.1" +temp-dir = "0.1.14" diff --git a/README.md b/README.md index 5cade51..fd0dc8e 100644 --- a/README.md +++ b/README.md @@ -23,8 +23,8 @@ To build this, there are a few dependencies you need to install. I use Fedora, b Under active development. - [ ] GOAL: Feature-completeness - - [ ] Metadata scanning for `Media` - - [ ] Images + - [x] Metadata scanning for `Media` + - [x] Images - [ ] GIFS - [ ] Video - [ ] General (including Folder. i.e. `stat`) diff --git a/migrations/0004_tables.sql b/migrations/0004_tables.sql new file mode 100644 index 0000000..7ca0b34 --- /dev/null +++ b/migrations/0004_tables.sql @@ -0,0 +1,51 @@ +-- media info: cached metadata about tracked media files +CREATE TABLE IF NOT EXISTS info( + id TEXT NOT NULL PRIMARY KEY, + -- note: this would preferably be unique, but that messes with modern sqlite + -- update-insert syntax... + path TEXT NOT NULL, + filesize INTEGER NOT NULL, + format TEXT NOT NULL, + creation_date DATETIME, + modification_date DATETIME, + first_seen_date DATETIME NOT NULL, + width_px INTEGER NOT NULL, + height_px INTEGER NOT NULL, + specific_metadata TEXT NOT NULL, + other_metadata TEXT, + tags TEXT NOT NULL +); + +ALTER TABLE + info +ADD + COLUMN album TEXT NOT NULL; + +-- thumbnails: preview media +CREATE TABLE IF NOT EXISTS thumbnail( + -- path to the thumbnail on disk + path TEXT NOT NULL, + -- thumbnail is for the media file with this uuid + media_id TEXT NOT NULL PRIMARY KEY +); + +-- albums: contain media +CREATE TABLE IF NOT EXISTS albums( + id TEXT NOT NULL PRIMARY KEY, + name TEXT NOT NULL, + path TEXT NOT NULL, + -- + -- uuids (in json) + contained_media TEXT NOT NULL +); + +-- hashes: media file hashes to ensure metadata is up-to-date! +CREATE TABLE IF NOT EXISTS hashes( + media_id TEXT NOT NULL PRIMARY KEY, + hash BLOB NOT NULL +); + +-- hash_blob_index: tell SQLite to make a btree for the hashes, too. +-- +-- (this allows for high-speed lookups, both ways. hash <=> id) +CREATE UNIQUE INDEX IF NOT EXISTS hash_blob_index ON hashes(hash); \ No newline at end of file diff --git a/src/database.rs b/src/database.rs index 8a2d6de..537278c 100644 --- a/src/database.rs +++ b/src/database.rs @@ -40,6 +40,8 @@ pub static DATABASE: LazyLock> = LazyLock::new(|| { panic!("No database folder path given."); }; + tracing::debug!("Loading from database at `{raves_db_folder}`..."); + // ensure the path exists match raves_db_folder.try_exists() { Ok(true) => (), diff --git a/src/error.rs b/src/error.rs index 5faabaf..e38bb01 100644 --- a/src/error.rs +++ b/src/error.rs @@ -41,6 +41,9 @@ pub enum RavesError { error: std::io::Error, }, + #[error("Failed to find media file's parent. path: {_0}")] + MediaFilePathNoParent(Utf8PathBuf), + #[error("The media file at `{path}` does not appear to contain MIME (file type) data.")] NoMimeData { path: String }, diff --git a/src/models/media/builder/mod.rs b/src/models/media/builder/mod.rs index 2957217..f090ffe 100644 --- a/src/models/media/builder/mod.rs +++ b/src/models/media/builder/mod.rs @@ -111,6 +111,18 @@ impl MediaBuilder { /// 7. Return it. #[tracing::instrument(skip(self))] async fn build_internal(mut self, path: &Utf8Path) -> Result { + // before anything, let's make sure the media file has an album to use! + let album = path + .parent() + .map(|p| p.to_path_buf().to_string()) + .inspect(|parent| { + tracing::debug!("Found album (parent) for media file! path: {parent}") + }) + .ok_or_else(|| { + tracing::warn!("Given a supposed file path, but failed to find its parent!"); + RavesError::MediaFilePathNoParent(path.to_path_buf()) + })?; + // grab format and apply it to self let format = format(path).await?; let mime_type = format.mime_type(); @@ -184,11 +196,14 @@ impl MediaBuilder { Ok(Media { id, + album: album.to_string(), path: path.to_string(), + filesize: self.filesize.ok_or(RavesError::FileMissingMetadata( path.to_string(), "no file size given".into(), ))?, + creation_date: self.creation_date, modification_date: self.modification_date, @@ -342,7 +357,7 @@ pub fn get_video_len(path: &Utf8Path) -> Result { #[cfg(test)] mod tests { - use std::env::temp_dir; + use temp_dir::TempDir; use camino::Utf8PathBuf; use chrono::{DateTime, Utc}; @@ -362,9 +377,10 @@ mod tests { /// The `MediaBuilder` should keep the `id` and `first_seen_date` fields as-is. #[tokio::test] async fn media_builder_keeps_static_fields() { + let temp_dir = TempDir::new().unwrap(); // set up the db database::DB_FOLDER_PATH - .set(Utf8PathBuf::try_from(temp_dir()).unwrap()) + .set(Utf8PathBuf::try_from(temp_dir.path().to_path_buf()).unwrap()) .unwrap(); let path = Utf8PathBuf::from("tests/assets/fear.avif") @@ -375,6 +391,7 @@ mod tests { let old_media = Media { id: Uuid::nil(), path: path.to_string(), + album: "tests/assets".into(), filesize: 0, format: Json(Format::new_from_mime("image/avif").unwrap()), creation_date: None, @@ -425,4 +442,32 @@ mod tests { "post-insert same first seen dates" ); } + + /// Checks that the `MediaBuilder` can correctly find albums. + #[tokio::test] + async fn album() { + let temp_dir = TempDir::new().unwrap(); + let album_path = temp_dir.path().join("farts album"); + let file_path = album_path.join("fear.avif"); + + // make a new folder in the temp_dir called "farts album" + tokio::fs::create_dir_all(temp_dir.path().join("farts album")) + .await + .unwrap(); + + database::DB_FOLDER_PATH + .set(Utf8PathBuf::try_from(temp_dir.path().to_path_buf()).unwrap()) + .unwrap(); + + tokio::fs::copy("tests/assets/fear.avif", &file_path) + .await + .unwrap(); + + let media = MediaBuilder::default() + .build(Utf8PathBuf::try_from(file_path).unwrap()) + .await + .unwrap(); + + assert_eq!(media.album, album_path.to_string_lossy().to_string()); + } } diff --git a/src/models/media/metadata.rs b/src/models/media/metadata.rs index 7d62d40..a30850d 100644 --- a/src/models/media/metadata.rs +++ b/src/models/media/metadata.rs @@ -1,4 +1,6 @@ -use std::collections::HashMap; +use std::{cmp::Ordering, collections::HashMap}; + +use fraction::GenericFraction; /// Metadata "specific" to one type of media. #[derive(Clone, Debug, PartialEq, PartialOrd, serde::Serialize, serde::Deserialize)] @@ -13,15 +15,50 @@ pub enum SpecificMetadata { }, #[non_exhaustive] - Video { length: f64 }, + Video { + /// The video's length in seconds. + length: f64, + // TODO: framerate (see below) + // framerate: Framerate, + }, +} + +impl SpecificMetadata { + pub fn new_image() -> Self { + Self::Image {} + } + + pub fn new_animated_image(frame_count: u32, framerate: Framerate) -> Self { + Self::AnimatedImage { + frame_count, + framerate, + } + } + + pub fn new_video(length: f64) -> Self { + Self::Video { length } + } } #[derive(Clone, Debug, PartialEq, PartialOrd, serde::Serialize, serde::Deserialize)] pub struct OtherMetadataValue { + // note: this is on the value since putting it on the key makes it difficult + // to actually use in the map lol + // + // TODO: maybe just do this on the frontend manually? pub user_facing_name: Option, pub value: String, } +impl OtherMetadataValue { + pub fn new(name: impl AsRef, value: impl AsRef) -> Self { + Self { + user_facing_name: Some(name.as_ref().to_string()), + value: value.as_ref().to_string(), + } + } +} + /// A representation for uncommon metadata that can only be read. /// /// Also, it's a `HashMap` newtype to get around the lack of `PartialOrd`. @@ -46,10 +83,6 @@ impl PartialOrd for OtherMetadataMap { } } -use std::cmp::Ordering; - -use fraction::GenericFraction; - /// Resolution, currently capped at 65,535 x 65,535. /// /// Internally uses `u16` values. @@ -87,6 +120,9 @@ impl std::fmt::Display for MediaKind { } /// A representation of a media file's MIME format. +// +// MAINTAINER NOTE: if you change the names of these fields, you also need to +// change the filter/searching modifiers for `Format`! #[derive(Clone, Debug, PartialEq, PartialOrd, Eq, Ord, serde::Serialize, serde::Deserialize)] pub struct Format { /// The "kind" of media used in this format. (image, video, animated image, etc..?) @@ -106,7 +142,7 @@ impl Format { tracing::debug!("creating format from mime type `{mime}`..."); let mut s = mime.split('/'); - let (raw_kind, raw_type) = (s.next()?, s.next()?); + let raw_kind = s.next()?; // TODO: annoying parsing for animated media. // maybe find a library for that... @@ -120,7 +156,7 @@ impl Format { Some(Self { media_kind: kind, - mime_type: raw_type.to_string(), + mime_type: mime.into(), }) } diff --git a/src/models/media/mod.rs b/src/models/media/mod.rs index d67499e..49b715e 100644 --- a/src/models/media/mod.rs +++ b/src/models/media/mod.rs @@ -37,6 +37,9 @@ pub struct Media { /// The last known file path for this media file. pub path: String, + /// Last known path of its containing folder. + pub album: String, + /// How large the file is, in bytes. pub filesize: i64, @@ -94,12 +97,13 @@ impl InsertIntoTable for Media { sqlx::query!( r#" INSERT INTO info - (id, path, filesize, format, creation_date, modification_date, first_seen_date, width_px, height_px, specific_metadata, other_metadata, tags) + (id, path, album, filesize, format, creation_date, modification_date, first_seen_date, width_px, height_px, specific_metadata, other_metadata, tags) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) ON CONFLICT(id) DO UPDATE SET path = excluded.path, + album = excluded.album, filesize = excluded.filesize, format = excluded.format, creation_date = excluded.creation_date, @@ -111,6 +115,7 @@ impl InsertIntoTable for Media { "#, self.id, self.path, + self.album, self.filesize, self.format, self.creation_date, diff --git a/src/models/tags.rs b/src/models/tags.rs index cd208bf..96c1042 100644 --- a/src/models/tags.rs +++ b/src/models/tags.rs @@ -2,8 +2,6 @@ use uuid::Uuid; -pub type TagIdent = String; - /// A "section" for tags. When a tag has a section, it is separated from others /// by extreme differences. /// @@ -15,13 +13,15 @@ pub type TagIdent = String; #[derive(Clone, Debug, PartialEq, PartialOrd, serde::Deserialize, serde::Serialize)] pub struct TagSection { name: String, + id: Uuid, } impl Default for TagSection { /// Creates THE default `TagSection`, simply titled "default". fn default() -> Self { Self { - name: String::from("default"), + name: String::from("Default"), + id: Uuid::nil(), } } } @@ -32,16 +32,31 @@ pub struct Tag { /// /// Don't use this to find the tag - EVER. /// The name can change, but a tag's UUID is forever static. - name: String, + pub name: String, /// A unique identifier. /// /// Always use this when referencing the tag externally. - uuid: TagIdent, + pub uuid: Uuid, /// The section this tag belongs to. - tag_section: Option, + pub tag_section: Option, /// The other tags this tag "implies". For example, tags "christmas" and /// "halloween" would both imply the "holiday" tag. - implies: Vec, + pub implies: Vec, +} + +impl Tag { + /// Creates a new tag **representation** for testing. + /// + /// It will not be stored in the database or anything like that. + #[doc(hidden)] + pub fn new_testing(name: impl AsRef) -> Self { + Self { + name: name.as_ref().to_string(), + uuid: Uuid::new_v4(), + tag_section: Some(Uuid::nil()), + implies: Vec::new(), + } + } } #[derive(Clone, Debug, PartialEq, PartialOrd, serde::Deserialize)] diff --git a/src/search/details.rs b/src/search/details.rs index fb7f5e0..f35110f 100644 --- a/src/search/details.rs +++ b/src/search/details.rs @@ -4,9 +4,9 @@ use std::path::PathBuf; -use crate::models::media::metadata::Framerate; +use crate::models::media::metadata::{Framerate, MediaKind}; -use jiff::Zoned; +use chrono::{DateTime, Utc}; /// the location of media #[derive(Clone, Debug, PartialEq, PartialOrd)] @@ -16,10 +16,9 @@ pub struct PathDetail(pub PathBuf); #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum DateDetail { // TODO: allow dates, times, or both. for now, assume manual conversion - Created(Zoned), - Modified(Zoned), - Accessed(Zoned), - FirstSeen(Zoned), + Created(DateTime), + Modified(DateTime), + FirstSeen(DateTime), } /// "webm", "avif", etc. @@ -32,10 +31,21 @@ pub enum FormatDetail { /// "video", "image", etc. #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum KindDetail { - Image, + Photo, + AnimatedPhoto, Video, } +impl From for MediaKind { + fn from(value: KindDetail) -> Self { + match value { + KindDetail::Photo => Self::Photo, + KindDetail::AnimatedPhoto => Self::AnimatedPhoto, + KindDetail::Video => Self::Video, + } + } +} + /// fps of a video #[derive(Clone, Debug, PartialEq, PartialOrd)] pub struct FramerateDetail(pub Framerate); @@ -50,6 +60,8 @@ pub enum TagDetail { TagName(String), PersonTagName(String), PersonTagWithMarker(String, String), + + /// The number of tags on a media file. Count(u8, Comparison), } @@ -62,6 +74,20 @@ pub enum Comparison { Greater, } +impl core::fmt::Display for Comparison { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let cmp = match self { + Self::Less => "<", + Self::LessOrEqual => "<=", + Self::Equal => "=", + Self::GreaterOrEqual => ">=", + Self::Greater => ">", + }; + + f.write_str(cmp) + } +} + /// "landscape", "portrait", "square" #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum OrientationDetail { diff --git a/src/search/mod.rs b/src/search/mod.rs index 8bbe688..4d3d9c8 100644 --- a/src/search/mod.rs +++ b/src/search/mod.rs @@ -2,7 +2,7 @@ pub mod details; pub mod modifiers; -// pub mod query; +pub mod query; pub mod sort; /// `modifier1 AND modifier2` diff --git a/src/search/modifiers.rs b/src/search/modifiers.rs index fcfc799..946d225 100644 --- a/src/search/modifiers.rs +++ b/src/search/modifiers.rs @@ -1,9 +1,16 @@ -use super::details::{DateDetail, FormatDetail, KindDetail, TagDetail}; +use camino::Utf8PathBuf; +use sea_query::SimpleExpr; + +use super::details::{DateDetail, FormatDetail, KindDetail, OrientationDetail, TagDetail}; #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum DateTimeModifier { Before(DateDetail), - During(DateDetail), + // TODO: this would be kinda cool... + // Between { + // start: DateDetail, + // end: DateDetail + // }, After(DateDetail), } @@ -11,12 +18,16 @@ pub enum DateTimeModifier { #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum CollectionModifier { Tag(TagDetail), - Album(String), + + /// Searches for media within a folder with the given album path. + /// + /// That's just the folder the media is in. + Album(Utf8PathBuf), Literal(String), DateTime(DateTimeModifier), Format(FormatDetail), Kind(KindDetail), - Orientation(String), + Orientation(OrientationDetail), } /// A modifier that applies `OR`/`NOT`` logic to modifier expressions. @@ -48,18 +59,12 @@ pub enum Expr { Boolean(BooleanModifier), Other(OtherModifier), } - -pub struct PreExecutionQuery { - pub query: String, - // pub parameters: Vec, // FIXME: no clue what i was cookin here. `surrealql::Value`..? +/// A modifier must become a query to be used. +/// +/// All modifiers must implement this trait! +pub trait ToQuery { + /// Converts the modifier into a query for use in querying the database. + /// + /// This assumes that each modifier can become a query clause. + fn to_query(self) -> SimpleExpr; } - -// /// A modifier must become a query to be used. -// /// -// /// All modifiers must implement this trait! -// pub trait ToQuery { -// /// Converts the modifier into a query for use in querying the database. -// /// -// /// This assumes that each modifier can become a query string. -// fn to_query(&self) -> Result; -// } diff --git a/src/search/query.rs b/src/search/query.rs index c810870..b1afc49 100644 --- a/src/search/query.rs +++ b/src/search/query.rs @@ -1,65 +1,183 @@ -use surrealdb::{engine::local::Db, method::Query, sql::Query as SqlQuery}; +use extension::sqlite::SqliteExpr; +use sea_query::*; + +use crate::{models::media::metadata::MediaKind, search::details::Comparison}; use super::{ - details::TagDetail, - modifiers::{CollectionModifier, ToQuery}, + details::{DateDetail, FormatDetail, OrientationDetail, TagDetail}, + modifiers::{CollectionModifier, DateTimeModifier, ToQuery}, }; -use surrealdb::sql::parse; +/// the media table +#[derive(Iden)] +pub enum Info { + Table, + Id, + Path, + Album, + Filesize, + Format, + CreationDate, + ModificationDate, + FirstSeenDate, + WidthPx, + HeightPx, + SpecificMetadata, + OtherMetadata, + Tags, +} + +/// The default array length function in SQLite. +struct JsonArrayLenFunction; -/// FIXME: this is a really basic impl. it's not a security risk or anything, but it's likely -/// bad to use format strings here. that might allow for injection if someone didn't know. +impl Iden for JsonArrayLenFunction { + fn unquoted(&self, s: &mut dyn core::fmt::Write) { + write!(s, "json_array_length").unwrap() + } +} impl ToQuery for CollectionModifier { - fn to_query(&self) -> Result { + #[tracing::instrument] + fn to_query(self) -> SimpleExpr { match self { + // based on the kind, we'll check various attributes of the media's tags. + // + // use `SqliteExpr::col(ColType::Variant).get_json_field("name")` CollectionModifier::Tag(tag_detail) => match tag_detail { - TagDetail::TagName(name) | TagDetail::PersonTagName(name) => { - // NOTE: there are no "person" tags yet, so these are the same - parse(format!("SELECT * FROM info WHERE media.tags CONTAINS '{name}'").as_str()) + TagDetail::TagUuid(uuid) => { + tracing::debug!("Looking for tag with UUID `{uuid}`"); + Expr::col(Info::Id).eq(uuid) + } + TagDetail::PersonTagUuid(uuid) => { + tracing::warn!("LPerson tag with UUID is unimplemented."); // TODO + Expr::col(Info::Id).eq(uuid) } + TagDetail::PersonTagWithMarker(uuid, _marker_uuid) => { + tracing::warn!("Person tag with marker is unimplemented. (uuid: {uuid}, marker uuid: {_marker_uuid})"); // TODO + Expr::col(Info::Id).eq(uuid) + } + TagDetail::Count(ct, cmp) => { + tracing::debug!("Looking for media with {cmp} {ct} tags!"); - TagDetail::PersonTagWithMarker(_, _) => unimplemented!(), + let fn_call = SimpleExpr::FunctionCall( + Func::cust(JsonArrayLenFunction).arg(Expr::col(Info::Tags)), + ); - TagDetail::Count(ct, cmp) => { - let cmp = match cmp { - super::details::Comparison::Less => "<", - super::details::Comparison::LessOrEqual => "<=", - super::details::Comparison::Equal => "=", - super::details::Comparison::GreaterOrEqual => ">=", - super::details::Comparison::Greater => ">", - }; - - parse( - format!("SELECT * FROM info WHERE array::len(media.tags) {cmp} {ct}") - .as_str(), - ) + match cmp { + Comparison::Less => fn_call.lt(ct), + Comparison::LessOrEqual => fn_call.lte(ct), + Comparison::Equal => fn_call.eq(ct), + Comparison::GreaterOrEqual => fn_call.gte(ct), + Comparison::Greater => fn_call.gt(ct), + } } }, - // we'll sort by the folder it's contained in - CollectionModifier::Album(name) => { - // FIXME: this isn't correct. placeholder until we parse it manually - // note that fixing it might require us to directly query here..! - parse(format!("SELECT * FROM info WHERE path CONTAINS '{name}'").as_str()) + // based on containing folder! + CollectionModifier::Album(path) => { + tracing::debug!("Checking for media file with album name: `{path}`..."); + Expr::col(Info::Album).like(path) + } + + // ez pz, just add a 'LIKE' clause with `.like()` + CollectionModifier::Literal(lit) => { + tracing::debug!("Checking for literal: `{lit}`"); + Expr::col(Info::Path).like(format!("%{lit}%")) + } + + // yeah that's not bad. might be difficult to express dates in the + // orm-ish syntax, though? + CollectionModifier::DateTime(dt_modifier) => { + let get_col_from_detail = |dd: DateDetail| { + tracing::debug!("Given date detail: {dd:?}"); + match dd { + DateDetail::Created(date_time) => { + (Expr::col(Info::CreationDate), date_time) + } + DateDetail::Modified(date_time) => { + (Expr::col(Info::ModificationDate), date_time) + } + DateDetail::FirstSeen(date_time) => { + (Expr::col(Info::FirstSeenDate), date_time) + } + } + }; + + match dt_modifier { + DateTimeModifier::Before(dd) => { + let (col, time) = get_col_from_detail(dd); + col.lt(Value::ChronoDateTimeUtc(Some(Box::new(time)))) + } + + // TODO: DateTimeModifier::Between ... + // + DateTimeModifier::After(dd) => { + let (col, time) = get_col_from_detail(dd); + col.gt(Value::ChronoDateTimeUtc(Some(Box::new(time)))) + } + } } - CollectionModifier::Literal(s) => { - parse(format!("SELECT * FROM info WHERE media.name = '{s}'").as_str()) - } // FIXME: this should only search the filename + CollectionModifier::Format(format_detail) => { + tracing::debug!("Asked to check for Format."); - CollectionModifier::DateTime(_) => todo!(), + match format_detail { + FormatDetail::MimeType(mime_type) => { + tracing::debug!("Looking at format's MIME type. given: `{mime_type}`"); - CollectionModifier::Format(_) => todo!(), + Expr::col(Info::Format) + .cast_json_field("mime_type") + .like(mime_type) + } - CollectionModifier::Kind(_) => todo!(), + FormatDetail::Extension(file_ext) => { + tracing::debug!("Checking format's extension. given: `{file_ext}`"); - CollectionModifier::Orientation(_) => todo!(), + // ensure correct formatting of extension. note that `LIKE` is + // case-insensitive :) + let file_ext = { + let mut s = String::with_capacity(file_ext.len() + 1); + + // IMPORTANT! this does the 'end of string' checking in SQLite + s.push('%'); + + // add the other part to the end, in lowercase + no whitespace + s.push_str(file_ext.trim()); + s + }; + + tracing::debug!("Made formatted extension: `{file_ext}`"); + Expr::col(Info::Path).like(file_ext) + } + } + } + + CollectionModifier::Kind(kind_detail) => { + tracing::debug!("Asked to check by kind: `{kind_detail:?}`"); + + // we'll use json for this + Expr::col(Info::Format) + .cast_json_field("media_kind") + .eq(MediaKind::from(kind_detail.clone()).to_string()) + } + + // hoz: width_px > height_px + // vert: height_px > width_px + // square: width_px = height_px + CollectionModifier::Orientation(orientation_detail) => match orientation_detail { + OrientationDetail::Landscape => { + tracing::debug!("Orientation detail (landscape)..."); + Expr::col(Info::WidthPx).gt(Expr::col(Info::HeightPx)) + } + OrientationDetail::Portrait => { + tracing::debug!("Orientation detail (portrait)..."); + Expr::col(Info::HeightPx).gt(Expr::col(Info::WidthPx)) + } + OrientationDetail::Square => { + tracing::debug!("Orientation detail (square)..."); + Expr::col(Info::WidthPx).eq(Expr::col(Info::HeightPx)) + } + }, } } } - -pub trait ToQuery2 { - /// Takes in an (unexecuted) query and adds additional clauses on it. - fn to_query(query: Query<'_, Db>) -> Query<'_, Db>; -} diff --git a/src/search/sort.rs b/src/search/sort.rs index eadf724..f0d8377 100644 --- a/src/search/sort.rs +++ b/src/search/sort.rs @@ -211,7 +211,8 @@ mod tests { fn create_default_media() -> Media { Media { id: Uuid::nil(), - path: "a".into(), + path: "/a/b.jpg".into(), + album: "a".into(), filesize: 1024, format: Json(Format::new_from_mime("image/jpeg").unwrap()), creation_date: None, diff --git a/tests/db.rs b/tests/db.rs index c99708b..c69deb3 100644 --- a/tests/db.rs +++ b/tests/db.rs @@ -10,7 +10,7 @@ mod common; #[cfg(test)] mod tests { - use std::{env::temp_dir, str::FromStr as _}; + use std::str::FromStr as _; use backdrop::{ database::{self, DATABASE, RAVES_DB_FILE}, @@ -18,14 +18,16 @@ mod tests { }; use camino::{Utf8Path, Utf8PathBuf}; use sqlx::{sqlite::SqliteConnectOptions, Sqlite}; + use temp_dir::TempDir; use uuid::Uuid; /// The database can cache metadata for the beach photo. #[tokio::test] async fn beach() { // set up the database + let temp_dir = temp_dir::TempDir::new().unwrap(); { - let db_temp_dir = Utf8PathBuf::try_from(temp_dir()) + let db_temp_dir = Utf8PathBuf::try_from(temp_dir.path().to_path_buf()) .unwrap() .join(Uuid::new_v4().to_string()) .join("_raves_db"); @@ -117,11 +119,11 @@ mod tests { for _ in 0..3 { let mut set = tokio::task::JoinSet::new(); - let tempdir = temp_dir(); - let p = Utf8PathBuf::try_from(tempdir).unwrap(); + let temp_dir = TempDir::new().unwrap(); + let p = Utf8PathBuf::try_from(temp_dir.path().to_path_buf()).unwrap(); set.spawn(make_pool(p)); - set.join_all().await; + set.join_all().await; // FIXME } } } diff --git a/tests/file_watcher.rs b/tests/file_watcher.rs index a27e6f7..38abe7a 100644 --- a/tests/file_watcher.rs +++ b/tests/file_watcher.rs @@ -4,7 +4,7 @@ mod common; #[cfg(test)] mod tests { - use std::{env::temp_dir, time::Duration}; + use std::time::Duration; use backdrop::{ database::{DATABASE, INFO_TABLE}, @@ -13,7 +13,7 @@ mod tests { }; use camino::Utf8PathBuf; - use uuid::Uuid; + use temp_dir::TempDir; use crate::common::{self, Setup}; @@ -26,7 +26,7 @@ mod tests { let task = tokio::spawn(Watch::watch()); // sleep for a bit - tokio::time::sleep(Duration::from_secs(5)).await; + tokio::time::sleep(Duration::from_millis(100)).await; // ensure the watcher is still running assert!(!task.is_finished(), "watcher should run indefinitely!"); @@ -41,16 +41,15 @@ mod tests { #[tokio::test] async fn find_file_in_temp_dir() { // generate a temp dir - let temp_dir = Utf8PathBuf::try_from(temp_dir()) - .unwrap() - .join(Uuid::new_v4().to_string()); - println!("temp dir located at: `{temp_dir}`"); + let temp_dir = TempDir::new().unwrap(); + let temp_dir_path = Utf8PathBuf::try_from(temp_dir.path().to_path_buf()).unwrap(); + println!("temp dir located at: `{temp_dir_path}`"); tokio::fs::create_dir_all(&temp_dir).await.unwrap(); // set up the app common::setup(Setup { port: 6670, - watched_folders: [temp_dir.clone()].into(), + watched_folders: [temp_dir_path.clone()].into(), }) .await; let mut conn = DATABASE.acquire().await.unwrap(); @@ -65,13 +64,13 @@ mod tests { .expect("remove all from info table"); // copy a photo to the temp dir - tokio::time::sleep(Duration::from_secs(3)).await; - tokio::fs::copy("tests/assets/fear.avif", temp_dir.join("fear.avif")) + tokio::time::sleep(Duration::from_millis(150)).await; + tokio::fs::copy("tests/assets/fear.avif", temp_dir_path.join("fear.avif")) .await .expect("copy to temp dir should work"); // wait... then check if we got metadata! - tokio::time::sleep(Duration::from_secs(5)).await; + tokio::time::sleep(Duration::from_millis(150)).await; let media = sqlx::query_as::<_, Media>(&format!("SELECT * FROM {INFO_TABLE}")) .fetch_one(&mut *conn) .await diff --git a/tests/search.rs b/tests/search.rs new file mode 100644 index 0000000..689317b --- /dev/null +++ b/tests/search.rs @@ -0,0 +1,607 @@ +#[cfg(test)] +mod tests { + + mod query { + use std::{collections::HashMap, env::temp_dir}; + + use chrono::DateTime; + use sea_query::{Asterisk, Cond, SqliteQueryBuilder}; + use sea_query_binder::SqlxBinder as _; + use sqlx::{pool::PoolConnection, Sqlite}; + use uuid::Uuid; + + use backdrop::{ + database::{self, InsertIntoTable, DATABASE}, + models::{ + media::{ + metadata::{Format, OtherMetadataMap, OtherMetadataValue, SpecificMetadata}, + Media, + }, + tags::Tag, + }, + search::{ + details::{ + Comparison, DateDetail, FormatDetail, KindDetail, OrientationDetail, TagDetail, + }, + modifiers::{CollectionModifier, DateTimeModifier, ToQuery as _}, + query::Info, + }, + }; + + #[tokio::test] + async fn collection_mod_orientation() { + let mut conn = setup_db().await; + + // find all the square ones. + { + // there should only be one. + let square_mod = CollectionModifier::Orientation(OrientationDetail::Square); + + // make the actual statement w/ the modifier + let (select, values) = sea_query::Query::select() + .column(Asterisk) // jesus christ + .from(Info::Table) + .cond_where(Cond::all().add(square_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + // check that it's right + assert_eq!( + r#"SELECT * FROM "info" WHERE "width_px" = "height_px""#, select, + "select statements should match" + ); + + // query dat mf + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .expect("select didnt err"); + + // ensure it's just vade lol + assert_eq!(res.len(), 1, "vec length"); + let vade_media = res.first().expect("there is a first option"); + + assert_eq!( + vade_media.id, + Uuid::from_u128(2), + "vade square media uuid match" + ); + assert_eq!( + vade_media.format, + Format::new_from_mime("image/png").unwrap().into(), + "vade square media format match" + ); + } + + // now, query for horizontal orientation. there should be two entries + { + let hoz_mod = CollectionModifier::Orientation(OrientationDetail::Landscape); + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(hoz_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .expect("select didnt err"); + + assert_eq!(res.len(), 3); + } + + // finally, there should be one for vertical + { + let vert_mod = CollectionModifier::Orientation(OrientationDetail::Portrait); + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(vert_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .expect("select didnt err"); + + assert_eq!(res.len(), 1); + assert!(res.first().unwrap().path.contains("a.jpg")) + } + } + + /// Tests the tag collection modifiers. + #[tokio::test] + async fn collection_mod_tags() { + let mut conn: PoolConnection = setup_db().await; + + // tag count + { + let tag_ct_mod = CollectionModifier::Tag(TagDetail::Count(3, Comparison::Equal)); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(tag_ct_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE json_array_length("tags") = ?"#, + select, + ); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::TinyUnsigned(Some(3)) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + // there should be exactly one entry + assert_eq!(res.len(), 1); + + assert!(res + .first() + .unwrap() + .clone() + .tags + .0 + .into_iter() + .any(|tag| &tag.name == "dittodill")); + } + + // TODO: test other tag detail queries when tables are implemented! + } + + /// Checks the collection modifier that searches for text. + /// + /// Currently, this just checks the path of the file, but it should also + /// look in comment/description/etc. fields of any attached metadata. + #[tokio::test] + async fn collection_mod_literal() { + let mut conn: PoolConnection = setup_db().await; + + // try searching for "A.JPG" (in caps) + let literal_search_mod = CollectionModifier::Literal("a.jpg".into()); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(literal_search_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new("%a.jpg%".into()))) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + // only one entry + assert_eq!(res.len(), 1); + } + + /// Checks the DateTime CollectionModifiers. + #[tokio::test] + #[expect(clippy::inconsistent_digit_grouping, reason = "unix time fmting")] + async fn collection_mod_datetime() { + let mut conn: PoolConnection = setup_db().await; + + // we'll try before + created here: + { + let before_created_datetime_mod = + CollectionModifier::DateTime(DateTimeModifier::Before(DateDetail::Created( + DateTime::from_timestamp_nanos(1737137976_000_000_000 + 1), + ))); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(before_created_datetime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "creation_date" < ?"#, select); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 3); + assert_eq!(res.first().unwrap().format.0.mime_type(), "image/png"); + } + + // after + modified: + { + let after_modified_datetime_mod = + CollectionModifier::DateTime(DateTimeModifier::After(DateDetail::Modified( + DateTime::from_timestamp_nanos(0), + ))); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(after_modified_datetime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE "modification_date" > ?"#, + select + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 4); + } + + // after + first_seen: + { + let after_first_seen_datetime_mod = + CollectionModifier::DateTime(DateTimeModifier::After(DateDetail::FirstSeen( + DateTime::from_timestamp_nanos(1551731451_000_000_000), + ))); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(after_first_seen_datetime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE "first_seen_date" > ?"#, + select + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 3, "not unix epoch (5) and this exact time (1)"); + } + + // before and after at the same time should reduce the query's domain: + { + let after_seen = CollectionModifier::DateTime(DateTimeModifier::After( + DateDetail::FirstSeen(DateTime::from_timestamp_nanos(1551731451_000_000_000)), + )); + + let before_seen = CollectionModifier::DateTime(DateTimeModifier::Before( + DateDetail::FirstSeen(DateTime::from_timestamp_nanos(1737126002_000_000_000)), + )); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where( + Cond::all() + .add(after_seen.to_query()) + .add(before_seen.to_query()), + ) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE "first_seen_date" > ? AND "first_seen_date" < ?"#, + select + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 2, "after 5, before 2, after 1. => [3, 4]"); + + assert!( + res.iter().any(|media| media.id == Uuid::from_u128(3)), + "has 3" + ); + assert!( + res.iter().any(|media| media.id == Uuid::from_u128(4)), + "has 4" + ); + } + } + + #[tokio::test] + async fn collection_mod_format() { + let mut conn: PoolConnection = setup_db().await; + + // there should only be one entry w/ ext "mp4": + { + let mp4_ext_mod = CollectionModifier::Format(FormatDetail::Extension("mp4".into())); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(mp4_ext_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("%mp4")))) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 1, "should only be one mp4 ext"); + } + + // again, one with MIME type "video/mp4": + { + let mp4_mime_mod = + CollectionModifier::Format(FormatDetail::MimeType("Video/mp4".into())); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(mp4_mime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE ("format" ->> ?) LIKE ?"#, + select + ); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("mime_type")))) + ); // json_array_length + assert_eq!( + values.0 .0.get(1).unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("Video/mp4")))) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 1, "should only be one with mp4 mime type"); + } + + // three with `png` extensions: + { + let png_ext_mod = CollectionModifier::Format(FormatDetail::Extension("PnG".into())); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(png_ext_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("%PnG")))) + ); + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + // note: we take it from the file extension, not the MIME here! + assert_eq!(res.len(), 2, "two pngs"); + } + } + + #[tokio::test] + async fn collection_mod_kind() { + let mut conn: PoolConnection = setup_db().await; + + // one video: + { + let video_kind_mod = CollectionModifier::Kind(KindDetail::Video); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(video_kind_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE ("format" ->> ?) = ?"#, select); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 1, "only one video"); + } + + // four photos: + { + let photo_kind_mod = CollectionModifier::Kind(KindDetail::Photo); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(photo_kind_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE ("format" ->> ?) = ?"#, select); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 4, "four photos"); + } + } + + #[tokio::test] + async fn collection_mod_album() { + let mut conn = setup_db().await; + + // "Downloads" album + { + let album_mod = CollectionModifier::Album("/home/barrett/Downloads".into()); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(album_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "album" LIKE ?"#, select); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new(String::from( + "/home/barrett/Downloads" + )))) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 2); + + let mut res_iter = res.into_iter(); + assert!(res_iter.any(|media| media.id == Uuid::from_u128(2))); + assert!(res_iter.any(|media| media.id == Uuid::from_u128(3))); + } + } + + /// creates a database with some entries in it... + #[expect(clippy::inconsistent_digit_grouping, reason = "easier to read")] + async fn setup_db() -> PoolConnection { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::DEBUG) + .init(); + + let db_folder = temp_dir().join(Uuid::new_v4().to_string()); + tokio::fs::create_dir_all(&db_folder).await.unwrap(); + + database::DB_FOLDER_PATH + .set(db_folder.try_into().unwrap()) + .unwrap(); + + let mut conn = DATABASE.acquire().await.unwrap(); + + let media_1 = Media { + id: Uuid::from_u128(1), + path: "/home/barrett/Videos/eceg_ditto_dill.mp4".into(), + album: "/home/barrett/Videos".into(), + filesize: 1024 * 1024 * 512, // 512 MiB + format: Format::new_from_mime("video/mp4") + .expect("format creation") + .into(), + creation_date: Some(DateTime::from_timestamp_nanos(1737308081_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1737308098_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1551731451_000_000_000), + width_px: 1920, + height_px: 1080, + specific_metadata: SpecificMetadata::new_video(147.0).into(), + other_metadata: Some( + OtherMetadataMap(HashMap::from([ + ( + "uploader".into(), + OtherMetadataValue::new("Uploader", "DittoDill"), + ), + ( + "category_id".into(), + OtherMetadataValue::new("Video Category ID", "24"), + ), + ])) + .into(), + ), + tags: vec![ + Tag::new_testing("dittodill"), + Tag::new_testing("music"), + Tag::new_testing("legend"), + ] + .into(), + }; + + let media_2 = Media { + id: Uuid::from_u128(2), + path: "/home/barrett/Downloads/vade.png".into(), + album: "/home/barrett/Downloads".into(), + filesize: (1024 * 34) + (230), // 34.2 KiB + format: Format::new_from_mime("image/png").unwrap().into(), + creation_date: Some(DateTime::from_timestamp_nanos(1737137976_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1737137976_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1737126002_000_000_000), + width_px: 174, + height_px: 174, + specific_metadata: SpecificMetadata::new_image().into(), + other_metadata: None, + tags: vec![].into(), + }; + + let media_3 = Media { + id: Uuid::from_u128(3), + path: "/home/barrett/Downloads/a.jpg".into(), + album: "/home/barrett/Downloads".into(), + filesize: 1024 * 60, // 60 KiB + format: Format::new_from_mime("image/jpeg").unwrap().into(), + creation_date: Some(DateTime::from_timestamp_nanos(1730329781_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1730329781_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1730329781_000_000_000), + width_px: 1824, + height_px: 1993, + specific_metadata: SpecificMetadata::new_image().into(), + other_metadata: None, + tags: vec![].into(), + }; + + let media_4 = Media { + id: Uuid::from_u128(4), + path: "/home/barrett/Pictures/2024-02-09 14-53-52.mkv-00:00:08.500.png".into(), + album: "/home/barrett/Pictures".into(), + filesize: 1024 * 765, // 765 KiB + format: Format::new_from_mime("image/png").unwrap().into(), + creation_date: Some(DateTime::from_timestamp_nanos(1725306903_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1725306903_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1725286951_000_000_000), + width_px: 1454, + height_px: 750, + specific_metadata: SpecificMetadata::new_image().into(), + other_metadata: None, + tags: vec![].into(), + }; + + // a bunch of null-ish values >:) + let media_5 = Media { + id: Uuid::nil(), + album: "/".into(), + path: "/nil.notpng.farts".into(), + filesize: 1024 * 765, // 765 KiB + format: Format::new_from_mime("image/png").unwrap().into(), + creation_date: None, + modification_date: None, + first_seen_date: DateTime::UNIX_EPOCH, + width_px: 1, + height_px: 0, + specific_metadata: SpecificMetadata::new_image().into(), + other_metadata: None, + tags: vec![].into(), + }; + + let m = [media_1, media_2, media_3, media_4, media_5]; + + for media in m { + media + .make_insertion_query() + .execute(&mut *conn) + .await + .unwrap(); + } + + conn + } + } +}