From b74a7ad1db5cf1814af7d30f303fec5eaa685b52 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:14:48 -0600 Subject: [PATCH 01/27] ci: separate some tasks to speed things up also i hope this fixes it (probably wont, cant even test in `act` either D: --- .github/workflows/ci.yml | 57 +++++++++++++++++++++++++++++++--------- README.md | 4 +-- 2 files changed, 47 insertions(+), 14 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 307cd1c..f784740 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,10 +18,10 @@ jobs: fail-fast: false steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - name: Install `nasm` - run: sudo apt-get install yasm -y + - name: Install `yasm` + run: sudo apt-get update && sudo apt-get install build-essential yasm -y - name: Set up Rust toolchain uses: actions-rs/toolchain@v1 @@ -47,7 +47,7 @@ jobs: path: target key: ${{ runner.os }}-cargo-build-target-${{ matrix.toolchain }}-${{ hashFiles('**/Cargo.lock') }} - - name: Cache cargo bin # bc wow why does it take three minutes to build `cargo-deny` + - name: Cache cargo bin uses: actions/cache@v4 with: path: ~/.cargo/bin @@ -61,17 +61,50 @@ jobs: with: tool: cargo-deny,cargo-nextest,cargo-rdme + - name: Run tests + run: cargo nextest run + + - name: Run doctests + run: cargo test --doc + - name: Check for unused dependencies uses: bnjbvr/cargo-machete@main - - name: Make sure README is up-to-date - run: cargo rdme --check + cargo_deny: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 - - name: Run cargo-deny - run: cargo deny check + - name: Set up Rust toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true - - name: Run tests - run: cargo nextest run + - name: Install `cargo` tools to do more testing + uses: taiki-e/install-action@v2 + with: + tool: cargo-deny - - name: Run doctests - run: cargo test --doc \ No newline at end of file + + readme: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + + - name: Install `yasm` + run: sudo apt-get update && sudo apt-get install build-essential yasm -y + + - name: Set up Rust toolchain + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true + + - name: Install `cargo` tools to do more testing + uses: taiki-e/install-action@v2 + with: + tool: cargo-rdme + + - name: Make sure README is up-to-date + run: cargo rdme --check \ No newline at end of file diff --git a/README.md b/README.md index 5cade51..fd0dc8e 100644 --- a/README.md +++ b/README.md @@ -23,8 +23,8 @@ To build this, there are a few dependencies you need to install. I use Fedora, b Under active development. - [ ] GOAL: Feature-completeness - - [ ] Metadata scanning for `Media` - - [ ] Images + - [x] Metadata scanning for `Media` + - [x] Images - [ ] GIFS - [ ] Video - [ ] General (including Folder. i.e. `stat`) From 8e1b5d471dd2b99c2417e8c12dd12a6cc79583af Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:16:19 -0600 Subject: [PATCH 02/27] refactor(search): use `chrono` types over `jiff` `jiff` doesn't yet have ecosystem support, unfortunately! `chrono` is still pretty good, though :) --- Cargo.toml | 4 ---- src/search/details.rs | 9 ++++----- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 6a09968..52f6654 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,10 +12,6 @@ async-watcher = "0.3.0" tracing = "0.1.40" async-walkdir = "2.0.0" rand = "0.8.5" -# flutter_rust_bridge = "2.3.0" # note: this is to specify acceptable inputs for flutter ffi - -# kinda unused rn -jiff = { version = "0.1.13", features = ["serde"] } # async tokio = { version = "1.40", features = ["macros", "rt-multi-thread"] } diff --git a/src/search/details.rs b/src/search/details.rs index fb7f5e0..7d947b7 100644 --- a/src/search/details.rs +++ b/src/search/details.rs @@ -6,7 +6,7 @@ use std::path::PathBuf; use crate::models::media::metadata::Framerate; -use jiff::Zoned; +use chrono::{DateTime, Utc}; /// the location of media #[derive(Clone, Debug, PartialEq, PartialOrd)] @@ -16,10 +16,9 @@ pub struct PathDetail(pub PathBuf); #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum DateDetail { // TODO: allow dates, times, or both. for now, assume manual conversion - Created(Zoned), - Modified(Zoned), - Accessed(Zoned), - FirstSeen(Zoned), + Created(DateTime), + Modified(DateTime), + FirstSeen(DateTime), } /// "webm", "avif", etc. From 574560d9b7e061db32c977f70016474141dc626e Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:17:55 -0600 Subject: [PATCH 03/27] chore(meta): add short todo for video framerate --- src/models/media/metadata.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/models/media/metadata.rs b/src/models/media/metadata.rs index 7d62d40..ad1f6f0 100644 --- a/src/models/media/metadata.rs +++ b/src/models/media/metadata.rs @@ -13,7 +13,12 @@ pub enum SpecificMetadata { }, #[non_exhaustive] - Video { length: f64 }, + Video { + /// The video's length in seconds. + length: f64, + // TODO: framerate (see below) + // framerate: Framerate, + }, } #[derive(Clone, Debug, PartialEq, PartialOrd, serde::Serialize, serde::Deserialize)] From 2f8db06fad68c4f70863f1988dd19b7a0453236d Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:18:43 -0600 Subject: [PATCH 04/27] feat(meta/OtherMeta): add constructor for map vals reduces string stuff in tests and likely elsewhere in the future :D --- src/models/media/metadata.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/models/media/metadata.rs b/src/models/media/metadata.rs index ad1f6f0..df0b894 100644 --- a/src/models/media/metadata.rs +++ b/src/models/media/metadata.rs @@ -23,10 +23,23 @@ pub enum SpecificMetadata { #[derive(Clone, Debug, PartialEq, PartialOrd, serde::Serialize, serde::Deserialize)] pub struct OtherMetadataValue { + // note: this is on the value since putting it on the key makes it difficult + // to actually use in the map lol + // + // TODO: maybe just do this on the frontend manually? pub user_facing_name: Option, pub value: String, } +impl OtherMetadataValue { + pub fn new(name: impl AsRef, value: impl AsRef) -> Self { + Self { + user_facing_name: Some(name.as_ref().to_string()), + value: value.as_ref().to_string(), + } + } +} + /// A representation for uncommon metadata that can only be read. /// /// Also, it's a `HashMap` newtype to get around the lack of `PartialOrd`. From 6d9d7d1e2692a96cf87d42f4d3271eb269bf25c9 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:19:15 -0600 Subject: [PATCH 05/27] style(meta): move imports to top --- src/models/media/metadata.rs | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/models/media/metadata.rs b/src/models/media/metadata.rs index df0b894..75e16c4 100644 --- a/src/models/media/metadata.rs +++ b/src/models/media/metadata.rs @@ -1,4 +1,6 @@ -use std::collections::HashMap; +use std::{cmp::Ordering, collections::HashMap}; + +use fraction::GenericFraction; /// Metadata "specific" to one type of media. #[derive(Clone, Debug, PartialEq, PartialOrd, serde::Serialize, serde::Deserialize)] @@ -64,10 +66,6 @@ impl PartialOrd for OtherMetadataMap { } } -use std::cmp::Ordering; - -use fraction::GenericFraction; - /// Resolution, currently capped at 65,535 x 65,535. /// /// Internally uses `u16` values. From daf9388cd7bf2a9fe41b211988373561196f335d Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:20:07 -0600 Subject: [PATCH 06/27] refactor(tag): make types more sane see #11 for more info this just makes search a little easier to implement for now :) --- src/models/tags.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/models/tags.rs b/src/models/tags.rs index cd208bf..1d281f3 100644 --- a/src/models/tags.rs +++ b/src/models/tags.rs @@ -2,8 +2,6 @@ use uuid::Uuid; -pub type TagIdent = String; - /// A "section" for tags. When a tag has a section, it is separated from others /// by extreme differences. /// @@ -15,13 +13,15 @@ pub type TagIdent = String; #[derive(Clone, Debug, PartialEq, PartialOrd, serde::Deserialize, serde::Serialize)] pub struct TagSection { name: String, + id: Uuid, } impl Default for TagSection { /// Creates THE default `TagSection`, simply titled "default". fn default() -> Self { Self { - name: String::from("default"), + name: String::from("Default"), + id: Uuid::nil(), } } } @@ -32,16 +32,16 @@ pub struct Tag { /// /// Don't use this to find the tag - EVER. /// The name can change, but a tag's UUID is forever static. - name: String, + pub name: String, /// A unique identifier. /// /// Always use this when referencing the tag externally. - uuid: TagIdent, + pub uuid: Uuid, /// The section this tag belongs to. - tag_section: Option, + pub tag_section: Option, /// The other tags this tag "implies". For example, tags "christmas" and /// "halloween" would both imply the "holiday" tag. - implies: Vec, + pub implies: Vec, } #[derive(Clone, Debug, PartialEq, PartialOrd, serde::Deserialize)] From 089b2760f50e152118f4a1ac1381c9653ecdc5e4 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:20:28 -0600 Subject: [PATCH 07/27] test(feat, tag): add tag ctor for tests --- src/models/tags.rs | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/models/tags.rs b/src/models/tags.rs index 1d281f3..f1b6a85 100644 --- a/src/models/tags.rs +++ b/src/models/tags.rs @@ -44,6 +44,21 @@ pub struct Tag { pub implies: Vec, } +impl Tag { + /// Creates a new tag **representation** for testing. + /// + /// It will not be stored in the database or anything like that. + #[cfg(test)] + pub(crate) fn new_testing(name: impl AsRef) -> Self { + Self { + name: name.as_ref().to_string(), + uuid: Uuid::new_v4(), + tag_section: Some(Uuid::nil()), + implies: Vec::new(), + } + } +} + #[derive(Clone, Debug, PartialEq, PartialOrd, serde::Deserialize)] pub struct TagRecord { pub tag: Tag, From 7ca3a2cde796776925add9fcf4ec758d3d58a0c0 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:21:17 -0600 Subject: [PATCH 08/27] feat(search/detail): mirror MediaKind w KindDetail --- src/search/details.rs | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/src/search/details.rs b/src/search/details.rs index 7d947b7..64ec978 100644 --- a/src/search/details.rs +++ b/src/search/details.rs @@ -4,7 +4,7 @@ use std::path::PathBuf; -use crate::models::media::metadata::Framerate; +use crate::models::media::metadata::{Framerate, MediaKind}; use chrono::{DateTime, Utc}; @@ -31,10 +31,21 @@ pub enum FormatDetail { /// "video", "image", etc. #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum KindDetail { - Image, + Photo, + AnimatedPhoto, Video, } +impl From for MediaKind { + fn from(value: KindDetail) -> Self { + match value { + KindDetail::Photo => Self::Photo, + KindDetail::AnimatedPhoto => Self::AnimatedPhoto, + KindDetail::Video => Self::Video, + } + } +} + /// fps of a video #[derive(Clone, Debug, PartialEq, PartialOrd)] pub struct FramerateDetail(pub Framerate); From 0a39b3516a06166b7995814c528e1b29a6e8013c Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:22:48 -0600 Subject: [PATCH 09/27] refactor(search/detail): use `Uuid` over `String` tags are no longer name-based :) that's because i want to have a table in the db for them! and that helps with caching and knowing 'what' to look for --- src/search/details.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/search/details.rs b/src/search/details.rs index 64ec978..261d4f2 100644 --- a/src/search/details.rs +++ b/src/search/details.rs @@ -57,9 +57,11 @@ pub struct FramerateDetail(pub Framerate); /// - has Person tag with marker tag #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum TagDetail { - TagName(String), - PersonTagName(String), + TagUuid(String), + PersonTagUuid(String), PersonTagWithMarker(String, String), + + /// The number of tags on a media file. Count(u8, Comparison), } From 4fb0f38b2bf49294a7b97f94d572c6af92169838 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:23:12 -0600 Subject: [PATCH 10/27] feat(search/detail): impl Display for Comparison we were doing this manually before --- src/search/details.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/search/details.rs b/src/search/details.rs index 261d4f2..e1473b8 100644 --- a/src/search/details.rs +++ b/src/search/details.rs @@ -74,6 +74,20 @@ pub enum Comparison { Greater, } +impl core::fmt::Display for Comparison { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let cmp = match self { + Self::Less => "<", + Self::LessOrEqual => "<=", + Self::Equal => "=", + Self::GreaterOrEqual => ">=", + Self::Greater => ">", + }; + + f.write_str(cmp) + } +} + /// "landscape", "portrait", "square" #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum OrientationDetail { From dcf8c5442d196e5c75e83445fb55ef4a234dfff4 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:24:11 -0600 Subject: [PATCH 11/27] fix(search/modf): uhh... use OrientationDetail --- src/search/modifiers.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/search/modifiers.rs b/src/search/modifiers.rs index fcfc799..f0542e8 100644 --- a/src/search/modifiers.rs +++ b/src/search/modifiers.rs @@ -16,7 +16,7 @@ pub enum CollectionModifier { DateTime(DateTimeModifier), Format(FormatDetail), Kind(KindDetail), - Orientation(String), + Orientation(OrientationDetail), } /// A modifier that applies `OR`/`NOT`` logic to modifier expressions. From 21debe6d565095306fb7ea75535fd7234a7bc547 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:25:08 -0600 Subject: [PATCH 12/27] refactor(search/modf/DateTime): remove "During" it doesn't really make much sense. in the future, i'll come back with better types for a `Between` modifier! --- src/search/modifiers.rs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/search/modifiers.rs b/src/search/modifiers.rs index f0542e8..a2b4c91 100644 --- a/src/search/modifiers.rs +++ b/src/search/modifiers.rs @@ -3,7 +3,11 @@ use super::details::{DateDetail, FormatDetail, KindDetail, TagDetail}; #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum DateTimeModifier { Before(DateDetail), - During(DateDetail), + // TODO: this would be kinda cool... + // Between { + // start: DateDetail, + // end: DateDetail + // }, After(DateDetail), } From 839a2e346b050f1673b4c49e2fe7a602bd7494b1 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:26:27 -0600 Subject: [PATCH 13/27] refactor(search/modf): `sea-query` in `ToQuery` `SimpleExpr` makes queries much easier to construct dynamically! it also prevents mistakes (and spares me from writing a grammar lol) --- Cargo.toml | 2 ++ src/search/modifiers.rs | 26 +++++++++++--------------- 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 52f6654..2d7e955 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -47,6 +47,8 @@ chrono = { version = "0.4.39", features = ["serde"] } uuid = { version = "1.12.0", features = ["v4", "serde"] } avif-parse = "1.3.2" blake3 = { version = "1.5.5", features = ["mmap", "rayon"] } +sea-query = { version = "0.32.1", default-features = false, features = ["derive", "backend-sqlite", "with-chrono", "chrono"] } +sea-query-binder = { version = "0.7.0", features = ["chrono", "runtime-tokio", "sqlx-sqlite", "with-chrono"] } # crc32fast = "1.4.2" # sys dependencies diff --git a/src/search/modifiers.rs b/src/search/modifiers.rs index a2b4c91..f8606ba 100644 --- a/src/search/modifiers.rs +++ b/src/search/modifiers.rs @@ -1,4 +1,6 @@ -use super::details::{DateDetail, FormatDetail, KindDetail, TagDetail}; +use sea_query::SimpleExpr; + +use super::details::{DateDetail, FormatDetail, KindDetail, OrientationDetail, TagDetail}; #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum DateTimeModifier { @@ -52,18 +54,12 @@ pub enum Expr { Boolean(BooleanModifier), Other(OtherModifier), } - -pub struct PreExecutionQuery { - pub query: String, - // pub parameters: Vec, // FIXME: no clue what i was cookin here. `surrealql::Value`..? +/// A modifier must become a query to be used. +/// +/// All modifiers must implement this trait! +pub trait ToQuery { + /// Converts the modifier into a query for use in querying the database. + /// + /// This assumes that each modifier can become a query clause. + fn to_query(self) -> SimpleExpr; } - -// /// A modifier must become a query to be used. -// /// -// /// All modifiers must implement this trait! -// pub trait ToQuery { -// /// Converts the modifier into a query for use in querying the database. -// /// -// /// This assumes that each modifier can become a query string. -// fn to_query(&self) -> Result; -// } From 321ad499e89ca06be2f5841721e99022de3f8d1c Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:27:14 -0600 Subject: [PATCH 14/27] refactor(search/query): rewrite with `sea-query` --- src/search/mod.rs | 2 +- src/search/query.rs | 189 +++++++++++++++++++++++++++++++++++--------- 2 files changed, 154 insertions(+), 37 deletions(-) diff --git a/src/search/mod.rs b/src/search/mod.rs index 8bbe688..4d3d9c8 100644 --- a/src/search/mod.rs +++ b/src/search/mod.rs @@ -2,7 +2,7 @@ pub mod details; pub mod modifiers; -// pub mod query; +pub mod query; pub mod sort; /// `modifier1 AND modifier2` diff --git a/src/search/query.rs b/src/search/query.rs index c810870..6814050 100644 --- a/src/search/query.rs +++ b/src/search/query.rs @@ -1,60 +1,177 @@ -use surrealdb::{engine::local::Db, method::Query, sql::Query as SqlQuery}; +use extension::sqlite::SqliteExpr; +use sea_query::*; + +use crate::{models::media::metadata::MediaKind, search::details::Comparison}; use super::{ - details::TagDetail, - modifiers::{CollectionModifier, ToQuery}, + details::{DateDetail, FormatDetail, OrientationDetail, TagDetail}, + modifiers::{CollectionModifier, DateTimeModifier, ToQuery}, }; -use surrealdb::sql::parse; +/// the media table +#[derive(Iden)] +pub enum Info { + Table, + Id, + Path, + Filesize, + Format, + CreationDate, + ModificationDate, + FirstSeenDate, + WidthPx, + HeightPx, + SpecificMetadata, + OtherMetadata, + Tags, +} -/// FIXME: this is a really basic impl. it's not a security risk or anything, but it's likely -/// bad to use format strings here. that might allow for injection if someone didn't know. +/// The default array length function in SQLite. +struct JsonArrayLenFunction; + +impl Iden for JsonArrayLenFunction { + fn unquoted(&self, s: &mut dyn core::fmt::Write) { + write!(s, "json_array_length").unwrap() + } +} impl ToQuery for CollectionModifier { - fn to_query(&self) -> Result { + #[tracing::instrument] + fn to_query(self) -> SimpleExpr { match self { + // based on the kind, we'll check various attributes of the media's tags. + // + // use `SqliteExpr::col(ColType::Variant).get_json_field("name")` CollectionModifier::Tag(tag_detail) => match tag_detail { - TagDetail::TagName(name) | TagDetail::PersonTagName(name) => { - // NOTE: there are no "person" tags yet, so these are the same - parse(format!("SELECT * FROM info WHERE media.tags CONTAINS '{name}'").as_str()) + TagDetail::TagUuid(uuid) => { + tracing::debug!("Looking for tag with UUID `{uuid}`"); + Expr::col(Info::Id).eq(uuid) + } + TagDetail::PersonTagUuid(uuid) => { + tracing::warn!("LPerson tag with UUID is unimplemented."); // TODO + Expr::col(Info::Id).eq(uuid) + } + TagDetail::PersonTagWithMarker(uuid, _marker_uuid) => { + tracing::warn!("Person tag with marker is unimplemented. (uuid: {uuid}, marker uuid: {_marker_uuid})"); // TODO + Expr::col(Info::Id).eq(uuid) } + TagDetail::Count(ct, cmp) => { + tracing::debug!("Looking for media with {cmp} {ct} tags!"); - TagDetail::PersonTagWithMarker(_, _) => unimplemented!(), + let fn_call = SimpleExpr::FunctionCall( + Func::cust(JsonArrayLenFunction).arg(Expr::col(Info::Tags)), + ); - TagDetail::Count(ct, cmp) => { - let cmp = match cmp { - super::details::Comparison::Less => "<", - super::details::Comparison::LessOrEqual => "<=", - super::details::Comparison::Equal => "=", - super::details::Comparison::GreaterOrEqual => ">=", - super::details::Comparison::Greater => ">", - }; - - parse( - format!("SELECT * FROM info WHERE array::len(media.tags) {cmp} {ct}") - .as_str(), - ) + match cmp { + Comparison::Less => fn_call.lt(ct), + Comparison::LessOrEqual => fn_call.lte(ct), + Comparison::Equal => fn_call.eq(ct), + Comparison::GreaterOrEqual => fn_call.gte(ct), + Comparison::Greater => fn_call.gt(ct), + } } }, - // we'll sort by the folder it's contained in - CollectionModifier::Album(name) => { - // FIXME: this isn't correct. placeholder until we parse it manually - // note that fixing it might require us to directly query here..! - parse(format!("SELECT * FROM info WHERE path CONTAINS '{name}'").as_str()) + // FIXME: based on album uuid. that part's easy. + // but how do we choose the table? + CollectionModifier::Album(_album_uuid) => todo!(), + + // ez pz, just add a 'LIKE' clause with `.like()` + CollectionModifier::Literal(lit) => Expr::col(Info::Path).like(lit), + + // yeah that's not bad. might be difficult to express dates in the + // orm-ish syntax, though? + CollectionModifier::DateTime(dt_modifier) => { + let get_col_from_detail = |dd: DateDetail| { + tracing::debug!("Given date detail: {dd:?}"); + match dd { + DateDetail::Created(date_time) => { + (Expr::col(Info::CreationDate), date_time) + } + DateDetail::Modified(date_time) => { + (Expr::col(Info::ModificationDate), date_time) + } + DateDetail::FirstSeen(date_time) => { + (Expr::col(Info::FirstSeenDate), date_time) + } + } + }; + + match dt_modifier { + DateTimeModifier::Before(dd) => { + let (col, time) = get_col_from_detail(dd); + col.lt(Value::ChronoDateTimeUtc(Some(Box::new(time)))) + } + + // TODO: DateTimeModifier::Between ... + // + DateTimeModifier::After(dd) => { + let (col, time) = get_col_from_detail(dd); + col.gt(Value::ChronoDateTimeUtc(Some(Box::new(time)))) + } + } } - CollectionModifier::Literal(s) => { - parse(format!("SELECT * FROM info WHERE media.name = '{s}'").as_str()) - } // FIXME: this should only search the filename + CollectionModifier::Format(format_detail) => { + tracing::debug!("Asked to check for Format."); - CollectionModifier::DateTime(_) => todo!(), + match format_detail { + FormatDetail::MimeType(mime_type) => { + tracing::debug!("Looking at format's MIME type. given: `{mime_type}`"); - CollectionModifier::Format(_) => todo!(), + Expr::col(Info::Format) + .get_json_field("mime_type") + .like(mime_type) + } - CollectionModifier::Kind(_) => todo!(), + FormatDetail::Extension(file_ext) => { + tracing::debug!("Checking format's extension. given: `{file_ext}`"); - CollectionModifier::Orientation(_) => todo!(), + // ensure correct formatting of extension. note that `LIKE` is + // case-insensitive :) + let file_ext = { + let mut s = String::with_capacity(file_ext.len() + 1); + + // IMPORTANT! this does the 'end of string' checking in SQLite + s.push('%'); + + // add the other part to the end, in lowercase + no whitespace + s.push_str(file_ext.trim()); + s + }; + + tracing::debug!("Made formatted extension: `{file_ext}`"); + Expr::col(Info::Path).like(file_ext) + } + } + } + + CollectionModifier::Kind(kind_detail) => { + tracing::debug!("Asked to check by kind: `{kind_detail:?}`"); + + // we'll use json for this + Expr::col(Info::Format) + .get_json_field("media_kind") + .eq(MediaKind::from(kind_detail.clone()).to_string()) + } + + // hoz: width_px > height_px + // vert: height_px > width_px + // square: width_px = height_px + CollectionModifier::Orientation(orientation_detail) => match orientation_detail { + OrientationDetail::Landscape => { + tracing::debug!("Orientation detail (landscape)..."); + Expr::col(Info::WidthPx).gt(Expr::col(Info::HeightPx)) + } + OrientationDetail::Portrait => { + tracing::debug!("Orientation detail (portrait)..."); + Expr::col(Info::HeightPx).gt(Expr::col(Info::WidthPx)) + } + OrientationDetail::Square => { + tracing::debug!("Orientation detail (square)..."); + Expr::col(Info::WidthPx).eq(Expr::col(Info::HeightPx)) + } + }, } } } From 0daaa73ef7d76084bca1d3b25e16700d1bc706e7 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 15:44:08 -0600 Subject: [PATCH 15/27] fix(search/query/Literal): add `%` for contains --- src/search/query.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/search/query.rs b/src/search/query.rs index 6814050..bbb30fb 100644 --- a/src/search/query.rs +++ b/src/search/query.rs @@ -77,7 +77,10 @@ impl ToQuery for CollectionModifier { CollectionModifier::Album(_album_uuid) => todo!(), // ez pz, just add a 'LIKE' clause with `.like()` - CollectionModifier::Literal(lit) => Expr::col(Info::Path).like(lit), + CollectionModifier::Literal(lit) => { + tracing::debug!("Checking for literal: `{lit}`"); + Expr::col(Info::Path).like(format!("%{lit}%")) + } // yeah that's not bad. might be difficult to express dates in the // orm-ish syntax, though? From 4c0d961ad0fd197aa96ef29ae6abd6923ab66375 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 16:44:28 -0600 Subject: [PATCH 16/27] test(fix): use `temp_dir` crate for dir creation i forgot to make them a lot lol --- Cargo.toml | 1 + src/models/media/builder/mod.rs | 5 +++-- tests/db.rs | 12 +++++++----- tests/file_watcher.rs | 14 +++++++------- 4 files changed, 18 insertions(+), 14 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 2d7e955..ee7cf97 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -69,3 +69,4 @@ tracing-subscriber = "0.3.18" dirs = "6.0.0" anyhow = "1.0.95" console-subscriber = "0.4.1" +temp-dir = "0.1.14" diff --git a/src/models/media/builder/mod.rs b/src/models/media/builder/mod.rs index 2957217..f8d9dd6 100644 --- a/src/models/media/builder/mod.rs +++ b/src/models/media/builder/mod.rs @@ -342,7 +342,7 @@ pub fn get_video_len(path: &Utf8Path) -> Result { #[cfg(test)] mod tests { - use std::env::temp_dir; + use temp_dir::TempDir; use camino::Utf8PathBuf; use chrono::{DateTime, Utc}; @@ -362,9 +362,10 @@ mod tests { /// The `MediaBuilder` should keep the `id` and `first_seen_date` fields as-is. #[tokio::test] async fn media_builder_keeps_static_fields() { + let temp_dir = TempDir::new().unwrap(); // set up the db database::DB_FOLDER_PATH - .set(Utf8PathBuf::try_from(temp_dir()).unwrap()) + .set(Utf8PathBuf::try_from(temp_dir.path().to_path_buf()).unwrap()) .unwrap(); let path = Utf8PathBuf::from("tests/assets/fear.avif") diff --git a/tests/db.rs b/tests/db.rs index c99708b..c69deb3 100644 --- a/tests/db.rs +++ b/tests/db.rs @@ -10,7 +10,7 @@ mod common; #[cfg(test)] mod tests { - use std::{env::temp_dir, str::FromStr as _}; + use std::str::FromStr as _; use backdrop::{ database::{self, DATABASE, RAVES_DB_FILE}, @@ -18,14 +18,16 @@ mod tests { }; use camino::{Utf8Path, Utf8PathBuf}; use sqlx::{sqlite::SqliteConnectOptions, Sqlite}; + use temp_dir::TempDir; use uuid::Uuid; /// The database can cache metadata for the beach photo. #[tokio::test] async fn beach() { // set up the database + let temp_dir = temp_dir::TempDir::new().unwrap(); { - let db_temp_dir = Utf8PathBuf::try_from(temp_dir()) + let db_temp_dir = Utf8PathBuf::try_from(temp_dir.path().to_path_buf()) .unwrap() .join(Uuid::new_v4().to_string()) .join("_raves_db"); @@ -117,11 +119,11 @@ mod tests { for _ in 0..3 { let mut set = tokio::task::JoinSet::new(); - let tempdir = temp_dir(); - let p = Utf8PathBuf::try_from(tempdir).unwrap(); + let temp_dir = TempDir::new().unwrap(); + let p = Utf8PathBuf::try_from(temp_dir.path().to_path_buf()).unwrap(); set.spawn(make_pool(p)); - set.join_all().await; + set.join_all().await; // FIXME } } } diff --git a/tests/file_watcher.rs b/tests/file_watcher.rs index a27e6f7..41435a1 100644 --- a/tests/file_watcher.rs +++ b/tests/file_watcher.rs @@ -4,7 +4,7 @@ mod common; #[cfg(test)] mod tests { - use std::{env::temp_dir, time::Duration}; + use std::time::Duration; use backdrop::{ database::{DATABASE, INFO_TABLE}, @@ -13,6 +13,7 @@ mod tests { }; use camino::Utf8PathBuf; + use temp_dir::TempDir; use uuid::Uuid; use crate::common::{self, Setup}; @@ -41,16 +42,15 @@ mod tests { #[tokio::test] async fn find_file_in_temp_dir() { // generate a temp dir - let temp_dir = Utf8PathBuf::try_from(temp_dir()) - .unwrap() - .join(Uuid::new_v4().to_string()); - println!("temp dir located at: `{temp_dir}`"); + let temp_dir = TempDir::new().unwrap(); + let temp_dir_path = Utf8PathBuf::try_from(temp_dir.path().to_path_buf()).unwrap(); + println!("temp dir located at: `{temp_dir_path}`"); tokio::fs::create_dir_all(&temp_dir).await.unwrap(); // set up the app common::setup(Setup { port: 6670, - watched_folders: [temp_dir.clone()].into(), + watched_folders: [temp_dir_path.clone()].into(), }) .await; let mut conn = DATABASE.acquire().await.unwrap(); @@ -66,7 +66,7 @@ mod tests { // copy a photo to the temp dir tokio::time::sleep(Duration::from_secs(3)).await; - tokio::fs::copy("tests/assets/fear.avif", temp_dir.join("fear.avif")) + tokio::fs::copy("tests/assets/fear.avif", temp_dir_path.join("fear.avif")) .await .expect("copy to temp dir should work"); From e1504b26998c2d1283a1d2cd7c2d8508facbc015 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 16:51:12 -0600 Subject: [PATCH 17/27] fix(meta): use the actual MIME type in its field why was i splitting that loool --- src/models/media/metadata.rs | 7 +++++-- tests/file_watcher.rs | 1 - 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/models/media/metadata.rs b/src/models/media/metadata.rs index 75e16c4..9c80240 100644 --- a/src/models/media/metadata.rs +++ b/src/models/media/metadata.rs @@ -103,6 +103,9 @@ impl std::fmt::Display for MediaKind { } /// A representation of a media file's MIME format. +// +// MAINTAINER NOTE: if you change the names of these fields, you also need to +// change the filter/searching modifiers for `Format`! #[derive(Clone, Debug, PartialEq, PartialOrd, Eq, Ord, serde::Serialize, serde::Deserialize)] pub struct Format { /// The "kind" of media used in this format. (image, video, animated image, etc..?) @@ -122,7 +125,7 @@ impl Format { tracing::debug!("creating format from mime type `{mime}`..."); let mut s = mime.split('/'); - let (raw_kind, raw_type) = (s.next()?, s.next()?); + let raw_kind = s.next()?; // TODO: annoying parsing for animated media. // maybe find a library for that... @@ -136,7 +139,7 @@ impl Format { Some(Self { media_kind: kind, - mime_type: raw_type.to_string(), + mime_type: mime.into(), }) } diff --git a/tests/file_watcher.rs b/tests/file_watcher.rs index 41435a1..e2e2c09 100644 --- a/tests/file_watcher.rs +++ b/tests/file_watcher.rs @@ -14,7 +14,6 @@ mod tests { use camino::Utf8PathBuf; use temp_dir::TempDir; - use uuid::Uuid; use crate::common::{self, Setup}; From 5882da5f9608668dfa5a107a63cb41e8e74bfe78 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 16:51:54 -0600 Subject: [PATCH 18/27] fix(search/query/json): use `cast_json_field` that makes it work for some reason should probably read the internals at some point but whatever, this does fine for now..! --- src/search/query.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/search/query.rs b/src/search/query.rs index bbb30fb..edd66bb 100644 --- a/src/search/query.rs +++ b/src/search/query.rs @@ -123,7 +123,7 @@ impl ToQuery for CollectionModifier { tracing::debug!("Looking at format's MIME type. given: `{mime_type}`"); Expr::col(Info::Format) - .get_json_field("mime_type") + .cast_json_field("mime_type") .like(mime_type) } @@ -154,7 +154,7 @@ impl ToQuery for CollectionModifier { // we'll use json for this Expr::col(Info::Format) - .get_json_field("media_kind") + .cast_json_field("media_kind") .eq(MediaKind::from(kind_detail.clone()).to_string()) } From 227c8c13a69fb66c6099e451b8b804c274556278 Mon Sep 17 00:00:00 2001 From: barrett Date: Sun, 19 Jan 2025 16:52:16 -0600 Subject: [PATCH 19/27] test(search/query): add a metric ****ton of tests --- src/search/query.rs | 564 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 561 insertions(+), 3 deletions(-) diff --git a/src/search/query.rs b/src/search/query.rs index edd66bb..48c7afb 100644 --- a/src/search/query.rs +++ b/src/search/query.rs @@ -179,7 +179,565 @@ impl ToQuery for CollectionModifier { } } -pub trait ToQuery2 { - /// Takes in an (unexecuted) query and adds additional clauses on it. - fn to_query(query: Query<'_, Db>) -> Query<'_, Db>; +#[cfg(test)] +mod tests { + use std::{collections::HashMap, env::temp_dir}; + + use chrono::DateTime; + use sea_query::{Asterisk, Cond, SqliteQueryBuilder}; + use sea_query_binder::SqlxBinder as _; + use sqlx::{pool::PoolConnection, Sqlite}; + use uuid::Uuid; + + use crate::{ + database::{self, InsertIntoTable, DATABASE}, + models::{ + media::{ + metadata::{Format, OtherMetadataMap, OtherMetadataValue, SpecificMetadata}, + Media, + }, + tags::Tag, + }, + search::{ + details::{ + Comparison, DateDetail, FormatDetail, KindDetail, OrientationDetail, TagDetail, + }, + modifiers::{CollectionModifier, DateTimeModifier, ToQuery as _}, + query::Info, + }, + }; + + #[tokio::test] + async fn collection_mod_orientation() { + let mut conn = setup_db().await; + + // find all the square ones. + { + // there should only be one. + let square_mod = CollectionModifier::Orientation(OrientationDetail::Square); + + // make the actual statement w/ the modifier + let (select, values) = sea_query::Query::select() + .column(Asterisk) // jesus christ + .from(Info::Table) + .cond_where(Cond::all().add(square_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + // check that it's right + assert_eq!( + r#"SELECT * FROM "info" WHERE "width_px" = "height_px""#, select, + "select statements should match" + ); + + // query dat mf + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .expect("select didnt err"); + + // ensure it's just vade lol + assert_eq!(res.len(), 1, "vec length"); + let vade_media = res.first().expect("there is a first option"); + + assert_eq!( + vade_media.id, + Uuid::from_u128(2), + "vade square media uuid match" + ); + assert_eq!( + vade_media.format, + Format::new_from_mime("image/png").unwrap().into(), + "vade square media format match" + ); + } + + // now, query for horizontal orientation. there should be two entries + { + let hoz_mod = CollectionModifier::Orientation(OrientationDetail::Landscape); + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(hoz_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .expect("select didnt err"); + + assert_eq!(res.len(), 3); + } + + // finally, there should be one for vertical + { + let vert_mod = CollectionModifier::Orientation(OrientationDetail::Portrait); + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(vert_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .expect("select didnt err"); + + assert_eq!(res.len(), 1); + assert!(res.first().unwrap().path.contains("a.jpg")) + } + } + + /// Tests the tag collection modifiers. + #[tokio::test] + async fn collection_mod_tags() { + let mut conn: PoolConnection = setup_db().await; + + // tag count + { + let tag_ct_mod = CollectionModifier::Tag(TagDetail::Count(3, Comparison::Equal)); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(tag_ct_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE json_array_length("tags") = ?"#, + select, + ); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::TinyUnsigned(Some(3)) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + // there should be exactly one entry + assert_eq!(res.len(), 1); + + assert!(res + .first() + .unwrap() + .clone() + .tags + .0 + .into_iter() + .any(|tag| &tag.name == "dittodill")); + } + + // TODO: test other tag detail queries when tables are implemented! + } + + /// Checks the collection modifier that searches for text. + /// + /// Currently, this just checks the path of the file, but it should also + /// look in comment/description/etc. fields of any attached metadata. + #[tokio::test] + async fn collection_mod_literal() { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::DEBUG) + .init(); + let mut conn: PoolConnection = setup_db().await; + + // try searching for "A.JPG" (in caps) + let literal_search_mod = CollectionModifier::Literal("a.jpg".into()); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(literal_search_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new("%a.jpg%".into()))) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + // only one entry + assert_eq!(res.len(), 1); + } + + /// Checks the DateTime CollectionModifiers. + #[tokio::test] + #[expect(clippy::inconsistent_digit_grouping, reason = "unix time fmting")] + async fn collection_mod_datetime() { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::DEBUG) + .init(); + let mut conn: PoolConnection = setup_db().await; + + // we'll try before + created here: + { + let before_created_datetime_mod = + CollectionModifier::DateTime(DateTimeModifier::Before(DateDetail::Created( + DateTime::from_timestamp_nanos(1737137976_000_000_000 + 1), + ))); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(before_created_datetime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "creation_date" < ?"#, select); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 3); + assert_eq!(res.first().unwrap().format.0.mime_type(), "image/png"); + } + + // after + modified: + { + let after_modified_datetime_mod = CollectionModifier::DateTime( + DateTimeModifier::After(DateDetail::Modified(DateTime::from_timestamp_nanos(0))), + ); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(after_modified_datetime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE "modification_date" > ?"#, + select + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 4); + } + + // after + first_seen: + { + let after_first_seen_datetime_mod = + CollectionModifier::DateTime(DateTimeModifier::After(DateDetail::FirstSeen( + DateTime::from_timestamp_nanos(1551731451_000_000_000), + ))); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(after_first_seen_datetime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE "first_seen_date" > ?"#, + select + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 3, "not unix epoch (5) and this exact time (1)"); + } + + // before and after at the same time should reduce the query's domain: + { + let after_seen = CollectionModifier::DateTime(DateTimeModifier::After( + DateDetail::FirstSeen(DateTime::from_timestamp_nanos(1551731451_000_000_000)), + )); + + let before_seen = CollectionModifier::DateTime(DateTimeModifier::Before( + DateDetail::FirstSeen(DateTime::from_timestamp_nanos(1737126002_000_000_000)), + )); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where( + Cond::all() + .add(after_seen.to_query()) + .add(before_seen.to_query()), + ) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE "first_seen_date" > ? AND "first_seen_date" < ?"#, + select + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 2, "after 5, before 2, after 1. => [3, 4]"); + + assert!( + res.iter().any(|media| media.id == Uuid::from_u128(3)), + "has 3" + ); + assert!( + res.iter().any(|media| media.id == Uuid::from_u128(4)), + "has 4" + ); + } + } + + #[tokio::test] + async fn collection_mod_format() { + let mut conn: PoolConnection = setup_db().await; + + // there should only be one entry w/ ext "mp4": + { + let mp4_ext_mod = CollectionModifier::Format(FormatDetail::Extension("mp4".into())); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(mp4_ext_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("%mp4")))) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 1, "should only be one mp4 ext"); + } + + // again, one with MIME type "video/mp4": + { + let mp4_mime_mod = + CollectionModifier::Format(FormatDetail::MimeType("Video/mp4".into())); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(mp4_mime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE ("format" ->> ?) LIKE ?"#, + select + ); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("mime_type")))) + ); // json_array_length + assert_eq!( + values.0 .0.get(1).unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("Video/mp4")))) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 1, "should only be one with mp4 mime type"); + } + + // three with `png` extensions: + { + let png_ext_mod = CollectionModifier::Format(FormatDetail::Extension("PnG".into())); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(png_ext_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("%PnG")))) + ); + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + // note: we take it from the file extension, not the MIME here! + assert_eq!(res.len(), 2, "two pngs"); + } + } + + #[tokio::test] + async fn collection_mod_kind() { + let mut conn: PoolConnection = setup_db().await; + + // one video: + { + let video_kind_mod = CollectionModifier::Kind(KindDetail::Video); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(video_kind_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE ("format" ->> ?) = ?"#, select); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 1, "only one video"); + } + + // four photos: + { + let photo_kind_mod = CollectionModifier::Kind(KindDetail::Photo); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(photo_kind_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE ("format" ->> ?) = ?"#, select); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 4, "four photos"); + } + } + + /// creates a database with some entries in it... + #[expect(clippy::inconsistent_digit_grouping, reason = "easier to read")] + async fn setup_db() -> PoolConnection { + database::DB_FOLDER_PATH + .set(temp_dir().try_into().unwrap()) + .unwrap(); + + let mut conn = DATABASE.acquire().await.unwrap(); + + let media_1 = Media { + id: Uuid::from_u128(1), + path: "/home/barrett/Videos/eceg_ditto_dill.mp4".into(), + filesize: 1024 * 1024 * 512, // 512 MiB + format: Format::new_from_mime("video/mp4") + .expect("format creation") + .into(), + creation_date: Some(DateTime::from_timestamp_nanos(1737308081_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1737308098_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1551731451_000_000_000), + width_px: 1920, + height_px: 1080, + specific_metadata: SpecificMetadata::Video { length: 147.0 }.into(), + other_metadata: Some( + OtherMetadataMap(HashMap::from([ + ( + "uploader".into(), + OtherMetadataValue::new("Uploader", "DittoDill"), + ), + ( + "category_id".into(), + OtherMetadataValue::new("Video Category ID", "24"), + ), + ])) + .into(), + ), + tags: vec![ + Tag::new_testing("dittodill"), + Tag::new_testing("music"), + Tag::new_testing("legend"), + ] + .into(), + }; + + let media_2 = Media { + id: Uuid::from_u128(2), + path: "/home/barrett/Downloads/vade.png".into(), + filesize: (1024 * 34) + (230), // 34.2 KiB + format: Format::new_from_mime("image/png").unwrap().into(), + creation_date: Some(DateTime::from_timestamp_nanos(1737137976_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1737137976_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1737126002_000_000_000), + width_px: 174, + height_px: 174, + specific_metadata: SpecificMetadata::Image {}.into(), + other_metadata: None, + tags: vec![].into(), + }; + + let media_3 = Media { + id: Uuid::from_u128(3), + path: "/home/barrett/Downloads/a.jpg".into(), + filesize: 1024 * 60, // 60 KiB + format: Format::new_from_mime("image/jpeg").unwrap().into(), + creation_date: Some(DateTime::from_timestamp_nanos(1730329781_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1730329781_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1730329781_000_000_000), + width_px: 1824, + height_px: 1993, + specific_metadata: SpecificMetadata::Image {}.into(), + other_metadata: None, + tags: vec![].into(), + }; + + let media_4 = Media { + id: Uuid::from_u128(4), + path: "/home/barrett/Pictures/2024-02-09 14-53-52.mkv-00:00:08.500.png".into(), + filesize: 1024 * 765, // 765 KiB + format: Format::new_from_mime("image/png").unwrap().into(), + creation_date: Some(DateTime::from_timestamp_nanos(1725306903_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1725306903_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1725286951_000_000_000), + width_px: 1454, + height_px: 750, + specific_metadata: SpecificMetadata::Image {}.into(), + other_metadata: None, + tags: vec![].into(), + }; + + // a bunch of null-ish values >:) + let media_5 = Media { + id: Uuid::nil(), + path: "".into(), + filesize: 1024 * 765, // 765 KiB + format: Format::new_from_mime("image/png").unwrap().into(), + creation_date: None, + modification_date: None, + first_seen_date: DateTime::UNIX_EPOCH, + width_px: 1, + height_px: 0, + specific_metadata: SpecificMetadata::Image {}.into(), + other_metadata: None, + tags: vec![].into(), + }; + + let m = [media_1, media_2, media_3, media_4, media_5]; + + for media in m { + media + .make_insertion_query() + .execute(&mut *conn) + .await + .unwrap(); + } + + conn + } } From 23343f3531aa37989af5acd429fa31577f211df8 Mon Sep 17 00:00:00 2001 From: barrett Date: Mon, 20 Jan 2025 15:09:33 -0600 Subject: [PATCH 20/27] refactor(test, query): use integration tests makes the code easier to edit + simplifies test creation :) --- src/models/media/metadata.rs | 17 ++ src/models/tags.rs | 4 +- src/search/modifiers.rs | 7 +- src/search/query.rs | 563 ---------------------------------- tests/search.rs | 566 +++++++++++++++++++++++++++++++++++ 5 files changed, 591 insertions(+), 566 deletions(-) create mode 100644 tests/search.rs diff --git a/src/models/media/metadata.rs b/src/models/media/metadata.rs index 9c80240..a30850d 100644 --- a/src/models/media/metadata.rs +++ b/src/models/media/metadata.rs @@ -23,6 +23,23 @@ pub enum SpecificMetadata { }, } +impl SpecificMetadata { + pub fn new_image() -> Self { + Self::Image {} + } + + pub fn new_animated_image(frame_count: u32, framerate: Framerate) -> Self { + Self::AnimatedImage { + frame_count, + framerate, + } + } + + pub fn new_video(length: f64) -> Self { + Self::Video { length } + } +} + #[derive(Clone, Debug, PartialEq, PartialOrd, serde::Serialize, serde::Deserialize)] pub struct OtherMetadataValue { // note: this is on the value since putting it on the key makes it difficult diff --git a/src/models/tags.rs b/src/models/tags.rs index f1b6a85..96c1042 100644 --- a/src/models/tags.rs +++ b/src/models/tags.rs @@ -48,8 +48,8 @@ impl Tag { /// Creates a new tag **representation** for testing. /// /// It will not be stored in the database or anything like that. - #[cfg(test)] - pub(crate) fn new_testing(name: impl AsRef) -> Self { + #[doc(hidden)] + pub fn new_testing(name: impl AsRef) -> Self { Self { name: name.as_ref().to_string(), uuid: Uuid::new_v4(), diff --git a/src/search/modifiers.rs b/src/search/modifiers.rs index f8606ba..a810fd4 100644 --- a/src/search/modifiers.rs +++ b/src/search/modifiers.rs @@ -1,3 +1,4 @@ +use camino::Utf8PathBuf; use sea_query::SimpleExpr; use super::details::{DateDetail, FormatDetail, KindDetail, OrientationDetail, TagDetail}; @@ -17,7 +18,11 @@ pub enum DateTimeModifier { #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum CollectionModifier { Tag(TagDetail), - Album(String), + + /// Searches for media within a certain folder. + /// + /// This folder is at the given path and must exist! + Album(Utf8PathBuf), Literal(String), DateTime(DateTimeModifier), Format(FormatDetail), diff --git a/src/search/query.rs b/src/search/query.rs index 48c7afb..c28d23c 100644 --- a/src/search/query.rs +++ b/src/search/query.rs @@ -178,566 +178,3 @@ impl ToQuery for CollectionModifier { } } } - -#[cfg(test)] -mod tests { - use std::{collections::HashMap, env::temp_dir}; - - use chrono::DateTime; - use sea_query::{Asterisk, Cond, SqliteQueryBuilder}; - use sea_query_binder::SqlxBinder as _; - use sqlx::{pool::PoolConnection, Sqlite}; - use uuid::Uuid; - - use crate::{ - database::{self, InsertIntoTable, DATABASE}, - models::{ - media::{ - metadata::{Format, OtherMetadataMap, OtherMetadataValue, SpecificMetadata}, - Media, - }, - tags::Tag, - }, - search::{ - details::{ - Comparison, DateDetail, FormatDetail, KindDetail, OrientationDetail, TagDetail, - }, - modifiers::{CollectionModifier, DateTimeModifier, ToQuery as _}, - query::Info, - }, - }; - - #[tokio::test] - async fn collection_mod_orientation() { - let mut conn = setup_db().await; - - // find all the square ones. - { - // there should only be one. - let square_mod = CollectionModifier::Orientation(OrientationDetail::Square); - - // make the actual statement w/ the modifier - let (select, values) = sea_query::Query::select() - .column(Asterisk) // jesus christ - .from(Info::Table) - .cond_where(Cond::all().add(square_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - // check that it's right - assert_eq!( - r#"SELECT * FROM "info" WHERE "width_px" = "height_px""#, select, - "select statements should match" - ); - - // query dat mf - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .expect("select didnt err"); - - // ensure it's just vade lol - assert_eq!(res.len(), 1, "vec length"); - let vade_media = res.first().expect("there is a first option"); - - assert_eq!( - vade_media.id, - Uuid::from_u128(2), - "vade square media uuid match" - ); - assert_eq!( - vade_media.format, - Format::new_from_mime("image/png").unwrap().into(), - "vade square media format match" - ); - } - - // now, query for horizontal orientation. there should be two entries - { - let hoz_mod = CollectionModifier::Orientation(OrientationDetail::Landscape); - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(hoz_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .expect("select didnt err"); - - assert_eq!(res.len(), 3); - } - - // finally, there should be one for vertical - { - let vert_mod = CollectionModifier::Orientation(OrientationDetail::Portrait); - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(vert_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .expect("select didnt err"); - - assert_eq!(res.len(), 1); - assert!(res.first().unwrap().path.contains("a.jpg")) - } - } - - /// Tests the tag collection modifiers. - #[tokio::test] - async fn collection_mod_tags() { - let mut conn: PoolConnection = setup_db().await; - - // tag count - { - let tag_ct_mod = CollectionModifier::Tag(TagDetail::Count(3, Comparison::Equal)); - - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(tag_ct_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - assert_eq!( - r#"SELECT * FROM "info" WHERE json_array_length("tags") = ?"#, - select, - ); - assert_eq!( - values.0 .0.first().unwrap(), - &sea_query::Value::TinyUnsigned(Some(3)) - ); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .unwrap(); - - // there should be exactly one entry - assert_eq!(res.len(), 1); - - assert!(res - .first() - .unwrap() - .clone() - .tags - .0 - .into_iter() - .any(|tag| &tag.name == "dittodill")); - } - - // TODO: test other tag detail queries when tables are implemented! - } - - /// Checks the collection modifier that searches for text. - /// - /// Currently, this just checks the path of the file, but it should also - /// look in comment/description/etc. fields of any attached metadata. - #[tokio::test] - async fn collection_mod_literal() { - tracing_subscriber::fmt() - .with_max_level(tracing::Level::DEBUG) - .init(); - let mut conn: PoolConnection = setup_db().await; - - // try searching for "A.JPG" (in caps) - let literal_search_mod = CollectionModifier::Literal("a.jpg".into()); - - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(literal_search_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); - assert_eq!( - values.0 .0.first().unwrap(), - &sea_query::Value::String(Some(Box::new("%a.jpg%".into()))) - ); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .unwrap(); - - // only one entry - assert_eq!(res.len(), 1); - } - - /// Checks the DateTime CollectionModifiers. - #[tokio::test] - #[expect(clippy::inconsistent_digit_grouping, reason = "unix time fmting")] - async fn collection_mod_datetime() { - tracing_subscriber::fmt() - .with_max_level(tracing::Level::DEBUG) - .init(); - let mut conn: PoolConnection = setup_db().await; - - // we'll try before + created here: - { - let before_created_datetime_mod = - CollectionModifier::DateTime(DateTimeModifier::Before(DateDetail::Created( - DateTime::from_timestamp_nanos(1737137976_000_000_000 + 1), - ))); - - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(before_created_datetime_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - assert_eq!(r#"SELECT * FROM "info" WHERE "creation_date" < ?"#, select); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .unwrap(); - - assert_eq!(res.len(), 3); - assert_eq!(res.first().unwrap().format.0.mime_type(), "image/png"); - } - - // after + modified: - { - let after_modified_datetime_mod = CollectionModifier::DateTime( - DateTimeModifier::After(DateDetail::Modified(DateTime::from_timestamp_nanos(0))), - ); - - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(after_modified_datetime_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - assert_eq!( - r#"SELECT * FROM "info" WHERE "modification_date" > ?"#, - select - ); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .unwrap(); - - assert_eq!(res.len(), 4); - } - - // after + first_seen: - { - let after_first_seen_datetime_mod = - CollectionModifier::DateTime(DateTimeModifier::After(DateDetail::FirstSeen( - DateTime::from_timestamp_nanos(1551731451_000_000_000), - ))); - - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(after_first_seen_datetime_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - assert_eq!( - r#"SELECT * FROM "info" WHERE "first_seen_date" > ?"#, - select - ); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .unwrap(); - - assert_eq!(res.len(), 3, "not unix epoch (5) and this exact time (1)"); - } - - // before and after at the same time should reduce the query's domain: - { - let after_seen = CollectionModifier::DateTime(DateTimeModifier::After( - DateDetail::FirstSeen(DateTime::from_timestamp_nanos(1551731451_000_000_000)), - )); - - let before_seen = CollectionModifier::DateTime(DateTimeModifier::Before( - DateDetail::FirstSeen(DateTime::from_timestamp_nanos(1737126002_000_000_000)), - )); - - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where( - Cond::all() - .add(after_seen.to_query()) - .add(before_seen.to_query()), - ) - .build_sqlx(SqliteQueryBuilder); - - assert_eq!( - r#"SELECT * FROM "info" WHERE "first_seen_date" > ? AND "first_seen_date" < ?"#, - select - ); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .unwrap(); - - assert_eq!(res.len(), 2, "after 5, before 2, after 1. => [3, 4]"); - - assert!( - res.iter().any(|media| media.id == Uuid::from_u128(3)), - "has 3" - ); - assert!( - res.iter().any(|media| media.id == Uuid::from_u128(4)), - "has 4" - ); - } - } - - #[tokio::test] - async fn collection_mod_format() { - let mut conn: PoolConnection = setup_db().await; - - // there should only be one entry w/ ext "mp4": - { - let mp4_ext_mod = CollectionModifier::Format(FormatDetail::Extension("mp4".into())); - - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(mp4_ext_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); - assert_eq!( - values.0 .0.first().unwrap(), - &sea_query::Value::String(Some(Box::new(String::from("%mp4")))) - ); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .unwrap(); - - assert_eq!(res.len(), 1, "should only be one mp4 ext"); - } - - // again, one with MIME type "video/mp4": - { - let mp4_mime_mod = - CollectionModifier::Format(FormatDetail::MimeType("Video/mp4".into())); - - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(mp4_mime_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - assert_eq!( - r#"SELECT * FROM "info" WHERE ("format" ->> ?) LIKE ?"#, - select - ); - assert_eq!( - values.0 .0.first().unwrap(), - &sea_query::Value::String(Some(Box::new(String::from("mime_type")))) - ); // json_array_length - assert_eq!( - values.0 .0.get(1).unwrap(), - &sea_query::Value::String(Some(Box::new(String::from("Video/mp4")))) - ); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .unwrap(); - - assert_eq!(res.len(), 1, "should only be one with mp4 mime type"); - } - - // three with `png` extensions: - { - let png_ext_mod = CollectionModifier::Format(FormatDetail::Extension("PnG".into())); - - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(png_ext_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); - assert_eq!( - values.0 .0.first().unwrap(), - &sea_query::Value::String(Some(Box::new(String::from("%PnG")))) - ); - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .unwrap(); - - // note: we take it from the file extension, not the MIME here! - assert_eq!(res.len(), 2, "two pngs"); - } - } - - #[tokio::test] - async fn collection_mod_kind() { - let mut conn: PoolConnection = setup_db().await; - - // one video: - { - let video_kind_mod = CollectionModifier::Kind(KindDetail::Video); - - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(video_kind_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - assert_eq!(r#"SELECT * FROM "info" WHERE ("format" ->> ?) = ?"#, select); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .unwrap(); - - assert_eq!(res.len(), 1, "only one video"); - } - - // four photos: - { - let photo_kind_mod = CollectionModifier::Kind(KindDetail::Photo); - - let (select, values) = sea_query::Query::select() - .column(Asterisk) - .from(Info::Table) - .cond_where(Cond::all().add(photo_kind_mod.to_query())) - .build_sqlx(SqliteQueryBuilder); - - assert_eq!(r#"SELECT * FROM "info" WHERE ("format" ->> ?) = ?"#, select); - - let res = sqlx::query_as_with::<_, Media, _>(&select, values) - .fetch_all(&mut *conn) - .await - .unwrap(); - - assert_eq!(res.len(), 4, "four photos"); - } - } - - /// creates a database with some entries in it... - #[expect(clippy::inconsistent_digit_grouping, reason = "easier to read")] - async fn setup_db() -> PoolConnection { - database::DB_FOLDER_PATH - .set(temp_dir().try_into().unwrap()) - .unwrap(); - - let mut conn = DATABASE.acquire().await.unwrap(); - - let media_1 = Media { - id: Uuid::from_u128(1), - path: "/home/barrett/Videos/eceg_ditto_dill.mp4".into(), - filesize: 1024 * 1024 * 512, // 512 MiB - format: Format::new_from_mime("video/mp4") - .expect("format creation") - .into(), - creation_date: Some(DateTime::from_timestamp_nanos(1737308081_000_000_000)), - modification_date: Some(DateTime::from_timestamp_nanos(1737308098_000_000_000)), - first_seen_date: DateTime::from_timestamp_nanos(1551731451_000_000_000), - width_px: 1920, - height_px: 1080, - specific_metadata: SpecificMetadata::Video { length: 147.0 }.into(), - other_metadata: Some( - OtherMetadataMap(HashMap::from([ - ( - "uploader".into(), - OtherMetadataValue::new("Uploader", "DittoDill"), - ), - ( - "category_id".into(), - OtherMetadataValue::new("Video Category ID", "24"), - ), - ])) - .into(), - ), - tags: vec![ - Tag::new_testing("dittodill"), - Tag::new_testing("music"), - Tag::new_testing("legend"), - ] - .into(), - }; - - let media_2 = Media { - id: Uuid::from_u128(2), - path: "/home/barrett/Downloads/vade.png".into(), - filesize: (1024 * 34) + (230), // 34.2 KiB - format: Format::new_from_mime("image/png").unwrap().into(), - creation_date: Some(DateTime::from_timestamp_nanos(1737137976_000_000_000)), - modification_date: Some(DateTime::from_timestamp_nanos(1737137976_000_000_000)), - first_seen_date: DateTime::from_timestamp_nanos(1737126002_000_000_000), - width_px: 174, - height_px: 174, - specific_metadata: SpecificMetadata::Image {}.into(), - other_metadata: None, - tags: vec![].into(), - }; - - let media_3 = Media { - id: Uuid::from_u128(3), - path: "/home/barrett/Downloads/a.jpg".into(), - filesize: 1024 * 60, // 60 KiB - format: Format::new_from_mime("image/jpeg").unwrap().into(), - creation_date: Some(DateTime::from_timestamp_nanos(1730329781_000_000_000)), - modification_date: Some(DateTime::from_timestamp_nanos(1730329781_000_000_000)), - first_seen_date: DateTime::from_timestamp_nanos(1730329781_000_000_000), - width_px: 1824, - height_px: 1993, - specific_metadata: SpecificMetadata::Image {}.into(), - other_metadata: None, - tags: vec![].into(), - }; - - let media_4 = Media { - id: Uuid::from_u128(4), - path: "/home/barrett/Pictures/2024-02-09 14-53-52.mkv-00:00:08.500.png".into(), - filesize: 1024 * 765, // 765 KiB - format: Format::new_from_mime("image/png").unwrap().into(), - creation_date: Some(DateTime::from_timestamp_nanos(1725306903_000_000_000)), - modification_date: Some(DateTime::from_timestamp_nanos(1725306903_000_000_000)), - first_seen_date: DateTime::from_timestamp_nanos(1725286951_000_000_000), - width_px: 1454, - height_px: 750, - specific_metadata: SpecificMetadata::Image {}.into(), - other_metadata: None, - tags: vec![].into(), - }; - - // a bunch of null-ish values >:) - let media_5 = Media { - id: Uuid::nil(), - path: "".into(), - filesize: 1024 * 765, // 765 KiB - format: Format::new_from_mime("image/png").unwrap().into(), - creation_date: None, - modification_date: None, - first_seen_date: DateTime::UNIX_EPOCH, - width_px: 1, - height_px: 0, - specific_metadata: SpecificMetadata::Image {}.into(), - other_metadata: None, - tags: vec![].into(), - }; - - let m = [media_1, media_2, media_3, media_4, media_5]; - - for media in m { - media - .make_insertion_query() - .execute(&mut *conn) - .await - .unwrap(); - } - - conn - } -} diff --git a/tests/search.rs b/tests/search.rs new file mode 100644 index 0000000..a2746e1 --- /dev/null +++ b/tests/search.rs @@ -0,0 +1,566 @@ +#[cfg(test)] +mod tests { + + mod query { + use std::{collections::HashMap, env::temp_dir}; + + use chrono::DateTime; + use sea_query::{Asterisk, Cond, SqliteQueryBuilder}; + use sea_query_binder::SqlxBinder as _; + use sqlx::{pool::PoolConnection, Sqlite}; + use uuid::Uuid; + + use backdrop::{ + database::{self, InsertIntoTable, DATABASE}, + models::{ + media::{ + metadata::{Format, OtherMetadataMap, OtherMetadataValue, SpecificMetadata}, + Media, + }, + tags::Tag, + }, + search::{ + details::{ + Comparison, DateDetail, FormatDetail, KindDetail, OrientationDetail, TagDetail, + }, + modifiers::{CollectionModifier, DateTimeModifier, ToQuery as _}, + query::Info, + }, + }; + + #[tokio::test] + async fn collection_mod_orientation() { + let mut conn = setup_db().await; + + // find all the square ones. + { + // there should only be one. + let square_mod = CollectionModifier::Orientation(OrientationDetail::Square); + + // make the actual statement w/ the modifier + let (select, values) = sea_query::Query::select() + .column(Asterisk) // jesus christ + .from(Info::Table) + .cond_where(Cond::all().add(square_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + // check that it's right + assert_eq!( + r#"SELECT * FROM "info" WHERE "width_px" = "height_px""#, select, + "select statements should match" + ); + + // query dat mf + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .expect("select didnt err"); + + // ensure it's just vade lol + assert_eq!(res.len(), 1, "vec length"); + let vade_media = res.first().expect("there is a first option"); + + assert_eq!( + vade_media.id, + Uuid::from_u128(2), + "vade square media uuid match" + ); + assert_eq!( + vade_media.format, + Format::new_from_mime("image/png").unwrap().into(), + "vade square media format match" + ); + } + + // now, query for horizontal orientation. there should be two entries + { + let hoz_mod = CollectionModifier::Orientation(OrientationDetail::Landscape); + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(hoz_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .expect("select didnt err"); + + assert_eq!(res.len(), 3); + } + + // finally, there should be one for vertical + { + let vert_mod = CollectionModifier::Orientation(OrientationDetail::Portrait); + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(vert_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .expect("select didnt err"); + + assert_eq!(res.len(), 1); + assert!(res.first().unwrap().path.contains("a.jpg")) + } + } + + /// Tests the tag collection modifiers. + #[tokio::test] + async fn collection_mod_tags() { + let mut conn: PoolConnection = setup_db().await; + + // tag count + { + let tag_ct_mod = CollectionModifier::Tag(TagDetail::Count(3, Comparison::Equal)); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(tag_ct_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE json_array_length("tags") = ?"#, + select, + ); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::TinyUnsigned(Some(3)) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + // there should be exactly one entry + assert_eq!(res.len(), 1); + + assert!(res + .first() + .unwrap() + .clone() + .tags + .0 + .into_iter() + .any(|tag| &tag.name == "dittodill")); + } + + // TODO: test other tag detail queries when tables are implemented! + } + + /// Checks the collection modifier that searches for text. + /// + /// Currently, this just checks the path of the file, but it should also + /// look in comment/description/etc. fields of any attached metadata. + #[tokio::test] + async fn collection_mod_literal() { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::DEBUG) + .init(); + let mut conn: PoolConnection = setup_db().await; + + // try searching for "A.JPG" (in caps) + let literal_search_mod = CollectionModifier::Literal("a.jpg".into()); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(literal_search_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new("%a.jpg%".into()))) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + // only one entry + assert_eq!(res.len(), 1); + } + + /// Checks the DateTime CollectionModifiers. + #[tokio::test] + #[expect(clippy::inconsistent_digit_grouping, reason = "unix time fmting")] + async fn collection_mod_datetime() { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::DEBUG) + .init(); + let mut conn: PoolConnection = setup_db().await; + + // we'll try before + created here: + { + let before_created_datetime_mod = + CollectionModifier::DateTime(DateTimeModifier::Before(DateDetail::Created( + DateTime::from_timestamp_nanos(1737137976_000_000_000 + 1), + ))); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(before_created_datetime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "creation_date" < ?"#, select); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 3); + assert_eq!(res.first().unwrap().format.0.mime_type(), "image/png"); + } + + // after + modified: + { + let after_modified_datetime_mod = + CollectionModifier::DateTime(DateTimeModifier::After(DateDetail::Modified( + DateTime::from_timestamp_nanos(0), + ))); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(after_modified_datetime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE "modification_date" > ?"#, + select + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 4); + } + + // after + first_seen: + { + let after_first_seen_datetime_mod = + CollectionModifier::DateTime(DateTimeModifier::After(DateDetail::FirstSeen( + DateTime::from_timestamp_nanos(1551731451_000_000_000), + ))); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(after_first_seen_datetime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE "first_seen_date" > ?"#, + select + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 3, "not unix epoch (5) and this exact time (1)"); + } + + // before and after at the same time should reduce the query's domain: + { + let after_seen = CollectionModifier::DateTime(DateTimeModifier::After( + DateDetail::FirstSeen(DateTime::from_timestamp_nanos(1551731451_000_000_000)), + )); + + let before_seen = CollectionModifier::DateTime(DateTimeModifier::Before( + DateDetail::FirstSeen(DateTime::from_timestamp_nanos(1737126002_000_000_000)), + )); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where( + Cond::all() + .add(after_seen.to_query()) + .add(before_seen.to_query()), + ) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE "first_seen_date" > ? AND "first_seen_date" < ?"#, + select + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 2, "after 5, before 2, after 1. => [3, 4]"); + + assert!( + res.iter().any(|media| media.id == Uuid::from_u128(3)), + "has 3" + ); + assert!( + res.iter().any(|media| media.id == Uuid::from_u128(4)), + "has 4" + ); + } + } + + #[tokio::test] + async fn collection_mod_format() { + let mut conn: PoolConnection = setup_db().await; + + // there should only be one entry w/ ext "mp4": + { + let mp4_ext_mod = CollectionModifier::Format(FormatDetail::Extension("mp4".into())); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(mp4_ext_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("%mp4")))) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 1, "should only be one mp4 ext"); + } + + // again, one with MIME type "video/mp4": + { + let mp4_mime_mod = + CollectionModifier::Format(FormatDetail::MimeType("Video/mp4".into())); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(mp4_mime_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!( + r#"SELECT * FROM "info" WHERE ("format" ->> ?) LIKE ?"#, + select + ); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("mime_type")))) + ); // json_array_length + assert_eq!( + values.0 .0.get(1).unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("Video/mp4")))) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 1, "should only be one with mp4 mime type"); + } + + // three with `png` extensions: + { + let png_ext_mod = CollectionModifier::Format(FormatDetail::Extension("PnG".into())); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(png_ext_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "path" LIKE ?"#, select); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new(String::from("%PnG")))) + ); + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + // note: we take it from the file extension, not the MIME here! + assert_eq!(res.len(), 2, "two pngs"); + } + } + + #[tokio::test] + async fn collection_mod_kind() { + let mut conn: PoolConnection = setup_db().await; + + // one video: + { + let video_kind_mod = CollectionModifier::Kind(KindDetail::Video); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(video_kind_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE ("format" ->> ?) = ?"#, select); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 1, "only one video"); + } + + // four photos: + { + let photo_kind_mod = CollectionModifier::Kind(KindDetail::Photo); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(photo_kind_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE ("format" ->> ?) = ?"#, select); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 4, "four photos"); + } + } + + /// creates a database with some entries in it... + #[expect(clippy::inconsistent_digit_grouping, reason = "easier to read")] + async fn setup_db() -> PoolConnection { + database::DB_FOLDER_PATH + .set(temp_dir().try_into().unwrap()) + .unwrap(); + + let mut conn = DATABASE.acquire().await.unwrap(); + + let media_1 = Media { + id: Uuid::from_u128(1), + path: "/home/barrett/Videos/eceg_ditto_dill.mp4".into(), + filesize: 1024 * 1024 * 512, // 512 MiB + format: Format::new_from_mime("video/mp4") + .expect("format creation") + .into(), + creation_date: Some(DateTime::from_timestamp_nanos(1737308081_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1737308098_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1551731451_000_000_000), + width_px: 1920, + height_px: 1080, + specific_metadata: SpecificMetadata::new_video(147.0).into(), + other_metadata: Some( + OtherMetadataMap(HashMap::from([ + ( + "uploader".into(), + OtherMetadataValue::new("Uploader", "DittoDill"), + ), + ( + "category_id".into(), + OtherMetadataValue::new("Video Category ID", "24"), + ), + ])) + .into(), + ), + tags: vec![ + Tag::new_testing("dittodill"), + Tag::new_testing("music"), + Tag::new_testing("legend"), + ] + .into(), + }; + + let media_2 = Media { + id: Uuid::from_u128(2), + path: "/home/barrett/Downloads/vade.png".into(), + filesize: (1024 * 34) + (230), // 34.2 KiB + format: Format::new_from_mime("image/png").unwrap().into(), + creation_date: Some(DateTime::from_timestamp_nanos(1737137976_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1737137976_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1737126002_000_000_000), + width_px: 174, + height_px: 174, + specific_metadata: SpecificMetadata::new_image().into(), + other_metadata: None, + tags: vec![].into(), + }; + + let media_3 = Media { + id: Uuid::from_u128(3), + path: "/home/barrett/Downloads/a.jpg".into(), + filesize: 1024 * 60, // 60 KiB + format: Format::new_from_mime("image/jpeg").unwrap().into(), + creation_date: Some(DateTime::from_timestamp_nanos(1730329781_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1730329781_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1730329781_000_000_000), + width_px: 1824, + height_px: 1993, + specific_metadata: SpecificMetadata::new_image().into(), + other_metadata: None, + tags: vec![].into(), + }; + + let media_4 = Media { + id: Uuid::from_u128(4), + path: "/home/barrett/Pictures/2024-02-09 14-53-52.mkv-00:00:08.500.png".into(), + filesize: 1024 * 765, // 765 KiB + format: Format::new_from_mime("image/png").unwrap().into(), + creation_date: Some(DateTime::from_timestamp_nanos(1725306903_000_000_000)), + modification_date: Some(DateTime::from_timestamp_nanos(1725306903_000_000_000)), + first_seen_date: DateTime::from_timestamp_nanos(1725286951_000_000_000), + width_px: 1454, + height_px: 750, + specific_metadata: SpecificMetadata::new_image().into(), + other_metadata: None, + tags: vec![].into(), + }; + + // a bunch of null-ish values >:) + let media_5 = Media { + id: Uuid::nil(), + path: "".into(), + filesize: 1024 * 765, // 765 KiB + format: Format::new_from_mime("image/png").unwrap().into(), + creation_date: None, + modification_date: None, + first_seen_date: DateTime::UNIX_EPOCH, + width_px: 1, + height_px: 0, + specific_metadata: SpecificMetadata::new_image().into(), + other_metadata: None, + tags: vec![].into(), + }; + + let m = [media_1, media_2, media_3, media_4, media_5]; + + for media in m { + media + .make_insertion_query() + .execute(&mut *conn) + .await + .unwrap(); + } + + conn + } + } +} From 7ec9070d487a38a311cd51bf3a30ddd2a2f639ac Mon Sep 17 00:00:00 2001 From: barrett Date: Mon, 20 Jan 2025 17:22:08 -0600 Subject: [PATCH 21/27] feat(db, Media): add `album` field making that a field will make searching by album muuuuch easier! --- ...f807ec79b1277abe271c113e429f94b3cdb1b.json | 12 +++++ ...174ebac2e8a7063cff012f36708b418b4061d.json | 12 ----- migrations/0004_tables.sql | 53 +++++++++++++++++++ src/database.rs | 2 + src/error.rs | 3 ++ src/models/media/builder/mod.rs | 16 ++++++ src/models/media/mod.rs | 9 +++- src/search/sort.rs | 3 +- tests/search.rs | 13 +++-- 9 files changed, 101 insertions(+), 22 deletions(-) create mode 100644 .sqlx/query-2e2da80807e94a55a5b6151735df807ec79b1277abe271c113e429f94b3cdb1b.json delete mode 100644 .sqlx/query-b684f6f8d49f6605881a61d83f0174ebac2e8a7063cff012f36708b418b4061d.json create mode 100644 migrations/0004_tables.sql diff --git a/.sqlx/query-2e2da80807e94a55a5b6151735df807ec79b1277abe271c113e429f94b3cdb1b.json b/.sqlx/query-2e2da80807e94a55a5b6151735df807ec79b1277abe271c113e429f94b3cdb1b.json new file mode 100644 index 0000000..31a408d --- /dev/null +++ b/.sqlx/query-2e2da80807e94a55a5b6151735df807ec79b1277abe271c113e429f94b3cdb1b.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "\n INSERT INTO info \n (id, path, album, filesize, format, creation_date, modification_date, first_seen_date, width_px, height_px, specific_metadata, other_metadata, tags)\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)\n ON CONFLICT(id)\n DO UPDATE SET\n path = excluded.path,\n album = excluded.album,\n filesize = excluded.filesize,\n format = excluded.format,\n creation_date = excluded.creation_date,\n width_px = excluded.width_px,\n height_px = excluded.height_px,\n specific_metadata = excluded.specific_metadata,\n other_metadata = excluded.other_metadata,\n tags = excluded.tags;\n ", + "describe": { + "columns": [], + "parameters": { + "Right": 13 + }, + "nullable": [] + }, + "hash": "2e2da80807e94a55a5b6151735df807ec79b1277abe271c113e429f94b3cdb1b" +} diff --git a/.sqlx/query-b684f6f8d49f6605881a61d83f0174ebac2e8a7063cff012f36708b418b4061d.json b/.sqlx/query-b684f6f8d49f6605881a61d83f0174ebac2e8a7063cff012f36708b418b4061d.json deleted file mode 100644 index 2c6bb22..0000000 --- a/.sqlx/query-b684f6f8d49f6605881a61d83f0174ebac2e8a7063cff012f36708b418b4061d.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "\n INSERT INTO info \n (id, path, filesize, format, creation_date, modification_date, first_seen_date, width_px, height_px, specific_metadata, other_metadata, tags)\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)\n ON CONFLICT(id)\n DO UPDATE SET\n path = excluded.path,\n filesize = excluded.filesize,\n format = excluded.format,\n creation_date = excluded.creation_date,\n width_px = excluded.width_px,\n height_px = excluded.height_px,\n specific_metadata = excluded.specific_metadata,\n other_metadata = excluded.other_metadata,\n tags = excluded.tags;\n ", - "describe": { - "columns": [], - "parameters": { - "Right": 12 - }, - "nullable": [] - }, - "hash": "b684f6f8d49f6605881a61d83f0174ebac2e8a7063cff012f36708b418b4061d" -} diff --git a/migrations/0004_tables.sql b/migrations/0004_tables.sql new file mode 100644 index 0000000..1b9cc58 --- /dev/null +++ b/migrations/0004_tables.sql @@ -0,0 +1,53 @@ +-- media info: cached metadata about tracked media files +CREATE TABLE IF NOT EXISTS info( + id TEXT NOT NULL PRIMARY KEY, + -- note: this would preferably be unique, but that messes with modern sqlite + -- update-insert syntax... + path TEXT NOT NULL, + filesize INTEGER NOT NULL, + format TEXT NOT NULL, + creation_date DATETIME, + modification_date DATETIME, + first_seen_date DATETIME NOT NULL, + width_px INTEGER NOT NULL, + height_px INTEGER NOT NULL, + specific_metadata TEXT NOT NULL, + other_metadata TEXT, + tags TEXT NOT NULL +); + +ALTER TABLE + info +ADD + COLUMN album TEXT NOT NULL; + +-- thumbnails: preview media +CREATE TABLE IF NOT EXISTS thumbnail( + -- path to the thumbnail on disk + path TEXT NOT NULL, + -- thumbnail is for the media file with this uuid + -- + -- TODO: migrate to `media_id` + media_id TEXT NOT NULL PRIMARY KEY +); + +-- albums: contain media +CREATE TABLE IF NOT EXISTS albums( + id TEXT NOT NULL PRIMARY KEY, + name TEXT NOT NULL, + path TEXT NOT NULL, + -- + -- uuids (in json) + contained_media TEXT NOT NULL +); + +-- hashes: media file hashes to ensure metadata is up-to-date! +CREATE TABLE IF NOT EXISTS hashes( + media_id TEXT NOT NULL PRIMARY KEY, + hash BLOB NOT NULL +); + +-- hash_blob_index: tell SQLite to make a btree for the hashes, too. +-- +-- (this allows for high-speed lookups, both ways. hash <=> id) +CREATE UNIQUE INDEX IF NOT EXISTS hash_blob_index ON hashes(hash); \ No newline at end of file diff --git a/src/database.rs b/src/database.rs index 8a2d6de..537278c 100644 --- a/src/database.rs +++ b/src/database.rs @@ -40,6 +40,8 @@ pub static DATABASE: LazyLock> = LazyLock::new(|| { panic!("No database folder path given."); }; + tracing::debug!("Loading from database at `{raves_db_folder}`..."); + // ensure the path exists match raves_db_folder.try_exists() { Ok(true) => (), diff --git a/src/error.rs b/src/error.rs index 5faabaf..e38bb01 100644 --- a/src/error.rs +++ b/src/error.rs @@ -41,6 +41,9 @@ pub enum RavesError { error: std::io::Error, }, + #[error("Failed to find media file's parent. path: {_0}")] + MediaFilePathNoParent(Utf8PathBuf), + #[error("The media file at `{path}` does not appear to contain MIME (file type) data.")] NoMimeData { path: String }, diff --git a/src/models/media/builder/mod.rs b/src/models/media/builder/mod.rs index f8d9dd6..9f43996 100644 --- a/src/models/media/builder/mod.rs +++ b/src/models/media/builder/mod.rs @@ -111,6 +111,18 @@ impl MediaBuilder { /// 7. Return it. #[tracing::instrument(skip(self))] async fn build_internal(mut self, path: &Utf8Path) -> Result { + // before anything, let's make sure the media file has an album to use! + let album = path + .parent() + .map(|p| p.to_path_buf().to_string()) + .inspect(|parent| { + tracing::debug!("Found album (parent) for media file! path: {parent}") + }) + .ok_or_else(|| { + tracing::warn!("Given a supposed file path, but failed to find its parent!"); + RavesError::MediaFilePathNoParent(path.to_path_buf()) + })?; + // grab format and apply it to self let format = format(path).await?; let mime_type = format.mime_type(); @@ -184,11 +196,14 @@ impl MediaBuilder { Ok(Media { id, + album: album.to_string(), path: path.to_string(), + filesize: self.filesize.ok_or(RavesError::FileMissingMetadata( path.to_string(), "no file size given".into(), ))?, + creation_date: self.creation_date, modification_date: self.modification_date, @@ -376,6 +391,7 @@ mod tests { let old_media = Media { id: Uuid::nil(), path: path.to_string(), + album: "tests/assets".into(), filesize: 0, format: Json(Format::new_from_mime("image/avif").unwrap()), creation_date: None, diff --git a/src/models/media/mod.rs b/src/models/media/mod.rs index d67499e..49b715e 100644 --- a/src/models/media/mod.rs +++ b/src/models/media/mod.rs @@ -37,6 +37,9 @@ pub struct Media { /// The last known file path for this media file. pub path: String, + /// Last known path of its containing folder. + pub album: String, + /// How large the file is, in bytes. pub filesize: i64, @@ -94,12 +97,13 @@ impl InsertIntoTable for Media { sqlx::query!( r#" INSERT INTO info - (id, path, filesize, format, creation_date, modification_date, first_seen_date, width_px, height_px, specific_metadata, other_metadata, tags) + (id, path, album, filesize, format, creation_date, modification_date, first_seen_date, width_px, height_px, specific_metadata, other_metadata, tags) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) ON CONFLICT(id) DO UPDATE SET path = excluded.path, + album = excluded.album, filesize = excluded.filesize, format = excluded.format, creation_date = excluded.creation_date, @@ -111,6 +115,7 @@ impl InsertIntoTable for Media { "#, self.id, self.path, + self.album, self.filesize, self.format, self.creation_date, diff --git a/src/search/sort.rs b/src/search/sort.rs index eadf724..f0d8377 100644 --- a/src/search/sort.rs +++ b/src/search/sort.rs @@ -211,7 +211,8 @@ mod tests { fn create_default_media() -> Media { Media { id: Uuid::nil(), - path: "a".into(), + path: "/a/b.jpg".into(), + album: "a".into(), filesize: 1024, format: Json(Format::new_from_mime("image/jpeg").unwrap()), creation_date: None, diff --git a/tests/search.rs b/tests/search.rs index a2746e1..4d33bab 100644 --- a/tests/search.rs +++ b/tests/search.rs @@ -159,9 +159,6 @@ mod tests { /// look in comment/description/etc. fields of any attached metadata. #[tokio::test] async fn collection_mod_literal() { - tracing_subscriber::fmt() - .with_max_level(tracing::Level::DEBUG) - .init(); let mut conn: PoolConnection = setup_db().await; // try searching for "A.JPG" (in caps) @@ -192,9 +189,6 @@ mod tests { #[tokio::test] #[expect(clippy::inconsistent_digit_grouping, reason = "unix time fmting")] async fn collection_mod_datetime() { - tracing_subscriber::fmt() - .with_max_level(tracing::Level::DEBUG) - .init(); let mut conn: PoolConnection = setup_db().await; // we'll try before + created here: @@ -458,6 +452,7 @@ mod tests { let media_1 = Media { id: Uuid::from_u128(1), path: "/home/barrett/Videos/eceg_ditto_dill.mp4".into(), + album: "/home/barrett/Videos".into(), filesize: 1024 * 1024 * 512, // 512 MiB format: Format::new_from_mime("video/mp4") .expect("format creation") @@ -492,6 +487,7 @@ mod tests { let media_2 = Media { id: Uuid::from_u128(2), path: "/home/barrett/Downloads/vade.png".into(), + album: "/home/barrett/Downloads".into(), filesize: (1024 * 34) + (230), // 34.2 KiB format: Format::new_from_mime("image/png").unwrap().into(), creation_date: Some(DateTime::from_timestamp_nanos(1737137976_000_000_000)), @@ -507,6 +503,7 @@ mod tests { let media_3 = Media { id: Uuid::from_u128(3), path: "/home/barrett/Downloads/a.jpg".into(), + album: "/home/barrett/Downloads".into(), filesize: 1024 * 60, // 60 KiB format: Format::new_from_mime("image/jpeg").unwrap().into(), creation_date: Some(DateTime::from_timestamp_nanos(1730329781_000_000_000)), @@ -522,6 +519,7 @@ mod tests { let media_4 = Media { id: Uuid::from_u128(4), path: "/home/barrett/Pictures/2024-02-09 14-53-52.mkv-00:00:08.500.png".into(), + album: "/home/barrett/Pictures".into(), filesize: 1024 * 765, // 765 KiB format: Format::new_from_mime("image/png").unwrap().into(), creation_date: Some(DateTime::from_timestamp_nanos(1725306903_000_000_000)), @@ -537,7 +535,8 @@ mod tests { // a bunch of null-ish values >:) let media_5 = Media { id: Uuid::nil(), - path: "".into(), + album: "/".into(), + path: "/nil.notpng.farts".into(), filesize: 1024 * 765, // 765 KiB format: Format::new_from_mime("image/png").unwrap().into(), creation_date: None, From 6af78ddb5a6c686d379a335a8645077c0616ec42 Mon Sep 17 00:00:00 2001 From: barrett Date: Mon, 20 Jan 2025 17:23:00 -0600 Subject: [PATCH 22/27] fix(test, search): use uuid for each db path otherwise, they're all using the same database and messing with each other lol --- tests/search.rs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/search.rs b/tests/search.rs index 4d33bab..01b9cb3 100644 --- a/tests/search.rs +++ b/tests/search.rs @@ -443,8 +443,15 @@ mod tests { /// creates a database with some entries in it... #[expect(clippy::inconsistent_digit_grouping, reason = "easier to read")] async fn setup_db() -> PoolConnection { + tracing_subscriber::fmt() + .with_max_level(tracing::Level::DEBUG) + .init(); + + let db_folder = temp_dir().join(Uuid::new_v4().to_string()); + tokio::fs::create_dir_all(&db_folder).await.unwrap(); + database::DB_FOLDER_PATH - .set(temp_dir().try_into().unwrap()) + .set(db_folder.try_into().unwrap()) .unwrap(); let mut conn = DATABASE.acquire().await.unwrap(); From da23b785add76994f41ed8f63691c309aa92e2ef Mon Sep 17 00:00:00 2001 From: barrett Date: Mon, 20 Jan 2025 17:23:24 -0600 Subject: [PATCH 23/27] test(builder): check that it can find albums right --- src/models/media/builder/mod.rs | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/src/models/media/builder/mod.rs b/src/models/media/builder/mod.rs index 9f43996..f090ffe 100644 --- a/src/models/media/builder/mod.rs +++ b/src/models/media/builder/mod.rs @@ -442,4 +442,32 @@ mod tests { "post-insert same first seen dates" ); } + + /// Checks that the `MediaBuilder` can correctly find albums. + #[tokio::test] + async fn album() { + let temp_dir = TempDir::new().unwrap(); + let album_path = temp_dir.path().join("farts album"); + let file_path = album_path.join("fear.avif"); + + // make a new folder in the temp_dir called "farts album" + tokio::fs::create_dir_all(temp_dir.path().join("farts album")) + .await + .unwrap(); + + database::DB_FOLDER_PATH + .set(Utf8PathBuf::try_from(temp_dir.path().to_path_buf()).unwrap()) + .unwrap(); + + tokio::fs::copy("tests/assets/fear.avif", &file_path) + .await + .unwrap(); + + let media = MediaBuilder::default() + .build(Utf8PathBuf::try_from(file_path).unwrap()) + .await + .unwrap(); + + assert_eq!(media.album, album_path.to_string_lossy().to_string()); + } } From 25f46e45fae21ab134352e1069af66d84c5cd6bd Mon Sep 17 00:00:00 2001 From: barrett Date: Mon, 20 Jan 2025 17:23:47 -0600 Subject: [PATCH 24/27] feat(search/query): CollectionMod::Album impl --- src/search/modifiers.rs | 4 ++-- src/search/query.rs | 9 ++++++--- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/src/search/modifiers.rs b/src/search/modifiers.rs index a810fd4..946d225 100644 --- a/src/search/modifiers.rs +++ b/src/search/modifiers.rs @@ -19,9 +19,9 @@ pub enum DateTimeModifier { pub enum CollectionModifier { Tag(TagDetail), - /// Searches for media within a certain folder. + /// Searches for media within a folder with the given album path. /// - /// This folder is at the given path and must exist! + /// That's just the folder the media is in. Album(Utf8PathBuf), Literal(String), DateTime(DateTimeModifier), diff --git a/src/search/query.rs b/src/search/query.rs index c28d23c..b1afc49 100644 --- a/src/search/query.rs +++ b/src/search/query.rs @@ -14,6 +14,7 @@ pub enum Info { Table, Id, Path, + Album, Filesize, Format, CreationDate, @@ -72,9 +73,11 @@ impl ToQuery for CollectionModifier { } }, - // FIXME: based on album uuid. that part's easy. - // but how do we choose the table? - CollectionModifier::Album(_album_uuid) => todo!(), + // based on containing folder! + CollectionModifier::Album(path) => { + tracing::debug!("Checking for media file with album name: `{path}`..."); + Expr::col(Info::Album).like(path) + } // ez pz, just add a 'LIKE' clause with `.like()` CollectionModifier::Literal(lit) => { From ed5bb68629923c96fa03ef7f4787ee47d95a2c11 Mon Sep 17 00:00:00 2001 From: barrett Date: Mon, 20 Jan 2025 17:23:59 -0600 Subject: [PATCH 25/27] test(search): collection modifier album --- tests/search.rs | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/tests/search.rs b/tests/search.rs index 01b9cb3..689317b 100644 --- a/tests/search.rs +++ b/tests/search.rs @@ -440,6 +440,41 @@ mod tests { } } + #[tokio::test] + async fn collection_mod_album() { + let mut conn = setup_db().await; + + // "Downloads" album + { + let album_mod = CollectionModifier::Album("/home/barrett/Downloads".into()); + + let (select, values) = sea_query::Query::select() + .column(Asterisk) + .from(Info::Table) + .cond_where(Cond::all().add(album_mod.to_query())) + .build_sqlx(SqliteQueryBuilder); + + assert_eq!(r#"SELECT * FROM "info" WHERE "album" LIKE ?"#, select); + assert_eq!( + values.0 .0.first().unwrap(), + &sea_query::Value::String(Some(Box::new(String::from( + "/home/barrett/Downloads" + )))) + ); + + let res = sqlx::query_as_with::<_, Media, _>(&select, values) + .fetch_all(&mut *conn) + .await + .unwrap(); + + assert_eq!(res.len(), 2); + + let mut res_iter = res.into_iter(); + assert!(res_iter.any(|media| media.id == Uuid::from_u128(2))); + assert!(res_iter.any(|media| media.id == Uuid::from_u128(3))); + } + } + /// creates a database with some entries in it... #[expect(clippy::inconsistent_digit_grouping, reason = "easier to read")] async fn setup_db() -> PoolConnection { From d3d1a01fbec7d3478fd392c5bb7c376b201eb730 Mon Sep 17 00:00:00 2001 From: barrett Date: Tue, 21 Jan 2025 21:47:25 -0600 Subject: [PATCH 26/27] chore(test/file_watcher): use lower sleep times the high second cts make tests really annoying lol --- tests/file_watcher.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/file_watcher.rs b/tests/file_watcher.rs index e2e2c09..38abe7a 100644 --- a/tests/file_watcher.rs +++ b/tests/file_watcher.rs @@ -26,7 +26,7 @@ mod tests { let task = tokio::spawn(Watch::watch()); // sleep for a bit - tokio::time::sleep(Duration::from_secs(5)).await; + tokio::time::sleep(Duration::from_millis(100)).await; // ensure the watcher is still running assert!(!task.is_finished(), "watcher should run indefinitely!"); @@ -64,13 +64,13 @@ mod tests { .expect("remove all from info table"); // copy a photo to the temp dir - tokio::time::sleep(Duration::from_secs(3)).await; + tokio::time::sleep(Duration::from_millis(150)).await; tokio::fs::copy("tests/assets/fear.avif", temp_dir_path.join("fear.avif")) .await .expect("copy to temp dir should work"); // wait... then check if we got metadata! - tokio::time::sleep(Duration::from_secs(5)).await; + tokio::time::sleep(Duration::from_millis(150)).await; let media = sqlx::query_as::<_, Media>(&format!("SELECT * FROM {INFO_TABLE}")) .fetch_one(&mut *conn) .await From e88c07394d03cf3f23e806d17d7d6eea28951fdb Mon Sep 17 00:00:00 2001 From: barrett Date: Tue, 21 Jan 2025 21:49:58 -0600 Subject: [PATCH 27/27] refactor(search/details): revert to tags by name uuids likely shouldn't be stored within media metadata. if a user wants to modify it from another device, it won't recognize the tags! in addition, i think we'll do something like adding metadata fields for different tag sections, which should be easier to parse. --- migrations/0004_tables.sql | 2 -- src/search/details.rs | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/migrations/0004_tables.sql b/migrations/0004_tables.sql index 1b9cc58..7ca0b34 100644 --- a/migrations/0004_tables.sql +++ b/migrations/0004_tables.sql @@ -26,8 +26,6 @@ CREATE TABLE IF NOT EXISTS thumbnail( -- path to the thumbnail on disk path TEXT NOT NULL, -- thumbnail is for the media file with this uuid - -- - -- TODO: migrate to `media_id` media_id TEXT NOT NULL PRIMARY KEY ); diff --git a/src/search/details.rs b/src/search/details.rs index e1473b8..f35110f 100644 --- a/src/search/details.rs +++ b/src/search/details.rs @@ -57,8 +57,8 @@ pub struct FramerateDetail(pub Framerate); /// - has Person tag with marker tag #[derive(Clone, Debug, PartialEq, PartialOrd)] pub enum TagDetail { - TagUuid(String), - PersonTagUuid(String), + TagName(String), + PersonTagName(String), PersonTagWithMarker(String, String), /// The number of tags on a media file.