Skip to content

Commit

Permalink
feat: htmx-based web ui (#147)
Browse files Browse the repository at this point in the history
This new web ui is written with htmx and maud for the goal of making the
ui self-contained in the rust project. Which means it requires no
additional build-step for front-end. htmx does most of the heavy lifting
so the new ui is smaller and hopefully easier to maintain than the old.

Currently, the new ui has mostly reached feature parity with the old
inspector ui. New features can be expected in the future.

The new ui is enabled by default. You can disable it the same way by
customizing `RSS_FUNNEL_INSPECTOR_UI` environment variable.

For compatibility reasons, the old ui is still available through the
`/_inspector` route. This route will be removed in a future release. Any
feedback on the new ui is welcome.
  • Loading branch information
shouya authored Sep 18, 2024
1 parent 29141c5 commit d178443
Show file tree
Hide file tree
Showing 37 changed files with 1,760 additions and 116 deletions.
208 changes: 187 additions & 21 deletions Cargo.lock

Large diffs are not rendered by default.

5 changes: 4 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ blake3 = "1.5.1"
clap = { version = "4.5.1", features = ["derive", "env"] }
serde = { version = "1.0.197", features = ["derive", "rc"] }
serde_yaml = "0.9.32"
duration-str = { version = "0.7.1", default-features = false, features = ["serde"] }
duration-str = { version = "0.11.2", default-features = false, features = ["serde"] }

# webserver
axum-macros = "0.4.1"
Expand Down Expand Up @@ -87,6 +87,9 @@ glob-match = "0.2.1"
# Logging
tracing = { version = "0.1.40"}
tracing-subscriber = "0.3.18"
maud = { version = "0.26.0", features = ["axum"] }
ammonia = "4.0.0"
serde_with = "3.9.0"

[patch.crates-io]
ego-tree = { git = "https://github.com/shouya/ego-tree.git" }
9 changes: 9 additions & 0 deletions scripts/watch-dev.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/bin/bash

trap "trap - SIGTERM && kill -- -$$" SIGINT SIGTERM EXIT

cargo watch -x build -s 'touch /tmp/.trigger' &
cargo watch -w /tmp/.trigger -d0 -s 'target/debug/rss-funnel -c ~/.config/rss-funnel-dev/funnel.yaml server' &

wait

4 changes: 2 additions & 2 deletions src/cli.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ struct TestConfig {
source: Option<Url>,
/// Limit the first N filter steps to run
#[clap(long, short)]
limit_filters: Option<usize>,
filter_skip: Option<usize>,
/// Limit the number of items in the feed
#[clap(long, short('n'))]
limit_posts: Option<usize>,
Expand All @@ -66,7 +66,7 @@ impl TestConfig {
fn to_endpoint_param(&self) -> server::EndpointParam {
server::EndpointParam::new(
self.source.as_ref().cloned(),
self.limit_filters,
self.filter_skip,
self.limit_posts,
self.base.clone(),
)
Expand Down
35 changes: 23 additions & 12 deletions src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,43 +22,50 @@ struct HttpFixture {
content: String,
}

#[serde_with::skip_serializing_none]
#[derive(
JsonSchema, Serialize, Deserialize, Debug, Clone, Default, PartialEq, Eq, Hash,
)]
pub struct ClientConfig {
/// The "user-agent" header to send with requests
user_agent: Option<String>,
#[serde(default)]
pub user_agent: Option<String>,
/// The "accept" header to send with requests
accept: Option<String>,
#[serde(default)]
pub accept: Option<String>,
/// The "cookie" header to send with requests (Deprecated, specify "cookie" field instead)
set_cookie: Option<String>,
#[serde(default)]
pub set_cookie: Option<String>,
/// The "cookie" header to send with requests
cookie: Option<String>,
#[serde(default)]
pub cookie: Option<String>,
/// The "referer" header to send with requests
referer: Option<String>,
/// The maximum number of cached responses
cache_size: Option<usize>,
#[serde(default)]
pub referer: Option<String>,
/// Ignore tls error
#[serde(default)]
accept_invalid_certs: bool,
pub accept_invalid_certs: bool,
/// The maximum number of cached responses
#[serde(default)]
pub cache_size: Option<usize>,
/// The maximum time a response is kept in the cache (Format: "4s",
/// 10m", "1h", "1d")
#[serde(default)]
#[serde(deserialize_with = "duration_str::deserialize_option_duration")]
#[schemars(with = "String")]
cache_ttl: Option<Duration>,
pub cache_ttl: Option<Duration>,
/// Request timeout (Format: "4s", "10m", "1h", "1d")
#[serde(deserialize_with = "duration_str::deserialize_option_duration")]
#[schemars(with = "String")]
timeout: Option<Duration>,
pub timeout: Option<Duration>,
/// Sometimes the feed doesn't report a correct content type, so we
/// need to override it.
#[serde(default)]
assume_content_type: Option<String>,
pub assume_content_type: Option<String>,
/// The proxy to use for requests
/// (Format: "http://user:pass@host:port", "socks5://user:pass@host:port")
#[serde(default)]
proxy: Option<String>,
pub proxy: Option<String>,
}

impl ClientConfig {
Expand Down Expand Up @@ -124,6 +131,10 @@ impl ClientConfig {
);
Ok(client)
}

pub fn to_yaml(&self) -> Result<String, ConfigError> {
Ok(serde_yaml::to_string(self)?)
}
}

pub struct Client {
Expand Down
9 changes: 9 additions & 0 deletions src/feed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,15 @@ pub enum FeedFormat {
Atom,
}

impl FeedFormat {
pub fn as_str(&self) -> &'static str {
match self {
FeedFormat::Rss => "rss",
FeedFormat::Atom => "atom",
}
}
}

impl Feed {
pub fn format(&self) -> FeedFormat {
match self {
Expand Down
78 changes: 53 additions & 25 deletions src/filter.rs
Original file line number Diff line number Diff line change
@@ -1,37 +1,57 @@
mod convert;
mod full_text;
mod highlight;
mod html;
mod image_proxy;
mod js;
mod limit;
mod magnet;
mod merge;
mod note;
mod sanitize;
mod select;
mod simplify_html;

use std::collections::HashMap;
pub(crate) mod convert;
pub(crate) mod full_text;
pub(crate) mod highlight;
pub(crate) mod html;
pub(crate) mod image_proxy;
pub(crate) mod js;
pub(crate) mod limit;
pub(crate) mod magnet;
pub(crate) mod merge;
pub(crate) mod note;
pub(crate) mod sanitize;
pub(crate) mod select;
pub(crate) mod simplify_html;

use std::collections::{HashMap, HashSet};
use std::sync::Arc;

use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use serde_with::{formats::CommaSeparator, serde_as, StringWithSeparator};
use url::Url;

use crate::{
feed::Feed,
util::{ConfigError, Error, Result},
};

#[serde_as]
#[derive(Clone, Debug, Deserialize)]
#[serde(transparent)]
pub struct FilterSkip {
#[serde_as(as = "StringWithSeparator::<CommaSeparator, usize>")]
indices: HashSet<usize>,
}

impl FilterSkip {
pub(crate) fn upto(n: usize) -> Self {
let indices = (0..n).collect::<HashSet<usize>>();
Self { indices }
}

pub fn allows_filter(&self, index: usize) -> bool {
!self.indices.contains(&index)
}
}

#[derive(Clone)]
pub struct FilterContext {
/// The base URL of the application. Used to construct absolute URLs
/// from a relative path.
base: Option<Url>,

/// The maximum number of filters to run on this pipeline
limit_filters: Option<usize>,
filter_skip: Option<FilterSkip>,

/// The extra query parameters passed to the endpoint
extra_queries: HashMap<String, String>,
Expand All @@ -42,15 +62,11 @@ impl FilterContext {
pub fn new() -> Self {
Self {
base: None,
limit_filters: None,
filter_skip: None,
extra_queries: HashMap::new(),
}
}

pub fn limit_filters(&self) -> Option<usize> {
self.limit_filters
}

pub fn base(&self) -> Option<&Url> {
self.base.as_ref()
}
Expand All @@ -63,8 +79,8 @@ impl FilterContext {
&self.extra_queries
}

pub fn set_limit_filters(&mut self, limit: usize) {
self.limit_filters = Some(limit);
pub fn set_filter_skip(&mut self, filter_skip: FilterSkip) {
self.filter_skip = Some(filter_skip);
}

pub fn set_base(&mut self, base: Url) {
Expand All @@ -74,18 +90,26 @@ impl FilterContext {
pub fn subcontext(&self) -> Self {
Self {
base: self.base.clone(),
limit_filters: None,
filter_skip: None,
extra_queries: self.extra_queries.clone(),
}
}

pub fn from_param(param: &crate::server::EndpointParam) -> Self {
Self {
base: param.base().cloned(),
limit_filters: param.limit_filters(),
filter_skip: param.filter_skip().cloned(),
extra_queries: param.extra_queries().clone(),
}
}

pub fn allows_filter(&self, index: usize) -> bool {
if let Some(f) = &self.filter_skip {
f.allows_filter(index)
} else {
true
}
}
}

#[async_trait::async_trait]
Expand Down Expand Up @@ -187,6 +211,10 @@ macro_rules! define_filters {
}
}

pub fn to_yaml(&self) -> Result<String, ConfigError> {
Ok(serde_yaml::to_string(self)?)
}

pub fn name(&self) -> &'static str {
match self {
$(FilterConfig::$variant(_) => paste::paste! {stringify!([<$variant:snake>])},)*
Expand Down
10 changes: 5 additions & 5 deletions src/filter/js.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use super::{FeedFilter, FeedFilterConfig, FilterContext};
/// See <a href="https://github.com/shouya/rss-funnel/wiki/JavaScript-API" target="_blank">JavaScript API</a>.
pub struct JsConfig {
code: String,
pub code: String,
}

#[derive(
Expand All @@ -30,7 +30,7 @@ pub struct JsConfig {
/// <br><br>
/// Example: <code>modify_post: post.title += " (modified)";</code>
pub struct ModifyPostConfig {
code: String,
pub code: String,
}

#[derive(
Expand All @@ -43,7 +43,7 @@ pub struct ModifyPostConfig {
/// <br><br>
/// Example: <code>modify_feed: feed.title.value = "Modified Feed";</code>
pub struct ModifyFeedConfig {
code: String,
pub code: String,
}

pub struct JsFilter {
Expand Down Expand Up @@ -83,8 +83,8 @@ impl FeedFilterConfig for JsConfig {

async fn build(self) -> Result<Self::Filter, ConfigError> {
let runtime = Runtime::new().await?;
runtime.eval(&self.code).await?;
runtime.eval(MODIFY_POSTS_CODE).await?;
let () = runtime.eval(&self.code).await?;
let () = runtime.eval(MODIFY_POSTS_CODE).await?;

Ok(Self::Filter { runtime })
}
Expand Down
12 changes: 6 additions & 6 deletions src/filter/merge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use crate::client::{Client, ClientConfig};
use crate::feed::Feed;
use crate::filter_pipeline::{FilterPipeline, FilterPipelineConfig};
use crate::source::{Source, SourceConfig};
use crate::source::{SimpleSourceConfig, Source};
use crate::util::{ConfigError, Result, SingleOrVec};

use super::{FeedFilter, FeedFilterConfig, FilterContext};
Expand All @@ -28,24 +28,24 @@ pub enum MergeConfig {
)]
#[serde(transparent)]
pub struct MergeSimpleConfig {
source: SingleOrVec<SourceConfig>,
pub source: SingleOrVec<SimpleSourceConfig>,
}

#[derive(
JsonSchema, Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Hash,
)]
pub struct MergeFullConfig {
/// Source configuration
source: SingleOrVec<SourceConfig>,
pub source: SingleOrVec<SimpleSourceConfig>,
/// Number of concurrent requests to make for fetching multiple sources (default: 20)
#[serde(default)]
parallelism: Option<usize>,
pub parallelism: Option<usize>,
/// Client configuration
#[serde(default)]
client: Option<ClientConfig>,
pub client: Option<ClientConfig>,
/// Filters to apply to the merged feed
#[serde(default)]
filters: Option<FilterPipelineConfig>,
pub filters: Option<FilterPipelineConfig>,
}

impl From<MergeSimpleConfig> for MergeFullConfig {
Expand Down
15 changes: 5 additions & 10 deletions src/filter_pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use crate::{
)]
#[serde(transparent)]
pub struct FilterPipelineConfig {
filters: Vec<FilterConfig>,
pub filters: Vec<FilterConfig>,
}

impl From<Vec<FilterConfig>> for FilterPipelineConfig {
Expand Down Expand Up @@ -88,16 +88,11 @@ impl Inner {
mut context: FilterContext,
mut feed: Feed,
) -> Result<Feed> {
let limit_filters = context
.limit_filters()
.unwrap_or_else(|| self.num_filters());
for filter in self.filters.iter().take(limit_filters) {
feed = filter.run(&mut context, feed).await?;
for (i, filter) in self.filters.iter().enumerate() {
if context.allows_filter(i) {
feed = filter.run(&mut context, feed).await?;
}
}
Ok(feed)
}

fn num_filters(&self) -> usize {
self.filters.len()
}
}
Loading

0 comments on commit d178443

Please sign in to comment.