Skip to content

Commit

Permalink
feat: js logs on webui (#156)
Browse files Browse the repository at this point in the history
This PR combines two features:

- make `console.{log,error}` outputs more clear (fixes #152)
- show js logs on webui (fixes #71)

Screenshot:


![image](https://github.com/user-attachments/assets/f696d1e4-a146-4202-b0d9-b620f5d81e4b)

Note: due to [the way caching is
implemented](#153), refreshing
the webpage without changing the config may skip js evaluation, thus the
log may not show up as expected.
  • Loading branch information
shouya authored Sep 30, 2024
1 parent 49be74f commit c34a46d
Show file tree
Hide file tree
Showing 9 changed files with 162 additions and 57 deletions.
58 changes: 48 additions & 10 deletions src/filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,27 @@ pub struct FilterContext {

/// The extra query parameters passed to the endpoint
extra_queries: HashMap<String, String>,

/// Logs collected from the filters. None indicates logging is
/// disabled.
logs: Option<Vec<String>>,
}

pub struct SubContext<'a> {
context: &'a mut FilterContext,
saved_filter_skip: Option<FilterSkip>,
}

impl AsMut<FilterContext> for SubContext<'_> {
fn as_mut(&mut self) -> &mut FilterContext {
self.context
}
}

impl Drop for SubContext<'_> {
fn drop(&mut self) {
self.context.filter_skip = self.saved_filter_skip.take();
}
}

impl FilterContext {
Expand All @@ -67,6 +88,7 @@ impl FilterContext {
filter_skip: None,
source: None,
extra_queries: HashMap::new(),
logs: None,
}
}

Expand All @@ -86,29 +108,45 @@ impl FilterContext {
self.source.as_ref()
}

pub fn set_filter_skip(&mut self, filter_skip: FilterSkip) {
self.filter_skip = Some(filter_skip);
}

#[cfg(test)]
pub fn set_base(&mut self, base: Url) {
self.base = Some(base);
}

pub fn subcontext(&self) -> Self {
Self {
base: self.base.clone(),
source: None,
filter_skip: None,
extra_queries: self.extra_queries.clone(),
pub fn subcontext(&mut self) -> SubContext<'_> {
let saved_filter_skip = self.filter_skip.take();
SubContext {
context: self,
saved_filter_skip,
}
}

pub fn log<'a, S>(&mut self, msg: S)
where
S: Into<std::borrow::Cow<'a, str>>,
{
if let Some(logs) = &mut self.logs {
logs.push(msg.into().into_owned());
}
}

pub fn enable_logging(&mut self) {
if self.logs.is_none() {
self.logs = Some(Vec::new());
};
}

pub fn logs(&self) -> Option<&[String]> {
self.logs.as_deref()
}

pub fn from_param(param: &crate::server::EndpointParam) -> Self {
Self {
base: param.base().cloned(),
source: param.source().cloned(),
filter_skip: param.filter_skip().cloned(),
extra_queries: param.extra_queries().clone(),
logs: None,
}
}

Expand Down
11 changes: 6 additions & 5 deletions src/filter/js.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,13 +189,14 @@ impl JsFilter {

#[async_trait::async_trait]
impl FeedFilter for JsFilter {
async fn run(
&self,
_ctx: &mut FilterContext,
mut feed: Feed,
) -> Result<Feed> {
async fn run(&self, ctx: &mut FilterContext, mut feed: Feed) -> Result<Feed> {
self.modify_feed(&mut feed).await?;
self.modify_posts(&mut feed).await?;

for log in self.runtime.extract_console_logs().await {
ctx.log(log);
}

Ok(feed)
}
}
Expand Down
7 changes: 4 additions & 3 deletions src/filter/merge.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,11 +133,12 @@ impl FeedFilter for Merge {
async fn run(&self, ctx: &mut FilterContext, mut feed: Feed) -> Result<Feed> {
let (new_feeds, errors) = self.fetch_sources(ctx).await?;

let mut subctx = ctx.subcontext();
for new_feed in new_feeds {
let ctx = ctx.subcontext();
let filtered_new_feed = self.filters.run(ctx, new_feed).await?;
feed.merge(filtered_new_feed)?;
let new_feed = self.filters.run(subctx.as_mut(), new_feed).await?;
feed.merge(new_feed)?;
}
drop(subctx);

for (source, error) in errors {
let post = post_from_error(source, error, ctx);
Expand Down
10 changes: 7 additions & 3 deletions src/filter_pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,11 @@ impl FilterPipelineConfig {
}

impl FilterPipeline {
pub async fn run(&self, context: FilterContext, feed: Feed) -> Result<Feed> {
pub async fn run(
&self,
context: &mut FilterContext,
feed: Feed,
) -> Result<Feed> {
self.inner.lock().await.run(context, feed).await
}

Expand Down Expand Up @@ -127,12 +131,12 @@ impl Inner {

async fn run(
&self,
mut context: FilterContext,
context: &mut FilterContext,
mut feed: Feed,
) -> Result<Feed> {
for (i, filter) in self.filters.iter().enumerate() {
if context.allows_filter(i) {
feed = self.step(i, filter, &mut context, feed).await?;
feed = self.step(i, filter, context, feed).await?;
}
}

Expand Down
16 changes: 15 additions & 1 deletion src/js.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use rquickjs::module::ModuleData;
use rquickjs::prelude::IntoArgs;
use rquickjs::promise::Promise;
use rquickjs::{
async_with, AsyncContext, Ctx, FromJs, Function, IntoJs, Value,
async_with, AsyncContext, Class, Ctx, FromJs, Function, IntoJs, Value,
};
use url::Url;

Expand Down Expand Up @@ -162,6 +162,20 @@ impl Runtime {
.with(|ctx| handle_exception(&ctx, retval))
.await
}

pub async fn extract_console_logs(&self) -> Vec<String> {
self
.context
.with(|ctx| {
ctx
.globals()
.get::<_, Class<builtin::Console>>("console")
.unwrap()
.borrow_mut()
.extract_logs()
})
.await
}
}

struct RemoteResolver;
Expand Down
66 changes: 46 additions & 20 deletions src/js/builtin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,11 @@ pub(super) fn register_builtin(ctx: &Ctx) -> Result<(), rquickjs::Error> {
Class::<DOM>::define(&ctx.globals())?;
Class::<Node>::define(&ctx.globals())?;

ctx
.globals()
.set("console", Class::instance(ctx.clone(), Console {})?)?;
let console = Class::instance(ctx.clone(), Console::new())?;
ctx.globals().set("console", console)?;

ctx
.globals()
.set("util", Class::instance(ctx.clone(), Util {})?)?;
let util = Class::instance(ctx.clone(), Util {})?;
ctx.globals().set("util", util)?;

let fetch_fn = Func::new(Async(fetch));
ctx.globals().set("fetch", fetch_fn)?;
Expand All @@ -29,29 +27,57 @@ pub(super) fn register_builtin(ctx: &Ctx) -> Result<(), rquickjs::Error> {

#[derive(Trace)]
#[rquickjs::class]
struct Console {}
pub(super) struct Console {
aggregated_logs: Vec<String>,
}

impl Console {
fn new() -> Self {
Self {
aggregated_logs: Vec::new(),
}
}

pub(super) fn extract_logs(&mut self) -> Vec<String> {
std::mem::take(&mut self.aggregated_logs)
}
}

#[rquickjs::methods]
impl Console {
fn log(&self, value: rquickjs::Value<'_>) -> Result<(), rquickjs::Error> {
let msg = match value.try_into_string() {
Ok(s) => s.to_string()?,
Err(v) => format!("[{}] {:?}", v.type_name(), v),
};
fn log(&mut self, value: rquickjs::Value<'_>) -> Result<(), rquickjs::Error> {
let ty = value.type_name();
let msg = format!("[log] ({ty}) {}", string_repr(value)?);
println!("{msg}");
self.aggregated_logs.push(msg);
Ok(())
}

println!("[console.log] {}", msg);
fn error(
&mut self,
value: rquickjs::Value<'_>,
) -> Result<(), rquickjs::Error> {
let ty = value.type_name();
let msg = format!("[error] ({ty}) {}", string_repr(value)?);
println!("{msg}");
self.aggregated_logs.push(msg);
Ok(())
}
}

fn error(&self, value: rquickjs::Value<'_>) -> Result<(), rquickjs::Error> {
let msg = match value.try_into_string() {
Ok(s) => s.to_string()?,
Err(v) => format!("[{}] {:?}", v.type_name(), v),
};
fn string_repr(value: rquickjs::Value<'_>) -> Result<String, rquickjs::Error> {
let ctx = value.ctx();
if let Some(json) =
ctx.json_stringify_replacer_space(value.clone(), rquickjs::Undefined, 4)?
{
return Ok(json.to_string().unwrap());
}

eprintln!("[console.error] {}", msg);
Ok(())
if let Some(string) = value.into_string() {
return Ok(string.to_string().unwrap());
}

Ok("unknown value".to_owned())
}

#[derive(Trace)]
Expand Down
2 changes: 1 addition & 1 deletion src/otf_filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ impl OnTheFlyFilter {
pub async fn run(
&mut self,
query: OnTheFlyFilterQuery,
context: FilterContext,
context: &mut FilterContext,
feed: Feed,
) -> Result<Feed, Error> {
let pipeline = self.update(query).await?;
Expand Down
24 changes: 13 additions & 11 deletions src/server/endpoint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -310,21 +310,17 @@ impl EndpointService {
})
}

pub async fn run(self, param: EndpointParam) -> Result<Feed> {
let mut context = FilterContext::from_param(&param);
pub async fn run_with_context(
self,
context: &mut FilterContext,
param: EndpointParam,
) -> Result<Feed> {
let feed = self
.source
.fetch_feed(&context, Some(&self.client))
.fetch_feed(context, Some(&self.client))
.await
.map_err(|e| Error::FetchSource(Box::new(e)))?;
if let Some(filter_skip) = param.filter_skip {
context.set_filter_skip(filter_skip);
}
if let Some(base) = param.base {
context.set_base(base);
}
// TODO: change filter pipeline to operate on a borrowed context
let mut feed = self.filters.run(context.clone(), feed).await?;
let mut feed = self.filters.run(context, feed).await?;

if let (Some(on_the_fly_filter), Some(query)) =
(self.on_the_fly_filter, param.query)
Expand All @@ -343,6 +339,12 @@ impl EndpointService {
Ok(feed)
}

pub async fn run(self, param: EndpointParam) -> Result<Feed> {
let mut context = FilterContext::from_param(&param);
let feed = self.run_with_context(&mut context, param).await?;
Ok(feed)
}

pub fn config_changed(&self, config: &EndpointServiceConfig) -> bool {
self.config != *config
}
Expand Down
25 changes: 22 additions & 3 deletions src/server/web/endpoint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use url::Url;

use crate::{
feed::{Feed, NormalizedPost, Post},
filter::FilterContext,
server::{endpoint::EndpointService, web::sprite, EndpointParam},
source::{FromScratch, Source},
};
Expand Down Expand Up @@ -266,9 +267,12 @@ async fn fetch_and_render_feed(
endpoint: EndpointService,
params: EndpointParam,
) -> Markup {
let mut context = FilterContext::from_param(&params);
context.enable_logging();

html! {
@match endpoint.run(params).await {
Ok(feed) => (render_feed(feed)),
@match endpoint.run_with_context(&mut context, params).await {
Ok(feed) => (render_feed(feed, context.logs())),
Err(e) => {
div .flash.error {
header { b { "Failed to fetch feed" } }
Expand Down Expand Up @@ -346,7 +350,7 @@ fn render_post(normalized_post: NormalizedPost, post: Post) -> Markup {
}
}

fn render_feed(mut feed: Feed) -> Markup {
fn render_feed(mut feed: Feed, logs: Option<&[String]>) -> Markup {
let normalized_feed = feed.normalize();
let posts = feed.take_posts();

Expand All @@ -358,6 +362,21 @@ fn render_feed(mut feed: Feed) -> Markup {
@if let Some(description) = &normalized_feed.description {
p { (description) }
}

@match logs {
Some(logs) if !logs.is_empty() => {
details .flash.error.logs {
summary { "Logs" }
div style="overflow-x:scroll" {
@for log in logs {
{ pre { (log) } }
}
}
}
}
_ => {}
}

p { (format!("Entries ({}):", normalized_feed.posts.len())) }

@for (norm_post, post) in normalized_feed.posts.into_iter().zip(posts) {
Expand Down

0 comments on commit c34a46d

Please sign in to comment.