Skip to content

Commit

Permalink
Merge branch 'main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
bconn98 authored Mar 9, 2024
2 parents 0998442 + 8ab1b34 commit dd1037e
Show file tree
Hide file tree
Showing 7 changed files with 162 additions and 26 deletions.
5 changes: 3 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,13 @@ fnv = "1.0"
humantime = { version = "2.1", optional = true }
log = { version = "0.4.20", features = ["std"] }
log-mdc = { version = "0.1", optional = true }
serde = { version = "1.0", optional = true, features = ["derive"] }
serde = { version = "1.0.196", optional = true, features = ["derive"] }
serde-value = { version = "0.7", optional = true }
thread-id = { version = "4", optional = true }
typemap-ors = { version = "1.0.0", optional = true }
serde_json = { version = "1.0", optional = true }
serde_yaml = { version = "0.9", optional = true }
toml = { version = "0.8", optional = true }
toml = { version = "<0.8.10", optional = true }
parking_lot = { version = "0.12.0", optional = true }
rand = { version = "0.8", optional = true}
thiserror = "1.0.15"
Expand All @@ -87,6 +87,7 @@ streaming-stats = "0.2.3"
humantime = "2.1"
tempfile = "3.8"
mock_instant = "0.3"
serde_test = "1.0.176"

[[example]]
name = "json_logger"
Expand Down
34 changes: 17 additions & 17 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,23 +9,6 @@
log4rs is a highly configurable logging framework modeled after Java's Logback
and log4j libraries.

## Warning

If you are using the file rotation in your configuration there is a known
substantial performance issue so listen up! By default the `gzip` feature
is enabled and when rolling files it will zip log archives automatically.
This is a problem when the log archives are large as the zip happens in the
main thread and will halt the process while the zip is completed. Be advised
that the `gzip` feature will be removed from default features as of `1.0`.

The methods to mitigate this are as follows.

1. Use the `background_rotation` feature which spawns an os thread to do the compression.
1. Disable the `gzip` feature with `--no-default-features`.
1. Ensure the archives are small enough that the compression time is acceptable.

For more information see the PR that added [`background_rotation`](https://github.com/estk/log4rs/pull/117).

## Quick Start

log4rs.yaml:
Expand Down Expand Up @@ -82,6 +65,23 @@ fn main() {
* Run the tests for all individual features for windows with
[cross](https://github.com/rust-embedded/cross): `./test.sh win`


## Compression

If you are using the file rotation in your configuration there is a known
substantial performance issue with the `gzip` feature. When rolling files
it will zip log archives automatically. This is a problem when the log archives
are large as the zip happens in the main thread and will halt the process while
the zip is completed.

The methods to mitigate this are as follows.

1. Use the `background_rotation` feature which spawns an os thread to do the compression.
2. Do not enable the `gzip` feature.
3. Ensure the archives are small enough that the compression time is acceptable.

For more information see the PR that added [`background_rotation`](https://github.com/estk/log4rs/pull/117).

## License

Licensed under either of
Expand Down
2 changes: 1 addition & 1 deletion src/append/rolling_file/policy/compound/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
//!
//! Requires the `compound_policy` feature.
#[cfg(feature = "config_parsing")]
use serde::{self, de};
use serde::de;
#[cfg(feature = "config_parsing")]
use serde_value::Value;
#[cfg(feature = "config_parsing")]
Expand Down
8 changes: 3 additions & 5 deletions src/config/raw.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,7 @@
//! ```
#![allow(deprecated)]

use std::{
borrow::ToOwned, collections::HashMap, fmt, marker::PhantomData, sync::Arc, time::Duration,
};
use std::{collections::HashMap, fmt, marker::PhantomData, sync::Arc, time::Duration};

use anyhow::anyhow;
use log::LevelFilter;
Expand Down Expand Up @@ -287,9 +285,9 @@ impl Deserializers {
}

/// Deserializes a value of a specific type and kind.
pub fn deserialize<T: ?Sized>(&self, kind: &str, config: Value) -> anyhow::Result<Box<T>>
pub fn deserialize<T>(&self, kind: &str, config: Value) -> anyhow::Result<Box<T>>
where
T: Deserializable,
T: Deserializable + ?Sized,
{
match self.0.get::<KeyAdaptor<T>>().and_then(|m| m.get(kind)) {
Some(b) => b.deserialize(config, self),
Expand Down
2 changes: 1 addition & 1 deletion src/config/runtime.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! log4rs configuration
use log::LevelFilter;
use std::{collections::HashSet, iter::IntoIterator};
use std::collections::HashSet;
use thiserror::Error;

use crate::{append::Append, filter::Filter};
Expand Down
42 changes: 42 additions & 0 deletions src/filter/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ impl Deserializable for dyn Filter {
}
}

#[derive(PartialEq, Debug)]
/// The response returned by a filter.
pub enum Response {
/// Accept the log event.
Expand Down Expand Up @@ -78,3 +79,44 @@ impl<'de> de::Deserialize<'de> for FilterConfig {
})
}
}

#[cfg(test)]
mod test {
#[cfg(feature = "config_parsing")]
use super::*;
#[cfg(feature = "config_parsing")]
use serde_test::{assert_de_tokens, assert_de_tokens_error, Token};

#[test]
#[cfg(feature = "config_parsing")]
fn test_cfg_deserializer() {
let filter = FilterConfig {
kind: "threshold".to_owned(),
config: Value::Map({
let mut map = BTreeMap::new();
map.insert(
Value::String("level".to_owned()),
Value::String("error".to_owned()),
);
map
}),
};

let mut cfg = vec![
Token::Struct {
name: "FilterConfig",
len: 2,
},
Token::Str("kind"),
Token::Str("threshold"),
Token::Str("level"),
Token::Str("error"),
Token::StructEnd,
];

assert_de_tokens(&filter, &cfg);

cfg[1] = Token::Str("knd");
assert_de_tokens_error::<FilterConfig>(&cfg, "missing field `kind`");
}
}
95 changes: 95 additions & 0 deletions src/filter/threshold.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,3 +66,98 @@ impl Deserialize for ThresholdFilterDeserializer {
Ok(Box::new(ThresholdFilter::new(config.level)))
}
}

#[cfg(test)]
mod test {
use log::{Level, LevelFilter, Record};

use super::*;

#[cfg(feature = "config_parsing")]
use crate::config::Deserializers;

#[cfg(feature = "config_parsing")]
use serde_test::{assert_de_tokens, assert_de_tokens_error, Token};

#[test]
#[cfg(feature = "config_parsing")]
fn test_cfg_deserialize() {
let filter_cfg = ThresholdFilterConfig {
level: LevelFilter::Off,
};

let mut cfg = vec![
Token::Struct {
name: "ThresholdFilterConfig",
len: 1,
},
Token::Str("level"),
Token::Enum {
name: "LevelFilter",
},
Token::Str("Off"),
Token::Unit,
Token::StructEnd,
];

assert_de_tokens(&filter_cfg, &cfg);

cfg[1] = Token::Str("leel");
assert_de_tokens_error::<ThresholdFilterConfig>(&cfg, "missing field `level`");

cfg[1] = Token::Str("level");
cfg[3] = Token::Str("On");
cfg.remove(4); // No Unit on this one as the Option is invalid
assert_de_tokens_error::<ThresholdFilterConfig>(
&cfg,
"unknown variant `On`, expected one of `OFF`, `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`",
);
}

#[test]
fn test_filter_new_vs_struct() {
assert_eq!(
ThresholdFilter::new(LevelFilter::Info),
ThresholdFilter {
level: LevelFilter::Info
}
);
}

#[test]
fn test_threshold_filter() {
let thres = ThresholdFilter::new(LevelFilter::Info);
let debug_record = Record::builder()
.level(Level::Debug)
.args(format_args!("the message"))
.module_path(Some("path"))
.file(Some("file"))
.line(Some(132))
.build();

assert_eq!(thres.filter(&debug_record), Response::Reject);

let error_record = Record::builder()
.level(Level::Error)
.args(format_args!("the message"))
.module_path(Some("path"))
.file(Some("file"))
.line(Some(132))
.build();

assert_eq!(thres.filter(&error_record), Response::Neutral);
}

#[test]
#[cfg(feature = "config_parsing")]
fn test_cfg_deserializer() {
let filter_cfg = ThresholdFilterConfig {
level: LevelFilter::Off,
};

let deserializer = ThresholdFilterDeserializer;

let res = deserializer.deserialize(filter_cfg, &Deserializers::default());
assert!(res.is_ok());
}
}

0 comments on commit dd1037e

Please sign in to comment.