diff --git a/Cargo.toml b/Cargo.toml index 39ad878b..98fd72cc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -75,7 +75,6 @@ thiserror = "1.0.15" anyhow = "1.0.28" derivative = "2.2" once_cell = "1.17.1" -strum = "0.26.1" [target.'cfg(windows)'.dependencies] winapi = { version = "0.3", optional = true, features = ["handleapi", "minwindef", "processenv", "winbase", "wincon"] } diff --git a/src/append/console.rs b/src/append/console.rs index cf089518..e83ec776 100644 --- a/src/append/console.rs +++ b/src/append/console.rs @@ -263,3 +263,117 @@ impl Deserialize for ConsoleAppenderDeserializer { Ok(Box::new(appender.build())) } } + +#[cfg(test)] +mod test { + use super::*; + use crate::encode::Write; + + #[test] + fn test_append() { + use log::Level; + + // Build a std out appender + let appender = ConsoleAppender::builder() + .tty_only(false) + .target(Target::Stdout) + .encoder(Box::new(PatternEncoder::new("{m}{n}"))) + .build(); + + assert!(appender + .append( + &Record::builder() + .level(Level::Debug) + .target("target") + .module_path(Some("module_path")) + .file(Some("file")) + .line(Some(100)) + .args(format_args!("{}", "message")) + .build() + ) + .is_ok()); + + // No op, but test coverage :) + appender.flush(); + } + + #[test] + fn test_builder() { + // Build a std out appender + let _appender = ConsoleAppender::builder() + .tty_only(false) + .target(Target::Stdout) + .encoder(Box::new(PatternEncoder::new("{m}{n}"))) + .build(); + + // Build a std err appender + let _appender = ConsoleAppender::builder() + .tty_only(false) + .target(Target::Stderr) + .encoder(Box::new(PatternEncoder::new("{m}{n}"))) + .build(); + + // Build a default encoder appender + let _appender = ConsoleAppender::builder() + .tty_only(true) + .target(Target::Stderr) + .build(); + } + + #[test] + #[cfg(feature = "config_parsing")] + fn test_config_deser() { + use crate::{config::Deserializers, encode::EncoderConfig}; + use serde_value::Value; + use std::collections::BTreeMap; + let deserializer = ConsoleAppenderDeserializer; + + let targets = vec![ConfigTarget::Stdout, ConfigTarget::Stderr]; + + for target in targets { + let console_cfg = ConsoleAppenderConfig { + target: Some(target), + encoder: Some(EncoderConfig { + kind: "pattern".to_owned(), + config: Value::Map(BTreeMap::new()), + }), + tty_only: Some(true), + }; + assert!(deserializer + .deserialize(console_cfg, &Deserializers::default()) + .is_ok()); + } + } + + fn write_test(mut writer: WriterLock) { + use std::io::Write; + + assert_eq!(writer.write(b"Write log\n").unwrap(), 10); + assert!(writer.set_style(&Style::new()).is_ok()); + assert!(writer.write_all(b"Write All log\n").is_ok()); + assert!(writer.write_fmt(format_args!("{} \n", "normal")).is_ok()); + assert!(writer.flush().is_ok()); + } + + #[test] + fn test_tty() { + // Note that this fails in GitHub Actions and therefore does not + // show as covered. + let w = match ConsoleWriter::stdout() { + Some(w) => w, + None => return, + }; + + let tty = Writer::Tty(w); + assert!(tty.is_tty()); + + write_test(tty.lock()); + } + + #[test] + fn test_raw() { + let raw = Writer::Raw(StdWriter::stdout()); + assert!(!raw.is_tty()); + write_test(raw.lock()); + } +} diff --git a/src/append/file.rs b/src/append/file.rs index 3f345e7d..5d82a0b8 100644 --- a/src/append/file.rs +++ b/src/append/file.rs @@ -164,7 +164,7 @@ mod test { use super::*; #[test] - fn create_directories() { + fn test_create_directories() { let tempdir = tempfile::tempdir().unwrap(); FileAppender::builder() @@ -173,11 +173,64 @@ mod test { } #[test] - fn append_false() { + fn test_append_trait() { + use log::Level; + let tempdir = tempfile::tempdir().unwrap(); - FileAppender::builder() - .append(false) + let appender = FileAppender::builder() .build(tempdir.path().join("foo.log")) .unwrap(); + + log_mdc::insert("foo", "bar"); + let res = appender.append( + &Record::builder() + .level(Level::Debug) + .target("target") + .module_path(Some("module_path")) + .file(Some("file")) + .line(Some(100)) + .args(format_args!("{}", "message")) + .build(), + ); + assert!(res.is_ok()); + + appender.flush(); + } + + #[test] + fn test_append_builder() { + let append_choices = vec![true, false]; + let tempdir = tempfile::tempdir().unwrap(); + + for do_append in append_choices { + // No actionable test + FileAppender::builder() + .append(do_append) + .build(tempdir.path().join("foo.log")) + .unwrap(); + } + } + + #[test] + #[cfg(feature = "config_parsing")] + fn test_config_deser() { + use crate::config::Deserializers; + use serde_value::Value; + use std::collections::BTreeMap; + + let tempdir = tempfile::tempdir().unwrap(); + let file_cfg = FileAppenderConfig { + path: tempdir.path().join("foo.log").to_str().unwrap().to_owned(), + encoder: Some(EncoderConfig { + kind: "pattern".to_owned(), + config: Value::Map(BTreeMap::new()), + }), + append: Some(true), + }; + + let deserializer = FileAppenderDeserializer; + + let res = deserializer.deserialize(file_cfg, &Deserializers::default()); + assert!(res.is_ok()); } } diff --git a/src/append/mod.rs b/src/append/mod.rs index 73570423..d421c414 100644 --- a/src/append/mod.rs +++ b/src/append/mod.rs @@ -153,12 +153,15 @@ impl<'de> Deserialize<'de> for AppenderConfig { #[cfg(test)] mod test { + #[cfg(feature = "config_parsing")] + use super::*; + #[cfg(any(feature = "file_appender", feature = "rolling_file_appender"))] use std::env::{set_var, var}; #[test] #[cfg(any(feature = "file_appender", feature = "rolling_file_appender"))] - fn expand_env_vars_tests() { + fn test_expand_env_vars() { set_var("HELLO_WORLD", "GOOD BYE"); #[cfg(not(target_os = "windows"))] let test_cases = vec![ @@ -250,4 +253,59 @@ mod test { assert_eq!(res, expected) } } + + #[test] + #[cfg(feature = "config_parsing")] + fn test_config_deser() { + use std::collections::BTreeMap; + + use serde_test::{assert_de_tokens, assert_de_tokens_error, Token}; + use serde_value::Value; + + use crate::filter::FilterConfig; + + let appender = AppenderConfig { + kind: "file".to_owned(), + filters: vec![FilterConfig { + kind: "threshold".to_owned(), + config: Value::Map({ + let mut map = BTreeMap::new(); + map.insert( + Value::String("level".to_owned()), + Value::String("error".to_owned()), + ); + map + }), + }], + config: Value::Map(BTreeMap::new()), + }; + + let mut cfg = vec![ + Token::Struct { + name: "AppenderConfig", + len: 3, + }, + Token::Str("kind"), + Token::Str("file"), + Token::Str("filters"), + Token::Seq { len: Some(1) }, + Token::Struct { + name: "FilterConfig", + len: 2, + }, + Token::Str("kind"), + Token::Str("threshold"), + Token::Str("level"), + Token::Str("error"), + Token::StructEnd, + Token::SeqEnd, + Token::StructEnd, + ]; + + assert_de_tokens(&appender, &cfg); + + // Intentional typo on expected field + cfg[1] = Token::Str("kid"); + assert_de_tokens_error::(&cfg, "missing field `kind`"); + } } diff --git a/src/append/rolling_file/mod.rs b/src/append/rolling_file/mod.rs index 9e6d35ee..4588dfef 100644 --- a/src/append/rolling_file/mod.rs +++ b/src/append/rolling_file/mod.rs @@ -367,17 +367,54 @@ impl Deserialize for RollingFileAppenderDeserializer { #[cfg(test)] mod test { - use std::{ - fs::File, - io::{Read, Write}, - }; - use super::*; use crate::append::rolling_file::policy::Policy; + use tempfile::NamedTempFile; + + #[cfg(feature = "config_parsing")] + use serde_test::{assert_de_tokens, Token}; + + #[test] + #[cfg(feature = "config_parsing")] + fn test_policy_derser() { + use super::*; + use serde_value::Value; + use std::collections::BTreeMap; + + let policy = Policy { + kind: "compound".to_owned(), + config: Value::Map(BTreeMap::new()), + }; + + assert_de_tokens( + &policy, + &[ + Token::Struct { + name: "Policy", + len: 1, + }, + Token::Str("kind"), + Token::Str("compound"), + Token::StructEnd, + ], + ); + + assert_de_tokens( + &policy, + &[ + Token::Struct { + name: "Policy", + len: 0, + }, + Token::StructEnd, + ], + ); + } + #[test] #[cfg(feature = "yaml_format")] - fn deserialize() { + fn test_deserialize_appenders() { use crate::config::{Deserializers, RawConfig}; let dir = tempfile::tempdir().unwrap(); @@ -413,14 +450,13 @@ appenders: let config = ::serde_yaml::from_str::(&config).unwrap(); let errors = config.appenders_lossy(&Deserializers::new()).1; - println!("{:?}", errors); assert!(errors.is_empty()); } #[derive(Debug)] - struct NopPolicy; + struct NopPostPolicy; - impl Policy for NopPolicy { + impl Policy for NopPostPolicy { fn process(&self, _: &mut LogFile) -> anyhow::Result<()> { Ok(()) } @@ -429,49 +465,146 @@ appenders: } } + #[derive(Debug)] + struct NopPrePolicy; + + impl Policy for NopPrePolicy { + fn process(&self, _: &mut LogFile) -> anyhow::Result<()> { + Ok(()) + } + fn is_pre_process(&self) -> bool { + true + } + } + #[test] - fn append() { - let dir = tempfile::tempdir().unwrap(); - let path = dir.path().join("append.log"); - RollingFileAppender::builder() - .append(true) - .build(&path, Box::new(NopPolicy)) - .unwrap(); - assert!(path.exists()); - File::create(&path).unwrap().write_all(b"hello").unwrap(); - - RollingFileAppender::builder() - .append(true) - .build(&path, Box::new(NopPolicy)) - .unwrap(); - let mut contents = vec![]; - File::open(&path) - .unwrap() - .read_to_end(&mut contents) - .unwrap(); - assert_eq!(contents, b"hello"); + fn test_append() { + use log::Level; + + let tmp_file = NamedTempFile::new().unwrap(); + let policies: Vec> = vec![Box::new(NopPrePolicy), Box::new(NopPostPolicy)]; + let record = Record::builder() + .level(Level::Debug) + .target("target") + .module_path(Some("module_path")) + .file(Some("file")) + .line(Some(100)) + .build(); + log_mdc::insert("foo", "bar"); + + for policy in policies { + let appender = RollingFileAppender::builder() + .append(true) + .encoder(Box::new(PatternEncoder::new("{m}{n}"))) + .build(&tmp_file.path(), policy) + .unwrap(); + + assert!(appender.append(&record).is_ok()); + + // No-op method, but get the test coverage :) + appender.flush(); + } } #[test] - fn truncate() { - let dir = tempfile::tempdir().unwrap(); - let path = dir.path().join("truncate.log"); - RollingFileAppender::builder() - .append(false) - .build(&path, Box::new(NopPolicy)) - .unwrap(); - assert!(path.exists()); - File::create(&path).unwrap().write_all(b"hello").unwrap(); - - RollingFileAppender::builder() - .append(false) - .build(&path, Box::new(NopPolicy)) - .unwrap(); - let mut contents = vec![]; - File::open(&path) - .unwrap() - .read_to_end(&mut contents) - .unwrap(); - assert_eq!(contents, b""); + fn test_logfile() { + let tmp_file = NamedTempFile::new().unwrap(); + let mut logfile = LogFile { + writer: &mut None, + path: tmp_file.path(), + len: 0, + }; + + assert_eq!(logfile.path(), tmp_file.path()); + assert_eq!(logfile.len_estimate(), 0); + + // No actions to take here, the writer becomes inaccessible but theres + // no getter to verify + logfile.roll(); + } + + #[test] + #[cfg(feature = "config_parsing")] + fn test_deserializer() { + use super::*; + use crate::config::Deserializers; + use serde_value::Value; + use std::collections::BTreeMap; + + let tmp_file = NamedTempFile::new().unwrap(); + + let append_cfg = RollingFileAppenderConfig { + path: tmp_file.path().to_str().unwrap().to_owned(), + append: Some(true), + encoder: Some(EncoderConfig { + kind: "pattern".to_owned(), + config: Value::Map(BTreeMap::new()), + }), + policy: Policy { + kind: "compound".to_owned(), + config: Value::Map({ + let mut map = BTreeMap::new(); + map.insert( + Value::String("trigger".to_owned()), + Value::Map({ + let mut map = BTreeMap::new(); + map.insert( + Value::String("kind".to_owned()), + Value::String("size".to_owned()), + ); + map.insert( + Value::String("limit".to_owned()), + Value::String("1mb".to_owned()), + ); + map + }), + ); + map.insert( + Value::String("roller".to_owned()), + Value::Map({ + let mut map = BTreeMap::new(); + map.insert( + Value::String("kind".to_owned()), + Value::String("fixed_window".to_owned()), + ); + map.insert(Value::String("base".to_owned()), Value::I32(1)); + map.insert(Value::String("count".to_owned()), Value::I32(5)); + map.insert( + Value::String("pattern".to_owned()), + Value::String("logs/test.{}.log".to_owned()), + ); + map + }), + ); + map + }), + }, + }; + + let deserializer = RollingFileAppenderDeserializer; + + let res = deserializer.deserialize(append_cfg, &Deserializers::default()); + assert!(res.is_ok()); + } + + #[test] + fn test_logwriter() { + // Can't use named or unnamed temp file here because of opening + // the file multiple times for reading + let file = tempfile::tempdir().unwrap(); + let file_path = file.path().join("writer.log"); + let file = File::create(&file_path).unwrap(); + let buf_writer = BufWriter::new(file); + let mut log_writer = LogWriter { + file: buf_writer, + len: 0, + }; + + let contents = fs::read_to_string(&file_path).unwrap(); + assert!(contents.is_empty()); + assert_eq!(log_writer.write(b"test").unwrap(), 4); + assert!(log_writer.flush().is_ok()); + let contents = fs::read_to_string(file_path).unwrap(); + assert!(contents.contains("test")); } } diff --git a/src/append/rolling_file/policy/compound/mod.rs b/src/append/rolling_file/policy/compound/mod.rs index 3e14ae06..83ac48cc 100644 --- a/src/append/rolling_file/policy/compound/mod.rs +++ b/src/append/rolling_file/policy/compound/mod.rs @@ -159,3 +159,101 @@ impl Deserialize for CompoundPolicyDeserializer { Ok(Box::new(CompoundPolicy::new(trigger, roller))) } } + +#[cfg(test)] +mod test { + use self::{roll::delete::DeleteRoller, trigger::size::SizeTrigger}; + + use super::*; + use tempfile::NamedTempFile; + + #[cfg(feature = "config_parsing")] + use serde_test::{assert_de_tokens, assert_de_tokens_error, Token}; + + fn create_policy() -> CompoundPolicy { + let trigger = SizeTrigger::new(1024); + let roller = DeleteRoller::new(); + CompoundPolicy::new(Box::new(trigger), Box::new(roller)) + } + + #[test] + #[cfg(feature = "config_parsing")] + fn test_trigger_deser() { + let mut cfg = vec![ + Token::Struct { + name: "Trigger", + len: 2, + }, + Token::Str("kind"), + Token::Str("size"), + Token::Str("limit"), + Token::U64(1024), + Token::StructEnd, + ]; + + assert_de_tokens( + &Trigger { + kind: "size".to_owned(), + config: Value::Map({ + let mut map = BTreeMap::new(); + map.insert(Value::String("limit".to_owned()), Value::U64(1024)); + map + }), + }, + &cfg, + ); + + // Intentionally break the config + cfg[1] = Token::Str("knd"); + assert_de_tokens_error::(&cfg, "missing field `kind`"); + } + + #[test] + #[cfg(feature = "config_parsing")] + fn test_roller_deser() { + let mut cfg = vec![ + Token::Struct { + name: "Roller", + len: 1, + }, + Token::Str("kind"), + Token::Str("delete"), + Token::StructEnd, + ]; + + assert_de_tokens( + &Roller { + kind: "delete".to_owned(), + config: Value::Map(BTreeMap::new()), + }, + &cfg, + ); + + // Intentionally break the config + cfg[1] = Token::Str("knd"); + assert_de_tokens_error::(&cfg, "missing field `kind`"); + } + + #[test] + fn test_pre_process() { + let policy = create_policy(); + assert!(!policy.is_pre_process()); + } + + #[test] + fn test_process() { + let policy = create_policy(); + // Don't roll then roll + let file_sizes = vec![0, 2048]; + let tmp_file = NamedTempFile::new().unwrap(); + + for file_size in file_sizes { + let mut logfile = LogFile { + writer: &mut None, + path: tmp_file.as_ref(), + len: file_size, + }; + assert!(policy.process(&mut logfile).is_ok()); + } + } +} diff --git a/src/append/rolling_file/policy/compound/roll/fixed_window.rs b/src/append/rolling_file/policy/compound/roll/fixed_window.rs index 43b0a533..5f67459d 100644 --- a/src/append/rolling_file/policy/compound/roll/fixed_window.rs +++ b/src/append/rolling_file/policy/compound/roll/fixed_window.rs @@ -615,7 +615,7 @@ mod test { } #[test] - fn test_test_rotate_to_del() { + fn test_rotate_to_del() { let dir = tempfile::tempdir().unwrap(); let base = dir.path().to_str().unwrap(); diff --git a/src/append/rolling_file/policy/compound/trigger/size.rs b/src/append/rolling_file/policy/compound/trigger/size.rs index fd102257..46192c1f 100644 --- a/src/append/rolling_file/policy/compound/trigger/size.rs +++ b/src/append/rolling_file/policy/compound/trigger/size.rs @@ -165,7 +165,7 @@ mod test { static BYTE_MULTIPLIER: u64 = 1024; #[test] - fn pre_process() { + fn test_pre_process() { let trigger = SizeTrigger::new(2048); assert!(!trigger.is_pre_process()); } @@ -223,19 +223,29 @@ mod test { let trigger = SizeTriggerConfig { limit: BYTE_MULTIPLIER, }; - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::I64(1024), - Token::StructEnd, - ], + + let mut cfg = vec![ + Token::Struct { + name: "SizeTriggerConfig", + len: 1, + }, + Token::Str("limit"), + Token::I64(1024), + Token::StructEnd, + ]; + + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::I64(-1024); + assert_de_tokens_error::( + &cfg, + "invalid value: integer `-1024`, expected a non-negative number", ); + } + #[test] + #[cfg(feature = "config_parsing")] + fn test_float_deserialize() { assert_de_tokens_error::( &[ Token::Struct { @@ -243,10 +253,10 @@ mod test { len: 1, }, Token::Str("limit"), - Token::I64(-1024), + Token::F32(2.0), Token::StructEnd, ], - "invalid value: integer `-1024`, expected a non-negative number", + "invalid type: floating point `2.0`, expected a size", ); } @@ -257,324 +267,182 @@ mod test { let trigger = SizeTriggerConfig { limit: BYTE_MULTIPLIER, }; - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1024"), - Token::StructEnd, - ], - ); + + let mut cfg = vec![ + Token::Struct { + name: "SizeTriggerConfig", + len: 1, + }, + Token::Str("limit"), + Token::Str("1024"), + Token::StructEnd, + ]; + + assert_de_tokens(&trigger, &cfg); // Test not an unsigned number + cfg[2] = Token::Str("-1024"); assert_de_tokens_error::( - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("-1024"), - Token::StructEnd, - ], + &cfg, "invalid value: string \"\", expected a number", ); - // Test not an unsigned number + // Test not a valid unit + cfg[2] = Token::Str("1024 pb"); assert_de_tokens_error::( - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1024 pb"), - Token::StructEnd, - ], + &cfg, "invalid value: string \"pb\", expected a valid unit", ); // u64::MAX which will overflow when converted to bytes - let size = "18446744073709551615 kb"; - // Test not an unsigned number + cfg[2] = Token::Str("18446744073709551615 kb"); assert_de_tokens_error::( - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str(size), - Token::StructEnd, - ], + &cfg, "invalid value: string \"18446744073709551615 kb\", expected a byte size", ); } #[test] #[cfg(feature = "config_parsing")] - fn byte_deserialize() { + fn test_byte_deserialize() { let trigger = SizeTriggerConfig { limit: BYTE_MULTIPLIER, }; + let mut cfg = vec![ + Token::Struct { + name: "SizeTriggerConfig", + len: 1, + }, + Token::Str("limit"), + Token::Str("1024b"), + Token::StructEnd, + ]; + // Test spacing & b vs B - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1024b"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1024 B"), - Token::StructEnd, - ], - ); + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1024 B"); + assert_de_tokens(&trigger, &cfg); } #[test] #[cfg(feature = "config_parsing")] - fn kilobyte_deserialize() { + fn test_kilobyte_deserialize() { let trigger = SizeTriggerConfig { limit: BYTE_MULTIPLIER, }; + let mut cfg = vec![ + Token::Struct { + name: "SizeTriggerConfig", + len: 1, + }, + Token::Str("limit"), + Token::Str("1 kb"), + Token::StructEnd, + ]; + // Test kb unit - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 kb"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 KB"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 kB"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 Kb"), - Token::StructEnd, - ], - ); + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 KB"); + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 kB"); + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 Kb"); + assert_de_tokens(&trigger, &cfg); } #[test] #[cfg(feature = "config_parsing")] - fn megabyte_deserialize() { + fn test_megabyte_deserialize() { // Test mb unit let trigger = SizeTriggerConfig { limit: BYTE_MULTIPLIER.pow(2), }; - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 mb"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 MB"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 mB"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 Mb"), - Token::StructEnd, - ], - ); + + let mut cfg = vec![ + Token::Struct { + name: "SizeTriggerConfig", + len: 1, + }, + Token::Str("limit"), + Token::Str("1 mb"), + Token::StructEnd, + ]; + + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 MB"); + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 mB"); + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 Mb"); + assert_de_tokens(&trigger, &cfg); } #[test] #[cfg(feature = "config_parsing")] - fn gigabyte_deserialize() { + fn test_gigabyte_deserialize() { // Test gb unit let trigger = SizeTriggerConfig { limit: BYTE_MULTIPLIER.pow(3), }; - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 gb"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 GB"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 gB"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 Gb"), - Token::StructEnd, - ], - ); + + let mut cfg = vec![ + Token::Struct { + name: "SizeTriggerConfig", + len: 1, + }, + Token::Str("limit"), + Token::Str("1 gb"), + Token::StructEnd, + ]; + + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 GB"); + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 gB"); + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 Gb"); + assert_de_tokens(&trigger, &cfg); } #[test] #[cfg(feature = "config_parsing")] - fn terabyte_deserialize() { + fn test_terabyte_deserialize() { // Test tb unit let trigger = SizeTriggerConfig { limit: BYTE_MULTIPLIER.pow(4), }; - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 tb"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 TB"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 tB"), - Token::StructEnd, - ], - ); - assert_de_tokens( - &trigger, - &[ - Token::Struct { - name: "SizeTriggerConfig", - len: 1, - }, - Token::Str("limit"), - Token::Str("1 Tb"), - Token::StructEnd, - ], - ); + + let mut cfg = vec![ + Token::Struct { + name: "SizeTriggerConfig", + len: 1, + }, + Token::Str("limit"), + Token::Str("1 tb"), + Token::StructEnd, + ]; + + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 TB"); + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 tB"); + assert_de_tokens(&trigger, &cfg); + + cfg[2] = Token::Str("1 Tb"); + assert_de_tokens(&trigger, &cfg); } } diff --git a/src/append/rolling_file/policy/compound/trigger/time.rs b/src/append/rolling_file/policy/compound/trigger/time.rs index ac70b87e..92e1590c 100644 --- a/src/append/rolling_file/policy/compound/trigger/time.rs +++ b/src/append/rolling_file/policy/compound/trigger/time.rs @@ -475,72 +475,45 @@ mod test { #[test] #[cfg(feature = "config_parsing")] fn test_interval_deser_errors() { - assert_de_tokens_error::( - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("abc"), - Token::StructEnd, - ], - "invalid value: string \"\", expected a number", - ); - - assert_de_tokens_error::( - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str(""), - Token::StructEnd, - ], - "invalid value: string \"\", expected a number", - ); - - assert_de_tokens_error::( - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("5 das"), - Token::StructEnd, - ], - "invalid value: string \"das\", expected a valid unit", - ); + let mut cfg = vec![ + Token::Struct { + name: "TimeTriggerConfig", + len: 1, + }, + Token::Str("interval"), + Token::Str("abc"), + Token::StructEnd, + ]; // Can't test with a STR "-1" because the negative sign parses as a // non-ascii and the whole value goes into unit - assert_de_tokens_error::( - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), + let tests = vec![ + ( + Token::Str("abc"), + "invalid value: string \"\", expected a number", + ), + ( + Token::Str(""), + "invalid value: string \"\", expected a number", + ), + ( + Token::Str("5 das"), + "invalid value: string \"das\", expected a valid unit", + ), + ( Token::I64(-1), - Token::StructEnd, - ], - "invalid value: integer `-1`, expected a non-negative number", - ); - - assert_de_tokens_error::( - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), + "invalid value: integer `-1`, expected a non-negative number", + ), + ( Token::F32(2.0), - Token::StructEnd, - ], - "invalid type: floating point `2.0`, expected a time", - ); + "invalid type: floating point `2.0`, expected a time", + ), + ]; + + for (cfg_val, err_msg) in tests { + cfg[2] = cfg_val; + assert_de_tokens_error::(&cfg, err_msg); + } } #[test] @@ -552,206 +525,37 @@ mod test { max_random_delay: 0, }; - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::U64(1), - Token::StructEnd, - ], - ); - - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 second"), - Token::StructEnd, - ], - ); - - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 seconds"), - Token::StructEnd, - ], - ); - - trigger_cfg.interval = TimeTriggerInterval::Minute(1); - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 minute"), - Token::StructEnd, - ], - ); - - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 minutes"), - Token::StructEnd, - ], - ); - - trigger_cfg.interval = TimeTriggerInterval::Hour(1); - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 hour"), - Token::StructEnd, - ], - ); - - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 hours"), - Token::StructEnd, - ], - ); - - trigger_cfg.interval = TimeTriggerInterval::Day(1); - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 day"), - Token::StructEnd, - ], - ); - - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 days"), - Token::StructEnd, - ], - ); - - trigger_cfg.interval = TimeTriggerInterval::Week(1); - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 week"), - Token::StructEnd, - ], - ); - - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 weeks"), - Token::StructEnd, - ], - ); - - trigger_cfg.interval = TimeTriggerInterval::Month(1); - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 month"), - Token::StructEnd, - ], - ); - - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 months"), - Token::StructEnd, - ], - ); + let mut cfg = vec![ + Token::Struct { + name: "TimeTriggerConfig", + len: 1, + }, + Token::Str("interval"), + Token::U64(1), + Token::StructEnd, + ]; - trigger_cfg.interval = TimeTriggerInterval::Year(1); - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 year"), - Token::StructEnd, - ], - ); + let tests = vec![ + (TimeTriggerInterval::Second(1), Token::U64(1)), + (TimeTriggerInterval::Second(1), Token::Str("1 second")), + (TimeTriggerInterval::Second(1), Token::Str("1 seconds")), + (TimeTriggerInterval::Minute(1), Token::Str("1 minute")), + (TimeTriggerInterval::Minute(1), Token::Str("1 minutes")), + (TimeTriggerInterval::Hour(1), Token::Str("1 hour")), + (TimeTriggerInterval::Hour(1), Token::Str("1 hours")), + (TimeTriggerInterval::Day(1), Token::Str("1 day")), + (TimeTriggerInterval::Day(1), Token::Str("1 days")), + (TimeTriggerInterval::Week(1), Token::Str("1 week")), + (TimeTriggerInterval::Week(1), Token::Str("1 weeks")), + (TimeTriggerInterval::Year(1), Token::Str("1 year")), + (TimeTriggerInterval::Year(1), Token::Str("1 years")), + ]; - assert_de_tokens( - &trigger_cfg, - &[ - Token::Struct { - name: "TimeTriggerConfig", - len: 1, - }, - Token::Str("interval"), - Token::Str("1 years"), - Token::StructEnd, - ], - ); + for (interval, cfg_val) in tests { + cfg[2] = cfg_val; + trigger_cfg.interval = interval; + assert_de_tokens(&trigger_cfg, &cfg); + } } #[test] @@ -773,46 +577,30 @@ mod test { #[test] fn test_max_rand_delay() { - let trigger_cfg = TimeTriggerConfig { - interval: TimeTriggerInterval::Second(1), - modulate: false, - max_random_delay: 0, - }; - - let current = { - let now: std::time::Duration = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("system time before Unix epoch"); - NaiveDateTime::from_timestamp_opt(now.as_secs() as i64 + 1, now.subsec_nanos()) - .unwrap() - .and_local_timezone(Local) - .unwrap() - }; - - let trigger = TimeTrigger::new(trigger_cfg); - let trigger_time = trigger.next_roll_time.read().unwrap(); - assert_eq!(*trigger_time, current); - // Using a delay of 1 will test the block, but will always add a 0 allowing us // to bypass the unknown of rand - let trigger_cfg = TimeTriggerConfig { - interval: TimeTriggerInterval::Second(1), - modulate: false, - max_random_delay: 1, - }; - - let current = { - let now: std::time::Duration = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("system time before Unix epoch"); - NaiveDateTime::from_timestamp_opt(now.as_secs() as i64 + 1, now.subsec_nanos()) - .unwrap() - .and_local_timezone(Local) - .unwrap() - }; - - let trigger = TimeTrigger::new(trigger_cfg); - let trigger_time = trigger.next_roll_time.read().unwrap(); - assert_eq!(*trigger_time, current); + let delays = vec![0, 1]; + + for delay in delays { + let trigger_cfg = TimeTriggerConfig { + interval: TimeTriggerInterval::Second(1), + modulate: false, + max_random_delay: delay, + }; + + let current = { + let now: std::time::Duration = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("system time before Unix epoch"); + NaiveDateTime::from_timestamp_opt(now.as_secs() as i64 + 1, now.subsec_nanos()) + .unwrap() + .and_local_timezone(Local) + .unwrap() + }; + + let trigger = TimeTrigger::new(trigger_cfg); + let trigger_time = trigger.next_roll_time.read().unwrap(); + assert_eq!(*trigger_time, current); + } } } diff --git a/src/config/file.rs b/src/config/file.rs index 4355c0f8..9f02ee26 100644 --- a/src/config/file.rs +++ b/src/config/file.rs @@ -92,7 +92,7 @@ pub enum FormatError { UnknownFormat, } -#[derive(Debug)] +#[derive(Debug, PartialEq)] enum Format { #[cfg(feature = "yaml_format")] Yaml, @@ -230,3 +230,105 @@ impl ConfigReloader { Ok(rate) } } + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn test_format_from_path() { + #[cfg(feature = "yaml_format")] + { + assert_eq!( + Format::from_path(Path::new("test.yml")).unwrap(), + Format::Yaml + ); + assert_eq!( + Format::from_path(Path::new("test.yaml")).unwrap(), + Format::Yaml + ); + } + + #[cfg(not(feature = "yaml_format"))] + assert!(Format::from_path(Path::new("test.yml")).is_err()); + + #[cfg(feature = "json_format")] + assert_eq!( + Format::from_path(Path::new("test.json")).unwrap(), + Format::Json + ); + #[cfg(not(feature = "json_format"))] + assert!(Format::from_path(Path::new("test.json")).is_err()); + + #[cfg(feature = "toml_format")] + assert_eq!( + Format::from_path(Path::new("test.toml")).unwrap(), + Format::Toml + ); + #[cfg(not(feature = "toml_format"))] + assert!(Format::from_path(Path::new("test.toml")).is_err()); + + // Unsupported Type + assert!(Format::from_path(Path::new("test.ini")).is_err()); + + // UnknownFormat + assert!(Format::from_path(Path::new("test")).is_err()); + } + + #[test] + fn test_parse_format() { + #[cfg(feature = "yaml_format")] + { + let cfg = read_config(Path::new("./test_cfgs/test.yml")).unwrap(); + assert!(!cfg.is_empty()); + + let cfg = Format::Yaml.parse(&cfg); + assert!(cfg.is_ok()); + + // No actions to test at this time. + deserialize(&cfg.unwrap(), &Deserializers::default()); + } + + #[cfg(feature = "json_format")] + { + let cfg = read_config(Path::new("./test_cfgs/test.json")).unwrap(); + assert!(!cfg.is_empty()); + + let cfg = Format::Json.parse(&cfg); + assert!(cfg.is_ok()); + + // No actions to test at this time. + deserialize(&cfg.unwrap(), &Deserializers::default()); + } + + #[cfg(feature = "toml_format")] + { + let cfg = read_config(Path::new("./test_cfgs/test.toml")).unwrap(); + assert!(!cfg.is_empty()); + + let cfg = Format::Toml.parse(&cfg); + assert!(cfg.is_ok()); + + // No actions to test at this time. + deserialize(&cfg.unwrap(), &Deserializers::default()); + } + } + + #[test] + fn test_load_cfg() { + #[cfg(feature = "yaml_format")] + assert!( + load_config_file(Path::new("./test_cfgs/test.yml"), Deserializers::default()).is_ok() + ); + + #[cfg(feature = "json_format")] + assert!( + load_config_file(Path::new("./test_cfgs/test.json"), Deserializers::default()).is_ok() + ); + + #[cfg(feature = "toml_format")] + assert!( + load_config_file(Path::new("./test_cfgs/test.toml"), Deserializers::default()).is_ok() + ); + } +} diff --git a/src/config/raw.rs b/src/config/raw.rs index 387d06b6..b5967a4e 100644 --- a/src/config/raw.rs +++ b/src/config/raw.rs @@ -286,9 +286,9 @@ impl Deserializers { } /// Deserializes a value of a specific type and kind. - pub fn deserialize(&self, kind: &str, config: Value) -> anyhow::Result> + pub fn deserialize(&self, kind: &str, config: Value) -> anyhow::Result> where - T: Deserializable, + T: Deserializable + ?Sized, { match self.0.get::>().and_then(|m| m.get(kind)) { Some(b) => b.deserialize(config, self), @@ -462,38 +462,17 @@ fn logger_additive_default() -> bool { #[cfg(test)] #[allow(unused_imports)] mod test { - use super::*; - use serde_value::Value; - use std::fs; - - #[test] - #[cfg(feature = "threshold_filter")] - fn deserialize_filter() { - use crate::filter::{Filter, FilterConfig}; - - let d = Deserializers::default(); - let filter = FilterConfig { - kind: "threshold".to_owned(), - config: Value::String("foobar".to_owned()), - }; + use crate::filter::FilterConfig; - let res: Result, anyhow::Error> = - d.deserialize(&filter.kind, filter.config.clone()); - assert!(res.is_err()); - // panic!("{:#?}", res); - - // let filter = FilterConfig{ - // kind: "threshold".to_owned(), - // config: Value::String("info".to_owned()), - // }; - // let res: Result, anyhow::Error> = d.deserialize(&filter.kind, filter.config.clone()); - // assert!(res.is_ok()); - // panic!("{:#?}", res); - } + use super::*; + use anyhow::Error; + use serde_test::{assert_de_tokens, assert_de_tokens_error, Token}; + use serde_value::{DeserializerError::UnknownField, Value}; + use std::{collections::BTreeMap, fs, vec}; #[test] #[cfg(all(feature = "yaml_format", feature = "threshold_filter"))] - fn full_deserialize() { + fn test_full_deserialize() { let cfg = r#" refresh_rate: 60 seconds @@ -523,14 +502,44 @@ loggers: "#; let config = ::serde_yaml::from_str::(cfg).unwrap(); let errors = config.appenders_lossy(&Deserializers::new()).1; - println!("{:?}", errors); assert!(errors.is_empty()); + assert_eq!(config.refresh_rate().unwrap(), Duration::new(60, 0)); } #[test] - #[cfg(feature = "yaml_format")] - fn empty() { - ::serde_yaml::from_str::("{}").unwrap(); + #[cfg(all(feature = "yaml_format", feature = "threshold_filter"))] + fn test_appenders_lossy_errs() { + let cfg = r#" +refresh_rate: 60 seconds + +appenders: + console: + kind: console + filters: + - kind: threshold + leve: debug + baz: + kind: file + pah: /tmp/baz.log + encoder: + pattern: "%m" + +root: + appenders: + - console + level: info + +loggers: + foo::bar::baz: + level: warn + appenders: + - baz + additive: false +"#; + let config = ::serde_yaml::from_str::(cfg).unwrap(); + let errors = config.appenders_lossy(&Deserializers::new()).1; + assert_eq!(errors.0.len(), 2); + // TODO look for a way to check the errors } #[cfg(windows)] @@ -542,7 +551,7 @@ loggers: #[test] #[cfg(feature = "yaml_format")] - fn readme_sample_file_is_ok() { + fn test_readme_sample_file_is_ok() { let readme = fs::read_to_string("./README.md").expect("README file exists"); let sample_file = &readme[readme .find("log4rs.yaml:") @@ -558,4 +567,47 @@ loggers: assert!(config.is_ok()); assert!(config::create_raw_config(config.unwrap()).is_ok()); } + + #[test] + #[cfg(feature = "yaml_format")] + fn test_empty_cfg() { + ::serde_yaml::from_str::("{}").unwrap(); + } + + #[test] + fn test_appender_errs() { + let errs = AppenderErrors { 0: vec![] }; + + assert!(errs.is_empty()); + + let mut errs = AppenderErrors { + 0: vec![DeserializingConfigError::Appender( + "example".to_owned(), + anyhow!("test"), + )], + }; + + // Reports to stderr + errs.handle(); + } + + #[test] + fn test_duration_deser() { + let duration = Duration::new(5, 0); + + assert_de_tokens( + &duration, + &[ + Token::Struct { + name: "Duration", + len: 2, + }, + Token::Str("secs"), + Token::U64(5), + Token::Str("nanos"), + Token::U64(0), + Token::StructEnd, + ], + ); + } } diff --git a/src/config/runtime.rs b/src/config/runtime.rs index 886d09f4..3d3ec175 100644 --- a/src/config/runtime.rs +++ b/src/config/runtime.rs @@ -420,7 +420,7 @@ fn check_logger_name(name: &str) -> Result<(), ConfigError> { } /// Errors encountered when validating a log4rs `Config`. -#[derive(Debug, Error)] +#[derive(Debug, Error, PartialEq)] #[error("Configuration errors: {0:#?}")] pub struct ConfigErrors(Vec); @@ -442,7 +442,7 @@ impl ConfigErrors { } /// An error validating a log4rs `Config`. -#[derive(Debug, Error)] +#[derive(Debug, Error, PartialEq)] pub enum ConfigError { /// Multiple appenders were registered with the same name. #[error("Duplicate appender name `{0}`")] @@ -490,11 +490,7 @@ mod test { ]; for &(ref name, expected) in &tests { - assert!( - expected == super::check_logger_name(name).is_ok(), - "{}", - name - ); + assert!(expected == check_logger_name(name).is_ok(), "{}", name); } } @@ -518,13 +514,20 @@ mod test { let filter = ThresholdFilter::new(LevelFilter::Warn); + let filters: Vec> = vec![ + Box::new(ThresholdFilter::new(LevelFilter::Trace)), + Box::new(ThresholdFilter::new(LevelFilter::Debug)), + Box::new(ThresholdFilter::new(LevelFilter::Info)), + ]; + let appender = Appender::builder() + .filters(filters) .filter(Box::new(filter)) .build("stdout", Box::new(stdout)); assert_eq!(appender.name(), "stdout"); assert!(!appender.filters().is_empty()); - assert_eq!(appender.filters().len(), 1); + assert_eq!(appender.filters().len(), 4); // Nothing to test on this right now let _appender = appender.appender(); @@ -545,4 +548,143 @@ mod test { assert_ne!(LevelFilter::Debug, root.level()); assert_eq!(LevelFilter::Warn, root.level()); } + + #[test] + fn test_root_appender() { + let appenders = vec!["stdout", "stderr"]; + + let mut root = Root::builder() + .appender("file") + .appenders(appenders) + .build(LevelFilter::Debug); + + // Test level set by builder and is accessible + assert_eq!(LevelFilter::Debug, root.level()); + + // Test appenders were added to builder + assert_eq!(root.appenders().len(), 3); + + // Test level set after root created and is accessible + root.set_level(LevelFilter::Warn); + assert_ne!(LevelFilter::Debug, root.level()); + assert_eq!(LevelFilter::Warn, root.level()); + } + + #[test] + fn test_simple_config() { + let root = Root::builder().build(LevelFilter::Debug); + let cfg = Config::builder().build(root); + + assert!(cfg.is_ok()); + + let mut cfg = cfg.unwrap(); + assert!(cfg.appenders().is_empty()); + assert!(cfg.loggers().is_empty()); + + // No test, just coverage + let _ = cfg.root(); + let _ = cfg.root_mut(); + } + + #[test] + #[cfg(feature = "console_appender")] + fn test_config_full() { + let root = Root::builder().build(LevelFilter::Debug); + let logger = Logger::builder().build("stdout", LevelFilter::Warn); + let appender = + Appender::builder().build("stdout0", Box::new(ConsoleAppender::builder().build())); + + let loggers = vec![ + Logger::builder().build("stdout0", LevelFilter::Trace), + Logger::builder().build("stdout1", LevelFilter::Debug), + Logger::builder().build("stdout2", LevelFilter::Info), + ]; + + let appenders = vec![ + Appender::builder().build("stdout1", Box::new(ConsoleAppender::builder().build())), + Appender::builder().build("stderr", Box::new(ConsoleAppender::builder().build())), + ]; + + let cfg = Config::builder() + .logger(logger) + .loggers(loggers) + .appender(appender) + .appenders(appenders) + .build(root); + + let cfg = cfg.unwrap(); + assert_eq!(cfg.appenders().len(), 3); + assert_eq!(cfg.loggers().len(), 4); + } + + #[test] + fn test_dup_logger() { + let root = Root::builder().build(LevelFilter::Debug); + let loggers = vec![ + Logger::builder().build("stdout", LevelFilter::Trace), + Logger::builder().build("stdout", LevelFilter::Debug), + ]; + + let cfg = Config::builder().loggers(loggers).build(root); + + let error = ConfigErrors { + 0: vec![ConfigError::DuplicateLoggerName("stdout".to_owned())], + }; + + assert_eq!(cfg.unwrap_err(), error); + } + + #[test] + #[cfg(feature = "console_appender")] + fn test_dup_appender() { + let root = Root::builder().build(LevelFilter::Debug); + + let appenders = vec![ + Appender::builder().build("stdout", Box::new(ConsoleAppender::builder().build())), + Appender::builder().build("stdout", Box::new(ConsoleAppender::builder().build())), + ]; + + let cfg = Config::builder().appenders(appenders).build(root); + + let error = ConfigErrors { + 0: vec![ConfigError::DuplicateAppenderName("stdout".to_owned())], + }; + + assert_eq!(cfg.unwrap_err(), error); + } + + #[test] + fn test_nonexist_appender() { + let root = Root::builder().appender("file").build(LevelFilter::Debug); + + let logger = Logger::builder() + .appender("stdout") + .build("stdout", LevelFilter::Trace); + + let cfg = Config::builder().logger(logger).build(root); + + let error = ConfigErrors { + 0: vec![ + ConfigError::NonexistentAppender("file".to_owned()), + ConfigError::NonexistentAppender("stdout".to_owned()), + ], + }; + + assert_eq!(cfg.unwrap_err(), error); + } + + #[test] + fn test_logger_name_cfg() { + let root = Root::builder().build(LevelFilter::Debug); + + let logger = Logger::builder().build("", LevelFilter::Trace); + + let cfg = Config::builder().logger(logger).build(root); + + let error = ConfigErrors { + 0: vec![ConfigError::InvalidLoggerName("".to_owned())], + }; + + assert_eq!(cfg.unwrap_err(), error); + } } diff --git a/src/encode/pattern/mod.rs b/src/encode/pattern/mod.rs index c2557e18..823f1132 100644 --- a/src/encode/pattern/mod.rs +++ b/src/encode/pattern/mod.rs @@ -747,17 +747,11 @@ mod tests { #[cfg(feature = "config_parsing")] use crate::config::Deserializers; #[cfg(feature = "simple_writer")] - use crate::encode::Write as EncodeWrite; - #[cfg(feature = "simple_writer")] - use crate::encode::{writer::simple::SimpleWriter, Encode}; + use crate::encode::{writer::simple::SimpleWriter, Encode, Write as EncodeWrite}; #[cfg(feature = "simple_writer")] use log::{Level, Record}; #[cfg(feature = "simple_writer")] - use std::io::Write; - #[cfg(feature = "simple_writer")] - use std::process; - #[cfg(feature = "simple_writer")] - use std::thread; + use std::{io::Write, process, thread}; use super::*; @@ -1074,15 +1068,14 @@ mod tests { let mut buf = vec![]; let mut w = SimpleWriter(&mut buf); - let remaining = 2; let mut w = MaxWidthWriter { - remaining: remaining, + remaining: 2, w: &mut w, }; let res = w.write(b"test write"); assert!(res.is_ok()); - assert_eq!(res.unwrap(), remaining); + assert_eq!(res.unwrap(), 2); assert_eq!(w.remaining, 0); assert!(w.flush().is_ok()); assert!(w.set_style(&Style::new()).is_ok()); @@ -1228,100 +1221,33 @@ mod tests { _ => assert!(false), } - // Test invalid tz - let pattern = "[{d(%Y-%m-%d %H:%M:%S %Z)(zulu)}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("invalid timezone")), - _ => assert!(false), - } - - // Test invalid tz - let pattern = "[{d(%Y-%m-%d %H:%M:%S %Z)({l})}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("invalid timezone")), - _ => assert!(false), - } - - // Test missing tz - let pattern = "[{d(%Y-%m-%d %H:%M:%S %Z)()}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("invalid timezone")), - _ => assert!(false), - } - - // Test extra highlight arg - let pattern = "[{h({l})({M}):<5.5}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("expected exactly one argument")), - _ => assert!(false), - } - - // Test extra Debug/Release arg - let pattern = "[{D({l})({M}):<5.5}{R({l})({M}):<5.5}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("expected exactly one argument")), - _ => assert!(false), - } - - // Test extra mdc arg - let pattern = "[{X(user_id)(foobar)(test):<5.5}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("expected at most two arguments")), - _ => assert!(false), - } - - // Test mdc error - let pattern = "[{X({l user_id):<5.5}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("expected '}'")), - _ => assert!(false), - } - - // Test mdc invalid key - let pattern = "[{X({l} user_id):<5.5}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("invalid MDC key")), - _ => assert!(false), - } - - // Test missing mdc key - let pattern = "[{X:<5.5}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("missing MDC key")), - _ => assert!(false), - } - - // Test mdc default error - let pattern = "[{X(user_id)({l):<5.5}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("expected '}'")), - _ => assert!(false), - } - - // Test mdc default unexpected arg - let pattern = "[{X(user_id)({l}):<5.5}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("invalid MDC default")), - _ => assert!(false), - } - - // Test missing mdc default - let pattern = "[{X(user_id)():<5.5} {M}]"; - let chunks: Vec = Parser::new(pattern).map(From::from).collect(); - match chunks.get(1).unwrap() { - Chunk::Error(err) => assert!(err.contains("invalid MDC default")), - _ => assert!(false), + let tests = vec![ + ("[{d(%Y-%m-%d %H:%M:%S %Z)(zulu)}]", "invalid timezone"), + ("[{d(%Y-%m-%d %H:%M:%S %Z)({l})}]", "invalid timezone"), + ("[{d(%Y-%m-%d %H:%M:%S %Z)()}]", "invalid timezone"), + ("[{h({l})({M}):<5.5}]", "expected exactly one argument"), + ( + "[{D({l})({M}):<5.5}{R({l})({M}):<5.5}]", + "expected exactly one argument", + ), + ( + "[{X(user_id)(foobar)(test):<5.5}]", + "expected at most two arguments", + ), + ("[{X({l user_id):<5.5}]", "expected '}'"), + ("[{X({l} user_id):<5.5}]", "invalid MDC key"), + ("[{X:<5.5}]", "missing MDC key"), + ("[{X(user_id)({l):<5.5}]", "expected '}'"), + ("[{X(user_id)({l}):<5.5}]", "invalid MDC default"), + ("[{X(user_id)():<5.5} {M}]", "invalid MDC default"), + ]; + + for (pattern, error_msg) in tests { + let chunks: Vec = Parser::new(pattern).map(From::from).collect(); + match chunks.get(1).unwrap() { + Chunk::Error(err) => assert!(err.contains(error_msg)), + _ => assert!(false), + } } // Test expected 1 arg diff --git a/src/encode/pattern/parser.rs b/src/encode/pattern/parser.rs index d236ba20..397f5d58 100644 --- a/src/encode/pattern/parser.rs +++ b/src/encode/pattern/parser.rs @@ -355,12 +355,4 @@ mod test { _ => false, }); } - - // #[test] - // fn test_bad_alignment_int() { - // let pattern = "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}"; - // let mut parser = Parser::new(pattern); - - // println!("{:#?}", parser.argument()); - // } } diff --git a/src/encode/writer/console.rs b/src/encode/writer/console.rs index 6cf255bf..e0e1bdda 100644 --- a/src/encode/writer/console.rs +++ b/src/encode/writer/console.rs @@ -442,7 +442,7 @@ mod test { // Unable to test the non locked Console as by definition, the unlocked // console results in race conditions. Codecov tooling does not seem to // see this test as coverage of the ConsoleWritterLock or WriterLock - // class, however, it should completely cover both. + // class, however, it should completely cover either. #[test] fn test_writers_lock() { let w = match ConsoleWriter::stdout() { diff --git a/src/filter/mod.rs b/src/filter/mod.rs index 2d0bdf55..09566940 100644 --- a/src/filter/mod.rs +++ b/src/filter/mod.rs @@ -82,27 +82,52 @@ impl<'de> de::Deserialize<'de> for FilterConfig { #[cfg(test)] mod test { - #[cfg(all(feature = "config_parsing", feature = "yaml_format"))] + #[cfg(feature = "config_parsing")] use super::*; + #[cfg(feature = "config_parsing")] + use serde_test::{assert_de_tokens, assert_de_tokens_error, Token}; #[test] - #[cfg(all(feature = "config_parsing", feature = "yaml_format"))] + #[cfg(feature = "config_parsing")] fn test_cfg_deserializer() { // This point in the config should have already parsed out the filters portion of the config. - let cfg_str = " - filters: - - kind: threshold - level: error - "; - let filter: Result = serde_yaml::from_str(cfg_str); - assert!(filter.is_err()); - - let cfg_str = " - kind: threshold - level: error - "; - - let filter: Result = serde_yaml::from_str(cfg_str); - assert!(filter.is_ok()); + + // use serde_test::assert_de_tokens_error; + // let cfg_str = " + // filters: + // - kind: threshold + // level: error + // "; + // let filter: Result = serde_yaml::from_str(cfg_str); + // assert!(filter.is_err()); + + let filter = FilterConfig { + kind: "threshold".to_owned(), + config: Value::Map({ + let mut map = BTreeMap::new(); + map.insert( + Value::String("level".to_owned()), + Value::String("error".to_owned()), + ); + map + }), + }; + + let mut cfg = vec![ + Token::Struct { + name: "FilterConfig", + len: 2, + }, + Token::Str("kind"), + Token::Str("threshold"), + Token::Str("level"), + Token::Str("error"), + Token::StructEnd, + ]; + + assert_de_tokens(&filter, &cfg); + + cfg[1] = Token::Str("knd"); + assert_de_tokens_error::(&cfg, "missing field `kind`"); } } diff --git a/src/filter/threshold.rs b/src/filter/threshold.rs index 567b2bf9..8878873f 100644 --- a/src/filter/threshold.rs +++ b/src/filter/threshold.rs @@ -86,52 +86,30 @@ mod test { level: LevelFilter::Off, }; - assert_de_tokens( - &filter_cfg, - &[ - Token::Struct { - name: "ThresholdFilterConfig", - len: 1, - }, - Token::Str("level"), - Token::Enum { - name: "LevelFilter", - }, - Token::Str("Off"), - Token::Unit, - Token::StructEnd, - ], - ); - - assert_de_tokens_error::( - &[ - Token::Struct { - name: "ThresholdFilterConfig", - len: 1, - }, - Token::Str("leel"), - Token::Enum { - name: "LevelFilter", - }, - Token::Str("Off"), - Token::Unit, - Token::StructEnd, - ], - "missing field `level`", - ); - + let mut cfg = vec![ + Token::Struct { + name: "ThresholdFilterConfig", + len: 1, + }, + Token::Str("level"), + Token::Enum { + name: "LevelFilter", + }, + Token::Str("Off"), + Token::Unit, + Token::StructEnd, + ]; + + assert_de_tokens(&filter_cfg, &cfg); + + cfg[1] = Token::Str("leel"); + assert_de_tokens_error::(&cfg, "missing field `level`"); + + cfg[1] = Token::Str("level"); + cfg[3] = Token::Str("On"); + cfg.remove(4); // No Unit on this one as the Option is invalid assert_de_tokens_error::( - &[ - Token::Struct { - name: "ThresholdFilterConfig", - len: 1, - }, - Token::Str("level"), - Token::Enum { name: "LevelFilter" }, - Token::Str("On"), - // No Unit on this one as the Option is invalid - Token::StructEnd, - ], + &cfg, "unknown variant `On`, expected one of `OFF`, `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`", ); } diff --git a/src/lib.rs b/src/lib.rs index 96b3b992..bc73f94c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -471,7 +471,7 @@ mod test { #[test] #[cfg(all(feature = "config_parsing", feature = "json_format"))] - fn init_from_raw_config() { + fn test_init_from_raw_config() { let dir = tempfile::tempdir().unwrap(); let path = dir.path().join("append.log"); @@ -507,7 +507,7 @@ mod test { } #[test] - fn enabled() { + fn test_logger_enabled() { let root = config::Root::builder().build(LevelFilter::Debug); let mut config = config::Config::builder(); let logger = config::Logger::builder().build("foo::bar", LevelFilter::Trace); diff --git a/src/priv_io.rs b/src/priv_io.rs index df7d3c06..1d093188 100644 --- a/src/priv_io.rs +++ b/src/priv_io.rs @@ -89,3 +89,23 @@ impl<'a> io::Write for StdWriterLock<'a> { } } } + +#[cfg(test)] +mod test { + use std::io::Write; + + use super::*; + + #[test] + fn test_writer_lock() { + let writer = StdWriter::stderr(); + let mut writer = writer.lock(); + + assert_eq!(writer.write(b"test stdwriter ; ").unwrap(), 17); + assert!(writer.write_all(b"test stdwriter ; ").is_ok()); + assert!(writer + .write_fmt(format_args!("{}\n", "test stdwriter")) + .is_ok()); + assert!(writer.flush().is_ok()); + } +} diff --git a/test_cfgs/malformed_appender.yml b/test_cfgs/malformed_appender.yml new file mode 100755 index 00000000..f9db7d57 --- /dev/null +++ b/test_cfgs/malformed_appender.yml @@ -0,0 +1,13 @@ +refresh_rate: 5 seconds + +appenders: + file: + kind: file + pah: "log/file.log" + encoder: + pattern: "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + +root: + level: info + appenders: + - file diff --git a/test_cfgs/test.json b/test_cfgs/test.json new file mode 100644 index 00000000..b93f08c9 --- /dev/null +++ b/test_cfgs/test.json @@ -0,0 +1,51 @@ +{ + "refresh_rate": "5 seconds", + "appenders": { + "stdout": { + "kind": "console", + "encoder": { + "pattern": "{d(%+)(utc)} [{f}:{L}] {h({l})} {M}:{m}{n}" + }, + "filters": [ + { + "kind": "threshold", + "level": "info" + } + ] + }, + "file": { + "kind": "file", + "path": "log/file.log", + "encoder": { + "pattern": "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + } + }, + "rollingfile": { + "kind": "rolling_file", + "path": "log/rolling_file.log", + "encoder": { + "pattern": "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + }, + "policy": { + "trigger": { + "kind": "time", + "interval": "1 minute" + }, + "roller": { + "kind": "fixed_window", + "pattern": "log/old-rolling_file-{}.log", + "base": 0, + "count": 2 + } + } + } + }, + "root": { + "level": "info", + "appenders": [ + "stdout", + "file", + "rollingfile" + ] + } +} diff --git a/test_cfgs/test.toml b/test_cfgs/test.toml new file mode 100644 index 00000000..dc1f10da --- /dev/null +++ b/test_cfgs/test.toml @@ -0,0 +1,39 @@ +refresh_rate = "5 seconds" + +[appenders.stdout] +kind = "console" + + [appenders.stdout.encoder] + pattern = "{d(%+)(utc)} [{f}:{L}] {h({l})} {M}:{m}{n}" + + [[appenders.stdout.filters]] + kind = "threshold" + level = "info" + +[appenders.file] +kind = "file" +path = "log/file.log" + + [appenders.file.encoder] + pattern = "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + +[appenders.rollingfile] +kind = "rolling_file" +path = "log/rolling_file.log" + + [appenders.rollingfile.encoder] + pattern = "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + +[appenders.rollingfile.policy.trigger] +kind = "time" +interval = "1 minute" + +[appenders.rollingfile.policy.roller] +kind = "fixed_window" +pattern = "log/old-rolling_file-{}.log" +base = 0 +count = 2 + +[root] +level = "info" +appenders = [ "stdout", "file", "rollingfile" ] diff --git a/test_cfgs/test.yml b/test_cfgs/test.yml new file mode 100644 index 00000000..47d18ccd --- /dev/null +++ b/test_cfgs/test.yml @@ -0,0 +1,36 @@ +refresh_rate: 5 seconds + +appenders: + stdout: + kind: console + encoder: + pattern: "{d(%+)(utc)} [{f}:{L}] {h({l})} {M}:{m}{n}" + filters: + - kind: threshold + level: info + file: + kind: file + path: "log/file.log" + encoder: + pattern: "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + rollingfile: + kind: rolling_file + path: "log/rolling_file.log" + encoder: + pattern: "[{d(%Y-%m-%dT%H:%M:%S%.6f)} {h({l}):<5.5} {M}] {m}{n}" + policy: + trigger: + kind: time + interval: 1 minute + roller: + kind: fixed_window + pattern: "log/old-rolling_file-{}.log" + base: 0 + count: 2 + +root: + level: info + appenders: + - stdout + - file + - rollingfile diff --git a/tests/init_cfg_error_handler.rs b/tests/init_cfg_error_handler.rs new file mode 100755 index 00000000..29a0ba3b --- /dev/null +++ b/tests/init_cfg_error_handler.rs @@ -0,0 +1,23 @@ +#[test] +#[cfg(all(feature = "config_parsing", feature = "yaml_format"))] +fn test_cfg_err_hdlr() { + use std::{ + io::{self, Write}, + path::Path, + }; + + let cfg = log4rs::config::load_config_file( + Path::new("./test_cfgs/test.yml"), + log4rs::config::Deserializers::default(), + ); + assert!(cfg.is_ok()); + let cfg = cfg.unwrap(); + + let res = log4rs::config::init_config_with_err_handler( + cfg, + Box::new(|e: &anyhow::Error| { + let _ = writeln!(io::stderr(), "log4rs: {}", e); + }), + ); + assert!(res.is_ok()); +} diff --git a/tests/init_json_config.rs b/tests/init_json_config.rs new file mode 100755 index 00000000..7e6287c5 --- /dev/null +++ b/tests/init_json_config.rs @@ -0,0 +1,12 @@ +#[test] +#[cfg(all(feature = "config_parsing", feature = "json_format"))] +fn test_init_json_cfg() { + use log4rs; + use std::path::Path; + + assert!(log4rs::init_file( + Path::new("./test_cfgs/test.json"), + log4rs::config::Deserializers::default() + ) + .is_ok()); +} diff --git a/tests/init_malformed_config.rs b/tests/init_malformed_config.rs new file mode 100755 index 00000000..c6b9e36d --- /dev/null +++ b/tests/init_malformed_config.rs @@ -0,0 +1,14 @@ +#[test] +#[cfg(all(feature = "config_parsing", feature = "yaml_format"))] +fn test_malformed_appenders() { + use std::fs; + + let config_str = fs::read_to_string("test_cfgs/malformed_appender.yml").unwrap(); + let cfg = ::serde_yaml::from_str::(&config_str); + + assert!(cfg.is_ok()); + let cfg = cfg.unwrap(); + + let res = log4rs::config::create_raw_config(cfg); + assert!(res.is_err()); +} diff --git a/tests/init_toml_config.rs b/tests/init_toml_config.rs new file mode 100755 index 00000000..f5b576ca --- /dev/null +++ b/tests/init_toml_config.rs @@ -0,0 +1,12 @@ +#[test] +#[cfg(all(feature = "config_parsing", feature = "toml_format"))] +fn test_init_toml_cfg() { + use log4rs; + use std::path::Path; + + assert!(log4rs::init_file( + Path::new("./test_cfgs/test.toml"), + log4rs::config::Deserializers::default() + ) + .is_ok()); +} diff --git a/tests/init_yaml_config.rs b/tests/init_yaml_config.rs new file mode 100755 index 00000000..b2a5f4da --- /dev/null +++ b/tests/init_yaml_config.rs @@ -0,0 +1,12 @@ +#[test] +#[cfg(all(feature = "config_parsing", feature = "yaml_format"))] +fn test_init_yaml_cfg() { + use log4rs; + use std::path::Path; + + assert!(log4rs::init_file( + Path::new("./test_cfgs/test.yml"), + log4rs::config::Deserializers::default() + ) + .is_ok()); +}