use humantime;
use log::LevelFilter;
use serde::de::{self, Deserialize as SerdeDeserialize, DeserializeOwned};
use serde_value::Value;
use std::borrow::ToOwned;
use std::collections::HashMap;
use std::error;
use std::fmt;
use std::marker::PhantomData;
use std::sync::Arc;
use std::time::Duration;
use typemap::{Key, ShareCloneMap};
use config;
use append::AppenderConfig;
pub trait Deserializable: 'static {
fn name() -> &'static str;
}
pub trait Deserialize: Send + Sync + 'static {
type Trait: ?Sized + Deserializable;
type Config: DeserializeOwned;
fn deserialize(
&self,
config: Self::Config,
deserializers: &Deserializers,
) -> Result<Box<Self::Trait>, Box<error::Error + Sync + Send>>;
}
trait ErasedDeserialize: Send + Sync + 'static {
type Trait: ?Sized;
fn deserialize(
&self,
config: Value,
deserializers: &Deserializers,
) -> Result<Box<Self::Trait>, Box<error::Error + Sync + Send>>;
}
struct DeserializeEraser<T>(T);
impl<T> ErasedDeserialize for DeserializeEraser<T>
where
T: Deserialize,
{
type Trait = T::Trait;
fn deserialize(
&self,
config: Value,
deserializers: &Deserializers,
) -> Result<Box<Self::Trait>, Box<error::Error + Sync + Send>> {
let config = config.deserialize_into()?;
self.0.deserialize(config, deserializers)
}
}
struct KeyAdaptor<T: ?Sized>(PhantomData<T>);
impl<T: ?Sized + 'static> Key for KeyAdaptor<T> {
type Value = HashMap<String, Arc<ErasedDeserialize<Trait = T>>>;
}
#[derive(Clone)]
pub struct Deserializers(ShareCloneMap);
impl Default for Deserializers {
fn default() -> Deserializers {
let mut d = Deserializers::empty();
#[cfg(feature = "console_appender")]
d.insert("console", ::append::console::ConsoleAppenderDeserializer);
#[cfg(feature = "file_appender")]
d.insert("file", ::append::file::FileAppenderDeserializer);
#[cfg(feature = "rolling_file_appender")]
d.insert(
"rolling_file",
::append::rolling_file::RollingFileAppenderDeserializer,
);
#[cfg(feature = "compound_policy")]
d.insert(
"compound",
::append::rolling_file::policy::compound::CompoundPolicyDeserializer,
);
#[cfg(feature = "delete_roller")]
d.insert(
"delete",
::append::rolling_file::policy::compound::roll::delete::DeleteRollerDeserializer,
);
#[cfg(feature = "fixed_window_roller")]
d.insert(
"fixed_window",
::append::rolling_file::policy::compound::roll::fixed_window::FixedWindowRollerDeserializer,
);
#[cfg(feature = "size_trigger")]
d.insert(
"size",
::append::rolling_file::policy::compound::trigger::size::SizeTriggerDeserializer,
);
#[cfg(feature = "json_encoder")]
d.insert("json", ::encode::json::JsonEncoderDeserializer);
#[cfg(feature = "pattern_encoder")]
d.insert("pattern", ::encode::pattern::PatternEncoderDeserializer);
#[cfg(feature = "threshold_filter")]
d.insert(
"threshold",
::filter::threshold::ThresholdFilterDeserializer,
);
d
}
}
impl Deserializers {
pub fn new() -> Deserializers {
Deserializers::default()
}
pub fn empty() -> Deserializers {
Deserializers(ShareCloneMap::custom())
}
pub fn insert<T>(&mut self, kind: &str, deserializer: T)
where
T: Deserialize,
{
self.0
.entry::<KeyAdaptor<T::Trait>>()
.or_insert_with(HashMap::new)
.insert(kind.to_owned(), Arc::new(DeserializeEraser(deserializer)));
}
pub fn deserialize<T: ?Sized>(
&self,
kind: &str,
config: Value,
) -> Result<Box<T>, Box<error::Error + Sync + Send>>
where
T: Deserializable,
{
match self.0.get::<KeyAdaptor<T>>().and_then(|m| m.get(kind)) {
Some(b) => b.deserialize(config, self),
None => Err(format!(
"no {} deserializer for kind `{}` registered",
T::name(),
kind
).into()),
}
}
}
#[derive(Debug)]
pub struct Error(ErrorKind, Box<error::Error + Sync + Send>);
#[derive(Debug)]
enum ErrorKind {
Appender(String),
Filter(String),
}
impl fmt::Display for Error {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match self.0 {
ErrorKind::Appender(ref name) => {
write!(fmt, "error deserializing appender {}: {}", name, self.1)
}
ErrorKind::Filter(ref name) => write!(
fmt,
"error deserializing filter attached to appender {}: {}",
name,
self.1
),
}
}
}
impl error::Error for Error {
fn description(&self) -> &str {
"error deserializing a log4rs `Config`"
}
fn cause(&self) -> Option<&error::Error> {
Some(&*self.1)
}
}
#[cfg(feature = "xml_format")]
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct RawConfigXml {
#[serde(deserialize_with = "de_duration", default)] refresh_rate: Option<Duration>,
#[serde(default)] root: Root,
#[serde(default)] appenders: HashMap<String, AppenderConfig>,
#[serde(rename = "loggers", default)] loggers: LoggersXml,
}
#[cfg(feature = "xml_format")]
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct LoggersXml {
#[serde(rename = "logger", default)] loggers: Vec<LoggerXml>,
}
#[cfg(feature = "xml_format")]
impl Default for LoggersXml {
fn default() -> Self {
Self { loggers: vec![] }
}
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
pub struct RawConfig {
#[serde(deserialize_with = "de_duration", default)] refresh_rate: Option<Duration>,
#[serde(default)] root: Root,
#[serde(default)] appenders: HashMap<String, AppenderConfig>,
#[serde(default)] loggers: HashMap<String, Logger>,
}
impl RawConfig {
pub fn root(&self) -> config::Root {
config::Root::builder()
.appenders(self.root.appenders.clone())
.build(self.root.level)
}
pub fn loggers(&self) -> Vec<config::Logger> {
self.loggers
.iter()
.map(|(name, logger)| {
config::Logger::builder()
.appenders(logger.appenders.clone())
.additive(logger.additive)
.build(name.clone(), logger.level)
})
.collect()
}
pub fn appenders_lossy(
&self,
deserializers: &Deserializers,
) -> (Vec<config::Appender>, Vec<Error>) {
let mut appenders = vec![];
let mut errors = vec![];
for (name, appender) in &self.appenders {
let mut builder = config::Appender::builder();
for filter in &appender.filters {
match deserializers.deserialize(&filter.kind, filter.config.clone()) {
Ok(filter) => builder = builder.filter(filter),
Err(e) => errors.push(Error(ErrorKind::Filter(name.clone()), e)),
}
}
match deserializers.deserialize(&appender.kind, appender.config.clone()) {
Ok(appender) => appenders.push(builder.build(name.clone(), appender)),
Err(e) => errors.push(Error(ErrorKind::Appender(name.clone()), e)),
}
}
(appenders, errors)
}
pub fn refresh_rate(&self) -> Option<Duration> {
self.refresh_rate
}
}
#[cfg(feature = "xml_format")]
impl ::std::convert::From<RawConfigXml> for RawConfig {
fn from(cfg: RawConfigXml) -> Self {
Self {
refresh_rate: cfg.refresh_rate,
root: cfg.root,
appenders: cfg.appenders,
loggers: cfg.loggers
.loggers
.into_iter()
.map(|l| (l.name.clone(), l.into()))
.collect(),
}
}
}
fn de_duration<'de, D>(d: D) -> Result<Option<Duration>, D::Error>
where
D: de::Deserializer<'de>,
{
struct S(Duration);
impl<'de2> de::Deserialize<'de2> for S {
fn deserialize<D>(d: D) -> Result<S, D::Error>
where
D: de::Deserializer<'de2>,
{
struct V;
impl<'de3> de::Visitor<'de3> for V {
type Value = S;
fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("a duration")
}
fn visit_str<E>(self, v: &str) -> Result<S, E>
where
E: de::Error,
{
humantime::parse_duration(v)
.map(S)
.map_err(|e| E::custom(e))
}
}
d.deserialize_any(V)
}
}
Option::<S>::deserialize(d).map(|r| r.map(|s| s.0))
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
struct Root {
#[serde(default = "root_level_default")]
level: LevelFilter,
#[serde(default)] appenders: Vec<String>,
}
impl Default for Root {
fn default() -> Root {
Root {
level: root_level_default(),
appenders: vec![],
}
}
}
fn root_level_default() -> LevelFilter {
LevelFilter::Debug
}
#[cfg(feature = "xml_format")]
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
struct LoggerXml {
name: String,
level: LevelFilter,
#[serde(default)] appenders: Vec<String>,
#[serde(default = "logger_additive_default")] additive: bool,
}
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
struct Logger {
level: LevelFilter,
#[serde(default)] appenders: Vec<String>,
#[serde(default = "logger_additive_default")] additive: bool,
}
#[cfg(feature = "xml_format")]
impl ::std::convert::From<LoggerXml> for Logger {
fn from(logger_xml: LoggerXml) -> Self {
Logger {
level: logger_xml.level,
appenders: logger_xml.appenders,
additive: logger_xml.additive,
}
}
}
fn logger_additive_default() -> bool {
true
}
#[cfg(test)]
#[allow(unused_imports)]
mod test {
use super::*;
#[test]
#[cfg(all(feature = "yaml_format", feature = "threshold_filter"))]
fn full_deserialize() {
let cfg = r#"
refresh_rate: 60 seconds
appenders:
console:
kind: console
filters:
- kind: threshold
level: debug
baz:
kind: file
path: /tmp/baz.log
encoder:
pattern: "%m"
root:
appenders:
- console
level: info
loggers:
foo::bar::baz:
level: warn
appenders:
- baz
additive: false
"#;
let config = ::serde_yaml::from_str::<RawConfig>(cfg).unwrap();
let errors = config.appenders_lossy(&Deserializers::new()).1;
println!("{:?}", errors);
assert!(errors.is_empty());
}
#[test]
#[cfg(feature = "yaml_format")]
fn empty() {
::serde_yaml::from_str::<RawConfig>("{}").unwrap();
}
#[test]
#[cfg(feature = "xml_format")]
fn full_deserialize_xml() {
let cfg = r#"
<?xml version="1.0" encoding="utf-8"?>
<configuration refresh_rate="30 seconds">
<appenders>
<stdout kind="console"/>
<requests kind="file" path="/tmp/requests.log">
<encoder pattern="{d} - {m}{n}" />
</requests>
</appenders>
<root level="warn">
<appenders>stdout</appenders>
</root>
<loggers>
<logger name="foo::bar::baz" level="trace" additive="false" >
<appenders>requests</appenders>
</logger>
</loggers>
</configuration>
"#;
let config: RawConfigXml = ::serde_xml_rs::deserialize(cfg.as_bytes()).unwrap();
let config: RawConfig = config.into();
let errors = config.appenders_lossy(&Deserializers::new()).1;
println!("{:?}", errors);
assert!(errors.is_empty());
assert_eq!(config.refresh_rate, Some(Duration::from_secs(30)));
let logger = config.loggers.get("foo::bar::baz").unwrap();
assert_eq!(logger.appenders[0], "requests");
}
}