diff --git a/foundations/Cargo.toml b/foundations/Cargo.toml index 9faf9b2e..94abaf2d 100644 --- a/foundations/Cargo.toml +++ b/foundations/Cargo.toml @@ -159,6 +159,12 @@ metrics = [ "once_cell", ] +health-check = [ + "scc", + "telemetry-server", + "once_cell", +] + settings = [ "serde", "serde_yaml", diff --git a/foundations/examples/src/http.rs b/foundations/examples/src/http.rs index 7746791f..2380b858 100644 --- a/foundations/examples/src/http.rs +++ b/foundations/examples/src/http.rs @@ -29,6 +29,8 @@ struct Config { } impl Bootstrap for Config { + type Settings = Self; + fn runtime_mode(&self) -> RuntimeSettings { self.runtime.clone() } diff --git a/foundations/examples/src/simple.rs b/foundations/examples/src/simple.rs index 3d7a1082..26aab1ab 100644 --- a/foundations/examples/src/simple.rs +++ b/foundations/examples/src/simple.rs @@ -33,6 +33,8 @@ pub struct HttpServerSettings { } impl Bootstrap for HttpServerSettings { + type Settings = Self; + fn runtime_mode(&self) -> RuntimeSettings { self.runtime.clone() } diff --git a/foundations/src/bootstrap.rs b/foundations/src/bootstrap.rs index 7565577f..4a7820b0 100644 --- a/foundations/src/bootstrap.rs +++ b/foundations/src/bootstrap.rs @@ -2,82 +2,80 @@ use std::future::Future; use anyhow::Context; use scuffle_foundations_macros::auto_settings; - pub use scuffle_foundations_macros::bootstrap; -use crate::{ - settings::{ - cli::{Cli, Matches}, - Settings, - }, - BootstrapResult, -}; +use crate::settings::cli::{Cli, Matches}; +use crate::settings::Settings; +use crate::BootstrapResult; -pub fn bootstrap< - C: Bootstrap + std::fmt::Debug, - F: Fn(Matches) -> Fut, - Fut: Future>, ->( - config: &C, - info: crate::ServiceInfo, - main: F, +pub fn bootstrap) -> Fut, Fut: Future>>( + default_settings: &C::Settings, + info: crate::ServiceInfo, + main: F, ) -> BootstrapResult<()> { - let mut cli = Cli::::new(config).with_service_info(info); + let mut cli = Cli::::new(default_settings).with_service_info(info); + + for arg in C::additional_args() { + cli = cli.with_arg(arg); + } - for arg in C::additional_args() { - cli = cli.with_arg(arg); - } + let matches = cli.parse()?; - let matches = cli.parse()?; + let matches = Matches { + settings: C::from(matches.settings), + args: matches.args, + }; - let runtime = match matches.settings.runtime_mode() { - RuntimeSettings::Steal { name, threads } => { - crate::runtime::Runtime::new_steal(threads, &name) - } - RuntimeSettings::NoSteal { name, threads } => { - crate::runtime::Runtime::new_no_steal(threads, &name) - } - } - .context("Failed to create runtime")?; + let runtime = match matches.settings.runtime_mode() { + RuntimeSettings::Steal { name, threads } => crate::runtime::Runtime::new_steal(threads, &name), + RuntimeSettings::NoSteal { name, threads } => crate::runtime::Runtime::new_no_steal(threads, &name), + } + .context("Failed to create runtime")?; - runtime.block_on(async move { - #[cfg(feature = "_telemetry")] - if let Some(telemetry) = matches.settings.telemetry_config() { - crate::telementry::settings::init(info, telemetry).await; - } + runtime.block_on(async move { + #[cfg(feature = "_telemetry")] + if let Some(telemetry) = matches.settings.telemetry_config() { + crate::telementry::settings::init(info, telemetry).await; + } - main(matches).await - }) + main(matches).await + }) } #[auto_settings(crate_path = "crate")] #[serde(tag = "kind", rename_all = "kebab-case")] pub enum RuntimeSettings { - Steal { - threads: usize, - name: String, - }, - #[settings(default)] - NoSteal { - threads: usize, - name: String, - }, + Steal { + threads: usize, + name: String, + }, + #[settings(default)] + NoSteal { + threads: usize, + name: String, + }, } -pub trait Bootstrap: serde::Serialize + serde::de::DeserializeOwned + Settings { - fn runtime_mode(&self) -> RuntimeSettings { - RuntimeSettings::NoSteal { - threads: num_cpus::get(), - name: String::new(), - } - } +pub trait Bootstrap: Sized + From { + type Settings: serde::Serialize + serde::de::DeserializeOwned + Settings; + + fn runtime_mode(&self) -> RuntimeSettings { + RuntimeSettings::NoSteal { + threads: num_cpus::get(), + name: String::new(), + } + } - #[cfg(feature = "_telemetry")] - fn telemetry_config(&self) -> Option { - None - } + #[cfg(feature = "_telemetry")] + fn telemetry_config(&self) -> Option { + None + } + + fn additional_args() -> Vec { + vec![] + } +} - fn additional_args() -> Vec { - vec![] - } +impl Bootstrap for () { + type Settings = Self; } diff --git a/foundations/src/context.rs b/foundations/src/context.rs index 1749f986..35aa7574 100644 --- a/foundations/src/context.rs +++ b/foundations/src/context.rs @@ -10,234 +10,225 @@ use tokio_util::sync::{CancellationToken, WaitForCancellationFutureOwned}; struct ContextTracker(Arc); impl Drop for ContextTracker { - fn drop(&mut self) { - if self - .active_count - .fetch_sub(1, std::sync::atomic::Ordering::Relaxed) - == 1 - && self.stopped.load(std::sync::atomic::Ordering::Relaxed) - { - self.notify.notify_waiters(); - } - } + fn drop(&mut self) { + if self.active_count.fetch_sub(1, std::sync::atomic::Ordering::Relaxed) == 1 + && self.stopped.load(std::sync::atomic::Ordering::Relaxed) + { + self.notify.notify_waiters(); + } + } } impl Clone for ContextTracker { - fn clone(&self) -> Self { - self.active_count - .fetch_add(1, std::sync::atomic::Ordering::Relaxed); - Self(self.0.clone()) - } + fn clone(&self) -> Self { + self.active_count.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + Self(self.0.clone()) + } } impl std::ops::Deref for ContextTracker { - type Target = ContextTrackerInner; + type Target = ContextTrackerInner; - fn deref(&self) -> &Self::Target { - &self.0 - } + fn deref(&self) -> &Self::Target { + &self.0 + } } #[derive(Debug)] struct ContextTrackerInner { - stopped: AtomicBool, - active_count: AtomicUsize, - notify: tokio::sync::Notify, + stopped: AtomicBool, + active_count: AtomicUsize, + notify: tokio::sync::Notify, } impl ContextTrackerInner { - fn new() -> Arc { - Arc::new(Self { - stopped: AtomicBool::new(false), - active_count: AtomicUsize::new(0), - notify: tokio::sync::Notify::new(), - }) - } + fn new() -> Arc { + Arc::new(Self { + stopped: AtomicBool::new(false), + active_count: AtomicUsize::new(0), + notify: tokio::sync::Notify::new(), + }) + } - fn child(self: &Arc) -> ContextTracker { - self.active_count - .fetch_add(1, std::sync::atomic::Ordering::Relaxed); - ContextTracker(self.clone()) - } + fn child(self: &Arc) -> ContextTracker { + self.active_count.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + ContextTracker(self.clone()) + } - fn stop(&self) { - self.stopped - .store(true, std::sync::atomic::Ordering::Relaxed); - } + fn stop(&self) { + self.stopped.store(true, std::sync::atomic::Ordering::Relaxed); + } - async fn wait(&self) { - let notify = self.notify.notified(); + async fn wait(&self) { + let notify = self.notify.notified(); - // If there are no active children, then the notify will never be called - if self.active_count.load(std::sync::atomic::Ordering::Relaxed) == 0 { - return; - } + // If there are no active children, then the notify will never be called + if self.active_count.load(std::sync::atomic::Ordering::Relaxed) == 0 { + return; + } - notify.await; - } + notify.await; + } } #[derive(Clone, Debug)] pub struct Context { - token: CancellationToken, - _trackers: Vec, + token: CancellationToken, + _trackers: Vec, } impl Context { - #[must_use] - pub fn new() -> (Self, Handler) { - Handler::global().new_child() - } - - #[must_use] - pub fn new_child(&self) -> (Self, Handler) { - let token = self.token.child_token(); - let tracker = ContextTrackerInner::new(); - - ( - Self { - _trackers: { - let mut trackers = self._trackers.clone(); - trackers.push(tracker.child()); - trackers - }, - token: token.clone(), - }, - Handler { - _token: TokenDropGuard(token), - tracker, - }, - ) - } - - #[must_use] - pub fn global() -> Self { - Handler::global().context() - } - - pub async fn done(&self) { - self.token.cancelled().await; - } - - pub async fn into_done(self) { - self.done().await; - } - - #[must_use] - pub fn is_done(&self) -> bool { - self.token.is_cancelled() - } + #[must_use] + pub fn new() -> (Self, Handler) { + Handler::global().new_child() + } + + #[must_use] + pub fn new_child(&self) -> (Self, Handler) { + let token = self.token.child_token(); + let tracker = ContextTrackerInner::new(); + + ( + Self { + _trackers: { + let mut trackers = self._trackers.clone(); + trackers.push(tracker.child()); + trackers + }, + token: token.clone(), + }, + Handler { + _token: TokenDropGuard(token), + tracker, + }, + ) + } + + #[must_use] + pub fn global() -> Self { + Handler::global().context() + } + + pub async fn done(&self) { + self.token.cancelled().await; + } + + pub async fn into_done(self) { + self.done().await; + } + + #[must_use] + pub fn is_done(&self) -> bool { + self.token.is_cancelled() + } } struct TokenDropGuard(CancellationToken); impl TokenDropGuard { - #[must_use] - fn child(&self) -> CancellationToken { - self.0.child_token() - } + #[must_use] + fn child(&self) -> CancellationToken { + self.0.child_token() + } - fn cancel(&self) { - self.0.cancel(); - } + fn cancel(&self) { + self.0.cancel(); + } } impl Drop for TokenDropGuard { - fn drop(&mut self) { - self.cancel(); - } + fn drop(&mut self) { + self.cancel(); + } } pub struct Handler { - _token: TokenDropGuard, - tracker: Arc, + _token: TokenDropGuard, + tracker: Arc, } impl Default for Handler { - fn default() -> Self { - Self::new() - } + fn default() -> Self { + Self::new() + } } impl Handler { - #[must_use] - pub fn new() -> Handler { - let token = CancellationToken::new(); - let tracker = ContextTrackerInner::new(); - - Handler { - _token: TokenDropGuard(token), - tracker, - } - } - - #[must_use] - pub fn global() -> &'static Self { - static GLOBAL: once_cell::sync::Lazy = once_cell::sync::Lazy::new(Handler::new); - &GLOBAL - } - - pub async fn shutdown(&self) { - self.tracker.stop(); - self.cancel(); - self.tracker.wait().await; - } - - #[must_use] - pub fn context(&self) -> Context { - Context { - token: self._token.child(), - _trackers: vec![self.tracker.child()], - } - } - - #[must_use] - pub fn new_child(&self) -> (Context, Handler) { - self.context().new_child() - } - - pub fn cancel(&self) { - self._token.cancel(); - } + #[must_use] + pub fn new() -> Handler { + let token = CancellationToken::new(); + let tracker = ContextTrackerInner::new(); + + Handler { + _token: TokenDropGuard(token), + tracker, + } + } + + #[must_use] + pub fn global() -> &'static Self { + static GLOBAL: once_cell::sync::Lazy = once_cell::sync::Lazy::new(Handler::new); + &GLOBAL + } + + pub async fn shutdown(&self) { + self.tracker.stop(); + self.cancel(); + self.tracker.wait().await; + } + + #[must_use] + pub fn context(&self) -> Context { + Context { + token: self._token.child(), + _trackers: vec![self.tracker.child()], + } + } + + #[must_use] + pub fn new_child(&self) -> (Context, Handler) { + self.context().new_child() + } + + pub fn cancel(&self) { + self._token.cancel(); + } } pub trait ContextExt { - fn context(self, ctx: Context) -> FutureWithContext - where - Self: Sized; + fn context(self, ctx: Context) -> FutureWithContext + where + Self: Sized; } impl ContextExt for F { - fn context(self, ctx: Context) -> FutureWithContext { - FutureWithContext { - future: self, - _channels: ctx._trackers, - ctx: Box::pin(ctx.token.cancelled_owned()), - } - } + fn context(self, ctx: Context) -> FutureWithContext { + FutureWithContext { + future: self, + _channels: ctx._trackers, + ctx: Box::pin(ctx.token.cancelled_owned()), + } + } } #[pin_project::pin_project] pub struct FutureWithContext { - #[pin] - future: F, - _channels: Vec, - ctx: Pin>, + #[pin] + future: F, + _channels: Vec, + ctx: Pin>, } impl Future for FutureWithContext { - type Output = Option; - - fn poll( - mut self: Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll { - let this = self.as_mut().project(); - - match (this.ctx.as_mut().poll(cx), this.future.poll(cx)) { - (_, Poll::Ready(v)) => std::task::Poll::Ready(Some(v)), - (Poll::Ready(_), Poll::Pending) => std::task::Poll::Ready(None), - _ => std::task::Poll::Pending, - } - } + type Output = Option; + + fn poll(mut self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> std::task::Poll { + let this = self.as_mut().project(); + + match (this.ctx.as_mut().poll(cx), this.future.poll(cx)) { + (_, Poll::Ready(v)) => std::task::Poll::Ready(Some(v)), + (Poll::Ready(_), Poll::Pending) => std::task::Poll::Ready(None), + _ => std::task::Poll::Pending, + } + } } diff --git a/foundations/src/lib.rs b/foundations/src/lib.rs index 6b8786f9..0427d3fb 100644 --- a/foundations/src/lib.rs +++ b/foundations/src/lib.rs @@ -10,16 +10,16 @@ pub use scuffle_foundations_macros::wrapped; #[cfg(feature = "macros")] #[doc(hidden)] pub mod macro_reexports { - #[cfg(feature = "cli")] - pub use const_str; - #[cfg(feature = "metrics")] - pub use once_cell; - #[cfg(feature = "metrics")] - pub use parking_lot; - #[cfg(feature = "metrics")] - pub use prometheus_client; - #[cfg(any(feature = "settings", feature = "metrics"))] - pub use serde; + #[cfg(feature = "cli")] + pub use const_str; + #[cfg(feature = "metrics")] + pub use once_cell; + #[cfg(feature = "metrics")] + pub use parking_lot; + #[cfg(feature = "metrics")] + pub use prometheus_client; + #[cfg(any(feature = "settings", feature = "metrics"))] + pub use serde; } pub type BootstrapResult = anyhow::Result; @@ -42,31 +42,27 @@ pub mod context; #[derive(Debug, Clone, Copy, Default)] /// Information about the service. pub struct ServiceInfo { - /// The name of the service. - pub name: &'static str, - /// The name of the service for metrics. Replaces `-` with `_`. - pub metric_name: &'static str, - /// The version of the service. - pub version: &'static str, - /// The author of the service. - pub author: &'static str, - /// A description of the service. - pub description: &'static str, + /// The name of the service. + pub name: &'static str, + /// The name of the service for metrics. Replaces `-` with `_`. + pub metric_name: &'static str, + /// The version of the service. + pub version: &'static str, + /// The author of the service. + pub author: &'static str, + /// A description of the service. + pub description: &'static str, } #[macro_export] macro_rules! service_info { - () => { - $crate::ServiceInfo { - name: env!("CARGO_PKG_NAME"), - metric_name: $crate::macro_reexports::const_str::replace!( - env!("CARGO_PKG_NAME"), - "-", - "_" - ), - version: env!("CARGO_PKG_VERSION"), - author: env!("CARGO_PKG_AUTHORS"), - description: env!("CARGO_PKG_DESCRIPTION"), - } - }; + () => { + $crate::ServiceInfo { + name: env!("CARGO_PKG_NAME"), + metric_name: $crate::macro_reexports::const_str::replace!(env!("CARGO_PKG_NAME"), "-", "_"), + version: env!("CARGO_PKG_VERSION"), + author: env!("CARGO_PKG_AUTHORS"), + description: env!("CARGO_PKG_DESCRIPTION"), + } + }; } diff --git a/foundations/src/runtime.rs b/foundations/src/runtime.rs index a86088a4..805fdbb2 100644 --- a/foundations/src/runtime.rs +++ b/foundations/src/runtime.rs @@ -1,158 +1,154 @@ -use std::{cell::RefCell, future::Future, sync::Arc}; +use std::cell::RefCell; +use std::future::Future; +use std::sync::Arc; use rand::Rng; use tokio::task::JoinHandle; pub enum Runtime { - Steal(tokio::runtime::Runtime), - NoSteal(Arc), + Steal(tokio::runtime::Runtime), + NoSteal(Arc), } impl Runtime { - pub fn new_steal(thread_count: usize, name: &str) -> std::io::Result { - Ok(Self::Steal( - tokio::runtime::Builder::new_multi_thread() - .worker_threads(thread_count) - .thread_name(name) - .enable_all() - .build()?, - )) - } - - pub fn new_no_steal(thread_count: usize, name: &str) -> std::io::Result { - Ok(Self::NoSteal(NoStealRuntime::new(thread_count, name)?)) - } - - pub fn spawn(&self, future: F) -> JoinHandle - where - F: Future + Send + 'static, - F::Output: Send + 'static, - { - match self { - Self::Steal(runtime) => runtime.spawn(future), - Self::NoSteal(runtime) => runtime.spawn(future), - } - } - - pub fn block_on(&self, future: F) -> F::Output - where - F: Future, - { - match self { - Self::Steal(runtime) => runtime.block_on(future), - Self::NoSteal(runtime) => runtime.block_on(future), - } - } + pub fn new_steal(thread_count: usize, name: &str) -> std::io::Result { + Ok(Self::Steal( + tokio::runtime::Builder::new_multi_thread() + .worker_threads(thread_count) + .thread_name(name) + .enable_all() + .build()?, + )) + } + + pub fn new_no_steal(thread_count: usize, name: &str) -> std::io::Result { + Ok(Self::NoSteal(NoStealRuntime::new(thread_count, name)?)) + } + + pub fn spawn(&self, future: F) -> JoinHandle + where + F: Future + Send + 'static, + F::Output: Send + 'static, + { + match self { + Self::Steal(runtime) => runtime.spawn(future), + Self::NoSteal(runtime) => runtime.spawn(future), + } + } + + pub fn block_on(&self, future: F) -> F::Output + where + F: Future, + { + match self { + Self::Steal(runtime) => runtime.block_on(future), + Self::NoSteal(runtime) => runtime.block_on(future), + } + } } pub struct NoStealRuntime { - runtimes: Vec, + runtimes: Vec, } struct NoStealRuntimeThreadData { - runtime: Arc, - idx: usize, + runtime: Arc, + idx: usize, } thread_local! { - static NO_STEAL_RUNTIME: RefCell> = const { RefCell::new(None) }; + static NO_STEAL_RUNTIME: RefCell> = const { RefCell::new(None) }; } struct Guard(Option); impl Guard { - fn new(data: Option) -> Self { - Self(NO_STEAL_RUNTIME.replace(data)) - } + fn new(data: Option) -> Self { + Self(NO_STEAL_RUNTIME.replace(data)) + } } impl Drop for Guard { - fn drop(&mut self) { - NO_STEAL_RUNTIME.with(|data| { - data.replace(self.0.take()); - }); - } + fn drop(&mut self) { + NO_STEAL_RUNTIME.with(|data| { + data.replace(self.0.take()); + }); + } } impl NoStealRuntime { - pub fn new(mut thread_count: usize, name: &str) -> Result, std::io::Error> { - if thread_count == 0 { - thread_count = num_cpus::get(); - } - - let this = Arc::new(Self { - runtimes: Vec::new(), - }); - - let runtimes = (0..thread_count) - .map(|i| { - let pool = this.clone(); - - let init_fn = move || { - let pool = pool.clone(); - NO_STEAL_RUNTIME.with(move |data| { - data.replace(Some(NoStealRuntimeThreadData { - runtime: pool, - idx: i, - })) - }); - }; - - tokio::runtime::Builder::new_multi_thread() - .worker_threads(1) - .thread_name(format!("{name}-{i}")) - .on_thread_start(init_fn) - .enable_all() - .build() - }) - .collect::, _>>()?; - - // This is safe because no one is using the runtimes yet - unsafe { - let ptr = Arc::as_ptr(&this) as *mut NoStealRuntime; - let this = &mut *ptr; - this.runtimes = runtimes; - } - - Ok(this) - } - - pub fn spawn(&self, future: F) -> JoinHandle - where - F: Future + Send + 'static, - F::Output: Send + 'static, - { - let idx = rand::thread_rng().gen_range(0..self.runtimes.len()); - self.runtimes[idx].spawn(future) - } - - pub fn block_on(self: &Arc, future: F) -> F::Output - where - F: Future, - { - let _guard = Guard::new(Some(NoStealRuntimeThreadData { - runtime: self.clone(), - idx: 0, - })); - - self.runtimes[0].block_on(future) - } + pub fn new(mut thread_count: usize, name: &str) -> Result, std::io::Error> { + if thread_count == 0 { + thread_count = num_cpus::get(); + } + + let this = Arc::new(Self { runtimes: Vec::new() }); + + let runtimes = (0..thread_count) + .map(|i| { + let pool = this.clone(); + + let init_fn = move || { + let pool = pool.clone(); + NO_STEAL_RUNTIME + .with(move |data| data.replace(Some(NoStealRuntimeThreadData { runtime: pool, idx: i }))); + }; + + tokio::runtime::Builder::new_multi_thread() + .worker_threads(1) + .thread_name(format!("{name}-{i}")) + .on_thread_start(init_fn) + .enable_all() + .build() + }) + .collect::, _>>()?; + + // This is safe because no one is using the runtimes yet + unsafe { + let ptr = Arc::as_ptr(&this) as *mut NoStealRuntime; + let this = &mut *ptr; + this.runtimes = runtimes; + } + + Ok(this) + } + + pub fn spawn(&self, future: F) -> JoinHandle + where + F: Future + Send + 'static, + F::Output: Send + 'static, + { + let idx = rand::thread_rng().gen_range(0..self.runtimes.len()); + self.runtimes[idx].spawn(future) + } + + pub fn block_on(self: &Arc, future: F) -> F::Output + where + F: Future, + { + let _guard = Guard::new(Some(NoStealRuntimeThreadData { + runtime: self.clone(), + idx: 0, + })); + + self.runtimes[0].block_on(future) + } } pub fn spawn(future: F) -> JoinHandle where - F: Future + Send + 'static, - F::Output: Send + 'static, + F: Future + Send + 'static, + F::Output: Send + 'static, { - NO_STEAL_RUNTIME.with_borrow(|data| match data { - Some(data) => data.runtime.spawn(future), - None => tokio::spawn(future), - }) + NO_STEAL_RUNTIME.with_borrow(|data| match data { + Some(data) => data.runtime.spawn(future), + None => tokio::spawn(future), + }) } pub fn current_handle() -> tokio::runtime::Handle { - NO_STEAL_RUNTIME.with_borrow(|data| match data.as_ref() { - Some(data) => data.runtime.runtimes[data.idx].handle().clone(), - None => tokio::runtime::Handle::current(), - }) + NO_STEAL_RUNTIME.with_borrow(|data| match data.as_ref() { + Some(data) => data.runtime.runtimes[data.idx].handle().clone(), + None => tokio::runtime::Handle::current(), + }) } diff --git a/foundations/src/settings/cli.rs b/foundations/src/settings/cli.rs index 7cec7426..56112267 100644 --- a/foundations/src/settings/cli.rs +++ b/foundations/src/settings/cli.rs @@ -8,123 +8,119 @@ const CONFIG_ARG_ID: &str = "config"; #[derive(Debug)] pub struct Cli { - settings: SettingsParser, - app: clap::Command, + settings: SettingsParser, + app: clap::Command, } fn default_cmd() -> clap::Command { - clap::Command::new("") - .arg( - clap::Arg::new(CONFIG_ARG_ID) - .long(CONFIG_ARG_ID) - .short('c') - .help("The configuration file to use") - .value_name("FILE") - .action(ArgAction::Append), - ) - .arg( - clap::Arg::new(GENERATE_ARG_ID) - .long(GENERATE_ARG_ID) - .help("Generate a configuration file") - .value_name("FILE") - .action(ArgAction::Set) - .num_args(0..=1) - .default_missing_value("./config.yaml"), - ) + clap::Command::new("") + .arg( + clap::Arg::new(CONFIG_ARG_ID) + .long(CONFIG_ARG_ID) + .short('c') + .help("The configuration file to use") + .value_name("FILE") + .action(ArgAction::Append), + ) + .arg( + clap::Arg::new(GENERATE_ARG_ID) + .long(GENERATE_ARG_ID) + .help("Generate a configuration file") + .value_name("FILE") + .action(ArgAction::Set) + .num_args(0..=1) + .default_missing_value("./config.yaml"), + ) } impl Default for Cli { - fn default() -> Self { - Self::new(&Default::default()) - } + fn default() -> Self { + Self::new(&Default::default()) + } } #[derive(Debug, Clone)] pub struct Matches { - pub settings: S, - pub args: clap::ArgMatches, + pub settings: S, + pub args: clap::ArgMatches, } impl Cli { - pub fn new(default: &S) -> Self { - Self { - settings: SettingsParser::new(default).unwrap(), - app: default_cmd(), - } - } - - pub fn with_service_info(mut self, info: crate::ServiceInfo) -> Self { - self.app = self - .app - .name(info.name) - .version(info.version) - .author(info.author) - .about(info.description); - - self - } - - pub fn with_arg(mut self, arg: clap::Arg) -> Self { - self.app = self.app.arg(arg); - self - } - - fn load_file(file: &str, optional: bool) -> anyhow::Result> { - let contents = match std::fs::read_to_string(file) { - Ok(contents) => contents, - Err(err) => { - if optional { - return Ok(None); - } - - return Err(err) - .with_context(|| format!("Error reading configuration file: {file}")); - } - }; - - let incoming = serde_yaml::from_str(&contents) - .with_context(|| format!("Error parsing configuration file: {file}"))?; - - Ok(Some(incoming)) - } - - pub fn parse(mut self) -> anyhow::Result> { - let args = self.app.get_matches(); - - if let Some(file) = args.get_one::(GENERATE_ARG_ID) { - let settings = self - .settings - .parse() - .context("failed to construct settings")? - .to_yaml_string() - .context("failed to serialize settings")?; - std::fs::write(file, settings) - .with_context(|| format!("Error writing configuration file: {file}"))?; - println!("Generated configuration file: {file}"); - std::process::exit(0); - } - - let mut files = if let Some(files) = args.get_many::(CONFIG_ARG_ID) { - files.cloned().map(|file| (file, false)).collect::>() - } else { - vec![] - }; - - if files.is_empty() { - files.push(("config.yaml".to_string(), true)); - } - - for (file, optional) in files { - if let Some(value) = Self::load_file(&file, optional)? { - self.settings - .merge(value) - .context("failed to merge configuration file")?; - } - } - - Ok(Matches { - settings: self.settings.parse().context("failed to parse settings")?, - args, - }) - } + pub fn new(default: &S) -> Self { + Self { + settings: SettingsParser::new(default).unwrap(), + app: default_cmd(), + } + } + + pub fn with_service_info(mut self, info: crate::ServiceInfo) -> Self { + self.app = self + .app + .name(info.name) + .version(info.version) + .author(info.author) + .about(info.description); + + self + } + + pub fn with_arg(mut self, arg: clap::Arg) -> Self { + self.app = self.app.arg(arg); + self + } + + fn load_file(file: &str, optional: bool) -> anyhow::Result> { + let contents = match std::fs::read_to_string(file) { + Ok(contents) => contents, + Err(err) => { + if optional { + return Ok(None); + } + + return Err(err).with_context(|| format!("Error reading configuration file: {file}")); + } + }; + + let incoming = + serde_yaml::from_str(&contents).with_context(|| format!("Error parsing configuration file: {file}"))?; + + Ok(Some(incoming)) + } + + pub fn parse(mut self) -> anyhow::Result> { + let args = self.app.get_matches(); + + if let Some(file) = args.get_one::(GENERATE_ARG_ID) { + let settings = self + .settings + .parse() + .context("failed to construct settings")? + .to_yaml_string() + .context("failed to serialize settings")?; + std::fs::write(file, settings).with_context(|| format!("Error writing configuration file: {file}"))?; + println!("Generated configuration file: {file}"); + std::process::exit(0); + } + + let mut files = if let Some(files) = args.get_many::(CONFIG_ARG_ID) { + files.cloned().map(|file| (file, false)).collect::>() + } else { + vec![] + }; + + if files.is_empty() { + files.push(("config.yaml".to_string(), true)); + } + + for (file, optional) in files { + if let Some(value) = Self::load_file(&file, optional)? { + self.settings.merge(value).context("failed to merge configuration file")?; + } + } + + Ok(Matches { + settings: self.settings.parse().context("failed to parse settings")?, + args, + }) + } } diff --git a/foundations/src/settings/mod.rs b/foundations/src/settings/mod.rs index f30f5208..12b20dd5 100644 --- a/foundations/src/settings/mod.rs +++ b/foundations/src/settings/mod.rs @@ -1,4 +1,5 @@ -use std::{borrow::Cow, collections::HashMap}; +use std::borrow::Cow; +use std::collections::HashMap; use serde_yaml::value::Tag; use serde_yaml::Value; @@ -11,161 +12,144 @@ pub use scuffle_foundations_macros::{auto_settings, Settings}; #[derive(Debug, Clone)] pub struct SettingsParser { - root: serde_yaml::Value, - _marker: std::marker::PhantomData, + root: serde_yaml::Value, + _marker: std::marker::PhantomData, } enum MergeDirective { - Unset, - Replace, - Merge, + Unset, + Replace, + Merge, } impl MergeDirective { - fn from_tag(tag: &Tag) -> Self { - if tag == "!replace" { - Self::Replace - } else if tag == "!merge" { - Self::Merge - } else { - Self::Unset - } - } + fn from_tag(tag: &Tag) -> Self { + if tag == "!replace" { + Self::Replace + } else if tag == "!merge" { + Self::Merge + } else { + Self::Unset + } + } } impl SettingsParser { - pub fn new(default: &S) -> serde_yaml::Result - where - S: serde::Serialize, - { - Ok(Self { - root: serde_yaml::to_value(default)?, - _marker: std::marker::PhantomData, - }) - } - - fn merge(&mut self, mut incoming: serde_yaml::Value) -> serde_yaml::Result<()> { - self.root.apply_merge()?; - incoming.apply_merge()?; - - let root = std::mem::take(&mut self.root); - self.root = self.merge_loop(root, incoming, MergeDirective::Unset); - Ok(()) - } - - fn merge_loop( - &self, - root: serde_yaml::Value, - incoming: serde_yaml::Value, - merge: MergeDirective, - ) -> serde_yaml::Value { - match (root, incoming) { - (serde_yaml::Value::Mapping(mut first_map), serde_yaml::Value::Mapping(second_map)) => { - for (key, value) in second_map { - // If the key is tagged we should process it - let (key, merge) = match key { - serde_yaml::Value::Tagged(tagged) => { - (tagged.value, MergeDirective::from_tag(&tagged.tag)) - } - _ => (key, MergeDirective::Unset), - }; - - let combined_value = if let Some(existing_value) = first_map.remove(&key) { - if matches!(merge, MergeDirective::Replace) { - value - } else { - self.merge_loop(existing_value, value, merge) - } - } else { - value - }; - first_map.insert(key, combined_value); - } - serde_yaml::Value::Mapping(first_map) - } - ( - serde_yaml::Value::Sequence(mut first_seq), - serde_yaml::Value::Sequence(second_seq), - ) => { - if matches!(merge, MergeDirective::Merge) { - first_seq.extend(second_seq); - } else { - first_seq = second_seq; - } - serde_yaml::Value::Sequence(first_seq) - } - (first, serde_yaml::Value::Tagged(tagged)) => self.handle_tagged(first, *tagged, merge), - (_, second) => second, - } - } - - fn handle_tagged( - &self, - first: serde_yaml::Value, - tagged: serde_yaml::value::TaggedValue, - merge: MergeDirective, - ) -> serde_yaml::Value { - // If the tag is replace it doesn't matter what the first value is - // we just return the tagged value - let merge = match (merge, MergeDirective::from_tag(&tagged.tag)) { - (MergeDirective::Unset, merge) => merge, - (merge, _) => merge, - }; - if matches!(merge, MergeDirective::Replace) { - return tagged.value; - } - // If the first value is tagged then we should compare the tags - // and act accordingly - if let serde_yaml::Value::Tagged(first_tagged) = first { - if first_tagged.tag == tagged.tag { - let value = self.merge_loop(first_tagged.value, tagged.value, merge); - // Retag the value - return serde_yaml::Value::Tagged(Box::new(serde_yaml::value::TaggedValue { - tag: first_tagged.tag, - value, - })); - } else { - return serde_yaml::Value::Tagged(Box::new(tagged)); - } - } - - // Otherwise we do not merge and retag the value - let value = self.merge_loop(first, tagged.value, merge); - if matches!(MergeDirective::from_tag(&tagged.tag), MergeDirective::Unset) { - serde_yaml::Value::Tagged(Box::new(serde_yaml::value::TaggedValue { - tag: tagged.tag, - value, - })) - } else { - value - } - } - - pub fn merge_str(&mut self, s: &str) -> serde_yaml::Result<()> { - let incoming = serde_yaml::from_str(s)?; - self.merge(incoming) - } - - pub fn parse(self) -> serde_yaml::Result - where - for<'de> S: serde::Deserialize<'de>, - { - serde_yaml::from_value(self.root) - } + pub fn new(default: &S) -> serde_yaml::Result + where + S: serde::Serialize, + { + Ok(Self { + root: serde_yaml::to_value(default)?, + _marker: std::marker::PhantomData, + }) + } + + fn merge(&mut self, mut incoming: serde_yaml::Value) -> serde_yaml::Result<()> { + self.root.apply_merge()?; + incoming.apply_merge()?; + + let root = std::mem::take(&mut self.root); + self.root = self.merge_loop(root, incoming, MergeDirective::Unset); + Ok(()) + } + + fn merge_loop(&self, root: serde_yaml::Value, incoming: serde_yaml::Value, merge: MergeDirective) -> serde_yaml::Value { + match (root, incoming) { + (serde_yaml::Value::Mapping(mut first_map), serde_yaml::Value::Mapping(second_map)) => { + for (key, value) in second_map { + // If the key is tagged we should process it + let (key, merge) = match key { + serde_yaml::Value::Tagged(tagged) => (tagged.value, MergeDirective::from_tag(&tagged.tag)), + _ => (key, MergeDirective::Unset), + }; + + let combined_value = if let Some(existing_value) = first_map.remove(&key) { + if matches!(merge, MergeDirective::Replace) { + value + } else { + self.merge_loop(existing_value, value, merge) + } + } else { + value + }; + first_map.insert(key, combined_value); + } + serde_yaml::Value::Mapping(first_map) + } + (serde_yaml::Value::Sequence(mut first_seq), serde_yaml::Value::Sequence(second_seq)) => { + if matches!(merge, MergeDirective::Merge) { + first_seq.extend(second_seq); + } else { + first_seq = second_seq; + } + serde_yaml::Value::Sequence(first_seq) + } + (first, serde_yaml::Value::Tagged(tagged)) => self.handle_tagged(first, *tagged, merge), + (_, second) => second, + } + } + + fn handle_tagged( + &self, + first: serde_yaml::Value, + tagged: serde_yaml::value::TaggedValue, + merge: MergeDirective, + ) -> serde_yaml::Value { + // If the tag is replace it doesn't matter what the first value is + // we just return the tagged value + let merge = match (merge, MergeDirective::from_tag(&tagged.tag)) { + (MergeDirective::Unset, merge) => merge, + (merge, _) => merge, + }; + if matches!(merge, MergeDirective::Replace) { + return tagged.value; + } + // If the first value is tagged then we should compare the tags + // and act accordingly + if let serde_yaml::Value::Tagged(first_tagged) = first { + if first_tagged.tag == tagged.tag { + let value = self.merge_loop(first_tagged.value, tagged.value, merge); + // Retag the value + return serde_yaml::Value::Tagged(Box::new(serde_yaml::value::TaggedValue { + tag: first_tagged.tag, + value, + })); + } else { + return serde_yaml::Value::Tagged(Box::new(tagged)); + } + } + + // Otherwise we do not merge and retag the value + let value = self.merge_loop(first, tagged.value, merge); + if matches!(MergeDirective::from_tag(&tagged.tag), MergeDirective::Unset) { + serde_yaml::Value::Tagged(Box::new(serde_yaml::value::TaggedValue { tag: tagged.tag, value })) + } else { + value + } + } + + pub fn merge_str(&mut self, s: &str) -> serde_yaml::Result<()> { + let incoming = serde_yaml::from_str(s)?; + self.merge(incoming) + } + + pub fn parse(self) -> serde_yaml::Result + where + for<'de> S: serde::Deserialize<'de>, + { + serde_yaml::from_value(self.root) + } } mod traits; -pub use traits::Wrapped; - -pub use traits::Settings; +pub use traits::{Settings, Wrapped}; /// Converts a settings struct to a YAML string including doc comments. /// If you want to provide doc comments for keys use to_yaml_string_with_docs. -pub fn to_yaml_string( - settings: &T, -) -> Result { - to_yaml_string_with_docs(settings, &settings.docs()) +pub fn to_yaml_string(settings: &T) -> Result { + to_yaml_string_with_docs(settings, &settings.docs()) } type CowStr = Cow<'static, str>; @@ -173,154 +157,141 @@ type DocMap = HashMap, Cow<'static, [CowStr]>>; /// Serializes a struct to YAML with documentation comments. /// Documentation comments are provided in a DocMap. -pub fn to_yaml_string_with_docs( - settings: &T, - docs: &DocMap, -) -> Result { - let data = serde_yaml::to_value(settings)?; - let mut result = String::new(); - convert_recursive(docs, &mut Vec::new(), &data, &mut result, 0); - - if result.ends_with("\n\n") { - result.pop(); - } else if !result.ends_with('\n') { - result.push('\n'); - } - - Ok(result) +pub fn to_yaml_string_with_docs(settings: &T, docs: &DocMap) -> Result { + let data = serde_yaml::to_value(settings)?; + let mut result = String::new(); + convert_recursive(docs, &mut Vec::new(), &data, &mut result, 0); + + if result.ends_with("\n\n") { + result.pop(); + } else if !result.ends_with('\n') { + result.push('\n'); + } + + Ok(result) } macro_rules! push_indent { - ($result: expr, $indent: expr) => {{ - for _ in 0..$indent { - $result.push(' '); - } - }}; + ($result:expr, $indent:expr) => {{ + for _ in 0..$indent { + $result.push(' '); + } + }}; } macro_rules! push_docs { - ($result: expr, $docs: expr, $stack: expr, $indent: expr) => {{ - $docs - .get($stack) - .into_iter() - .flat_map(|s| s.iter()) - .for_each(|doc| { - push_indent!($result, $indent); - $result.push_str("# "); - $result.push_str(doc); - push_new_line!($result); - }); - }}; + ($result:expr, $docs:expr, $stack:expr, $indent:expr) => {{ + $docs.get($stack).into_iter().flat_map(|s| s.iter()).for_each(|doc| { + push_indent!($result, $indent); + $result.push_str("# "); + $result.push_str(doc); + push_new_line!($result); + }); + }}; } macro_rules! push_key { - ($result: expr, $key: expr, $indent: expr) => {{ - push_indent!($result, $indent); - $result.push_str($key); - $result.push_str(":"); - }}; + ($result:expr, $key:expr, $indent:expr) => {{ + push_indent!($result, $indent); + $result.push_str($key); + $result.push_str(":"); + }}; } macro_rules! push_new_line { - ($result: expr) => {{ - if !$result.ends_with('\n') { - $result.push('\n'); - } - }}; + ($result:expr) => {{ + if !$result.ends_with('\n') { + $result.push('\n'); + } + }}; } -fn convert_recursive( - docs: &DocMap, - stack: &mut Vec, - value: &Value, - result: &mut String, - indent: usize, -) { - // Append doc comments at the current level - if matches!(value, Value::Mapping(_) | Value::Sequence(_)) { - stack.push(">".into()); - push_docs!(result, docs, stack, indent); - stack.pop(); - } - - match value { - Value::Mapping(map) => { - for (key, val) in map { - let key_str = key.as_str().unwrap_or_default(); - stack.push(Cow::from(key_str.to_owned())); - - push_docs!(result, docs, stack, indent); - push_key!(result, key_str, indent); - - // We dont want to push a new line if the item is a Tagged value - if matches!(val, Value::Mapping(_) | Value::Sequence(_)) { - push_new_line!(result); - } - - convert_recursive(docs, stack, val, result, indent + 2); - - push_new_line!(result); - - if (val.is_mapping() || val.is_sequence()) && !result.ends_with("\n\n") { - result.push('\n'); - } - - stack.pop(); - } - - if map.is_empty() { - if result.ends_with('\n') { - result.pop(); - } - result.push_str(" {}"); - } - } - Value::Sequence(seq) => { - for (idx, val) in seq.iter().enumerate() { - stack.push(Cow::from(idx.to_string())); - - push_docs!(result, docs, stack, indent); - - push_indent!(result, indent); - result.push('-'); - - if val.is_sequence() { - push_new_line!(result); - } - - convert_recursive(docs, stack, val, result, indent + 2); - - stack.pop(); - - push_new_line!(result); - } - - if seq.is_empty() { - if result.ends_with('\n') { - result.pop(); - } - result.push_str(" []"); - } - } - Value::Tagged(tagged) => { - result.push(' '); - result.push_str(&tagged.tag.to_string()); - - if tagged.value.is_mapping() || tagged.value.is_sequence() { - push_new_line!(result); - } - - convert_recursive(docs, stack, &tagged.value, result, indent); - } - _ => { - result.push(' '); - result.push_str(serde_yaml::to_string(value).unwrap_or_default().trim_end()); - // TODO(troy): figure out a way to do sub-docs for scalars so that the format - // isnt so janky - - // stack.push(">".into()); - // push_docs!(result, docs, stack, indent); - // stack.pop(); - } - } +fn convert_recursive(docs: &DocMap, stack: &mut Vec, value: &Value, result: &mut String, indent: usize) { + // Append doc comments at the current level + if matches!(value, Value::Mapping(_) | Value::Sequence(_)) { + stack.push(">".into()); + push_docs!(result, docs, stack, indent); + stack.pop(); + } + + match value { + Value::Mapping(map) => { + for (key, val) in map { + let key_str = key.as_str().unwrap_or_default(); + stack.push(Cow::from(key_str.to_owned())); + + push_docs!(result, docs, stack, indent); + push_key!(result, key_str, indent); + + // We dont want to push a new line if the item is a Tagged value + if matches!(val, Value::Mapping(_) | Value::Sequence(_)) { + push_new_line!(result); + } + + convert_recursive(docs, stack, val, result, indent + 2); + + push_new_line!(result); + + if (val.is_mapping() || val.is_sequence()) && !result.ends_with("\n\n") { + result.push('\n'); + } + + stack.pop(); + } + + if map.is_empty() { + if result.ends_with('\n') { + result.pop(); + } + result.push_str(" {}"); + } + } + Value::Sequence(seq) => { + for (idx, val) in seq.iter().enumerate() { + stack.push(Cow::from(idx.to_string())); + + push_docs!(result, docs, stack, indent); + + push_indent!(result, indent); + result.push('-'); + + if val.is_sequence() { + push_new_line!(result); + } + + convert_recursive(docs, stack, val, result, indent + 2); + + stack.pop(); + + push_new_line!(result); + } + + if seq.is_empty() { + if result.ends_with('\n') { + result.pop(); + } + result.push_str(" []"); + } + } + Value::Tagged(tagged) => { + result.push(' '); + result.push_str(&tagged.tag.to_string()); + + if tagged.value.is_mapping() || tagged.value.is_sequence() { + push_new_line!(result); + } + + convert_recursive(docs, stack, &tagged.value, result, indent); + } + _ => { + result.push(' '); + result.push_str(serde_yaml::to_string(value).unwrap_or_default().trim_end()); + // TODO(troy): figure out a way to do sub-docs for scalars so that + // the format isnt so janky + + // stack.push(">".into()); + // push_docs!(result, docs, stack, indent); + // stack.pop(); + } + } } diff --git a/foundations/src/settings/traits.rs b/foundations/src/settings/traits.rs index f69de15f..1c1faacc 100644 --- a/foundations/src/settings/traits.rs +++ b/foundations/src/settings/traits.rs @@ -1,39 +1,35 @@ -/*! -This module contains an auto-deref specialization to help with adding doc comments to sub-types. -You can read more about how it works here -https://lukaskalbertodt.github.io/2019/12/05/generalized-autoref-based-specialization.html -*/ - -use std::{ - borrow::Cow, - collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, LinkedList, VecDeque}, - hash::Hash, -}; +//! This module contains an auto-deref specialization to help with adding doc +//! comments to sub-types. You can read more about how it works here +//! https://lukaskalbertodt.github.io/2019/12/05/generalized-autoref-based-specialization.html + +use std::borrow::Cow; +use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, LinkedList, VecDeque}; +use std::hash::Hash; use super::to_yaml_string; pub trait Settings { - #[doc(hidden)] - fn add_docs( - &self, - parent_key: &[Cow<'static, str>], - docs: &mut HashMap>, Cow<'static, [Cow<'static, str>]>>, - ) { - let (_, _) = (parent_key, docs); - } - - fn docs(&self) -> HashMap>, Cow<'static, [Cow<'static, str>]>> { - let mut docs = HashMap::new(); - self.add_docs(&[], &mut docs); - docs - } - - fn to_yaml_string(&self) -> Result - where - Self: serde::Serialize + Sized, - { - to_yaml_string(self) - } + #[doc(hidden)] + fn add_docs( + &self, + parent_key: &[Cow<'static, str>], + docs: &mut HashMap>, Cow<'static, [Cow<'static, str>]>>, + ) { + let (_, _) = (parent_key, docs); + } + + fn docs(&self) -> HashMap>, Cow<'static, [Cow<'static, str>]>> { + let mut docs = HashMap::new(); + self.add_docs(&[], &mut docs); + docs + } + + fn to_yaml_string(&self) -> Result + where + Self: serde::Serialize + Sized, + { + to_yaml_string(self) + } } #[doc(hidden)] @@ -44,13 +40,13 @@ impl Settings for Wrapped<&T> {} /// Specialization for adding docs to a type that implements SerdeDocs. impl Settings for &Wrapped<&T> { - fn add_docs( - &self, - parent_key: &[Cow<'static, str>], - docs: &mut HashMap>, Cow<'static, [Cow<'static, str>]>>, - ) { - ::add_docs(self.0, parent_key, docs) - } + fn add_docs( + &self, + parent_key: &[Cow<'static, str>], + docs: &mut HashMap>, Cow<'static, [Cow<'static, str>]>>, + ) { + ::add_docs(self.0, parent_key, docs) + } } /// Specialization for adding docs an array type that implements SerdeDocs. @@ -129,29 +125,30 @@ impl_map!( Settings for &Wrapped<&BTreeMap>); /// Specialization for adding docs to an option type that implements SerdeDocs. impl Settings for &Wrapped<&Option> { - fn add_docs( - &self, - parent_key: &[Cow<'static, str>], - docs: &mut HashMap>, Cow<'static, [Cow<'static, str>]>>, - ) { - if let Some(inner) = self.0 { - inner.add_docs(parent_key, docs); - } - } + fn add_docs( + &self, + parent_key: &[Cow<'static, str>], + docs: &mut HashMap>, Cow<'static, [Cow<'static, str>]>>, + ) { + if let Some(inner) = self.0 { + inner.add_docs(parent_key, docs); + } + } } -/// Specialization for any type that derefs into a type that implements SerdeDocs. +/// Specialization for any type that derefs into a type that implements +/// SerdeDocs. impl Settings for &&Wrapped<&R> where - R: std::ops::Deref, + R: std::ops::Deref, { - fn add_docs( - &self, - parent_key: &[Cow<'static, str>], - docs: &mut HashMap>, Cow<'static, [Cow<'static, str>]>>, - ) { - (**self.0).add_docs(parent_key, docs); - } + fn add_docs( + &self, + parent_key: &[Cow<'static, str>], + docs: &mut HashMap>, Cow<'static, [Cow<'static, str>]>>, + ) { + (**self.0).add_docs(parent_key, docs); + } } impl Settings for () {} diff --git a/foundations/src/signal.rs b/foundations/src/signal.rs index e4e918a8..c59ccb2e 100644 --- a/foundations/src/signal.rs +++ b/foundations/src/signal.rs @@ -3,38 +3,38 @@ use tokio::signal::unix::{Signal, SignalKind}; #[derive(Default)] pub struct SignalHandler { - signals: Vec<(SignalKind, Signal)>, + signals: Vec<(SignalKind, Signal)>, } impl SignalHandler { - pub fn new() -> Self { - Self::default() - } + pub fn new() -> Self { + Self::default() + } - pub fn with_signal(&mut self, kind: SignalKind) -> &mut Self { - if self.signals.iter().any(|(k, _)| k == &kind) { - return self; - } + pub fn with_signal(&mut self, kind: SignalKind) -> &mut Self { + if self.signals.iter().any(|(k, _)| k == &kind) { + return self; + } - let signal = tokio::signal::unix::signal(kind).expect("failed to create signal"); + let signal = tokio::signal::unix::signal(kind).expect("failed to create signal"); - self.signals.push((kind, signal)); + self.signals.push((kind, signal)); - self - } + self + } - pub async fn recv(&mut self) -> Option { - if self.signals.is_empty() { - return None; - } + pub async fn recv(&mut self) -> Option { + if self.signals.is_empty() { + return None; + } - let (item, _, _) = futures::future::select_all( - self.signals - .iter_mut() - .map(|(kind, signal)| Box::pin(signal.recv().map(|_| *kind))), - ) - .await; + let (item, _, _) = futures::future::select_all( + self.signals + .iter_mut() + .map(|(kind, signal)| Box::pin(signal.recv().map(|_| *kind))), + ) + .await; - Some(item) - } + Some(item) + } } diff --git a/foundations/src/telementry/env_filter/directive.rs b/foundations/src/telementry/env_filter/directive.rs index f4a85e78..afc12dc8 100644 --- a/foundations/src/telementry/env_filter/directive.rs +++ b/foundations/src/telementry/env_filter/directive.rs @@ -1,15 +1,18 @@ #![allow(dead_code)] +use core::cmp::Ordering; +use core::iter::FromIterator; +use core::str::FromStr; +use core::{fmt, slice}; use std::vec; -use tracing::metadata::ParseLevelFilterError; -use tracing_subscriber::filter::LevelFilter; -use core::{cmp::Ordering, fmt, iter::FromIterator, slice, str::FromStr}; +use tracing::metadata::ParseLevelFilterError; use tracing::{Level, Metadata}; +use tracing_subscriber::filter::LevelFilter; /// Indicates that a string could not be parsed as a filtering directive. #[derive(Debug)] pub struct ParseError { - kind: ParseErrorKind, + kind: ParseErrorKind, } /// A directive which will statically enable or disable a given callsite. @@ -17,429 +20,405 @@ pub struct ParseError { /// Unlike a dynamic directive, this can be cached by the callsite. #[derive(Debug, PartialEq, Eq, Clone)] pub(crate) struct StaticDirective { - pub(crate) target: Option, - pub(crate) field_names: Vec, - pub(crate) level: LevelFilter, + pub(crate) target: Option, + pub(crate) field_names: Vec, + pub(crate) level: LevelFilter, } pub(crate) type FilterVec = Vec; #[derive(Debug, PartialEq, Clone)] pub(crate) struct DirectiveSet { - directives: FilterVec, - pub(crate) max_level: LevelFilter, + directives: FilterVec, + pub(crate) max_level: LevelFilter, } pub(crate) trait Match { - fn cares_about(&self, meta: &Metadata<'_>) -> bool; - fn level(&self) -> &LevelFilter; + fn cares_about(&self, meta: &Metadata<'_>) -> bool; + fn level(&self) -> &LevelFilter; } #[derive(Debug)] enum ParseErrorKind { - Field(Box), - Level(ParseLevelFilterError), - Other(Option<&'static str>), + Field(Box), + Level(ParseLevelFilterError), + Other(Option<&'static str>), } // === impl DirectiveSet === impl DirectiveSet { - // this is only used by `env-filter`. - pub(crate) fn is_empty(&self) -> bool { - self.directives.is_empty() - } - - pub(crate) fn iter(&self) -> slice::Iter<'_, T> { - self.directives.iter() - } + // this is only used by `env-filter`. + pub(crate) fn is_empty(&self) -> bool { + self.directives.is_empty() + } + + pub(crate) fn iter(&self) -> slice::Iter<'_, T> { + self.directives.iter() + } } impl Default for DirectiveSet { - fn default() -> Self { - Self { - directives: FilterVec::new(), - max_level: LevelFilter::OFF, - } - } + fn default() -> Self { + Self { + directives: FilterVec::new(), + max_level: LevelFilter::OFF, + } + } } impl DirectiveSet { - pub(crate) fn directives(&self) -> impl Iterator { - self.directives.iter() - } - - pub(crate) fn directives_for<'a>( - &'a self, - metadata: &'a Metadata<'a>, - ) -> impl Iterator + 'a { - self.directives().filter(move |d| d.cares_about(metadata)) - } - - pub(crate) fn add(&mut self, directive: T) { - // does this directive enable a more verbose level than the current - // max? if so, update the max level. - let level = *directive.level(); - if level > self.max_level { - self.max_level = level; - } - // insert the directive into the vec of directives, ordered by - // specificity (length of target + number of field filters). this - // ensures that, when finding a directive to match a span or event, we - // search the directive set in most specific first order. - match self.directives.binary_search(&directive) { - Ok(i) => self.directives[i] = directive, - Err(i) => self.directives.insert(i, directive), - } - } - - #[cfg(test)] - pub(crate) fn into_vec(self) -> FilterVec { - self.directives - } + pub(crate) fn directives(&self) -> impl Iterator { + self.directives.iter() + } + + pub(crate) fn directives_for<'a>(&'a self, metadata: &'a Metadata<'a>) -> impl Iterator + 'a { + self.directives().filter(move |d| d.cares_about(metadata)) + } + + pub(crate) fn add(&mut self, directive: T) { + // does this directive enable a more verbose level than the current + // max? if so, update the max level. + let level = *directive.level(); + if level > self.max_level { + self.max_level = level; + } + // insert the directive into the vec of directives, ordered by + // specificity (length of target + number of field filters). this + // ensures that, when finding a directive to match a span or event, we + // search the directive set in most specific first order. + match self.directives.binary_search(&directive) { + Ok(i) => self.directives[i] = directive, + Err(i) => self.directives.insert(i, directive), + } + } + + #[cfg(test)] + pub(crate) fn into_vec(self) -> FilterVec { + self.directives + } } impl FromIterator for DirectiveSet { - fn from_iter>(iter: I) -> Self { - let mut this = Self::default(); - this.extend(iter); - this - } + fn from_iter>(iter: I) -> Self { + let mut this = Self::default(); + this.extend(iter); + this + } } impl Extend for DirectiveSet { - fn extend>(&mut self, iter: I) { - for directive in iter.into_iter() { - self.add(directive); - } - } + fn extend>(&mut self, iter: I) { + for directive in iter.into_iter() { + self.add(directive); + } + } } impl IntoIterator for DirectiveSet { - type Item = T; - - type IntoIter = vec::IntoIter; + type IntoIter = vec::IntoIter; + type Item = T; - fn into_iter(self) -> Self::IntoIter { - self.directives.into_iter() - } + fn into_iter(self) -> Self::IntoIter { + self.directives.into_iter() + } } // === impl Statics === impl DirectiveSet { - pub(crate) fn enabled(&self, meta: &Metadata<'_>) -> bool { - let level = meta.level(); - match self.directives_for(meta).next() { - Some(d) => d.level >= *level, - None => false, - } - } - - /// Same as `enabled` above, but skips `Directive`'s with fields. - pub(crate) fn target_enabled(&self, target: &str, level: &Level) -> bool { - match self.directives_for_target(target).next() { - Some(d) => d.level >= *level, - None => false, - } - } - - pub(crate) fn directives_for_target<'a>( - &'a self, - target: &'a str, - ) -> impl Iterator + 'a { - self.directives() - .filter(move |d| d.cares_about_target(target)) - } + pub(crate) fn enabled(&self, meta: &Metadata<'_>) -> bool { + let level = meta.level(); + match self.directives_for(meta).next() { + Some(d) => d.level >= *level, + None => false, + } + } + + /// Same as `enabled` above, but skips `Directive`'s with fields. + pub(crate) fn target_enabled(&self, target: &str, level: &Level) -> bool { + match self.directives_for_target(target).next() { + Some(d) => d.level >= *level, + None => false, + } + } + + pub(crate) fn directives_for_target<'a>(&'a self, target: &'a str) -> impl Iterator + 'a { + self.directives().filter(move |d| d.cares_about_target(target)) + } } // === impl StaticDirective === impl StaticDirective { - pub(crate) fn new( - target: Option, - field_names: Vec, - level: LevelFilter, - ) -> Self { - Self { - target, - field_names, - level, - } - } - - pub(crate) fn cares_about_target(&self, to_check: &str) -> bool { - // Does this directive have a target filter, and does it match the - // metadata's target? - if let Some(ref target) = self.target { - if !to_check.starts_with(&target[..]) { - return false; - } - } - - if !self.field_names.is_empty() { - return false; - } - - true - } + pub(crate) fn new(target: Option, field_names: Vec, level: LevelFilter) -> Self { + Self { + target, + field_names, + level, + } + } + + pub(crate) fn cares_about_target(&self, to_check: &str) -> bool { + // Does this directive have a target filter, and does it match the + // metadata's target? + if let Some(ref target) = self.target { + if !to_check.starts_with(&target[..]) { + return false; + } + } + + if !self.field_names.is_empty() { + return false; + } + + true + } } impl Ord for StaticDirective { - fn cmp(&self, other: &StaticDirective) -> Ordering { - // We attempt to order directives by how "specific" they are. This - // ensures that we try the most specific directives first when - // attempting to match a piece of metadata. - - // First, we compare based on whether a target is specified, and the - // lengths of those targets if both have targets. - let ordering = self - .target - .as_ref() - .map(String::len) - .cmp(&other.target.as_ref().map(String::len)) - // Then we compare how many field names are matched by each directive. - .then_with(|| self.field_names.len().cmp(&other.field_names.len())) - // Finally, we fall back to lexicographical ordering if the directives are - // equally specific. Although this is no longer semantically important, - // we need to define a total ordering to determine the directive's place - // in the BTreeMap. - .then_with(|| { - self.target - .cmp(&other.target) - .then_with(|| self.field_names[..].cmp(&other.field_names[..])) - }) - .reverse(); - - #[cfg(debug_assertions)] - { - if ordering == Ordering::Equal { - debug_assert_eq!( - self.target, other.target, - "invariant violated: Ordering::Equal must imply a.target == b.target" - ); - debug_assert_eq!( - self.field_names, other.field_names, - "invariant violated: Ordering::Equal must imply a.field_names == b.field_names" - ); - } - } - - ordering - } + fn cmp(&self, other: &StaticDirective) -> Ordering { + // We attempt to order directives by how "specific" they are. This + // ensures that we try the most specific directives first when + // attempting to match a piece of metadata. + + // First, we compare based on whether a target is specified, and the + // lengths of those targets if both have targets. + let ordering = self + .target + .as_ref() + .map(String::len) + .cmp(&other.target.as_ref().map(String::len)) + // Then we compare how many field names are matched by each directive. + .then_with(|| self.field_names.len().cmp(&other.field_names.len())) + // Finally, we fall back to lexicographical ordering if the directives are + // equally specific. Although this is no longer semantically important, + // we need to define a total ordering to determine the directive's place + // in the BTreeMap. + .then_with(|| { + self.target + .cmp(&other.target) + .then_with(|| self.field_names[..].cmp(&other.field_names[..])) + }) + .reverse(); + + #[cfg(debug_assertions)] + { + if ordering == Ordering::Equal { + debug_assert_eq!( + self.target, other.target, + "invariant violated: Ordering::Equal must imply a.target == b.target" + ); + debug_assert_eq!( + self.field_names, other.field_names, + "invariant violated: Ordering::Equal must imply a.field_names == b.field_names" + ); + } + } + + ordering + } } impl PartialOrd for StaticDirective { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } } impl Match for StaticDirective { - fn cares_about(&self, meta: &Metadata<'_>) -> bool { - // Does this directive have a target filter, and does it match the - // metadata's target? - if let Some(ref target) = self.target { - if !meta.target().starts_with(&target[..]) { - return false; - } - } - - if meta.is_event() && !self.field_names.is_empty() { - let fields = meta.fields(); - for name in &self.field_names { - if fields.field(name).is_none() { - return false; - } - } - } - - true - } - - fn level(&self) -> &LevelFilter { - &self.level - } + fn cares_about(&self, meta: &Metadata<'_>) -> bool { + // Does this directive have a target filter, and does it match the + // metadata's target? + if let Some(ref target) = self.target { + if !meta.target().starts_with(&target[..]) { + return false; + } + } + + if meta.is_event() && !self.field_names.is_empty() { + let fields = meta.fields(); + for name in &self.field_names { + if fields.field(name).is_none() { + return false; + } + } + } + + true + } + + fn level(&self) -> &LevelFilter { + &self.level + } } impl Default for StaticDirective { - fn default() -> Self { - StaticDirective { - target: None, - field_names: Vec::new(), - level: LevelFilter::ERROR, - } - } + fn default() -> Self { + StaticDirective { + target: None, + field_names: Vec::new(), + level: LevelFilter::ERROR, + } + } } impl fmt::Display for StaticDirective { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut wrote_any = false; - if let Some(ref target) = self.target { - fmt::Display::fmt(target, f)?; - wrote_any = true; - } - - if !self.field_names.is_empty() { - f.write_str("[")?; - - let mut fields = self.field_names.iter(); - if let Some(field) = fields.next() { - write!(f, "{{{}", field)?; - for field in fields { - write!(f, ",{}", field)?; - } - f.write_str("}")?; - } - - f.write_str("]")?; - wrote_any = true; - } - - if wrote_any { - f.write_str("=")?; - } - - fmt::Display::fmt(&self.level, f) - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut wrote_any = false; + if let Some(ref target) = self.target { + fmt::Display::fmt(target, f)?; + wrote_any = true; + } + + if !self.field_names.is_empty() { + f.write_str("[")?; + + let mut fields = self.field_names.iter(); + if let Some(field) = fields.next() { + write!(f, "{{{}", field)?; + for field in fields { + write!(f, ",{}", field)?; + } + f.write_str("}")?; + } + + f.write_str("]")?; + wrote_any = true; + } + + if wrote_any { + f.write_str("=")?; + } + + fmt::Display::fmt(&self.level, f) + } } impl FromStr for StaticDirective { - type Err = ParseError; - - fn from_str(s: &str) -> Result { - // This method parses a filtering directive in one of the following - // forms: - // - // * `foo=trace` (TARGET=LEVEL) - // * `foo[{bar,baz}]=info` (TARGET[{FIELD,+}]=LEVEL) - // * `trace` (bare LEVEL) - // * `foo` (bare TARGET) - let mut split = s.split('='); - let part0 = split - .next() - .ok_or_else(|| ParseError::msg("string must not be empty"))?; - - // Directive includes an `=`: - // * `foo=trace` - // * `foo[{bar}]=trace` - // * `foo[{bar,baz}]=trace` - if let Some(part1) = split.next() { - if split.next().is_some() { - return Err(ParseError::msg( - "too many '=' in filter directive, expected 0 or 1", - )); - } - - let mut split = part0.split("[{"); - let target = split.next().map(String::from); - let mut field_names = Vec::new(); - // Directive includes fields: - // * `foo[{bar}]=trace` - // * `foo[{bar,baz}]=trace` - if let Some(maybe_fields) = split.next() { - if split.next().is_some() { - return Err(ParseError::msg( - "too many '[{' in filter directive, expected 0 or 1", - )); - } - - if !maybe_fields.ends_with("}]") { - return Err(ParseError::msg("expected fields list to end with '}]'")); - } - - let fields = maybe_fields - .trim_end_matches("}]") - .split(',') - .filter_map(|s| { - if s.is_empty() { - None - } else { - Some(String::from(s)) - } - }); - field_names.extend(fields); - }; - let level = part1.parse()?; - return Ok(Self { - level, - field_names, - target, - }); - } - - // Okay, the part after the `=` was empty, the directive is either a - // bare level or a bare target. - // * `foo` - // * `info` - Ok(match part0.parse::() { - Ok(level) => Self { - level, - target: None, - field_names: Vec::new(), - }, - Err(_) => Self { - target: Some(String::from(part0)), - level: LevelFilter::TRACE, - field_names: Vec::new(), - }, - }) - } + type Err = ParseError; + + fn from_str(s: &str) -> Result { + // This method parses a filtering directive in one of the following + // forms: + // + // * `foo=trace` (TARGET=LEVEL) + // * `foo[{bar,baz}]=info` (TARGET[{FIELD,+}]=LEVEL) + // * `trace` (bare LEVEL) + // * `foo` (bare TARGET) + let mut split = s.split('='); + let part0 = split.next().ok_or_else(|| ParseError::msg("string must not be empty"))?; + + // Directive includes an `=`: + // * `foo=trace` + // * `foo[{bar}]=trace` + // * `foo[{bar,baz}]=trace` + if let Some(part1) = split.next() { + if split.next().is_some() { + return Err(ParseError::msg("too many '=' in filter directive, expected 0 or 1")); + } + + let mut split = part0.split("[{"); + let target = split.next().map(String::from); + let mut field_names = Vec::new(); + // Directive includes fields: + // * `foo[{bar}]=trace` + // * `foo[{bar,baz}]=trace` + if let Some(maybe_fields) = split.next() { + if split.next().is_some() { + return Err(ParseError::msg("too many '[{' in filter directive, expected 0 or 1")); + } + + if !maybe_fields.ends_with("}]") { + return Err(ParseError::msg("expected fields list to end with '}]'")); + } + + let fields = maybe_fields + .trim_end_matches("}]") + .split(',') + .filter_map(|s| if s.is_empty() { None } else { Some(String::from(s)) }); + field_names.extend(fields); + }; + let level = part1.parse()?; + return Ok(Self { + level, + field_names, + target, + }); + } + + // Okay, the part after the `=` was empty, the directive is either a + // bare level or a bare target. + // * `foo` + // * `info` + Ok(match part0.parse::() { + Ok(level) => Self { + level, + target: None, + field_names: Vec::new(), + }, + Err(_) => Self { + target: Some(String::from(part0)), + level: LevelFilter::TRACE, + field_names: Vec::new(), + }, + }) + } } // === impl ParseError === impl ParseError { - pub(crate) fn new() -> Self { - ParseError { - kind: ParseErrorKind::Other(None), - } - } - - pub(crate) fn msg(s: &'static str) -> Self { - ParseError { - kind: ParseErrorKind::Other(Some(s)), - } - } + pub(crate) fn new() -> Self { + ParseError { + kind: ParseErrorKind::Other(None), + } + } + + pub(crate) fn msg(s: &'static str) -> Self { + ParseError { + kind: ParseErrorKind::Other(Some(s)), + } + } } impl fmt::Display for ParseError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.kind { - ParseErrorKind::Other(None) => f.pad("invalid filter directive"), - ParseErrorKind::Other(Some(msg)) => write!(f, "invalid filter directive: {}", msg), - ParseErrorKind::Level(ref l) => l.fmt(f), - ParseErrorKind::Field(ref e) => write!(f, "invalid field filter: {}", e), - } - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.kind { + ParseErrorKind::Other(None) => f.pad("invalid filter directive"), + ParseErrorKind::Other(Some(msg)) => write!(f, "invalid filter directive: {}", msg), + ParseErrorKind::Level(ref l) => l.fmt(f), + ParseErrorKind::Field(ref e) => write!(f, "invalid field filter: {}", e), + } + } } impl std::error::Error for ParseError { - fn description(&self) -> &str { - "invalid filter directive" - } - - fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { - match self.kind { - ParseErrorKind::Other(_) => None, - ParseErrorKind::Level(ref l) => Some(l), - ParseErrorKind::Field(ref n) => Some(n.as_ref()), - } - } + fn description(&self) -> &str { + "invalid filter directive" + } + + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + match self.kind { + ParseErrorKind::Other(_) => None, + ParseErrorKind::Level(ref l) => Some(l), + ParseErrorKind::Field(ref n) => Some(n.as_ref()), + } + } } impl From> for ParseError { - fn from(e: Box) -> Self { - Self { - kind: ParseErrorKind::Field(e), - } - } + fn from(e: Box) -> Self { + Self { + kind: ParseErrorKind::Field(e), + } + } } impl From for ParseError { - fn from(l: ParseLevelFilterError) -> Self { - Self { - kind: ParseErrorKind::Level(l), - } - } + fn from(l: ParseLevelFilterError) -> Self { + Self { + kind: ParseErrorKind::Level(l), + } + } } diff --git a/foundations/src/telementry/env_filter/env/builder.rs b/foundations/src/telementry/env_filter/env/builder.rs index 8ed552c3..46b433c8 100644 --- a/foundations/src/telementry/env_filter/env/builder.rs +++ b/foundations/src/telementry/env_filter/env/builder.rs @@ -1,325 +1,317 @@ -use super::{ - directive::{self, Directive}, - EnvFilter, FromEnvError, -}; use std::env; + use thread_local::ThreadLocal; use tracing::level_filters::STATIC_MAX_LEVEL; +use super::directive::{self, Directive}; +use super::{EnvFilter, FromEnvError}; + /// A [builder] for constructing new [`EnvFilter`]s. /// /// [builder]: https://rust-unofficial.github.io/patterns/patterns/creational/builder.html #[derive(Debug, Clone)] #[must_use] pub struct Builder { - regex: bool, - env: Option, - default_directive: Option, + regex: bool, + env: Option, + default_directive: Option, } impl Builder { - /// Sets whether span field values can be matched with regular expressions. - /// - /// If this is `true`, field filter directives will be interpreted as - /// regular expressions if they are not able to be interpreted as a `bool`, - /// `i64`, `u64`, or `f64` literal. If this is `false,` those field values - /// will be interpreted as literal [`std::fmt::Debug`] output instead. - /// - /// By default, regular expressions are enabled. - /// - /// **Note**: when [`EnvFilter`]s are constructed from untrusted inputs, - /// disabling regular expressions is strongly encouraged. - pub fn with_regex(self, regex: bool) -> Self { - Self { regex, ..self } - } + /// Sets whether span field values can be matched with regular expressions. + /// + /// If this is `true`, field filter directives will be interpreted as + /// regular expressions if they are not able to be interpreted as a `bool`, + /// `i64`, `u64`, or `f64` literal. If this is `false,` those field values + /// will be interpreted as literal [`std::fmt::Debug`] output instead. + /// + /// By default, regular expressions are enabled. + /// + /// **Note**: when [`EnvFilter`]s are constructed from untrusted inputs, + /// disabling regular expressions is strongly encouraged. + pub fn with_regex(self, regex: bool) -> Self { + Self { regex, ..self } + } - /// Sets a default [filtering directive] that will be added to the filter if - /// the parsed string or environment variable contains no filter directives. - /// - /// By default, there is no default directive. - /// - /// # Examples - /// - /// If [`parse`], [`parse_lossy`], [`from_env`], or [`from_env_lossy`] are - /// called with an empty string or environment variable, the default - /// directive is used instead: - /// - /// ```rust - /// # fn main() -> Result<(), Box> { - /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; - /// - /// let filter = EnvFilter::builder() - /// .with_default_directive(LevelFilter::INFO.into()) - /// .parse("")?; - /// - /// assert_eq!(format!("{}", filter), "info"); - /// # Ok(()) } - /// ``` - /// - /// Note that the `lossy` variants ([`parse_lossy`] and [`from_env_lossy`]) - /// will ignore any invalid directives. If all directives in a filter - /// string or environment variable are invalid, those methods will also use - /// the default directive: - /// - /// ```rust - /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; - /// - /// let filter = EnvFilter::builder() - /// .with_default_directive(LevelFilter::INFO.into()) - /// .parse_lossy("some_target=fake level,foo::bar=lolwut"); - /// - /// assert_eq!(format!("{}", filter), "info"); - /// ``` - /// - /// - /// If the string or environment variable contains valid filtering - /// directives, the default directive is not used: - /// - /// ```rust - /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; - /// - /// let filter = EnvFilter::builder() - /// .with_default_directive(LevelFilter::INFO.into()) - /// .parse_lossy("foo=trace"); - /// - /// // The default directive is *not* used: - /// assert_eq!(format!("{}", filter), "foo=trace"); - /// ``` - /// - /// Parsing a more complex default directive from a string: - /// - /// ```rust - /// # fn main() -> Result<(), Box> { - /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; - /// - /// let default = "myapp=debug".parse() - /// .expect("hard-coded default directive should be valid"); - /// - /// let filter = EnvFilter::builder() - /// .with_default_directive(default) - /// .parse("")?; - /// - /// assert_eq!(format!("{}", filter), "myapp=debug"); - /// # Ok(()) } - /// ``` - /// - /// [`parse_lossy`]: Self::parse_lossy - /// [`from_env_lossy`]: Self::from_env_lossy - /// [`parse`]: Self::parse - /// [`from_env`]: Self::from_env - pub fn with_default_directive(self, default_directive: Directive) -> Self { - Self { - default_directive: Some(default_directive), - ..self - } - } + /// Sets a default [filtering directive] that will be added to the filter if + /// the parsed string or environment variable contains no filter directives. + /// + /// By default, there is no default directive. + /// + /// # Examples + /// + /// If [`parse`], [`parse_lossy`], [`from_env`], or [`from_env_lossy`] are + /// called with an empty string or environment variable, the default + /// directive is used instead: + /// + /// ```rust + /// # fn main() -> Result<(), Box> { + /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; + /// + /// let filter = EnvFilter::builder() + /// .with_default_directive(LevelFilter::INFO.into()) + /// .parse("")?; + /// + /// assert_eq!(format!("{}", filter), "info"); + /// # Ok(()) } + /// ``` + /// + /// Note that the `lossy` variants ([`parse_lossy`] and [`from_env_lossy`]) + /// will ignore any invalid directives. If all directives in a filter + /// string or environment variable are invalid, those methods will also use + /// the default directive: + /// + /// ```rust + /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; + /// + /// let filter = EnvFilter::builder() + /// .with_default_directive(LevelFilter::INFO.into()) + /// .parse_lossy("some_target=fake level,foo::bar=lolwut"); + /// + /// assert_eq!(format!("{}", filter), "info"); + /// ``` + /// + /// + /// If the string or environment variable contains valid filtering + /// directives, the default directive is not used: + /// + /// ```rust + /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; + /// + /// let filter = EnvFilter::builder() + /// .with_default_directive(LevelFilter::INFO.into()) + /// .parse_lossy("foo=trace"); + /// + /// // The default directive is *not* used: + /// assert_eq!(format!("{}", filter), "foo=trace"); + /// ``` + /// + /// Parsing a more complex default directive from a string: + /// + /// ```rust + /// # fn main() -> Result<(), Box> { + /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; + /// + /// let default = "myapp=debug".parse() + /// .expect("hard-coded default directive should be valid"); + /// + /// let filter = EnvFilter::builder() + /// .with_default_directive(default) + /// .parse("")?; + /// + /// assert_eq!(format!("{}", filter), "myapp=debug"); + /// # Ok(()) } + /// ``` + /// + /// [`parse_lossy`]: Self::parse_lossy + /// [`from_env_lossy`]: Self::from_env_lossy + /// [`parse`]: Self::parse + /// [`from_env`]: Self::from_env + pub fn with_default_directive(self, default_directive: Directive) -> Self { + Self { + default_directive: Some(default_directive), + ..self + } + } - /// Sets the name of the environment variable used by the [`from_env`], - /// [`from_env_lossy`], and [`try_from_env`] methods. - /// - /// By default, this is the value of [`EnvFilter::DEFAULT_ENV`] - /// (`RUST_LOG`). - /// - /// [`from_env`]: Self::from_env - /// [`from_env_lossy`]: Self::from_env_lossy - /// [`try_from_env`]: Self::try_from_env - pub fn with_env_var(self, var: impl ToString) -> Self { - Self { - env: Some(var.to_string()), - ..self - } - } + /// Sets the name of the environment variable used by the [`from_env`], + /// [`from_env_lossy`], and [`try_from_env`] methods. + /// + /// By default, this is the value of [`EnvFilter::DEFAULT_ENV`] + /// (`RUST_LOG`). + /// + /// [`from_env`]: Self::from_env + /// [`from_env_lossy`]: Self::from_env_lossy + /// [`try_from_env`]: Self::try_from_env + pub fn with_env_var(self, var: impl ToString) -> Self { + Self { + env: Some(var.to_string()), + ..self + } + } - /// Returns a new [`EnvFilter`] from the directives in the given string, - /// *ignoring* any that are invalid. - pub fn parse_lossy>(&self, dirs: S) -> EnvFilter { - let directives = dirs - .as_ref() - .split(',') - .filter(|s| !s.is_empty()) - .filter_map(|s| match Directive::parse(s, self.regex) { - Ok(d) => Some(d), - Err(err) => { - eprintln!("ignoring `{}`: {}", s, err); - None - } - }); - self.from_directives(directives) - } + /// Returns a new [`EnvFilter`] from the directives in the given string, + /// *ignoring* any that are invalid. + pub fn parse_lossy>(&self, dirs: S) -> EnvFilter { + let directives = + dirs.as_ref() + .split(',') + .filter(|s| !s.is_empty()) + .filter_map(|s| match Directive::parse(s, self.regex) { + Ok(d) => Some(d), + Err(err) => { + eprintln!("ignoring `{}`: {}", s, err); + None + } + }); + self.from_directives(directives) + } - /// Returns a new [`EnvFilter`] from the directives in the given string, - /// or an error if any are invalid. - pub fn parse>(&self, dirs: S) -> Result { - let dirs = dirs.as_ref(); - if dirs.is_empty() { - return Ok(self.from_directives(std::iter::empty())); - } - let directives = dirs - .split(',') - .filter(|s| !s.is_empty()) - .map(|s| Directive::parse(s, self.regex)) - .collect::, _>>()?; - Ok(self.from_directives(directives)) - } + /// Returns a new [`EnvFilter`] from the directives in the given string, + /// or an error if any are invalid. + pub fn parse>(&self, dirs: S) -> Result { + let dirs = dirs.as_ref(); + if dirs.is_empty() { + return Ok(self.from_directives(std::iter::empty())); + } + let directives = dirs + .split(',') + .filter(|s| !s.is_empty()) + .map(|s| Directive::parse(s, self.regex)) + .collect::, _>>()?; + Ok(self.from_directives(directives)) + } - /// Returns a new [`EnvFilter`] from the directives in the configured - /// environment variable, ignoring any directives that are invalid. - pub fn from_env_lossy(&self) -> EnvFilter { - let var = env::var(self.env_var_name()).unwrap_or_default(); - self.parse_lossy(var) - } + /// Returns a new [`EnvFilter`] from the directives in the configured + /// environment variable, ignoring any directives that are invalid. + pub fn from_env_lossy(&self) -> EnvFilter { + let var = env::var(self.env_var_name()).unwrap_or_default(); + self.parse_lossy(var) + } - /// Returns a new [`EnvFilter`] from the directives in the configured - /// environment variable. If the environment variable is unset, no directive is added. - /// - /// An error is returned if the environment contains invalid directives. - pub fn from_env(&self) -> Result { - let var = env::var(self.env_var_name()).unwrap_or_default(); - self.parse(var).map_err(Into::into) - } + /// Returns a new [`EnvFilter`] from the directives in the configured + /// environment variable. If the environment variable is unset, no directive + /// is added. + /// + /// An error is returned if the environment contains invalid directives. + pub fn from_env(&self) -> Result { + let var = env::var(self.env_var_name()).unwrap_or_default(); + self.parse(var).map_err(Into::into) + } - /// Returns a new [`EnvFilter`] from the directives in the configured - /// environment variable, or an error if the environment variable is not set - /// or contains invalid directives. - pub fn try_from_env(&self) -> Result { - let var = env::var(self.env_var_name())?; - self.parse(var).map_err(Into::into) - } + /// Returns a new [`EnvFilter`] from the directives in the configured + /// environment variable, or an error if the environment variable is not set + /// or contains invalid directives. + pub fn try_from_env(&self) -> Result { + let var = env::var(self.env_var_name())?; + self.parse(var).map_err(Into::into) + } - // TODO(eliza): consider making this a public API? - // Clippy doesn't love this naming, because it suggests that `from_` methods - // should not take a `Self`...but in this case, it's the `EnvFilter` that is - // being constructed "from" the directives, rather than the builder itself. - #[allow(clippy::wrong_self_convention)] - pub(super) fn from_directives( - &self, - directives: impl IntoIterator, - ) -> EnvFilter { - use tracing::Level; + // TODO(eliza): consider making this a public API? + // Clippy doesn't love this naming, because it suggests that `from_` methods + // should not take a `Self`...but in this case, it's the `EnvFilter` that is + // being constructed "from" the directives, rather than the builder itself. + #[allow(clippy::wrong_self_convention)] + pub(super) fn from_directives(&self, directives: impl IntoIterator) -> EnvFilter { + use tracing::Level; - let mut directives: Vec<_> = directives.into_iter().collect(); - let mut disabled = Vec::new(); - for directive in &mut directives { - if directive.level > STATIC_MAX_LEVEL { - disabled.push(directive.clone()); - } - if !self.regex { - directive.deregexify(); - } - } + let mut directives: Vec<_> = directives.into_iter().collect(); + let mut disabled = Vec::new(); + for directive in &mut directives { + if directive.level > STATIC_MAX_LEVEL { + disabled.push(directive.clone()); + } + if !self.regex { + directive.deregexify(); + } + } - if !disabled.is_empty() { - #[cfg(feature = "nu_ansi_term")] - use nu_ansi_term::{Color, Style}; - // NOTE: We can't use a configured `MakeWriter` because the EnvFilter - // has no knowledge of any underlying subscriber or subscriber, which - // may or may not use a `MakeWriter`. - let warn = |msg: &str| { - #[cfg(not(feature = "nu_ansi_term"))] - let msg = format!("warning: {}", msg); - #[cfg(feature = "nu_ansi_term")] - let msg = { - let bold = Style::new().bold(); - let mut warning = Color::Yellow.paint("warning"); - warning.style_ref_mut().is_bold = true; - format!("{}{} {}", warning, bold.paint(":"), bold.paint(msg)) - }; - eprintln!("{}", msg); - }; - let ctx_prefixed = |prefix: &str, msg: &str| { - #[cfg(not(feature = "nu_ansi_term"))] - let msg = format!("{} {}", prefix, msg); - #[cfg(feature = "nu_ansi_term")] - let msg = { - let mut equal = Color::Fixed(21).paint("="); // dark blue - equal.style_ref_mut().is_bold = true; - format!(" {} {} {}", equal, Style::new().bold().paint(prefix), msg) - }; - eprintln!("{}", msg); - }; - let ctx_help = |msg| ctx_prefixed("help:", msg); - let ctx_note = |msg| ctx_prefixed("note:", msg); - let ctx = |msg: &str| { - #[cfg(not(feature = "nu_ansi_term"))] - let msg = format!("note: {}", msg); - #[cfg(feature = "nu_ansi_term")] - let msg = { - let mut pipe = Color::Fixed(21).paint("|"); - pipe.style_ref_mut().is_bold = true; - format!(" {} {}", pipe, msg) - }; - eprintln!("{}", msg); - }; - warn("some trace filter directives would enable traces that are disabled statically"); - for directive in disabled { - let target = if let Some(target) = &directive.target { - format!("the `{}` target", target) - } else { - "all targets".into() - }; - let level = directive - .level - .into_level() - .expect("=off would not have enabled any filters"); - ctx(&format!( - "`{}` would enable the {} level for {}", - directive, level, target - )); - } - ctx_note(&format!("the static max level is `{}`", STATIC_MAX_LEVEL)); - let help_msg = || { - let (feature, filter) = match STATIC_MAX_LEVEL.into_level() { - Some(Level::TRACE) => unreachable!( - "if the max level is trace, no static filtering features are enabled" - ), - Some(Level::DEBUG) => ("max_level_debug", Level::TRACE), - Some(Level::INFO) => ("max_level_info", Level::DEBUG), - Some(Level::WARN) => ("max_level_warn", Level::INFO), - Some(Level::ERROR) => ("max_level_error", Level::WARN), - None => return ("max_level_off", String::new()), - }; - (feature, format!("{} ", filter)) - }; - let (feature, earlier_level) = help_msg(); - ctx_help(&format!( - "to enable {}logging, remove the `{}` feature from the `tracing` crate", - earlier_level, feature - )); - } + if !disabled.is_empty() { + #[cfg(feature = "nu_ansi_term")] + use nu_ansi_term::{Color, Style}; + // NOTE: We can't use a configured `MakeWriter` because the EnvFilter + // has no knowledge of any underlying subscriber or subscriber, which + // may or may not use a `MakeWriter`. + let warn = |msg: &str| { + #[cfg(not(feature = "nu_ansi_term"))] + let msg = format!("warning: {}", msg); + #[cfg(feature = "nu_ansi_term")] + let msg = { + let bold = Style::new().bold(); + let mut warning = Color::Yellow.paint("warning"); + warning.style_ref_mut().is_bold = true; + format!("{}{} {}", warning, bold.paint(":"), bold.paint(msg)) + }; + eprintln!("{}", msg); + }; + let ctx_prefixed = |prefix: &str, msg: &str| { + #[cfg(not(feature = "nu_ansi_term"))] + let msg = format!("{} {}", prefix, msg); + #[cfg(feature = "nu_ansi_term")] + let msg = { + let mut equal = Color::Fixed(21).paint("="); // dark blue + equal.style_ref_mut().is_bold = true; + format!(" {} {} {}", equal, Style::new().bold().paint(prefix), msg) + }; + eprintln!("{}", msg); + }; + let ctx_help = |msg| ctx_prefixed("help:", msg); + let ctx_note = |msg| ctx_prefixed("note:", msg); + let ctx = |msg: &str| { + #[cfg(not(feature = "nu_ansi_term"))] + let msg = format!("note: {}", msg); + #[cfg(feature = "nu_ansi_term")] + let msg = { + let mut pipe = Color::Fixed(21).paint("|"); + pipe.style_ref_mut().is_bold = true; + format!(" {} {}", pipe, msg) + }; + eprintln!("{}", msg); + }; + warn("some trace filter directives would enable traces that are disabled statically"); + for directive in disabled { + let target = if let Some(target) = &directive.target { + format!("the `{}` target", target) + } else { + "all targets".into() + }; + let level = directive.level.into_level().expect("=off would not have enabled any filters"); + ctx(&format!("`{}` would enable the {} level for {}", directive, level, target)); + } + ctx_note(&format!("the static max level is `{}`", STATIC_MAX_LEVEL)); + let help_msg = || { + let (feature, filter) = match STATIC_MAX_LEVEL.into_level() { + Some(Level::TRACE) => { + unreachable!("if the max level is trace, no static filtering features are enabled") + } + Some(Level::DEBUG) => ("max_level_debug", Level::TRACE), + Some(Level::INFO) => ("max_level_info", Level::DEBUG), + Some(Level::WARN) => ("max_level_warn", Level::INFO), + Some(Level::ERROR) => ("max_level_error", Level::WARN), + None => return ("max_level_off", String::new()), + }; + (feature, format!("{} ", filter)) + }; + let (feature, earlier_level) = help_msg(); + ctx_help(&format!( + "to enable {}logging, remove the `{}` feature from the `tracing` crate", + earlier_level, feature + )); + } - let (dynamics, statics) = Directive::make_tables(directives); - let has_dynamics = !dynamics.is_empty(); + let (dynamics, statics) = Directive::make_tables(directives); + let has_dynamics = !dynamics.is_empty(); - let mut filter = EnvFilter { - statics, - dynamics, - has_dynamics, - by_id: Default::default(), - by_cs: Default::default(), - scope: ThreadLocal::new(), - regex: self.regex, - }; + let mut filter = EnvFilter { + statics, + dynamics, + has_dynamics, + by_id: Default::default(), + by_cs: Default::default(), + scope: ThreadLocal::new(), + regex: self.regex, + }; - if !has_dynamics && filter.statics.is_empty() { - if let Some(ref default) = self.default_directive { - filter = filter.add_directive(default.clone()); - } - } + if !has_dynamics && filter.statics.is_empty() { + if let Some(ref default) = self.default_directive { + filter = filter.add_directive(default.clone()); + } + } - filter - } + filter + } - fn env_var_name(&self) -> &str { - self.env.as_deref().unwrap_or(EnvFilter::DEFAULT_ENV) - } + fn env_var_name(&self) -> &str { + self.env.as_deref().unwrap_or(EnvFilter::DEFAULT_ENV) + } } impl Default for Builder { - fn default() -> Self { - Self { - regex: true, - env: None, - default_directive: None, - } - } + fn default() -> Self { + Self { + regex: true, + env: None, + default_directive: None, + } + } } diff --git a/foundations/src/telementry/env_filter/env/directive.rs b/foundations/src/telementry/env_filter/env/directive.rs index 548c6207..abdc27fa 100644 --- a/foundations/src/telementry/env_filter/env/directive.rs +++ b/foundations/src/telementry/env_filter/env/directive.rs @@ -1,23 +1,26 @@ -pub(crate) use super::super::directive::{FilterVec, ParseError, StaticDirective}; -use super::super::{ - directive::{DirectiveSet, Match}, - env::{field, FieldMap}, -}; +use std::cmp::Ordering; +use std::fmt; +use std::iter::FromIterator; +use std::str::FromStr; + use once_cell::sync::Lazy; use regex::Regex; -use std::{cmp::Ordering, fmt, iter::FromIterator, str::FromStr}; use tracing::{span, Level, Metadata}; use tracing_subscriber::filter::LevelFilter; +use super::super::directive::{DirectiveSet, Match}; +pub(crate) use super::super::directive::{FilterVec, ParseError, StaticDirective}; +use super::super::env::{field, FieldMap}; + /// A single filtering directive. // TODO(eliza): add a builder for programmatically constructing directives? #[derive(Clone, Debug, Eq, PartialEq)] #[cfg_attr(docsrs, doc(cfg(feature = "env-filter")))] pub struct Directive { - in_span: Option, - fields: Vec, - pub(crate) target: Option, - pub(crate) level: LevelFilter, + in_span: Option, + fields: Vec, + pub(crate) target: Option, + pub(crate) level: LevelFilter, } /// A set of dynamic filtering directives. @@ -31,98 +34,84 @@ pub(crate) type SpanMatcher = MatchSet; #[derive(Debug, PartialEq, Eq)] pub(crate) struct MatchSet { - field_matches: FilterVec, - base_level: LevelFilter, + field_matches: FilterVec, + base_level: LevelFilter, } impl Directive { - pub(super) fn has_name(&self) -> bool { - self.in_span.is_some() - } - - pub(super) fn has_fields(&self) -> bool { - !self.fields.is_empty() - } - - pub(super) fn to_static(&self) -> Option { - if !self.is_static() { - return None; - } - - // TODO(eliza): these strings are all immutable; we should consider - // `Arc`ing them to make this more efficient... - let field_names = self.fields.iter().map(field::Match::name).collect(); - - Some(StaticDirective::new( - self.target.clone(), - field_names, - self.level, - )) - } - - fn is_static(&self) -> bool { - !self.has_name() && !self.fields.iter().any(field::Match::has_value) - } - - pub(super) fn is_dynamic(&self) -> bool { - self.has_name() || self.has_fields() - } - - pub(crate) fn field_matcher(&self, meta: &Metadata<'_>) -> Option { - let fieldset = meta.fields(); - let fields = self - .fields - .iter() - .filter_map( - |field::Match { - ref name, - ref value, - }| { - if let Some(field) = fieldset.field(name) { - let value = value.as_ref().cloned()?; - Some(Ok((field, value))) - } else { - Some(Err(())) - } - }, - ) - .collect::, ()>>() - .ok()?; - Some(field::CallsiteMatch { - fields, - level: self.level, - }) - } - - pub(super) fn make_tables( - directives: impl IntoIterator, - ) -> (Dynamics, Statics) { - // TODO(eliza): this could be made more efficient... - let (dyns, stats): (Vec, Vec) = - directives.into_iter().partition(Directive::is_dynamic); - let statics = stats - .into_iter() - .filter_map(|d| d.to_static()) - .chain(dyns.iter().filter_map(Directive::to_static)) - .collect(); - (Dynamics::from_iter(dyns), statics) - } - - pub(super) fn deregexify(&mut self) { - for field in &mut self.fields { - field.value = match field.value.take() { - Some(field::ValueMatch::Pat(pat)) => { - Some(field::ValueMatch::Debug(pat.into_debug_match())) - } - x => x, - } - } - } - - pub(super) fn parse(from: &str, regex: bool) -> Result { - static DIRECTIVE_RE: Lazy = Lazy::new(|| { - Regex::new( - r"(?x) + pub(super) fn has_name(&self) -> bool { + self.in_span.is_some() + } + + pub(super) fn has_fields(&self) -> bool { + !self.fields.is_empty() + } + + pub(super) fn to_static(&self) -> Option { + if !self.is_static() { + return None; + } + + // TODO(eliza): these strings are all immutable; we should consider + // `Arc`ing them to make this more efficient... + let field_names = self.fields.iter().map(field::Match::name).collect(); + + Some(StaticDirective::new(self.target.clone(), field_names, self.level)) + } + + fn is_static(&self) -> bool { + !self.has_name() && !self.fields.iter().any(field::Match::has_value) + } + + pub(super) fn is_dynamic(&self) -> bool { + self.has_name() || self.has_fields() + } + + pub(crate) fn field_matcher(&self, meta: &Metadata<'_>) -> Option { + let fieldset = meta.fields(); + let fields = self + .fields + .iter() + .filter_map(|field::Match { ref name, ref value }| { + if let Some(field) = fieldset.field(name) { + let value = value.as_ref().cloned()?; + Some(Ok((field, value))) + } else { + Some(Err(())) + } + }) + .collect::, ()>>() + .ok()?; + Some(field::CallsiteMatch { + fields, + level: self.level, + }) + } + + pub(super) fn make_tables(directives: impl IntoIterator) -> (Dynamics, Statics) { + // TODO(eliza): this could be made more efficient... + let (dyns, stats): (Vec, Vec) = directives.into_iter().partition(Directive::is_dynamic); + let statics = stats + .into_iter() + .filter_map(|d| d.to_static()) + .chain(dyns.iter().filter_map(Directive::to_static)) + .collect(); + (Dynamics::from_iter(dyns), statics) + } + + pub(super) fn deregexify(&mut self) { + for field in &mut self.fields { + field.value = match field.value.take() { + Some(field::ValueMatch::Pat(pat)) => Some(field::ValueMatch::Debug(pat.into_debug_match())), + x => x, + } + } + } + + pub(super) fn parse(from: &str, regex: bool) -> Result { + static DIRECTIVE_RE: Lazy = Lazy::new(|| { + Regex::new( + r"(?x) ^(?P(?i:trace|debug|info|warn|error|off|[0-5]))$ | # ^^^. # `note: we match log level names case-insensitively @@ -137,17 +126,17 @@ impl Directive { )? $ ", - ) - .unwrap() - }); - static SPAN_PART_RE: Lazy = - Lazy::new(|| Regex::new(r"(?P[^\]\{]+)?(?:\{(?P[^\}]*)\})?").unwrap()); - static FIELD_FILTER_RE: Lazy = - // TODO(eliza): this doesn't _currently_ handle value matchers that include comma - // characters. We should fix that. - Lazy::new(|| { - Regex::new( - r"(?x) + ) + .unwrap() + }); + static SPAN_PART_RE: Lazy = + Lazy::new(|| Regex::new(r"(?P[^\]\{]+)?(?:\{(?P[^\}]*)\})?").unwrap()); + static FIELD_FILTER_RE: Lazy = + // TODO(eliza): this doesn't _currently_ handle value matchers that include comma + // characters. We should fix that. + Lazy::new(|| { + Regex::new( + r"(?x) ( # field name [[:word:]][[[:word:]]\.]* @@ -157,711 +146,678 @@ impl Directive { # trailing comma or EOS (?:,\s?|$) ", - ) - .unwrap() - }); - - let caps = DIRECTIVE_RE.captures(from).ok_or_else(ParseError::new)?; - - if let Some(level) = caps - .name("global_level") - .and_then(|s| s.as_str().parse().ok()) - { - return Ok(Directive { - level, - ..Default::default() - }); - } - - let target = caps.name("target").and_then(|c| { - let s = c.as_str(); - if s.parse::().is_ok() { - None - } else { - Some(s.to_owned()) - } - }); - - let (in_span, fields) = caps - .name("span") - .and_then(|cap| { - let cap = cap.as_str().trim_matches(|c| c == '[' || c == ']'); - let caps = SPAN_PART_RE.captures(cap)?; - let span = caps.name("name").map(|c| c.as_str().to_owned()); - let fields = caps - .name("fields") - .map(|c| { - FIELD_FILTER_RE - .find_iter(c.as_str()) - .map(|c| field::Match::parse(c.as_str(), regex)) - .collect::, _>>() - }) - .unwrap_or_else(|| Ok(Vec::new())); - Some((span, fields)) - }) - .unwrap_or_else(|| (None, Ok(Vec::new()))); - - let level = caps - .name("level") - .and_then(|l| l.as_str().parse().ok()) - // Setting the target without the level enables every level for that target - .unwrap_or(LevelFilter::TRACE); - - Ok(Self { - level, - target, - in_span, - fields: fields?, - }) - } + ) + .unwrap() + }); + + let caps = DIRECTIVE_RE.captures(from).ok_or_else(ParseError::new)?; + + if let Some(level) = caps.name("global_level").and_then(|s| s.as_str().parse().ok()) { + return Ok(Directive { + level, + ..Default::default() + }); + } + + let target = caps.name("target").and_then(|c| { + let s = c.as_str(); + if s.parse::().is_ok() { + None + } else { + Some(s.to_owned()) + } + }); + + let (in_span, fields) = caps + .name("span") + .and_then(|cap| { + let cap = cap.as_str().trim_matches(|c| c == '[' || c == ']'); + let caps = SPAN_PART_RE.captures(cap)?; + let span = caps.name("name").map(|c| c.as_str().to_owned()); + let fields = caps + .name("fields") + .map(|c| { + FIELD_FILTER_RE + .find_iter(c.as_str()) + .map(|c| field::Match::parse(c.as_str(), regex)) + .collect::, _>>() + }) + .unwrap_or_else(|| Ok(Vec::new())); + Some((span, fields)) + }) + .unwrap_or_else(|| (None, Ok(Vec::new()))); + + let level = caps + .name("level") + .and_then(|l| l.as_str().parse().ok()) + // Setting the target without the level enables every level for that target + .unwrap_or(LevelFilter::TRACE); + + Ok(Self { + level, + target, + in_span, + fields: fields?, + }) + } } impl Match for Directive { - fn cares_about(&self, meta: &Metadata<'_>) -> bool { - // Does this directive have a target filter, and does it match the - // metadata's target? - if let Some(ref target) = self.target { - if !meta.target().starts_with(&target[..]) { - return false; - } - } - - // Do we have a name filter, and does it match the metadata's name? - // TODO(eliza): put name globbing here? - if let Some(ref name) = self.in_span { - if name != meta.name() { - return false; - } - } - - // Does the metadata define all the fields that this directive cares about? - let actual_fields = meta.fields(); - for expected_field in &self.fields { - // Does the actual field set (from the metadata) contain this field? - if actual_fields.field(&expected_field.name).is_none() { - return false; - } - } - - true - } - - fn level(&self) -> &LevelFilter { - &self.level - } + fn cares_about(&self, meta: &Metadata<'_>) -> bool { + // Does this directive have a target filter, and does it match the + // metadata's target? + if let Some(ref target) = self.target { + if !meta.target().starts_with(&target[..]) { + return false; + } + } + + // Do we have a name filter, and does it match the metadata's name? + // TODO(eliza): put name globbing here? + if let Some(ref name) = self.in_span { + if name != meta.name() { + return false; + } + } + + // Does the metadata define all the fields that this directive cares about? + let actual_fields = meta.fields(); + for expected_field in &self.fields { + // Does the actual field set (from the metadata) contain this field? + if actual_fields.field(&expected_field.name).is_none() { + return false; + } + } + + true + } + + fn level(&self) -> &LevelFilter { + &self.level + } } impl FromStr for Directive { - type Err = ParseError; - fn from_str(from: &str) -> Result { - Directive::parse(from, true) - } + type Err = ParseError; + + fn from_str(from: &str) -> Result { + Directive::parse(from, true) + } } impl Default for Directive { - fn default() -> Self { - Directive { - level: LevelFilter::OFF, - target: None, - in_span: None, - fields: Vec::new(), - } - } + fn default() -> Self { + Directive { + level: LevelFilter::OFF, + target: None, + in_span: None, + fields: Vec::new(), + } + } } impl PartialOrd for Directive { - fn partial_cmp(&self, other: &Directive) -> Option { - Some(self.cmp(other)) - } + fn partial_cmp(&self, other: &Directive) -> Option { + Some(self.cmp(other)) + } } impl Ord for Directive { - fn cmp(&self, other: &Directive) -> Ordering { - // We attempt to order directives by how "specific" they are. This - // ensures that we try the most specific directives first when - // attempting to match a piece of metadata. - - // First, we compare based on whether a target is specified, and the - // lengths of those targets if both have targets. - let ordering = self - .target - .as_ref() - .map(String::len) - .cmp(&other.target.as_ref().map(String::len)) - // Next compare based on the presence of span names. - .then_with(|| self.in_span.is_some().cmp(&other.in_span.is_some())) - // Then we compare how many fields are defined by each - // directive. - .then_with(|| self.fields.len().cmp(&other.fields.len())) - // Finally, we fall back to lexicographical ordering if the directives are - // equally specific. Although this is no longer semantically important, - // we need to define a total ordering to determine the directive's place - // in the BTreeMap. - .then_with(|| { - self.target - .cmp(&other.target) - .then_with(|| self.in_span.cmp(&other.in_span)) - .then_with(|| self.fields[..].cmp(&other.fields[..])) - }) - .reverse(); - - #[cfg(debug_assertions)] - { - if ordering == Ordering::Equal { - debug_assert_eq!( - self.target, other.target, - "invariant violated: Ordering::Equal must imply a.target == b.target" - ); - debug_assert_eq!( - self.in_span, other.in_span, - "invariant violated: Ordering::Equal must imply a.in_span == b.in_span" - ); - debug_assert_eq!( - self.fields, other.fields, - "invariant violated: Ordering::Equal must imply a.fields == b.fields" - ); - } - } - - ordering - } + fn cmp(&self, other: &Directive) -> Ordering { + // We attempt to order directives by how "specific" they are. This + // ensures that we try the most specific directives first when + // attempting to match a piece of metadata. + + // First, we compare based on whether a target is specified, and the + // lengths of those targets if both have targets. + let ordering = self + .target + .as_ref() + .map(String::len) + .cmp(&other.target.as_ref().map(String::len)) + // Next compare based on the presence of span names. + .then_with(|| self.in_span.is_some().cmp(&other.in_span.is_some())) + // Then we compare how many fields are defined by each + // directive. + .then_with(|| self.fields.len().cmp(&other.fields.len())) + // Finally, we fall back to lexicographical ordering if the directives are + // equally specific. Although this is no longer semantically important, + // we need to define a total ordering to determine the directive's place + // in the BTreeMap. + .then_with(|| { + self.target + .cmp(&other.target) + .then_with(|| self.in_span.cmp(&other.in_span)) + .then_with(|| self.fields[..].cmp(&other.fields[..])) + }) + .reverse(); + + #[cfg(debug_assertions)] + { + if ordering == Ordering::Equal { + debug_assert_eq!( + self.target, other.target, + "invariant violated: Ordering::Equal must imply a.target == b.target" + ); + debug_assert_eq!( + self.in_span, other.in_span, + "invariant violated: Ordering::Equal must imply a.in_span == b.in_span" + ); + debug_assert_eq!( + self.fields, other.fields, + "invariant violated: Ordering::Equal must imply a.fields == b.fields" + ); + } + } + + ordering + } } impl fmt::Display for Directive { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut wrote_any = false; - if let Some(ref target) = self.target { - fmt::Display::fmt(target, f)?; - wrote_any = true; - } - - if self.in_span.is_some() || !self.fields.is_empty() { - f.write_str("[")?; - - if let Some(ref span) = self.in_span { - fmt::Display::fmt(span, f)?; - } - - let mut fields = self.fields.iter(); - if let Some(field) = fields.next() { - write!(f, "{{{}", field)?; - for field in fields { - write!(f, ",{}", field)?; - } - f.write_str("}")?; - } - - f.write_str("]")?; - wrote_any = true; - } - - if wrote_any { - f.write_str("=")?; - } - - fmt::Display::fmt(&self.level, f) - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut wrote_any = false; + if let Some(ref target) = self.target { + fmt::Display::fmt(target, f)?; + wrote_any = true; + } + + if self.in_span.is_some() || !self.fields.is_empty() { + f.write_str("[")?; + + if let Some(ref span) = self.in_span { + fmt::Display::fmt(span, f)?; + } + + let mut fields = self.fields.iter(); + if let Some(field) = fields.next() { + write!(f, "{{{}", field)?; + for field in fields { + write!(f, ",{}", field)?; + } + f.write_str("}")?; + } + + f.write_str("]")?; + wrote_any = true; + } + + if wrote_any { + f.write_str("=")?; + } + + fmt::Display::fmt(&self.level, f) + } } impl From for Directive { - fn from(level: LevelFilter) -> Self { - Self { - level, - ..Self::default() - } - } + fn from(level: LevelFilter) -> Self { + Self { + level, + ..Self::default() + } + } } impl From for Directive { - fn from(level: Level) -> Self { - LevelFilter::from_level(level).into() - } + fn from(level: Level) -> Self { + LevelFilter::from_level(level).into() + } } // === impl Dynamics === impl Dynamics { - pub(crate) fn matcher(&self, metadata: &Metadata<'_>) -> Option { - let mut base_level = None; - let field_matches = self - .directives_for(metadata) - .filter_map(|d| { - if let Some(f) = d.field_matcher(metadata) { - return Some(f); - } - match base_level { - Some(ref b) if d.level > *b => base_level = Some(d.level), - None => base_level = Some(d.level), - _ => {} - } - None - }) - .collect(); - - if let Some(base_level) = base_level { - Some(CallsiteMatcher { - field_matches, - base_level, - }) - } else if !field_matches.is_empty() { - Some(CallsiteMatcher { - field_matches, - base_level: base_level.unwrap_or(LevelFilter::OFF), - }) - } else { - None - } - } - - pub(crate) fn has_value_filters(&self) -> bool { - self.directives() - .any(|d| d.fields.iter().any(|f| f.value.is_some())) - } + pub(crate) fn matcher(&self, metadata: &Metadata<'_>) -> Option { + let mut base_level = None; + let field_matches = self + .directives_for(metadata) + .filter_map(|d| { + if let Some(f) = d.field_matcher(metadata) { + return Some(f); + } + match base_level { + Some(ref b) if d.level > *b => base_level = Some(d.level), + None => base_level = Some(d.level), + _ => {} + } + None + }) + .collect(); + + if let Some(base_level) = base_level { + Some(CallsiteMatcher { + field_matches, + base_level, + }) + } else if !field_matches.is_empty() { + Some(CallsiteMatcher { + field_matches, + base_level: base_level.unwrap_or(LevelFilter::OFF), + }) + } else { + None + } + } + + pub(crate) fn has_value_filters(&self) -> bool { + self.directives().any(|d| d.fields.iter().any(|f| f.value.is_some())) + } } // ===== impl DynamicMatch ===== impl CallsiteMatcher { - /// Create a new `SpanMatch` for a given instance of the matched callsite. - pub(crate) fn to_span_match(&self, attrs: &span::Attributes<'_>) -> SpanMatcher { - let field_matches = self - .field_matches - .iter() - .map(|m| { - let m = m.to_span_match(); - attrs.record(&mut m.visitor()); - m - }) - .collect(); - SpanMatcher { - field_matches, - base_level: self.base_level, - } - } + /// Create a new `SpanMatch` for a given instance of the matched callsite. + pub(crate) fn to_span_match(&self, attrs: &span::Attributes<'_>) -> SpanMatcher { + let field_matches = self + .field_matches + .iter() + .map(|m| { + let m = m.to_span_match(); + attrs.record(&mut m.visitor()); + m + }) + .collect(); + SpanMatcher { + field_matches, + base_level: self.base_level, + } + } } impl SpanMatcher { - /// Returns the level currently enabled for this callsite. - pub(crate) fn level(&self) -> LevelFilter { - self.field_matches - .iter() - .filter_map(field::SpanMatch::filter) - .max() - .unwrap_or(self.base_level) - } - - pub(crate) fn record_update(&self, record: &span::Record<'_>) { - for m in &self.field_matches { - record.record(&mut m.visitor()) - } - } + /// Returns the level currently enabled for this callsite. + pub(crate) fn level(&self) -> LevelFilter { + self.field_matches + .iter() + .filter_map(field::SpanMatch::filter) + .max() + .unwrap_or(self.base_level) + } + + pub(crate) fn record_update(&self, record: &span::Record<'_>) { + for m in &self.field_matches { + record.record(&mut m.visitor()) + } + } } #[cfg(test)] mod test { - use super::*; - - fn parse_directives(dirs: impl AsRef) -> Vec { - dirs.as_ref() - .split(',') - .filter_map(|s| s.parse().ok()) - .collect() - } - - fn expect_parse(dirs: impl AsRef) -> Vec { - dirs.as_ref() - .split(',') - .map(|s| { - s.parse() - .unwrap_or_else(|err| panic!("directive '{:?}' should parse: {}", s, err)) - }) - .collect() - } - - #[test] - fn directive_ordering_by_target_len() { - // TODO(eliza): it would be nice to have a property-based test for this - // instead. - let mut dirs = expect_parse( - "foo::bar=debug,foo::bar::baz=trace,foo=info,a_really_long_name_with_no_colons=warn", - ); - dirs.sort_unstable(); - - let expected = vec![ - "a_really_long_name_with_no_colons", - "foo::bar::baz", - "foo::bar", - "foo", - ]; - let sorted = dirs - .iter() - .map(|d| d.target.as_ref().unwrap()) - .collect::>(); - - assert_eq!(expected, sorted); - } - #[test] - fn directive_ordering_by_span() { - // TODO(eliza): it would be nice to have a property-based test for this - // instead. - let mut dirs = expect_parse("bar[span]=trace,foo=debug,baz::quux=info,a[span]=warn"); - dirs.sort_unstable(); - - let expected = vec!["baz::quux", "bar", "foo", "a"]; - let sorted = dirs - .iter() - .map(|d| d.target.as_ref().unwrap()) - .collect::>(); - - assert_eq!(expected, sorted); - } - - #[test] - fn directive_ordering_uses_lexicographic_when_equal() { - // TODO(eliza): it would be nice to have a property-based test for this - // instead. - let mut dirs = expect_parse("span[b]=debug,b=debug,a=trace,c=info,span[a]=info"); - dirs.sort_unstable(); - - let expected = vec![ - ("span", Some("b")), - ("span", Some("a")), - ("c", None), - ("b", None), - ("a", None), - ]; - let sorted = dirs - .iter() - .map(|d| { - ( - d.target.as_ref().unwrap().as_ref(), - d.in_span.as_ref().map(String::as_ref), - ) - }) - .collect::>(); - - assert_eq!(expected, sorted); - } - - // TODO: this test requires the parser to support directives with multiple - // fields, which it currently can't handle. We should enable this test when - // that's implemented. - #[test] - #[ignore] - fn directive_ordering_by_field_num() { - // TODO(eliza): it would be nice to have a property-based test for this - // instead. - let mut dirs = expect_parse( - "b[{foo,bar}]=info,c[{baz,quuux,quuux}]=debug,a[{foo}]=warn,bar[{field}]=trace,foo=debug,baz::quux=info" - ); - dirs.sort_unstable(); - - let expected = vec!["baz::quux", "bar", "foo", "c", "b", "a"]; - let sorted = dirs - .iter() - .map(|d| d.target.as_ref().unwrap()) - .collect::>(); - - assert_eq!(expected, sorted); - } - - #[test] - fn parse_directives_ralith() { - let dirs = parse_directives("common=trace,server=trace"); - assert_eq!(dirs.len(), 2, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("common".to_string())); - assert_eq!(dirs[0].level, LevelFilter::TRACE); - assert_eq!(dirs[0].in_span, None); - - assert_eq!(dirs[1].target, Some("server".to_string())); - assert_eq!(dirs[1].level, LevelFilter::TRACE); - assert_eq!(dirs[1].in_span, None); - } - - #[test] - fn parse_directives_ralith_uc() { - let dirs = parse_directives("common=INFO,server=DEBUG"); - assert_eq!(dirs.len(), 2, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("common".to_string())); - assert_eq!(dirs[0].level, LevelFilter::INFO); - assert_eq!(dirs[0].in_span, None); - - assert_eq!(dirs[1].target, Some("server".to_string())); - assert_eq!(dirs[1].level, LevelFilter::DEBUG); - assert_eq!(dirs[1].in_span, None); - } - - #[test] - fn parse_directives_ralith_mixed() { - let dirs = parse_directives("common=iNfo,server=dEbUg"); - assert_eq!(dirs.len(), 2, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("common".to_string())); - assert_eq!(dirs[0].level, LevelFilter::INFO); - assert_eq!(dirs[0].in_span, None); - - assert_eq!(dirs[1].target, Some("server".to_string())); - assert_eq!(dirs[1].level, LevelFilter::DEBUG); - assert_eq!(dirs[1].in_span, None); - } - - #[test] - fn parse_directives_valid() { - let dirs = parse_directives("crate1::mod1=error,crate1::mod2,crate2=debug,crate3=off"); - assert_eq!(dirs.len(), 4, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("crate1::mod1".to_string())); - assert_eq!(dirs[0].level, LevelFilter::ERROR); - assert_eq!(dirs[0].in_span, None); - - assert_eq!(dirs[1].target, Some("crate1::mod2".to_string())); - assert_eq!(dirs[1].level, LevelFilter::TRACE); - assert_eq!(dirs[1].in_span, None); - - assert_eq!(dirs[2].target, Some("crate2".to_string())); - assert_eq!(dirs[2].level, LevelFilter::DEBUG); - assert_eq!(dirs[2].in_span, None); - - assert_eq!(dirs[3].target, Some("crate3".to_string())); - assert_eq!(dirs[3].level, LevelFilter::OFF); - assert_eq!(dirs[3].in_span, None); - } - - #[test] - - fn parse_level_directives() { - let dirs = parse_directives( - "crate1::mod1=error,crate1::mod2=warn,crate1::mod2::mod3=info,\ + use super::*; + + fn parse_directives(dirs: impl AsRef) -> Vec { + dirs.as_ref().split(',').filter_map(|s| s.parse().ok()).collect() + } + + fn expect_parse(dirs: impl AsRef) -> Vec { + dirs.as_ref() + .split(',') + .map(|s| { + s.parse() + .unwrap_or_else(|err| panic!("directive '{:?}' should parse: {}", s, err)) + }) + .collect() + } + + #[test] + fn directive_ordering_by_target_len() { + // TODO(eliza): it would be nice to have a property-based test for this + // instead. + let mut dirs = expect_parse("foo::bar=debug,foo::bar::baz=trace,foo=info,a_really_long_name_with_no_colons=warn"); + dirs.sort_unstable(); + + let expected = vec!["a_really_long_name_with_no_colons", "foo::bar::baz", "foo::bar", "foo"]; + let sorted = dirs.iter().map(|d| d.target.as_ref().unwrap()).collect::>(); + + assert_eq!(expected, sorted); + } + #[test] + fn directive_ordering_by_span() { + // TODO(eliza): it would be nice to have a property-based test for this + // instead. + let mut dirs = expect_parse("bar[span]=trace,foo=debug,baz::quux=info,a[span]=warn"); + dirs.sort_unstable(); + + let expected = vec!["baz::quux", "bar", "foo", "a"]; + let sorted = dirs.iter().map(|d| d.target.as_ref().unwrap()).collect::>(); + + assert_eq!(expected, sorted); + } + + #[test] + fn directive_ordering_uses_lexicographic_when_equal() { + // TODO(eliza): it would be nice to have a property-based test for this + // instead. + let mut dirs = expect_parse("span[b]=debug,b=debug,a=trace,c=info,span[a]=info"); + dirs.sort_unstable(); + + let expected = vec![ + ("span", Some("b")), + ("span", Some("a")), + ("c", None), + ("b", None), + ("a", None), + ]; + let sorted = dirs + .iter() + .map(|d| (d.target.as_ref().unwrap().as_ref(), d.in_span.as_ref().map(String::as_ref))) + .collect::>(); + + assert_eq!(expected, sorted); + } + + // TODO: this test requires the parser to support directives with multiple + // fields, which it currently can't handle. We should enable this test when + // that's implemented. + #[test] + #[ignore] + fn directive_ordering_by_field_num() { + // TODO(eliza): it would be nice to have a property-based test for this + // instead. + let mut dirs = expect_parse( + "b[{foo,bar}]=info,c[{baz,quuux,quuux}]=debug,a[{foo}]=warn,bar[{field}]=trace,foo=debug,baz::quux=info", + ); + dirs.sort_unstable(); + + let expected = vec!["baz::quux", "bar", "foo", "c", "b", "a"]; + let sorted = dirs.iter().map(|d| d.target.as_ref().unwrap()).collect::>(); + + assert_eq!(expected, sorted); + } + + #[test] + fn parse_directives_ralith() { + let dirs = parse_directives("common=trace,server=trace"); + assert_eq!(dirs.len(), 2, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("common".to_string())); + assert_eq!(dirs[0].level, LevelFilter::TRACE); + assert_eq!(dirs[0].in_span, None); + + assert_eq!(dirs[1].target, Some("server".to_string())); + assert_eq!(dirs[1].level, LevelFilter::TRACE); + assert_eq!(dirs[1].in_span, None); + } + + #[test] + fn parse_directives_ralith_uc() { + let dirs = parse_directives("common=INFO,server=DEBUG"); + assert_eq!(dirs.len(), 2, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("common".to_string())); + assert_eq!(dirs[0].level, LevelFilter::INFO); + assert_eq!(dirs[0].in_span, None); + + assert_eq!(dirs[1].target, Some("server".to_string())); + assert_eq!(dirs[1].level, LevelFilter::DEBUG); + assert_eq!(dirs[1].in_span, None); + } + + #[test] + fn parse_directives_ralith_mixed() { + let dirs = parse_directives("common=iNfo,server=dEbUg"); + assert_eq!(dirs.len(), 2, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("common".to_string())); + assert_eq!(dirs[0].level, LevelFilter::INFO); + assert_eq!(dirs[0].in_span, None); + + assert_eq!(dirs[1].target, Some("server".to_string())); + assert_eq!(dirs[1].level, LevelFilter::DEBUG); + assert_eq!(dirs[1].in_span, None); + } + + #[test] + fn parse_directives_valid() { + let dirs = parse_directives("crate1::mod1=error,crate1::mod2,crate2=debug,crate3=off"); + assert_eq!(dirs.len(), 4, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("crate1::mod1".to_string())); + assert_eq!(dirs[0].level, LevelFilter::ERROR); + assert_eq!(dirs[0].in_span, None); + + assert_eq!(dirs[1].target, Some("crate1::mod2".to_string())); + assert_eq!(dirs[1].level, LevelFilter::TRACE); + assert_eq!(dirs[1].in_span, None); + + assert_eq!(dirs[2].target, Some("crate2".to_string())); + assert_eq!(dirs[2].level, LevelFilter::DEBUG); + assert_eq!(dirs[2].in_span, None); + + assert_eq!(dirs[3].target, Some("crate3".to_string())); + assert_eq!(dirs[3].level, LevelFilter::OFF); + assert_eq!(dirs[3].in_span, None); + } + + #[test] + + fn parse_level_directives() { + let dirs = parse_directives( + "crate1::mod1=error,crate1::mod2=warn,crate1::mod2::mod3=info,\ crate2=debug,crate3=trace,crate3::mod2::mod1=off", - ); - assert_eq!(dirs.len(), 6, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("crate1::mod1".to_string())); - assert_eq!(dirs[0].level, LevelFilter::ERROR); - assert_eq!(dirs[0].in_span, None); - - assert_eq!(dirs[1].target, Some("crate1::mod2".to_string())); - assert_eq!(dirs[1].level, LevelFilter::WARN); - assert_eq!(dirs[1].in_span, None); - - assert_eq!(dirs[2].target, Some("crate1::mod2::mod3".to_string())); - assert_eq!(dirs[2].level, LevelFilter::INFO); - assert_eq!(dirs[2].in_span, None); - - assert_eq!(dirs[3].target, Some("crate2".to_string())); - assert_eq!(dirs[3].level, LevelFilter::DEBUG); - assert_eq!(dirs[3].in_span, None); - - assert_eq!(dirs[4].target, Some("crate3".to_string())); - assert_eq!(dirs[4].level, LevelFilter::TRACE); - assert_eq!(dirs[4].in_span, None); - - assert_eq!(dirs[5].target, Some("crate3::mod2::mod1".to_string())); - assert_eq!(dirs[5].level, LevelFilter::OFF); - assert_eq!(dirs[5].in_span, None); - } - - #[test] - fn parse_uppercase_level_directives() { - let dirs = parse_directives( - "crate1::mod1=ERROR,crate1::mod2=WARN,crate1::mod2::mod3=INFO,\ + ); + assert_eq!(dirs.len(), 6, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("crate1::mod1".to_string())); + assert_eq!(dirs[0].level, LevelFilter::ERROR); + assert_eq!(dirs[0].in_span, None); + + assert_eq!(dirs[1].target, Some("crate1::mod2".to_string())); + assert_eq!(dirs[1].level, LevelFilter::WARN); + assert_eq!(dirs[1].in_span, None); + + assert_eq!(dirs[2].target, Some("crate1::mod2::mod3".to_string())); + assert_eq!(dirs[2].level, LevelFilter::INFO); + assert_eq!(dirs[2].in_span, None); + + assert_eq!(dirs[3].target, Some("crate2".to_string())); + assert_eq!(dirs[3].level, LevelFilter::DEBUG); + assert_eq!(dirs[3].in_span, None); + + assert_eq!(dirs[4].target, Some("crate3".to_string())); + assert_eq!(dirs[4].level, LevelFilter::TRACE); + assert_eq!(dirs[4].in_span, None); + + assert_eq!(dirs[5].target, Some("crate3::mod2::mod1".to_string())); + assert_eq!(dirs[5].level, LevelFilter::OFF); + assert_eq!(dirs[5].in_span, None); + } + + #[test] + fn parse_uppercase_level_directives() { + let dirs = parse_directives( + "crate1::mod1=ERROR,crate1::mod2=WARN,crate1::mod2::mod3=INFO,\ crate2=DEBUG,crate3=TRACE,crate3::mod2::mod1=OFF", - ); - assert_eq!(dirs.len(), 6, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("crate1::mod1".to_string())); - assert_eq!(dirs[0].level, LevelFilter::ERROR); - assert_eq!(dirs[0].in_span, None); - - assert_eq!(dirs[1].target, Some("crate1::mod2".to_string())); - assert_eq!(dirs[1].level, LevelFilter::WARN); - assert_eq!(dirs[1].in_span, None); - - assert_eq!(dirs[2].target, Some("crate1::mod2::mod3".to_string())); - assert_eq!(dirs[2].level, LevelFilter::INFO); - assert_eq!(dirs[2].in_span, None); - - assert_eq!(dirs[3].target, Some("crate2".to_string())); - assert_eq!(dirs[3].level, LevelFilter::DEBUG); - assert_eq!(dirs[3].in_span, None); - - assert_eq!(dirs[4].target, Some("crate3".to_string())); - assert_eq!(dirs[4].level, LevelFilter::TRACE); - assert_eq!(dirs[4].in_span, None); - - assert_eq!(dirs[5].target, Some("crate3::mod2::mod1".to_string())); - assert_eq!(dirs[5].level, LevelFilter::OFF); - assert_eq!(dirs[5].in_span, None); - } - - #[test] - fn parse_numeric_level_directives() { - let dirs = parse_directives( - "crate1::mod1=1,crate1::mod2=2,crate1::mod2::mod3=3,crate2=4,\ + ); + assert_eq!(dirs.len(), 6, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("crate1::mod1".to_string())); + assert_eq!(dirs[0].level, LevelFilter::ERROR); + assert_eq!(dirs[0].in_span, None); + + assert_eq!(dirs[1].target, Some("crate1::mod2".to_string())); + assert_eq!(dirs[1].level, LevelFilter::WARN); + assert_eq!(dirs[1].in_span, None); + + assert_eq!(dirs[2].target, Some("crate1::mod2::mod3".to_string())); + assert_eq!(dirs[2].level, LevelFilter::INFO); + assert_eq!(dirs[2].in_span, None); + + assert_eq!(dirs[3].target, Some("crate2".to_string())); + assert_eq!(dirs[3].level, LevelFilter::DEBUG); + assert_eq!(dirs[3].in_span, None); + + assert_eq!(dirs[4].target, Some("crate3".to_string())); + assert_eq!(dirs[4].level, LevelFilter::TRACE); + assert_eq!(dirs[4].in_span, None); + + assert_eq!(dirs[5].target, Some("crate3::mod2::mod1".to_string())); + assert_eq!(dirs[5].level, LevelFilter::OFF); + assert_eq!(dirs[5].in_span, None); + } + + #[test] + fn parse_numeric_level_directives() { + let dirs = parse_directives( + "crate1::mod1=1,crate1::mod2=2,crate1::mod2::mod3=3,crate2=4,\ crate3=5,crate3::mod2::mod1=0", - ); - assert_eq!(dirs.len(), 6, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("crate1::mod1".to_string())); - assert_eq!(dirs[0].level, LevelFilter::ERROR); - assert_eq!(dirs[0].in_span, None); - - assert_eq!(dirs[1].target, Some("crate1::mod2".to_string())); - assert_eq!(dirs[1].level, LevelFilter::WARN); - assert_eq!(dirs[1].in_span, None); - - assert_eq!(dirs[2].target, Some("crate1::mod2::mod3".to_string())); - assert_eq!(dirs[2].level, LevelFilter::INFO); - assert_eq!(dirs[2].in_span, None); - - assert_eq!(dirs[3].target, Some("crate2".to_string())); - assert_eq!(dirs[3].level, LevelFilter::DEBUG); - assert_eq!(dirs[3].in_span, None); - - assert_eq!(dirs[4].target, Some("crate3".to_string())); - assert_eq!(dirs[4].level, LevelFilter::TRACE); - assert_eq!(dirs[4].in_span, None); - - assert_eq!(dirs[5].target, Some("crate3::mod2::mod1".to_string())); - assert_eq!(dirs[5].level, LevelFilter::OFF); - assert_eq!(dirs[5].in_span, None); - } - - #[test] - fn parse_directives_invalid_crate() { - // test parse_directives with multiple = in specification - let dirs = parse_directives("crate1::mod1=warn=info,crate2=debug"); - assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("crate2".to_string())); - assert_eq!(dirs[0].level, LevelFilter::DEBUG); - assert_eq!(dirs[0].in_span, None); - } - - #[test] - fn parse_directives_invalid_level() { - // test parse_directives with 'noNumber' as log level - let dirs = parse_directives("crate1::mod1=noNumber,crate2=debug"); - assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("crate2".to_string())); - assert_eq!(dirs[0].level, LevelFilter::DEBUG); - assert_eq!(dirs[0].in_span, None); - } - - #[test] - fn parse_directives_string_level() { - // test parse_directives with 'warn' as log level - let dirs = parse_directives("crate1::mod1=wrong,crate2=warn"); - assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("crate2".to_string())); - assert_eq!(dirs[0].level, LevelFilter::WARN); - assert_eq!(dirs[0].in_span, None); - } - - #[test] - fn parse_directives_empty_level() { - // test parse_directives with '' as log level - let dirs = parse_directives("crate1::mod1=wrong,crate2="); - assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("crate2".to_string())); - assert_eq!(dirs[0].level, LevelFilter::TRACE); - assert_eq!(dirs[0].in_span, None); - } - - #[test] - fn parse_directives_global() { - // test parse_directives with no crate - let dirs = parse_directives("warn,crate2=debug"); - assert_eq!(dirs.len(), 2, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, None); - assert_eq!(dirs[0].level, LevelFilter::WARN); - assert_eq!(dirs[1].in_span, None); - - assert_eq!(dirs[1].target, Some("crate2".to_string())); - assert_eq!(dirs[1].level, LevelFilter::DEBUG); - assert_eq!(dirs[1].in_span, None); - } - - // helper function for tests below - fn test_parse_bare_level(directive_to_test: &str, level_expected: LevelFilter) { - let dirs = parse_directives(directive_to_test); - assert_eq!( - dirs.len(), - 1, - "\ninput: \"{}\"; parsed: {:#?}", - directive_to_test, - dirs - ); - assert_eq!(dirs[0].target, None); - assert_eq!(dirs[0].level, level_expected); - assert_eq!(dirs[0].in_span, None); - } - - #[test] - fn parse_directives_global_bare_warn_lc() { - // test parse_directives with no crate, in isolation, all lowercase - test_parse_bare_level("warn", LevelFilter::WARN); - } - - #[test] - fn parse_directives_global_bare_warn_uc() { - // test parse_directives with no crate, in isolation, all uppercase - test_parse_bare_level("WARN", LevelFilter::WARN); - } - - #[test] - fn parse_directives_global_bare_warn_mixed() { - // test parse_directives with no crate, in isolation, mixed case - test_parse_bare_level("wArN", LevelFilter::WARN); - } - - #[test] - fn parse_directives_valid_with_spans() { - let dirs = parse_directives("crate1::mod1[foo]=error,crate1::mod2[bar],crate2[baz]=debug"); - assert_eq!(dirs.len(), 3, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("crate1::mod1".to_string())); - assert_eq!(dirs[0].level, LevelFilter::ERROR); - assert_eq!(dirs[0].in_span, Some("foo".to_string())); - - assert_eq!(dirs[1].target, Some("crate1::mod2".to_string())); - assert_eq!(dirs[1].level, LevelFilter::TRACE); - assert_eq!(dirs[1].in_span, Some("bar".to_string())); - - assert_eq!(dirs[2].target, Some("crate2".to_string())); - assert_eq!(dirs[2].level, LevelFilter::DEBUG); - assert_eq!(dirs[2].in_span, Some("baz".to_string())); - } - - #[test] - fn parse_directives_with_dash_in_target_name() { - let dirs = parse_directives("target-name=info"); - assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("target-name".to_string())); - assert_eq!(dirs[0].level, LevelFilter::INFO); - assert_eq!(dirs[0].in_span, None); - } - - #[test] - fn parse_directives_with_dash_in_span_name() { - // Reproduces https://github.com/tokio-rs/tracing/issues/1367 - - let dirs = parse_directives("target[span-name]=info"); - assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("target".to_string())); - assert_eq!(dirs[0].level, LevelFilter::INFO); - assert_eq!(dirs[0].in_span, Some("span-name".to_string())); - } - - #[test] - fn parse_directives_with_special_characters_in_span_name() { - let span_name = "!\"#$%&'()*+-./:;<=>?@^_`|~[}"; - - let dirs = parse_directives(format!("target[{}]=info", span_name)); - assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); - assert_eq!(dirs[0].target, Some("target".to_string())); - assert_eq!(dirs[0].level, LevelFilter::INFO); - assert_eq!(dirs[0].in_span, Some(span_name.to_string())); - } - - #[test] - fn parse_directives_with_invalid_span_chars() { - let invalid_span_name = "]{"; - - let dirs = parse_directives(format!("target[{}]=info", invalid_span_name)); - assert_eq!(dirs.len(), 0, "\nparsed: {:#?}", dirs); - } + ); + assert_eq!(dirs.len(), 6, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("crate1::mod1".to_string())); + assert_eq!(dirs[0].level, LevelFilter::ERROR); + assert_eq!(dirs[0].in_span, None); + + assert_eq!(dirs[1].target, Some("crate1::mod2".to_string())); + assert_eq!(dirs[1].level, LevelFilter::WARN); + assert_eq!(dirs[1].in_span, None); + + assert_eq!(dirs[2].target, Some("crate1::mod2::mod3".to_string())); + assert_eq!(dirs[2].level, LevelFilter::INFO); + assert_eq!(dirs[2].in_span, None); + + assert_eq!(dirs[3].target, Some("crate2".to_string())); + assert_eq!(dirs[3].level, LevelFilter::DEBUG); + assert_eq!(dirs[3].in_span, None); + + assert_eq!(dirs[4].target, Some("crate3".to_string())); + assert_eq!(dirs[4].level, LevelFilter::TRACE); + assert_eq!(dirs[4].in_span, None); + + assert_eq!(dirs[5].target, Some("crate3::mod2::mod1".to_string())); + assert_eq!(dirs[5].level, LevelFilter::OFF); + assert_eq!(dirs[5].in_span, None); + } + + #[test] + fn parse_directives_invalid_crate() { + // test parse_directives with multiple = in specification + let dirs = parse_directives("crate1::mod1=warn=info,crate2=debug"); + assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("crate2".to_string())); + assert_eq!(dirs[0].level, LevelFilter::DEBUG); + assert_eq!(dirs[0].in_span, None); + } + + #[test] + fn parse_directives_invalid_level() { + // test parse_directives with 'noNumber' as log level + let dirs = parse_directives("crate1::mod1=noNumber,crate2=debug"); + assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("crate2".to_string())); + assert_eq!(dirs[0].level, LevelFilter::DEBUG); + assert_eq!(dirs[0].in_span, None); + } + + #[test] + fn parse_directives_string_level() { + // test parse_directives with 'warn' as log level + let dirs = parse_directives("crate1::mod1=wrong,crate2=warn"); + assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("crate2".to_string())); + assert_eq!(dirs[0].level, LevelFilter::WARN); + assert_eq!(dirs[0].in_span, None); + } + + #[test] + fn parse_directives_empty_level() { + // test parse_directives with '' as log level + let dirs = parse_directives("crate1::mod1=wrong,crate2="); + assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("crate2".to_string())); + assert_eq!(dirs[0].level, LevelFilter::TRACE); + assert_eq!(dirs[0].in_span, None); + } + + #[test] + fn parse_directives_global() { + // test parse_directives with no crate + let dirs = parse_directives("warn,crate2=debug"); + assert_eq!(dirs.len(), 2, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, None); + assert_eq!(dirs[0].level, LevelFilter::WARN); + assert_eq!(dirs[1].in_span, None); + + assert_eq!(dirs[1].target, Some("crate2".to_string())); + assert_eq!(dirs[1].level, LevelFilter::DEBUG); + assert_eq!(dirs[1].in_span, None); + } + + // helper function for tests below + fn test_parse_bare_level(directive_to_test: &str, level_expected: LevelFilter) { + let dirs = parse_directives(directive_to_test); + assert_eq!(dirs.len(), 1, "\ninput: \"{}\"; parsed: {:#?}", directive_to_test, dirs); + assert_eq!(dirs[0].target, None); + assert_eq!(dirs[0].level, level_expected); + assert_eq!(dirs[0].in_span, None); + } + + #[test] + fn parse_directives_global_bare_warn_lc() { + // test parse_directives with no crate, in isolation, all lowercase + test_parse_bare_level("warn", LevelFilter::WARN); + } + + #[test] + fn parse_directives_global_bare_warn_uc() { + // test parse_directives with no crate, in isolation, all uppercase + test_parse_bare_level("WARN", LevelFilter::WARN); + } + + #[test] + fn parse_directives_global_bare_warn_mixed() { + // test parse_directives with no crate, in isolation, mixed case + test_parse_bare_level("wArN", LevelFilter::WARN); + } + + #[test] + fn parse_directives_valid_with_spans() { + let dirs = parse_directives("crate1::mod1[foo]=error,crate1::mod2[bar],crate2[baz]=debug"); + assert_eq!(dirs.len(), 3, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("crate1::mod1".to_string())); + assert_eq!(dirs[0].level, LevelFilter::ERROR); + assert_eq!(dirs[0].in_span, Some("foo".to_string())); + + assert_eq!(dirs[1].target, Some("crate1::mod2".to_string())); + assert_eq!(dirs[1].level, LevelFilter::TRACE); + assert_eq!(dirs[1].in_span, Some("bar".to_string())); + + assert_eq!(dirs[2].target, Some("crate2".to_string())); + assert_eq!(dirs[2].level, LevelFilter::DEBUG); + assert_eq!(dirs[2].in_span, Some("baz".to_string())); + } + + #[test] + fn parse_directives_with_dash_in_target_name() { + let dirs = parse_directives("target-name=info"); + assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("target-name".to_string())); + assert_eq!(dirs[0].level, LevelFilter::INFO); + assert_eq!(dirs[0].in_span, None); + } + + #[test] + fn parse_directives_with_dash_in_span_name() { + // Reproduces https://github.com/tokio-rs/tracing/issues/1367 + + let dirs = parse_directives("target[span-name]=info"); + assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("target".to_string())); + assert_eq!(dirs[0].level, LevelFilter::INFO); + assert_eq!(dirs[0].in_span, Some("span-name".to_string())); + } + + #[test] + fn parse_directives_with_special_characters_in_span_name() { + let span_name = "!\"#$%&'()*+-./:;<=>?@^_`|~[}"; + + let dirs = parse_directives(format!("target[{}]=info", span_name)); + assert_eq!(dirs.len(), 1, "\nparsed: {:#?}", dirs); + assert_eq!(dirs[0].target, Some("target".to_string())); + assert_eq!(dirs[0].level, LevelFilter::INFO); + assert_eq!(dirs[0].in_span, Some(span_name.to_string())); + } + + #[test] + fn parse_directives_with_invalid_span_chars() { + let invalid_span_name = "]{"; + + let dirs = parse_directives(format!("target[{}]=info", invalid_span_name)); + assert_eq!(dirs.len(), 0, "\nparsed: {:#?}", dirs); + } } diff --git a/foundations/src/telementry/env_filter/env/field.rs b/foundations/src/telementry/env_filter/env/field.rs index 5e425e40..6d7b51f1 100644 --- a/foundations/src/telementry/env_filter/env/field.rs +++ b/foundations/src/telementry/env_filter/env/field.rs @@ -1,122 +1,118 @@ +use std::cmp::Ordering; +use std::error::Error; +use std::fmt::{self, Write}; +use std::str::FromStr; +use std::sync::atomic::AtomicBool; +use std::sync::atomic::Ordering::*; +use std::sync::Arc; + use matchers::Pattern; -use std::{ - cmp::Ordering, - error::Error, - fmt::{self, Write}, - str::FromStr, - sync::{ - atomic::{AtomicBool, Ordering::*}, - Arc, - }, -}; +use tracing::field::{Field, Visit}; use super::{FieldMap, LevelFilter}; -use tracing::field::{Field, Visit}; #[derive(Clone, Debug, Eq, PartialEq)] pub(crate) struct Match { - pub(crate) name: String, // TODO: allow match patterns for names? - pub(crate) value: Option, + pub(crate) name: String, // TODO: allow match patterns for names? + pub(crate) value: Option, } #[derive(Debug, Eq, PartialEq)] pub(crate) struct CallsiteMatch { - pub(crate) fields: FieldMap, - pub(crate) level: LevelFilter, + pub(crate) fields: FieldMap, + pub(crate) level: LevelFilter, } #[derive(Debug)] pub(crate) struct SpanMatch { - fields: FieldMap<(ValueMatch, AtomicBool)>, - level: LevelFilter, - has_matched: AtomicBool, + fields: FieldMap<(ValueMatch, AtomicBool)>, + level: LevelFilter, + has_matched: AtomicBool, } pub(crate) struct MatchVisitor<'a> { - inner: &'a SpanMatch, + inner: &'a SpanMatch, } #[derive(Debug, Clone)] pub(crate) enum ValueMatch { - /// Matches a specific `bool` value. - Bool(bool), - /// Matches a specific `f64` value. - F64(f64), - /// Matches a specific `u64` value. - U64(u64), - /// Matches a specific `i64` value. - I64(i64), - /// Matches any `NaN` `f64` value. - NaN, - /// Matches any field whose `fmt::Debug` output is equal to a fixed string. - Debug(MatchDebug), - /// Matches any field whose `fmt::Debug` output matches a regular expression - /// pattern. - Pat(Box), + /// Matches a specific `bool` value. + Bool(bool), + /// Matches a specific `f64` value. + F64(f64), + /// Matches a specific `u64` value. + U64(u64), + /// Matches a specific `i64` value. + I64(i64), + /// Matches any `NaN` `f64` value. + NaN, + /// Matches any field whose `fmt::Debug` output is equal to a fixed string. + Debug(MatchDebug), + /// Matches any field whose `fmt::Debug` output matches a regular expression + /// pattern. + Pat(Box), } impl Eq for ValueMatch {} impl PartialEq for ValueMatch { - fn eq(&self, other: &Self) -> bool { - use ValueMatch::*; - match (self, other) { - (Bool(a), Bool(b)) => a.eq(b), - (F64(a), F64(b)) => { - debug_assert!(!a.is_nan()); - debug_assert!(!b.is_nan()); - - a.eq(b) - } - (U64(a), U64(b)) => a.eq(b), - (I64(a), I64(b)) => a.eq(b), - (NaN, NaN) => true, - (Pat(a), Pat(b)) => a.eq(b), - _ => false, - } - } + fn eq(&self, other: &Self) -> bool { + use ValueMatch::*; + match (self, other) { + (Bool(a), Bool(b)) => a.eq(b), + (F64(a), F64(b)) => { + debug_assert!(!a.is_nan()); + debug_assert!(!b.is_nan()); + + a.eq(b) + } + (U64(a), U64(b)) => a.eq(b), + (I64(a), I64(b)) => a.eq(b), + (NaN, NaN) => true, + (Pat(a), Pat(b)) => a.eq(b), + _ => false, + } + } } impl Ord for ValueMatch { - fn cmp(&self, other: &Self) -> Ordering { - use ValueMatch::*; - match (self, other) { - (Bool(this), Bool(that)) => this.cmp(that), - (Bool(_), _) => Ordering::Less, + fn cmp(&self, other: &Self) -> Ordering { + use ValueMatch::*; + match (self, other) { + (Bool(this), Bool(that)) => this.cmp(that), + (Bool(_), _) => Ordering::Less, - (F64(this), F64(that)) => this - .partial_cmp(that) - .expect("`ValueMatch::F64` may not contain `NaN` values"), - (F64(_), Bool(_)) => Ordering::Greater, - (F64(_), _) => Ordering::Less, + (F64(this), F64(that)) => this + .partial_cmp(that) + .expect("`ValueMatch::F64` may not contain `NaN` values"), + (F64(_), Bool(_)) => Ordering::Greater, + (F64(_), _) => Ordering::Less, - (NaN, NaN) => Ordering::Equal, - (NaN, Bool(_)) | (NaN, F64(_)) => Ordering::Greater, - (NaN, _) => Ordering::Less, + (NaN, NaN) => Ordering::Equal, + (NaN, Bool(_)) | (NaN, F64(_)) => Ordering::Greater, + (NaN, _) => Ordering::Less, - (U64(this), U64(that)) => this.cmp(that), - (U64(_), Bool(_)) | (U64(_), F64(_)) | (U64(_), NaN) => Ordering::Greater, - (U64(_), _) => Ordering::Less, + (U64(this), U64(that)) => this.cmp(that), + (U64(_), Bool(_)) | (U64(_), F64(_)) | (U64(_), NaN) => Ordering::Greater, + (U64(_), _) => Ordering::Less, - (I64(this), I64(that)) => this.cmp(that), - (I64(_), Bool(_)) | (I64(_), F64(_)) | (I64(_), NaN) | (I64(_), U64(_)) => { - Ordering::Greater - } - (I64(_), _) => Ordering::Less, + (I64(this), I64(that)) => this.cmp(that), + (I64(_), Bool(_)) | (I64(_), F64(_)) | (I64(_), NaN) | (I64(_), U64(_)) => Ordering::Greater, + (I64(_), _) => Ordering::Less, - (Pat(this), Pat(that)) => this.cmp(that), - (Pat(_), _) => Ordering::Greater, + (Pat(this), Pat(that)) => this.cmp(that), + (Pat(_), _) => Ordering::Greater, - (Debug(this), Debug(that)) => this.cmp(that), - (Debug(_), _) => Ordering::Greater, - } - } + (Debug(this), Debug(that)) => this.cmp(that), + (Debug(_), _) => Ordering::Greater, + } + } } impl PartialOrd for ValueMatch { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } } /// Matches a field's `fmt::Debug` output against a regular expression pattern. @@ -125,8 +121,8 @@ impl PartialOrd for ValueMatch { /// expressions are enabled. #[derive(Debug, Clone)] pub(crate) struct MatchPattern { - pub(crate) matcher: Pattern, - pattern: Arc, + pub(crate) matcher: Pattern, + pattern: Arc, } /// Matches a field's `fmt::Debug` output against a fixed string pattern. @@ -135,309 +131,297 @@ pub(crate) struct MatchPattern { /// expressions are disabled. #[derive(Debug, Clone)] pub(crate) struct MatchDebug { - pattern: Arc, + pattern: Arc, } /// Indicates that a field name specified in a filter directive was invalid. #[derive(Clone, Debug)] #[cfg_attr(docsrs, doc(cfg(feature = "env-filter")))] pub struct BadName { - name: String, + name: String, } // === impl Match === impl Match { - pub(crate) fn has_value(&self) -> bool { - self.value.is_some() - } - - // TODO: reference count these strings? - pub(crate) fn name(&self) -> String { - self.name.clone() - } - - pub(crate) fn parse(s: &str, regex: bool) -> Result> { - let mut parts = s.split('='); - let name = parts - .next() - .ok_or_else(|| BadName { - name: "".to_string(), - })? - // TODO: validate field name - .to_string(); - let value = parts - .next() - .map(|part| match regex { - true => ValueMatch::parse_regex(part), - false => Ok(ValueMatch::parse_non_regex(part)), - }) - .transpose()?; - Ok(Match { name, value }) - } + pub(crate) fn has_value(&self) -> bool { + self.value.is_some() + } + + // TODO: reference count these strings? + pub(crate) fn name(&self) -> String { + self.name.clone() + } + + pub(crate) fn parse(s: &str, regex: bool) -> Result> { + let mut parts = s.split('='); + let name = parts + .next() + .ok_or_else(|| BadName { name: "".to_string() })? + // TODO: validate field name + .to_string(); + let value = parts + .next() + .map(|part| match regex { + true => ValueMatch::parse_regex(part), + false => Ok(ValueMatch::parse_non_regex(part)), + }) + .transpose()?; + Ok(Match { name, value }) + } } impl fmt::Display for Match { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&self.name, f)?; - if let Some(ref value) = self.value { - write!(f, "={}", value)?; - } - Ok(()) - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.name, f)?; + if let Some(ref value) = self.value { + write!(f, "={}", value)?; + } + Ok(()) + } } impl Ord for Match { - fn cmp(&self, other: &Self) -> Ordering { - // Ordering for `Match` directives is based first on _whether_ a value - // is matched or not. This is semantically meaningful --- we would - // prefer to check directives that match values first as they are more - // specific. - let has_value = match (self.value.as_ref(), other.value.as_ref()) { - (Some(_), None) => Ordering::Greater, - (None, Some(_)) => Ordering::Less, - _ => Ordering::Equal, - }; - // If both directives match a value, we fall back to the field names in - // length + lexicographic ordering, and if these are equal as well, we - // compare the match directives. - // - // This ordering is no longer semantically meaningful but is necessary - // so that the directives can be stored in the `BTreeMap` in a defined - // order. - has_value - .then_with(|| self.name.cmp(&other.name)) - .then_with(|| self.value.cmp(&other.value)) - } + fn cmp(&self, other: &Self) -> Ordering { + // Ordering for `Match` directives is based first on _whether_ a value + // is matched or not. This is semantically meaningful --- we would + // prefer to check directives that match values first as they are more + // specific. + let has_value = match (self.value.as_ref(), other.value.as_ref()) { + (Some(_), None) => Ordering::Greater, + (None, Some(_)) => Ordering::Less, + _ => Ordering::Equal, + }; + // If both directives match a value, we fall back to the field names in + // length + lexicographic ordering, and if these are equal as well, we + // compare the match directives. + // + // This ordering is no longer semantically meaningful but is necessary + // so that the directives can be stored in the `BTreeMap` in a defined + // order. + has_value + .then_with(|| self.name.cmp(&other.name)) + .then_with(|| self.value.cmp(&other.value)) + } } impl PartialOrd for Match { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } } // === impl ValueMatch === fn value_match_f64(v: f64) -> ValueMatch { - if v.is_nan() { - ValueMatch::NaN - } else { - ValueMatch::F64(v) - } + if v.is_nan() { ValueMatch::NaN } else { ValueMatch::F64(v) } } impl ValueMatch { - /// Parse a `ValueMatch` that will match `fmt::Debug` fields using regular - /// expressions. - /// - /// This returns an error if the string didn't contain a valid `bool`, - /// `u64`, `i64`, or `f64` literal, and couldn't be parsed as a regular - /// expression. - fn parse_regex(s: &str) -> Result { - s.parse::() - .map(ValueMatch::Bool) - .or_else(|_| s.parse::().map(ValueMatch::U64)) - .or_else(|_| s.parse::().map(ValueMatch::I64)) - .or_else(|_| s.parse::().map(value_match_f64)) - .or_else(|_| { - s.parse::() - .map(|p| ValueMatch::Pat(Box::new(p))) - }) - } - - /// Parse a `ValueMatch` that will match `fmt::Debug` against a fixed - /// string. - /// - /// This does *not* return an error, because any string that isn't a valid - /// `bool`, `u64`, `i64`, or `f64` literal is treated as expected - /// `fmt::Debug` output. - fn parse_non_regex(s: &str) -> Self { - s.parse::() - .map(ValueMatch::Bool) - .or_else(|_| s.parse::().map(ValueMatch::U64)) - .or_else(|_| s.parse::().map(ValueMatch::I64)) - .or_else(|_| s.parse::().map(value_match_f64)) - .unwrap_or_else(|_| ValueMatch::Debug(MatchDebug::new(s))) - } + /// Parse a `ValueMatch` that will match `fmt::Debug` fields using regular + /// expressions. + /// + /// This returns an error if the string didn't contain a valid `bool`, + /// `u64`, `i64`, or `f64` literal, and couldn't be parsed as a regular + /// expression. + fn parse_regex(s: &str) -> Result { + s.parse::() + .map(ValueMatch::Bool) + .or_else(|_| s.parse::().map(ValueMatch::U64)) + .or_else(|_| s.parse::().map(ValueMatch::I64)) + .or_else(|_| s.parse::().map(value_match_f64)) + .or_else(|_| s.parse::().map(|p| ValueMatch::Pat(Box::new(p)))) + } + + /// Parse a `ValueMatch` that will match `fmt::Debug` against a fixed + /// string. + /// + /// This does *not* return an error, because any string that isn't a valid + /// `bool`, `u64`, `i64`, or `f64` literal is treated as expected + /// `fmt::Debug` output. + fn parse_non_regex(s: &str) -> Self { + s.parse::() + .map(ValueMatch::Bool) + .or_else(|_| s.parse::().map(ValueMatch::U64)) + .or_else(|_| s.parse::().map(ValueMatch::I64)) + .or_else(|_| s.parse::().map(value_match_f64)) + .unwrap_or_else(|_| ValueMatch::Debug(MatchDebug::new(s))) + } } impl fmt::Display for ValueMatch { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - ValueMatch::Bool(ref inner) => fmt::Display::fmt(inner, f), - ValueMatch::F64(ref inner) => fmt::Display::fmt(inner, f), - ValueMatch::NaN => fmt::Display::fmt(&f64::NAN, f), - ValueMatch::I64(ref inner) => fmt::Display::fmt(inner, f), - ValueMatch::U64(ref inner) => fmt::Display::fmt(inner, f), - ValueMatch::Debug(ref inner) => fmt::Display::fmt(inner, f), - ValueMatch::Pat(ref inner) => fmt::Display::fmt(inner, f), - } - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ValueMatch::Bool(ref inner) => fmt::Display::fmt(inner, f), + ValueMatch::F64(ref inner) => fmt::Display::fmt(inner, f), + ValueMatch::NaN => fmt::Display::fmt(&f64::NAN, f), + ValueMatch::I64(ref inner) => fmt::Display::fmt(inner, f), + ValueMatch::U64(ref inner) => fmt::Display::fmt(inner, f), + ValueMatch::Debug(ref inner) => fmt::Display::fmt(inner, f), + ValueMatch::Pat(ref inner) => fmt::Display::fmt(inner, f), + } + } } // === impl MatchPattern === impl FromStr for MatchPattern { - type Err = matchers::Error; - fn from_str(s: &str) -> Result { - let matcher = s.parse::()?; - Ok(Self { - matcher, - pattern: s.to_owned().into(), - }) - } + type Err = matchers::Error; + + fn from_str(s: &str) -> Result { + let matcher = s.parse::()?; + Ok(Self { + matcher, + pattern: s.to_owned().into(), + }) + } } impl fmt::Display for MatchPattern { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&*self.pattern, f) - } + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&*self.pattern, f) + } } impl AsRef for MatchPattern { - #[inline] - fn as_ref(&self) -> &str { - self.pattern.as_ref() - } + #[inline] + fn as_ref(&self) -> &str { + self.pattern.as_ref() + } } impl MatchPattern { - #[inline] - fn str_matches(&self, s: &impl AsRef) -> bool { - self.matcher.matches(s) - } + #[inline] + fn str_matches(&self, s: &impl AsRef) -> bool { + self.matcher.matches(s) + } - #[inline] - fn debug_matches(&self, d: &impl fmt::Debug) -> bool { - self.matcher.debug_matches(d) - } + #[inline] + fn debug_matches(&self, d: &impl fmt::Debug) -> bool { + self.matcher.debug_matches(d) + } - pub(super) fn into_debug_match(self) -> MatchDebug { - MatchDebug { - pattern: self.pattern, - } - } + pub(super) fn into_debug_match(self) -> MatchDebug { + MatchDebug { pattern: self.pattern } + } } impl PartialEq for MatchPattern { - #[inline] - fn eq(&self, other: &Self) -> bool { - self.pattern == other.pattern - } + #[inline] + fn eq(&self, other: &Self) -> bool { + self.pattern == other.pattern + } } impl Eq for MatchPattern {} impl PartialOrd for MatchPattern { - #[inline] - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.pattern.cmp(&other.pattern)) - } + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.pattern.cmp(&other.pattern)) + } } impl Ord for MatchPattern { - #[inline] - fn cmp(&self, other: &Self) -> Ordering { - self.pattern.cmp(&other.pattern) - } + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + self.pattern.cmp(&other.pattern) + } } // === impl MatchDebug === impl MatchDebug { - fn new(s: &str) -> Self { - Self { - pattern: s.to_owned().into(), - } - } - - #[inline] - fn debug_matches(&self, d: &impl fmt::Debug) -> bool { - // Naively, we would probably match a value's `fmt::Debug` output by - // formatting it to a string, and then checking if the string is equal - // to the expected pattern. However, this would require allocating every - // time we want to match a field value against a `Debug` matcher, which - // can be avoided. - // - // Instead, we implement `fmt::Write` for a type that, rather than - // actually _writing_ the strings to something, matches them against the - // expected pattern, and returns an error if the pattern does not match. - struct Matcher<'a> { - pattern: &'a str, - } - - impl fmt::Write for Matcher<'_> { - fn write_str(&mut self, s: &str) -> fmt::Result { - // If the string is longer than the remaining expected string, - // we know it won't match, so bail. - if s.len() > self.pattern.len() { - return Err(fmt::Error); - } - - // If the expected string begins with the string that was - // written, we are still potentially a match. Advance the - // position in the expected pattern to chop off the matched - // output, and continue. - if self.pattern.starts_with(s) { - self.pattern = &self.pattern[s.len()..]; - return Ok(()); - } - - // Otherwise, the expected string doesn't include the string - // that was written at the current position, so the `fmt::Debug` - // output doesn't match! Return an error signalling that this - // doesn't match. - Err(fmt::Error) - } - } - let mut matcher = Matcher { - pattern: &self.pattern, - }; - - // Try to "write" the value's `fmt::Debug` output to a `Matcher`. This - // returns an error if the `fmt::Debug` implementation wrote any - // characters that did not match the expected pattern. - write!(matcher, "{:?}", d).is_ok() - } + fn new(s: &str) -> Self { + Self { + pattern: s.to_owned().into(), + } + } + + #[inline] + fn debug_matches(&self, d: &impl fmt::Debug) -> bool { + // Naively, we would probably match a value's `fmt::Debug` output by + // formatting it to a string, and then checking if the string is equal + // to the expected pattern. However, this would require allocating every + // time we want to match a field value against a `Debug` matcher, which + // can be avoided. + // + // Instead, we implement `fmt::Write` for a type that, rather than + // actually _writing_ the strings to something, matches them against the + // expected pattern, and returns an error if the pattern does not match. + struct Matcher<'a> { + pattern: &'a str, + } + + impl fmt::Write for Matcher<'_> { + fn write_str(&mut self, s: &str) -> fmt::Result { + // If the string is longer than the remaining expected string, + // we know it won't match, so bail. + if s.len() > self.pattern.len() { + return Err(fmt::Error); + } + + // If the expected string begins with the string that was + // written, we are still potentially a match. Advance the + // position in the expected pattern to chop off the matched + // output, and continue. + if self.pattern.starts_with(s) { + self.pattern = &self.pattern[s.len()..]; + return Ok(()); + } + + // Otherwise, the expected string doesn't include the string + // that was written at the current position, so the `fmt::Debug` + // output doesn't match! Return an error signalling that this + // doesn't match. + Err(fmt::Error) + } + } + let mut matcher = Matcher { pattern: &self.pattern }; + + // Try to "write" the value's `fmt::Debug` output to a `Matcher`. This + // returns an error if the `fmt::Debug` implementation wrote any + // characters that did not match the expected pattern. + write!(matcher, "{:?}", d).is_ok() + } } impl fmt::Display for MatchDebug { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt(&*self.pattern, f) - } + #[inline] + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&*self.pattern, f) + } } impl AsRef for MatchDebug { - #[inline] - fn as_ref(&self) -> &str { - self.pattern.as_ref() - } + #[inline] + fn as_ref(&self) -> &str { + self.pattern.as_ref() + } } impl PartialEq for MatchDebug { - #[inline] - fn eq(&self, other: &Self) -> bool { - self.pattern == other.pattern - } + #[inline] + fn eq(&self, other: &Self) -> bool { + self.pattern == other.pattern + } } impl Eq for MatchDebug {} impl PartialOrd for MatchDebug { - #[inline] - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.pattern.cmp(&other.pattern)) - } + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.pattern.cmp(&other.pattern)) + } } impl Ord for MatchDebug { - #[inline] - fn cmp(&self, other: &Self) -> Ordering { - self.pattern.cmp(&other.pattern) - } + #[inline] + fn cmp(&self, other: &Self) -> Ordering { + self.pattern.cmp(&other.pattern) + } } // === impl BadName === @@ -445,178 +429,163 @@ impl Ord for MatchDebug { impl Error for BadName {} impl fmt::Display for BadName { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "invalid field name `{}`", self.name) - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "invalid field name `{}`", self.name) + } } impl CallsiteMatch { - pub(crate) fn to_span_match(&self) -> SpanMatch { - let fields = self - .fields - .iter() - .map(|(k, v)| (k.clone(), (v.clone(), AtomicBool::new(false)))) - .collect(); - SpanMatch { - fields, - level: self.level, - has_matched: AtomicBool::new(false), - } - } + pub(crate) fn to_span_match(&self) -> SpanMatch { + let fields = self + .fields + .iter() + .map(|(k, v)| (k.clone(), (v.clone(), AtomicBool::new(false)))) + .collect(); + SpanMatch { + fields, + level: self.level, + has_matched: AtomicBool::new(false), + } + } } impl SpanMatch { - pub(crate) fn visitor(&self) -> MatchVisitor<'_> { - MatchVisitor { inner: self } - } - - #[inline] - pub(crate) fn is_matched(&self) -> bool { - if self.has_matched.load(Acquire) { - return true; - } - self.is_matched_slow() - } - - #[inline(never)] - fn is_matched_slow(&self) -> bool { - let matched = self - .fields - .values() - .all(|(_, matched)| matched.load(Acquire)); - if matched { - self.has_matched.store(true, Release); - } - matched - } - - #[inline] - pub(crate) fn filter(&self) -> Option { - if self.is_matched() { - Some(self.level) - } else { - None - } - } + pub(crate) fn visitor(&self) -> MatchVisitor<'_> { + MatchVisitor { inner: self } + } + + #[inline] + pub(crate) fn is_matched(&self) -> bool { + if self.has_matched.load(Acquire) { + return true; + } + self.is_matched_slow() + } + + #[inline(never)] + fn is_matched_slow(&self) -> bool { + let matched = self.fields.values().all(|(_, matched)| matched.load(Acquire)); + if matched { + self.has_matched.store(true, Release); + } + matched + } + + #[inline] + pub(crate) fn filter(&self) -> Option { + if self.is_matched() { Some(self.level) } else { None } + } } impl<'a> Visit for MatchVisitor<'a> { - fn record_f64(&mut self, field: &Field, value: f64) { - match self.inner.fields.get(field) { - Some((ValueMatch::NaN, ref matched)) if value.is_nan() => { - matched.store(true, Release); - } - Some((ValueMatch::F64(ref e), ref matched)) if (value - *e).abs() < f64::EPSILON => { - matched.store(true, Release); - } - _ => {} - } - } - - fn record_i64(&mut self, field: &Field, value: i64) { - match self.inner.fields.get(field) { - Some((ValueMatch::I64(ref e), ref matched)) if value == *e => { - matched.store(true, Release); - } - Some((ValueMatch::U64(ref e), ref matched)) if Ok(value) == (*e).try_into() => { - matched.store(true, Release); - } - _ => {} - } - } - - fn record_u64(&mut self, field: &Field, value: u64) { - match self.inner.fields.get(field) { - Some((ValueMatch::U64(ref e), ref matched)) if value == *e => { - matched.store(true, Release); - } - _ => {} - } - } - - fn record_bool(&mut self, field: &Field, value: bool) { - match self.inner.fields.get(field) { - Some((ValueMatch::Bool(ref e), ref matched)) if value == *e => { - matched.store(true, Release); - } - _ => {} - } - } - - fn record_str(&mut self, field: &Field, value: &str) { - match self.inner.fields.get(field) { - Some((ValueMatch::Pat(ref e), ref matched)) if e.str_matches(&value) => { - matched.store(true, Release); - } - Some((ValueMatch::Debug(ref e), ref matched)) if e.debug_matches(&value) => { - matched.store(true, Release) - } - _ => {} - } - } - - fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { - match self.inner.fields.get(field) { - Some((ValueMatch::Pat(ref e), ref matched)) if e.debug_matches(&value) => { - matched.store(true, Release); - } - Some((ValueMatch::Debug(ref e), ref matched)) if e.debug_matches(&value) => { - matched.store(true, Release) - } - _ => {} - } - } + fn record_f64(&mut self, field: &Field, value: f64) { + match self.inner.fields.get(field) { + Some((ValueMatch::NaN, ref matched)) if value.is_nan() => { + matched.store(true, Release); + } + Some((ValueMatch::F64(ref e), ref matched)) if (value - *e).abs() < f64::EPSILON => { + matched.store(true, Release); + } + _ => {} + } + } + + fn record_i64(&mut self, field: &Field, value: i64) { + match self.inner.fields.get(field) { + Some((ValueMatch::I64(ref e), ref matched)) if value == *e => { + matched.store(true, Release); + } + Some((ValueMatch::U64(ref e), ref matched)) if Ok(value) == (*e).try_into() => { + matched.store(true, Release); + } + _ => {} + } + } + + fn record_u64(&mut self, field: &Field, value: u64) { + match self.inner.fields.get(field) { + Some((ValueMatch::U64(ref e), ref matched)) if value == *e => { + matched.store(true, Release); + } + _ => {} + } + } + + fn record_bool(&mut self, field: &Field, value: bool) { + match self.inner.fields.get(field) { + Some((ValueMatch::Bool(ref e), ref matched)) if value == *e => { + matched.store(true, Release); + } + _ => {} + } + } + + fn record_str(&mut self, field: &Field, value: &str) { + match self.inner.fields.get(field) { + Some((ValueMatch::Pat(ref e), ref matched)) if e.str_matches(&value) => { + matched.store(true, Release); + } + Some((ValueMatch::Debug(ref e), ref matched)) if e.debug_matches(&value) => matched.store(true, Release), + _ => {} + } + } + + fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { + match self.inner.fields.get(field) { + Some((ValueMatch::Pat(ref e), ref matched)) if e.debug_matches(&value) => { + matched.store(true, Release); + } + Some((ValueMatch::Debug(ref e), ref matched)) if e.debug_matches(&value) => matched.store(true, Release), + _ => {} + } + } } #[cfg(test)] mod tests { - use super::*; - #[derive(Debug)] - #[allow(dead_code)] - struct MyStruct { - answer: usize, - question: &'static str, - } - - #[test] - fn debug_struct_match() { - let my_struct = MyStruct { - answer: 42, - question: "life, the universe, and everything", - }; - - let pattern = "MyStruct { answer: 42, question: \"life, the universe, and everything\" }"; - - assert_eq!( - format!("{:?}", my_struct), - pattern, - "`MyStruct`'s `Debug` impl doesn't output the expected string" - ); - - let matcher = MatchDebug { - pattern: pattern.into(), - }; - assert!(matcher.debug_matches(&my_struct)) - } - - #[test] - fn debug_struct_not_match() { - let my_struct = MyStruct { - answer: 42, - question: "what shall we have for lunch?", - }; - - let pattern = "MyStruct { answer: 42, question: \"life, the universe, and everything\" }"; - - assert_eq!( - format!("{:?}", my_struct), - "MyStruct { answer: 42, question: \"what shall we have for lunch?\" }", - "`MyStruct`'s `Debug` impl doesn't output the expected string" - ); - - let matcher = MatchDebug { - pattern: pattern.into(), - }; - assert!(!matcher.debug_matches(&my_struct)) - } + use super::*; + #[derive(Debug)] + #[allow(dead_code)] + struct MyStruct { + answer: usize, + question: &'static str, + } + + #[test] + fn debug_struct_match() { + let my_struct = MyStruct { + answer: 42, + question: "life, the universe, and everything", + }; + + let pattern = "MyStruct { answer: 42, question: \"life, the universe, and everything\" }"; + + assert_eq!( + format!("{:?}", my_struct), + pattern, + "`MyStruct`'s `Debug` impl doesn't output the expected string" + ); + + let matcher = MatchDebug { pattern: pattern.into() }; + assert!(matcher.debug_matches(&my_struct)) + } + + #[test] + fn debug_struct_not_match() { + let my_struct = MyStruct { + answer: 42, + question: "what shall we have for lunch?", + }; + + let pattern = "MyStruct { answer: 42, question: \"life, the universe, and everything\" }"; + + assert_eq!( + format!("{:?}", my_struct), + "MyStruct { answer: 42, question: \"what shall we have for lunch?\" }", + "`MyStruct`'s `Debug` impl doesn't output the expected string" + ); + + let matcher = MatchDebug { pattern: pattern.into() }; + assert!(!matcher.debug_matches(&my_struct)) + } } diff --git a/foundations/src/telementry/env_filter/env/mod.rs b/foundations/src/telementry/env_filter/env/mod.rs index 76455db4..ef6d692d 100644 --- a/foundations/src/telementry/env_filter/env/mod.rs +++ b/foundations/src/telementry/env_filter/env/mod.rs @@ -9,29 +9,28 @@ mod builder; mod directive; mod field; +use std::cell::RefCell; +use std::collections::HashMap; +use std::error::Error; +use std::str::FromStr; +use std::{env, fmt}; + use directive::ParseError; -use std::{cell::RefCell, collections::HashMap, env, error::Error, fmt, str::FromStr}; use thread_local::ThreadLocal; -use tracing::{ - callsite, - field::Field, - span, - subscriber::{Interest, Subscriber}, - Metadata, -}; -use tracing_subscriber::{ - filter::LevelFilter, - layer::{Context, Layer}, - registry::LookupSpan, -}; +use tracing::field::Field; +use tracing::subscriber::{Interest, Subscriber}; +use tracing::{callsite, span, Metadata}; +use tracing_subscriber::filter::LevelFilter; +use tracing_subscriber::layer::{Context, Layer}; +use tracing_subscriber::registry::LookupSpan; /// A [`Layer`] which filters spans and events based on a set of filter /// directives. /// -/// `EnvFilter` implements both the [`Layer`](#impl-Layer) and [`Filter`] traits, so it may -/// be used for both [global filtering][global] and [per-layer filtering][plf], -/// respectively. See [the documentation on filtering with `Layer`s][filtering] -/// for details. +/// `EnvFilter` implements both the [`Layer`](#impl-Layer) and [`Filter`] +/// traits, so it may be used for both [global filtering][global] and [per-layer +/// filtering][plf], respectively. See [the documentation on filtering with +/// `Layer`s][filtering] for details. /// /// The [`Targets`] type implements a similar form of filtering, but without the /// ability to dynamically enable events based on the current span context, and @@ -40,36 +39,41 @@ use tracing_subscriber::{ /// /// # Directives /// -/// A filter consists of one or more comma-separated directives which match on [`Span`]s and [`Event`]s. -/// Each directive may have a corresponding maximum verbosity [`level`] which -/// enables (e.g., _selects for_) spans and events that match. Like `log`, -/// `tracing` considers less exclusive levels (like `trace` or `info`) to be more -/// verbose than more exclusive levels (like `error` or `warn`). +/// A filter consists of one or more comma-separated directives which match on +/// [`Span`]s and [`Event`]s. Each directive may have a corresponding maximum +/// verbosity [`level`] which enables (e.g., _selects for_) spans and events +/// that match. Like `log`, `tracing` considers less exclusive levels (like +/// `trace` or `info`) to be more verbose than more exclusive levels (like +/// `error` or `warn`). /// -/// The directive syntax is similar to that of [`env_logger`]'s. At a high level, the syntax for directives -/// consists of several parts: +/// The directive syntax is similar to that of [`env_logger`]'s. At a high +/// level, the syntax for directives consists of several parts: /// /// ```text /// target[span{field=value}]=level /// ``` /// -/// Each component (`target`, `span`, `field`, `value`, and `level`) will be covered in turn. +/// Each component (`target`, `span`, `field`, `value`, and `level`) will be +/// covered in turn. /// -/// - `target` matches the event or span's target. In general, this is the module path and/or crate name. -/// Examples of targets `h2`, `tokio::net`, or `tide::server`. For more information on targets, -/// please refer to [`Metadata`]'s documentation. -/// - `span` matches on the span's name. If a `span` directive is provided alongside a `target`, -/// the `span` directive will match on spans _within_ the `target`. -/// - `field` matches on [fields] within spans. Field names can also be supplied without a `value` -/// and will match on any [`Span`] or [`Event`] that has a field with that name. -/// For example: `[span{field=\"value\"}]=debug`, `[{field}]=trace`. -/// - `value` matches on the value of a span's field. If a value is a numeric literal or a bool, -/// it will match _only_ on that value. Otherwise, this filter matches the -/// [`std::fmt::Debug`] output from the value. +/// - `target` matches the event or span's target. In general, this is the +/// module path and/or crate name. Examples of targets `h2`, `tokio::net`, or +/// `tide::server`. For more information on targets, please refer to +/// [`Metadata`]'s documentation. +/// - `span` matches on the span's name. If a `span` directive is provided +/// alongside a `target`, the `span` directive will match on spans _within_ +/// the `target`. +/// - `field` matches on [fields] within spans. Field names can also be supplied +/// without a `value` and will match on any [`Span`] or [`Event`] that has a +/// field with that name. For example: `[span{field=\"value\"}]=debug`, +/// `[{field}]=trace`. +/// - `value` matches on the value of a span's field. If a value is a numeric +/// literal or a bool, it will match _only_ on that value. Otherwise, this +/// filter matches the [`std::fmt::Debug`] output from the value. /// - `level` sets a maximum verbosity level accepted by this directive. /// -/// When a field value directive (`[{=}]=...`) matches a -/// value's [`std::fmt::Debug`] output (i.e., the field value in the directive +/// When a field value directive (`[{=}]=...`) matches +/// a value's [`std::fmt::Debug`] output (i.e., the field value in the directive /// is not a `bool`, `i64`, `u64`, or `f64` literal), the matched pattern may be /// interpreted as either a regular expression or as the precise expected /// output of the field's [`std::fmt::Debug`] implementation. By default, these @@ -87,17 +91,19 @@ use tracing_subscriber::{ /// /// ## Usage Notes /// -/// - The portion of the directive which is included within the square brackets is `tracing`-specific. +/// - The portion of the directive which is included within the square brackets +/// is `tracing`-specific. /// - Any portion of the directive can be omitted. -/// - The sole exception are the `field` and `value` directives. If a `value` is provided, -/// a `field` must _also_ be provided. However, the converse does not hold, as fields can -/// be matched without a value. -/// - If only a level is provided, it will set the maximum level for all `Span`s and `Event`s -/// that are not enabled by other filters. -/// - A directive without a level will enable anything that it matches. This is equivalent to `=trace`. -/// - When a crate has a dash in its name, the default target for events will be the -/// crate's module path as it appears in Rust. This means every dash will be replaced -/// with an underscore. +/// - The sole exception are the `field` and `value` directives. If a +/// `value` is provided, a `field` must _also_ be provided. However, the +/// converse does not hold, as fields can be matched without a value. +/// - If only a level is provided, it will set the maximum level for all `Span`s +/// and `Event`s that are not enabled by other filters. +/// - A directive without a level will enable anything that it matches. This is +/// equivalent to `=trace`. +/// - When a crate has a dash in its name, the default target for events will be +/// the crate's module path as it appears in Rust. This means every dash will +/// be replaced with an underscore. /// - A dash in a target will only appear when being specified explicitly: /// `tracing::info!(target: "target-name", ...);` /// @@ -110,7 +116,8 @@ use tracing_subscriber::{ /// - are at the level `warn` or above, *or* /// - have the `tokio::net` target at the level `info` or above. /// - `my_crate[span_a]=trace` will enable all spans and events that: -/// - are within the `span_a` span or named `span_a` _if_ `span_a` has the target `my_crate`, +/// - are within the `span_a` span or named `span_a` _if_ `span_a` has the +/// target `my_crate`, /// - at the level `trace` or above. /// - `[span_b{name=\"bob\"}]` will enable all spans or event that: /// - have _any_ target, @@ -194,13 +201,13 @@ use tracing_subscriber::{ /// [filtering]: crate::layer#filtering-with-layers #[derive(Debug)] pub struct EnvFilter { - statics: directive::Statics, - dynamics: directive::Dynamics, - has_dynamics: bool, - by_id: scc::HashMap, - by_cs: scc::HashMap, - scope: ThreadLocal>>, - regex: bool, + statics: directive::Statics, + dynamics: directive::Dynamics, + has_dynamics: bool, + by_id: scc::HashMap, + by_cs: scc::HashMap, + scope: ThreadLocal>>, + regex: bool, } type FieldMap = HashMap; @@ -209,791 +216,778 @@ type FieldMap = HashMap; /// environment variable. #[derive(Debug)] pub struct FromEnvError { - kind: ErrorKind, + kind: ErrorKind, } #[derive(Debug)] enum ErrorKind { - Parse(ParseError), - Env(env::VarError), + Parse(ParseError), + Env(env::VarError), } impl EnvFilter { - /// `RUST_LOG` is the default environment variable used by - /// [`EnvFilter::from_default_env`] and [`EnvFilter::try_from_default_env`]. - /// - /// [`EnvFilter::from_default_env`]: EnvFilter::from_default_env() - /// [`EnvFilter::try_from_default_env`]: EnvFilter::try_from_default_env() - pub const DEFAULT_ENV: &'static str = "RUST_LOG"; - - // === constructors, etc === - - /// Returns a [builder] that can be used to configure a new [`EnvFilter`] - /// instance. - /// - /// The [`Builder`] type is used to set additional configurations, such as - /// [whether regular expressions are enabled](Builder::with_regex) or [the - /// default directive](Builder::with_default_directive) before parsing an - /// [`EnvFilter`] from a string or environment variable. - /// - /// [builder]: https://rust-unofficial.github.io/patterns/patterns/creational/builder.html - pub fn builder() -> Builder { - Builder::default() - } - - /// Returns a new `EnvFilter` from the value of the `RUST_LOG` environment - /// variable, ignoring any invalid filter directives. - /// - /// If the environment variable is empty or not set, or if it contains only - /// invalid directives, a default directive enabling the [`ERROR`] level is - /// added. - /// - /// To set additional configuration options prior to parsing the filter, use - /// the [`Builder`] type instead. - /// - /// This function is equivalent to the following: - /// - /// ```rust - /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; - /// - /// # fn docs() -> EnvFilter { - /// EnvFilter::builder() - /// .with_default_directive(LevelFilter::ERROR.into()) - /// .from_env_lossy() - /// # } - /// ``` - /// - /// [`ERROR`]: tracing::Level::ERROR - pub fn from_default_env() -> Self { - Self::builder() - .with_default_directive(LevelFilter::ERROR.into()) - .from_env_lossy() - } - - /// Returns a new `EnvFilter` from the value of the given environment - /// variable, ignoring any invalid filter directives. - /// - /// If the environment variable is empty or not set, or if it contains only - /// invalid directives, a default directive enabling the [`ERROR`] level is - /// added. - /// - /// To set additional configuration options prior to parsing the filter, use - /// the [`Builder`] type instead. - /// - /// This function is equivalent to the following: - /// - /// ```rust - /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; - /// - /// # fn docs() -> EnvFilter { - /// # let env = ""; - /// EnvFilter::builder() - /// .with_default_directive(LevelFilter::ERROR.into()) - /// .with_env_var(env) - /// .from_env_lossy() - /// # } - /// ``` - /// - /// [`ERROR`]: tracing::Level::ERROR - pub fn from_env>(env: A) -> Self { - Self::builder() - .with_default_directive(LevelFilter::ERROR.into()) - .with_env_var(env.as_ref()) - .from_env_lossy() - } - - /// Returns a new `EnvFilter` from the directives in the given string, - /// ignoring any that are invalid. - /// - /// If the string is empty or contains only invalid directives, a default - /// directive enabling the [`ERROR`] level is added. - /// - /// To set additional configuration options prior to parsing the filter, use - /// the [`Builder`] type instead. - /// - /// This function is equivalent to the following: - /// - /// ```rust - /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; - /// - /// # fn docs() -> EnvFilter { - /// # let directives = ""; - /// EnvFilter::builder() - /// .with_default_directive(LevelFilter::ERROR.into()) - /// .parse_lossy(directives) - /// # } - /// ``` - /// - /// [`ERROR`]: tracing::Level::ERROR - pub fn new>(directives: S) -> Self { - Self::builder() - .with_default_directive(LevelFilter::ERROR.into()) - .parse_lossy(directives) - } - - /// Returns a new `EnvFilter` from the directives in the given string, - /// or an error if any are invalid. - /// - /// If the string is empty, a default directive enabling the [`ERROR`] level - /// is added. - /// - /// To set additional configuration options prior to parsing the filter, use - /// the [`Builder`] type instead. - /// - /// This function is equivalent to the following: - /// - /// ```rust - /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; - /// - /// # fn docs() -> Result { - /// # let directives = ""; - /// EnvFilter::builder() - /// .with_default_directive(LevelFilter::ERROR.into()) - /// .parse(directives) - /// # } - /// ``` - /// - /// [`ERROR`]: tracing::Level::ERROR - pub fn try_new>(dirs: S) -> Result { - Self::builder().parse(dirs) - } - - /// Returns a new `EnvFilter` from the value of the `RUST_LOG` environment - /// variable, or an error if the environment variable is unset or contains - /// any invalid filter directives. - /// - /// To set additional configuration options prior to parsing the filter, use - /// the [`Builder`] type instead. - /// - /// This function is equivalent to the following: - /// - /// ```rust - /// use tracing_subscriber::EnvFilter; - /// - /// # fn docs() -> Result { - /// EnvFilter::builder().try_from_env() - /// # } - /// ``` - pub fn try_from_default_env() -> Result { - Self::builder().try_from_env() - } - - /// Returns a new `EnvFilter` from the value of the given environment - /// variable, or an error if the environment variable is unset or contains - /// any invalid filter directives. - /// - /// To set additional configuration options prior to parsing the filter, use - /// the [`Builder`] type instead. - /// - /// This function is equivalent to the following: - /// - /// ```rust - /// use tracing_subscriber::EnvFilter; - /// - /// # fn docs() -> Result { - /// # let env = ""; - /// EnvFilter::builder().with_env_var(env).try_from_env() - /// # } - /// ``` - pub fn try_from_env>(env: A) -> Result { - Self::builder().with_env_var(env.as_ref()).try_from_env() - } - - /// Add a filtering directive to this `EnvFilter`. - /// - /// The added directive will be used in addition to any previously set - /// directives, either added using this method or provided when the filter - /// is constructed. - /// - /// Filters may be created from [`LevelFilter`] or [`Level`], which will - /// enable all traces at or below a certain verbosity level, or - /// parsed from a string specifying a directive. - /// - /// If a filter directive is inserted that matches exactly the same spans - /// and events as a previous filter, but sets a different level for those - /// spans and events, the previous directive is overwritten. - /// - /// [`LevelFilter`]: super::LevelFilter - /// [`Level`]: tracing_core::Level - /// - /// # Examples - /// - /// From [`LevelFilter`]: - /// - /// ```rust - /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; - /// let mut filter = EnvFilter::from_default_env() - /// .add_directive(LevelFilter::INFO.into()); - /// ``` - /// - /// Or from [`Level`]: - /// - /// ```rust - /// # use tracing_subscriber::filter::{EnvFilter, LevelFilter}; - /// # use tracing::Level; - /// let mut filter = EnvFilter::from_default_env() - /// .add_directive(Level::INFO.into()); - /// ``` - /// - /// Parsed from a string: - /// - /// ```rust - /// use tracing_subscriber::filter::{EnvFilter, Directive}; - /// - /// # fn try_mk_filter() -> Result<(), Box> { - /// let mut filter = EnvFilter::try_from_default_env()? - /// .add_directive("my_crate::module=trace".parse()?) - /// .add_directive("my_crate::my_other_module::something=info".parse()?); - /// # Ok(()) - /// # } - /// ``` - /// In the above example, substitute `my_crate`, `module`, etc. with the - /// name your target crate/module is imported with. This might be - /// different from the package name in Cargo.toml (`-` is replaced by `_`). - /// Example, if the package name in your Cargo.toml is `MY-FANCY-LIB`, then - /// the corresponding Rust identifier would be `MY_FANCY_LIB`: - pub fn add_directive(mut self, mut directive: Directive) -> Self { - if !self.regex { - directive.deregexify(); - } - if let Some(stat) = directive.to_static() { - self.statics.add(stat) - } else { - self.has_dynamics = true; - self.dynamics.add(directive); - } - self - } - - // === filtering methods === - - /// Returns `true` if this `EnvFilter` would enable the provided `metadata` - /// in the current context. - /// - /// This is equivalent to calling the [`Layer::enabled`] or - /// [`Filter::enabled`] methods on `EnvFilter`'s implementations of those - /// traits, but it does not require the trait to be in scope. - pub fn enabled(&self, metadata: &Metadata<'_>, _: Context<'_, S>) -> bool { - let level = metadata.level(); - - // is it possible for a dynamic filter directive to enable this event? - // if not, we can avoid the thread loca'l access + iterating over the - // spans in the current scope. - if self.has_dynamics && self.dynamics.max_level >= *level { - if metadata.is_span() { - // If the metadata is a span, see if we care about its callsite. - let enabled_by_cs = self.by_cs.contains(&metadata.callsite()); - if enabled_by_cs { - return true; - } - } - - let enabled_by_scope = { - let scope = self.scope.get_or_default().borrow(); - for filter in &*scope { - if filter >= level { - return true; - } - } - false - }; - if enabled_by_scope { - return true; - } - } - - // is it possible for a static filter directive to enable this event? - if self.statics.max_level >= *level { - // Otherwise, fall back to checking if the callsite is - // statically enabled. - return self.statics.enabled(metadata); - } - - false - } - - /// Returns an optional hint of the highest [verbosity level][level] that - /// this `EnvFilter` will enable. - /// - /// This is equivalent to calling the [`Layer::max_level_hint`] or - /// [`Filter::max_level_hint`] methods on `EnvFilter`'s implementations of those - /// traits, but it does not require the trait to be in scope. - /// - /// [level]: tracing_core::metadata::Level - pub fn max_level_hint(&self) -> Option { - if self.dynamics.has_value_filters() { - // If we perform any filtering on span field *values*, we will - // enable *all* spans, because their field values are not known - // until recording. - return Some(LevelFilter::TRACE); - } - std::cmp::max( - self.statics.max_level.into(), - self.dynamics.max_level.into(), - ) - } - - /// Informs the filter that a new span was created. - /// - /// This is equivalent to calling the [`Layer::on_new_span`] or - /// [`Filter::on_new_span`] methods on `EnvFilter`'s implementations of those - /// traits, but it does not require the trait to be in scope. - pub fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, _: Context<'_, S>) { - if !self.has_dynamics { - return; - } - - if let Some(cs) = self.by_cs.get(&attrs.metadata().callsite()) { - let span = cs.get().to_span_match(attrs); - self.by_id - .insert(id.clone(), span) - .expect("a span should only be registered once"); - } - } - - /// Informs the filter that the span with the provided `id` was entered. - /// - /// This is equivalent to calling the [`Layer::on_enter`] or - /// [`Filter::on_enter`] methods on `EnvFilter`'s implementations of those - /// traits, but it does not require the trait to be in scope. - pub fn on_enter(&self, id: &span::Id, _: Context<'_, S>) { - if !self.has_dynamics { - return; - } - - // XXX: This is where _we_ could push IDs to the stack instead, and use - // that to allow changing the filter while a span is already entered. - // But that might be much less efficient... - if let Some(span) = self.by_id.get(id) { - self.scope - .get_or_default() - .borrow_mut() - .push(span.get().level()); - } - } - - /// Informs the filter that the span with the provided `id` was exited. - /// - /// This is equivalent to calling the [`Layer::on_exit`] or - /// [`Filter::on_exit`] methods on `EnvFilter`'s implementations of those - /// traits, but it does not require the trait to be in scope. - pub fn on_exit(&self, id: &span::Id, _: Context<'_, S>) { - if !self.has_dynamics { - return; - } - - if self.cares_about_span(id) { - self.scope.get_or_default().borrow_mut().pop(); - } - } - - /// Informs the filter that the span with the provided `id` was closed. - /// - /// This is equivalent to calling the [`Layer::on_close`] or - /// [`Filter::on_close`] methods on `EnvFilter`'s implementations of those - /// traits, but it does not require the trait to be in scope. - pub fn on_close(&self, id: span::Id, _: Context<'_, S>) { - if !self.has_dynamics { - return; - } - - // If we don't need to acquire a write lock, avoid doing so. - if !self.cares_about_span(&id) { - return; - } - - self.by_id.remove(&id); - } - - /// Informs the filter that the span with the provided `id` recorded the - /// provided field `values`. - /// - /// This is equivalent to calling the [`Layer::on_record`] or - /// [`Filter::on_record`] methods on `EnvFilter`'s implementations of those - /// traits, but it does not require the trait to be in scope - pub fn on_record(&self, id: &span::Id, values: &span::Record<'_>, _: Context<'_, S>) { - if !self.has_dynamics { - return; - } - - if let Some(span) = self.by_id.get(id) { - span.get().record_update(values); - } - } - - fn cares_about_span(&self, span: &span::Id) -> bool { - self.by_id.contains(span) - } - - fn base_interest(&self) -> Interest { - if self.has_dynamics { - Interest::sometimes() - } else { - Interest::never() - } - } - - fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest { - if self.has_dynamics && metadata.is_span() { - // If this metadata describes a span, first, check if there is a - // dynamic filter that should be constructed for it. If so, it - // should always be enabled, since it influences filtering. - if let Some(matcher) = self.dynamics.matcher(metadata) { - self.by_cs - .insert(metadata.callsite(), matcher) - .expect("a callsite should only be registered once"); - return Interest::always(); - } - } - - // Otherwise, check if any of our static filters enable this metadata. - if self.statics.enabled(metadata) { - Interest::always() - } else { - self.base_interest() - } - } + /// `RUST_LOG` is the default environment variable used by + /// [`EnvFilter::from_default_env`] and [`EnvFilter::try_from_default_env`]. + /// + /// [`EnvFilter::from_default_env`]: EnvFilter::from_default_env() + /// [`EnvFilter::try_from_default_env`]: EnvFilter::try_from_default_env() + pub const DEFAULT_ENV: &'static str = "RUST_LOG"; + + // === constructors, etc === + + /// Returns a [builder] that can be used to configure a new [`EnvFilter`] + /// instance. + /// + /// The [`Builder`] type is used to set additional configurations, such as + /// [whether regular expressions are enabled](Builder::with_regex) or [the + /// default directive](Builder::with_default_directive) before parsing an + /// [`EnvFilter`] from a string or environment variable. + /// + /// [builder]: https://rust-unofficial.github.io/patterns/patterns/creational/builder.html + pub fn builder() -> Builder { + Builder::default() + } + + /// Returns a new `EnvFilter` from the value of the `RUST_LOG` environment + /// variable, ignoring any invalid filter directives. + /// + /// If the environment variable is empty or not set, or if it contains only + /// invalid directives, a default directive enabling the [`ERROR`] level is + /// added. + /// + /// To set additional configuration options prior to parsing the filter, use + /// the [`Builder`] type instead. + /// + /// This function is equivalent to the following: + /// + /// ```rust + /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; + /// + /// # fn docs() -> EnvFilter { + /// EnvFilter::builder() + /// .with_default_directive(LevelFilter::ERROR.into()) + /// .from_env_lossy() + /// # } + /// ``` + /// + /// [`ERROR`]: tracing::Level::ERROR + pub fn from_default_env() -> Self { + Self::builder() + .with_default_directive(LevelFilter::ERROR.into()) + .from_env_lossy() + } + + /// Returns a new `EnvFilter` from the value of the given environment + /// variable, ignoring any invalid filter directives. + /// + /// If the environment variable is empty or not set, or if it contains only + /// invalid directives, a default directive enabling the [`ERROR`] level is + /// added. + /// + /// To set additional configuration options prior to parsing the filter, use + /// the [`Builder`] type instead. + /// + /// This function is equivalent to the following: + /// + /// ```rust + /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; + /// + /// # fn docs() -> EnvFilter { + /// # let env = ""; + /// EnvFilter::builder() + /// .with_default_directive(LevelFilter::ERROR.into()) + /// .with_env_var(env) + /// .from_env_lossy() + /// # } + /// ``` + /// + /// [`ERROR`]: tracing::Level::ERROR + pub fn from_env>(env: A) -> Self { + Self::builder() + .with_default_directive(LevelFilter::ERROR.into()) + .with_env_var(env.as_ref()) + .from_env_lossy() + } + + /// Returns a new `EnvFilter` from the directives in the given string, + /// ignoring any that are invalid. + /// + /// If the string is empty or contains only invalid directives, a default + /// directive enabling the [`ERROR`] level is added. + /// + /// To set additional configuration options prior to parsing the filter, use + /// the [`Builder`] type instead. + /// + /// This function is equivalent to the following: + /// + /// ```rust + /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; + /// + /// # fn docs() -> EnvFilter { + /// # let directives = ""; + /// EnvFilter::builder() + /// .with_default_directive(LevelFilter::ERROR.into()) + /// .parse_lossy(directives) + /// # } + /// ``` + /// + /// [`ERROR`]: tracing::Level::ERROR + pub fn new>(directives: S) -> Self { + Self::builder() + .with_default_directive(LevelFilter::ERROR.into()) + .parse_lossy(directives) + } + + /// Returns a new `EnvFilter` from the directives in the given string, + /// or an error if any are invalid. + /// + /// If the string is empty, a default directive enabling the [`ERROR`] level + /// is added. + /// + /// To set additional configuration options prior to parsing the filter, use + /// the [`Builder`] type instead. + /// + /// This function is equivalent to the following: + /// + /// ```rust + /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; + /// + /// # fn docs() -> Result { + /// # let directives = ""; + /// EnvFilter::builder() + /// .with_default_directive(LevelFilter::ERROR.into()) + /// .parse(directives) + /// # } + /// ``` + /// + /// [`ERROR`]: tracing::Level::ERROR + pub fn try_new>(dirs: S) -> Result { + Self::builder().parse(dirs) + } + + /// Returns a new `EnvFilter` from the value of the `RUST_LOG` environment + /// variable, or an error if the environment variable is unset or contains + /// any invalid filter directives. + /// + /// To set additional configuration options prior to parsing the filter, use + /// the [`Builder`] type instead. + /// + /// This function is equivalent to the following: + /// + /// ```rust + /// use tracing_subscriber::EnvFilter; + /// + /// # fn docs() -> Result { + /// EnvFilter::builder().try_from_env() + /// # } + /// ``` + pub fn try_from_default_env() -> Result { + Self::builder().try_from_env() + } + + /// Returns a new `EnvFilter` from the value of the given environment + /// variable, or an error if the environment variable is unset or contains + /// any invalid filter directives. + /// + /// To set additional configuration options prior to parsing the filter, use + /// the [`Builder`] type instead. + /// + /// This function is equivalent to the following: + /// + /// ```rust + /// use tracing_subscriber::EnvFilter; + /// + /// # fn docs() -> Result { + /// # let env = ""; + /// EnvFilter::builder().with_env_var(env).try_from_env() + /// # } + /// ``` + pub fn try_from_env>(env: A) -> Result { + Self::builder().with_env_var(env.as_ref()).try_from_env() + } + + /// Add a filtering directive to this `EnvFilter`. + /// + /// The added directive will be used in addition to any previously set + /// directives, either added using this method or provided when the filter + /// is constructed. + /// + /// Filters may be created from [`LevelFilter`] or [`Level`], which will + /// enable all traces at or below a certain verbosity level, or + /// parsed from a string specifying a directive. + /// + /// If a filter directive is inserted that matches exactly the same spans + /// and events as a previous filter, but sets a different level for those + /// spans and events, the previous directive is overwritten. + /// + /// [`LevelFilter`]: super::LevelFilter + /// [`Level`]: tracing_core::Level + /// + /// # Examples + /// + /// From [`LevelFilter`]: + /// + /// ```rust + /// use tracing_subscriber::filter::{EnvFilter, LevelFilter}; + /// let mut filter = EnvFilter::from_default_env() + /// .add_directive(LevelFilter::INFO.into()); + /// ``` + /// + /// Or from [`Level`]: + /// + /// ```rust + /// # use tracing_subscriber::filter::{EnvFilter, LevelFilter}; + /// # use tracing::Level; + /// let mut filter = EnvFilter::from_default_env() + /// .add_directive(Level::INFO.into()); + /// ``` + /// + /// Parsed from a string: + /// + /// ```rust + /// use tracing_subscriber::filter::{EnvFilter, Directive}; + /// + /// # fn try_mk_filter() -> Result<(), Box> { + /// let mut filter = EnvFilter::try_from_default_env()? + /// .add_directive("my_crate::module=trace".parse()?) + /// .add_directive("my_crate::my_other_module::something=info".parse()?); + /// # Ok(()) + /// # } + /// ``` + /// In the above example, substitute `my_crate`, `module`, etc. with the + /// name your target crate/module is imported with. This might be + /// different from the package name in Cargo.toml (`-` is replaced by `_`). + /// Example, if the package name in your Cargo.toml is `MY-FANCY-LIB`, then + /// the corresponding Rust identifier would be `MY_FANCY_LIB`: + pub fn add_directive(mut self, mut directive: Directive) -> Self { + if !self.regex { + directive.deregexify(); + } + if let Some(stat) = directive.to_static() { + self.statics.add(stat) + } else { + self.has_dynamics = true; + self.dynamics.add(directive); + } + self + } + + // === filtering methods === + + /// Returns `true` if this `EnvFilter` would enable the provided `metadata` + /// in the current context. + /// + /// This is equivalent to calling the [`Layer::enabled`] or + /// [`Filter::enabled`] methods on `EnvFilter`'s implementations of those + /// traits, but it does not require the trait to be in scope. + pub fn enabled(&self, metadata: &Metadata<'_>, _: Context<'_, S>) -> bool { + let level = metadata.level(); + + // is it possible for a dynamic filter directive to enable this event? + // if not, we can avoid the thread loca'l access + iterating over the + // spans in the current scope. + if self.has_dynamics && self.dynamics.max_level >= *level { + if metadata.is_span() { + // If the metadata is a span, see if we care about its callsite. + let enabled_by_cs = self.by_cs.contains(&metadata.callsite()); + if enabled_by_cs { + return true; + } + } + + let enabled_by_scope = { + let scope = self.scope.get_or_default().borrow(); + for filter in &*scope { + if filter >= level { + return true; + } + } + false + }; + if enabled_by_scope { + return true; + } + } + + // is it possible for a static filter directive to enable this event? + if self.statics.max_level >= *level { + // Otherwise, fall back to checking if the callsite is + // statically enabled. + return self.statics.enabled(metadata); + } + + false + } + + /// Returns an optional hint of the highest [verbosity level][level] that + /// this `EnvFilter` will enable. + /// + /// This is equivalent to calling the [`Layer::max_level_hint`] or + /// [`Filter::max_level_hint`] methods on `EnvFilter`'s implementations of + /// those traits, but it does not require the trait to be in scope. + /// + /// [level]: tracing_core::metadata::Level + pub fn max_level_hint(&self) -> Option { + if self.dynamics.has_value_filters() { + // If we perform any filtering on span field *values*, we will + // enable *all* spans, because their field values are not known + // until recording. + return Some(LevelFilter::TRACE); + } + std::cmp::max(self.statics.max_level.into(), self.dynamics.max_level.into()) + } + + /// Informs the filter that a new span was created. + /// + /// This is equivalent to calling the [`Layer::on_new_span`] or + /// [`Filter::on_new_span`] methods on `EnvFilter`'s implementations of + /// those traits, but it does not require the trait to be in scope. + pub fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, _: Context<'_, S>) { + if !self.has_dynamics { + return; + } + + if let Some(cs) = self.by_cs.get(&attrs.metadata().callsite()) { + let span = cs.get().to_span_match(attrs); + self.by_id + .insert(id.clone(), span) + .expect("a span should only be registered once"); + } + } + + /// Informs the filter that the span with the provided `id` was entered. + /// + /// This is equivalent to calling the [`Layer::on_enter`] or + /// [`Filter::on_enter`] methods on `EnvFilter`'s implementations of those + /// traits, but it does not require the trait to be in scope. + pub fn on_enter(&self, id: &span::Id, _: Context<'_, S>) { + if !self.has_dynamics { + return; + } + + // XXX: This is where _we_ could push IDs to the stack instead, and use + // that to allow changing the filter while a span is already entered. + // But that might be much less efficient... + if let Some(span) = self.by_id.get(id) { + self.scope.get_or_default().borrow_mut().push(span.get().level()); + } + } + + /// Informs the filter that the span with the provided `id` was exited. + /// + /// This is equivalent to calling the [`Layer::on_exit`] or + /// [`Filter::on_exit`] methods on `EnvFilter`'s implementations of those + /// traits, but it does not require the trait to be in scope. + pub fn on_exit(&self, id: &span::Id, _: Context<'_, S>) { + if !self.has_dynamics { + return; + } + + if self.cares_about_span(id) { + self.scope.get_or_default().borrow_mut().pop(); + } + } + + /// Informs the filter that the span with the provided `id` was closed. + /// + /// This is equivalent to calling the [`Layer::on_close`] or + /// [`Filter::on_close`] methods on `EnvFilter`'s implementations of those + /// traits, but it does not require the trait to be in scope. + pub fn on_close(&self, id: span::Id, _: Context<'_, S>) { + if !self.has_dynamics { + return; + } + + // If we don't need to acquire a write lock, avoid doing so. + if !self.cares_about_span(&id) { + return; + } + + self.by_id.remove(&id); + } + + /// Informs the filter that the span with the provided `id` recorded the + /// provided field `values`. + /// + /// This is equivalent to calling the [`Layer::on_record`] or + /// [`Filter::on_record`] methods on `EnvFilter`'s implementations of those + /// traits, but it does not require the trait to be in scope + pub fn on_record(&self, id: &span::Id, values: &span::Record<'_>, _: Context<'_, S>) { + if !self.has_dynamics { + return; + } + + if let Some(span) = self.by_id.get(id) { + span.get().record_update(values); + } + } + + fn cares_about_span(&self, span: &span::Id) -> bool { + self.by_id.contains(span) + } + + fn base_interest(&self) -> Interest { + if self.has_dynamics { + Interest::sometimes() + } else { + Interest::never() + } + } + + fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest { + if self.has_dynamics && metadata.is_span() { + // If this metadata describes a span, first, check if there is a + // dynamic filter that should be constructed for it. If so, it + // should always be enabled, since it influences filtering. + if let Some(matcher) = self.dynamics.matcher(metadata) { + self.by_cs + .insert(metadata.callsite(), matcher) + .expect("a callsite should only be registered once"); + return Interest::always(); + } + } + + // Otherwise, check if any of our static filters enable this metadata. + if self.statics.enabled(metadata) { + Interest::always() + } else { + self.base_interest() + } + } } impl Layer for EnvFilter where - S: for<'a> LookupSpan<'a>, + S: for<'a> LookupSpan<'a>, { - #[inline] - fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest { - EnvFilter::register_callsite(self, metadata) - } - - #[inline] - fn max_level_hint(&self) -> Option { - EnvFilter::max_level_hint(self) - } - - #[inline] - fn enabled(&self, metadata: &Metadata<'_>, ctx: Context<'_, S>) -> bool { - self.enabled(metadata, ctx) - } - - #[inline] - fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: Context<'_, S>) { - self.on_new_span(attrs, id, ctx) - } - - #[inline] - fn on_record(&self, id: &span::Id, values: &span::Record<'_>, ctx: Context<'_, S>) { - self.on_record(id, values, ctx); - } - - #[inline] - fn on_enter(&self, id: &span::Id, ctx: Context<'_, S>) { - self.on_enter(id, ctx); - } - - #[inline] - fn on_exit(&self, id: &span::Id, ctx: Context<'_, S>) { - self.on_exit(id, ctx); - } - - #[inline] - fn on_close(&self, id: span::Id, ctx: Context<'_, S>) { - self.on_close(id, ctx); - } + #[inline] + fn register_callsite(&self, metadata: &'static Metadata<'static>) -> Interest { + EnvFilter::register_callsite(self, metadata) + } + + #[inline] + fn max_level_hint(&self) -> Option { + EnvFilter::max_level_hint(self) + } + + #[inline] + fn enabled(&self, metadata: &Metadata<'_>, ctx: Context<'_, S>) -> bool { + self.enabled(metadata, ctx) + } + + #[inline] + fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: Context<'_, S>) { + self.on_new_span(attrs, id, ctx) + } + + #[inline] + fn on_record(&self, id: &span::Id, values: &span::Record<'_>, ctx: Context<'_, S>) { + self.on_record(id, values, ctx); + } + + #[inline] + fn on_enter(&self, id: &span::Id, ctx: Context<'_, S>) { + self.on_enter(id, ctx); + } + + #[inline] + fn on_exit(&self, id: &span::Id, ctx: Context<'_, S>) { + self.on_exit(id, ctx); + } + + #[inline] + fn on_close(&self, id: span::Id, ctx: Context<'_, S>) { + self.on_close(id, ctx); + } } use tracing_subscriber::layer::Filter; impl Filter for EnvFilter where - S: Subscriber, + S: Subscriber, { - #[inline] - fn enabled(&self, meta: &Metadata<'_>, ctx: &Context<'_, S>) -> bool { - self.enabled(meta, ctx.clone()) - } - - #[inline] - fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest { - self.register_callsite(meta) - } - - #[inline] - fn max_level_hint(&self) -> Option { - EnvFilter::max_level_hint(self) - } - - #[inline] - fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: Context<'_, S>) { - self.on_new_span(attrs, id, ctx) - } - - #[inline] - fn on_record(&self, id: &span::Id, values: &span::Record<'_>, ctx: Context<'_, S>) { - self.on_record(id, values, ctx); - } - - #[inline] - fn on_enter(&self, id: &span::Id, ctx: Context<'_, S>) { - self.on_enter(id, ctx); - } - - #[inline] - fn on_exit(&self, id: &span::Id, ctx: Context<'_, S>) { - self.on_exit(id, ctx); - } - - #[inline] - fn on_close(&self, id: span::Id, ctx: Context<'_, S>) { - self.on_close(id, ctx); - } + #[inline] + fn enabled(&self, meta: &Metadata<'_>, ctx: &Context<'_, S>) -> bool { + self.enabled(meta, ctx.clone()) + } + + #[inline] + fn callsite_enabled(&self, meta: &'static Metadata<'static>) -> Interest { + self.register_callsite(meta) + } + + #[inline] + fn max_level_hint(&self) -> Option { + EnvFilter::max_level_hint(self) + } + + #[inline] + fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: Context<'_, S>) { + self.on_new_span(attrs, id, ctx) + } + + #[inline] + fn on_record(&self, id: &span::Id, values: &span::Record<'_>, ctx: Context<'_, S>) { + self.on_record(id, values, ctx); + } + + #[inline] + fn on_enter(&self, id: &span::Id, ctx: Context<'_, S>) { + self.on_enter(id, ctx); + } + + #[inline] + fn on_exit(&self, id: &span::Id, ctx: Context<'_, S>) { + self.on_exit(id, ctx); + } + + #[inline] + fn on_close(&self, id: span::Id, ctx: Context<'_, S>) { + self.on_close(id, ctx); + } } impl FromStr for EnvFilter { - type Err = directive::ParseError; + type Err = directive::ParseError; - fn from_str(spec: &str) -> Result { - Self::try_new(spec) - } + fn from_str(spec: &str) -> Result { + Self::try_new(spec) + } } impl From for EnvFilter where - S: AsRef, + S: AsRef, { - fn from(s: S) -> Self { - Self::new(s) - } + fn from(s: S) -> Self { + Self::new(s) + } } impl Default for EnvFilter { - fn default() -> Self { - Builder::default().from_directives(std::iter::empty()) - } + fn default() -> Self { + Builder::default().from_directives(std::iter::empty()) + } } impl fmt::Display for EnvFilter { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut statics = self.statics.iter(); - let wrote_statics = if let Some(next) = statics.next() { - fmt::Display::fmt(next, f)?; - for directive in statics { - write!(f, ",{}", directive)?; - } - true - } else { - false - }; - - let mut dynamics = self.dynamics.iter(); - if let Some(next) = dynamics.next() { - if wrote_statics { - f.write_str(",")?; - } - fmt::Display::fmt(next, f)?; - for directive in dynamics { - write!(f, ",{}", directive)?; - } - } - Ok(()) - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut statics = self.statics.iter(); + let wrote_statics = if let Some(next) = statics.next() { + fmt::Display::fmt(next, f)?; + for directive in statics { + write!(f, ",{}", directive)?; + } + true + } else { + false + }; + + let mut dynamics = self.dynamics.iter(); + if let Some(next) = dynamics.next() { + if wrote_statics { + f.write_str(",")?; + } + fmt::Display::fmt(next, f)?; + for directive in dynamics { + write!(f, ",{}", directive)?; + } + } + Ok(()) + } } // ===== impl FromEnvError ===== impl From for FromEnvError { - fn from(p: directive::ParseError) -> Self { - Self { - kind: ErrorKind::Parse(p), - } - } + fn from(p: directive::ParseError) -> Self { + Self { + kind: ErrorKind::Parse(p), + } + } } impl From for FromEnvError { - fn from(v: env::VarError) -> Self { - Self { - kind: ErrorKind::Env(v), - } - } + fn from(v: env::VarError) -> Self { + Self { kind: ErrorKind::Env(v) } + } } impl fmt::Display for FromEnvError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self.kind { - ErrorKind::Parse(ref p) => p.fmt(f), - ErrorKind::Env(ref e) => e.fmt(f), - } - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self.kind { + ErrorKind::Parse(ref p) => p.fmt(f), + ErrorKind::Env(ref e) => e.fmt(f), + } + } } impl Error for FromEnvError { - fn source(&self) -> Option<&(dyn Error + 'static)> { - match self.kind { - ErrorKind::Parse(ref p) => Some(p), - ErrorKind::Env(ref e) => Some(e), - } - } + fn source(&self) -> Option<&(dyn Error + 'static)> { + match self.kind { + ErrorKind::Parse(ref p) => Some(p), + ErrorKind::Env(ref e) => Some(e), + } + } } #[cfg(test)] mod tests { - use metadata::Kind; - use tracing_subscriber::Registry; - - #[macro_export] - macro_rules! identify_callsite { - ($callsite:expr) => { - tracing::callsite::Identifier($callsite) - }; - } - - use super::*; - use tracing::field::FieldSet; - use tracing::*; - - struct Cs; - impl Callsite for Cs { - fn set_interest(&self, _interest: Interest) {} - fn metadata(&self) -> &Metadata<'_> { - unimplemented!() - } - } - - #[test] - fn callsite_enabled_no_span_directive() { - let filter = EnvFilter::new("app=debug").with_subscriber(Registry::default()); - static META: &Metadata<'static> = &Metadata::new( - "mySpan", - "app", - Level::TRACE, - None, - None, - None, - FieldSet::new(&[], identify_callsite!(&Cs)), - Kind::SPAN, - ); - - let interest = filter.register_callsite(META); - assert!(interest.is_never()); - } - - #[test] - fn callsite_off() { - let filter = EnvFilter::new("app=off").with_subscriber(Registry::default()); - static META: &Metadata<'static> = &Metadata::new( - "mySpan", - "app", - Level::ERROR, - None, - None, - None, - FieldSet::new(&[], identify_callsite!(&Cs)), - Kind::SPAN, - ); - - let interest = filter.register_callsite(META); - assert!(interest.is_never()); - } - - #[test] - fn callsite_enabled_includes_span_directive() { - let filter = EnvFilter::new("app[mySpan]=debug").with_subscriber(Registry::default()); - static META: &Metadata<'static> = &Metadata::new( - "mySpan", - "app", - Level::TRACE, - None, - None, - None, - FieldSet::new(&[], identify_callsite!(&Cs)), - Kind::SPAN, - ); - - let interest = filter.register_callsite(META); - assert!(interest.is_always()); - } - - #[test] - fn callsite_enabled_includes_span_directive_field() { - let filter = EnvFilter::new("app[mySpan{field=\"value\"}]=debug") - .with_subscriber(Registry::default()); - static META: &Metadata<'static> = &Metadata::new( - "mySpan", - "app", - Level::TRACE, - None, - None, - None, - FieldSet::new(&["field"], identify_callsite!(&Cs)), - Kind::SPAN, - ); - - let interest = filter.register_callsite(META); - assert!(interest.is_always()); - } - - #[test] - fn callsite_enabled_includes_span_directive_multiple_fields() { - let filter = EnvFilter::new("app[mySpan{field=\"value\",field2=2}]=debug") - .with_subscriber(Registry::default()); - static META: &Metadata<'static> = &Metadata::new( - "mySpan", - "app", - Level::TRACE, - None, - None, - None, - FieldSet::new(&["field"], identify_callsite!(&Cs)), - Kind::SPAN, - ); - - let interest = filter.register_callsite(META); - assert!(interest.is_never()); - } - - #[test] - fn roundtrip() { - let f1: EnvFilter = - "[span1{foo=1}]=error,[span2{bar=2 baz=false}],crate2[{quux=\"quuux\"}]=debug" - .parse() - .unwrap(); - let f2: EnvFilter = format!("{}", f1).parse().unwrap(); - assert_eq!(f1.statics, f2.statics); - assert_eq!(f1.dynamics, f2.dynamics); - } - - #[test] - fn size_of_filters() { - fn print_sz(s: &str) { - let filter = s.parse::().expect("filter should parse"); - println!( - "size_of_val({:?})\n -> {}B", - s, - std::mem::size_of_val(&filter) - ); - } - - print_sz("info"); - - print_sz("foo=debug"); - - print_sz( - "crate1::mod1=error,crate1::mod2=warn,crate1::mod2::mod3=info,\ + use metadata::Kind; + use tracing_subscriber::Registry; + + #[macro_export] + macro_rules! identify_callsite { + ($callsite:expr) => { + tracing::callsite::Identifier($callsite) + }; + } + + use tracing::field::FieldSet; + use tracing::*; + + use super::*; + + struct Cs; + impl Callsite for Cs { + fn set_interest(&self, _interest: Interest) {} + + fn metadata(&self) -> &Metadata<'_> { + unimplemented!() + } + } + + #[test] + fn callsite_enabled_no_span_directive() { + let filter = EnvFilter::new("app=debug").with_subscriber(Registry::default()); + static META: &Metadata<'static> = &Metadata::new( + "mySpan", + "app", + Level::TRACE, + None, + None, + None, + FieldSet::new(&[], identify_callsite!(&Cs)), + Kind::SPAN, + ); + + let interest = filter.register_callsite(META); + assert!(interest.is_never()); + } + + #[test] + fn callsite_off() { + let filter = EnvFilter::new("app=off").with_subscriber(Registry::default()); + static META: &Metadata<'static> = &Metadata::new( + "mySpan", + "app", + Level::ERROR, + None, + None, + None, + FieldSet::new(&[], identify_callsite!(&Cs)), + Kind::SPAN, + ); + + let interest = filter.register_callsite(META); + assert!(interest.is_never()); + } + + #[test] + fn callsite_enabled_includes_span_directive() { + let filter = EnvFilter::new("app[mySpan]=debug").with_subscriber(Registry::default()); + static META: &Metadata<'static> = &Metadata::new( + "mySpan", + "app", + Level::TRACE, + None, + None, + None, + FieldSet::new(&[], identify_callsite!(&Cs)), + Kind::SPAN, + ); + + let interest = filter.register_callsite(META); + assert!(interest.is_always()); + } + + #[test] + fn callsite_enabled_includes_span_directive_field() { + let filter = EnvFilter::new("app[mySpan{field=\"value\"}]=debug").with_subscriber(Registry::default()); + static META: &Metadata<'static> = &Metadata::new( + "mySpan", + "app", + Level::TRACE, + None, + None, + None, + FieldSet::new(&["field"], identify_callsite!(&Cs)), + Kind::SPAN, + ); + + let interest = filter.register_callsite(META); + assert!(interest.is_always()); + } + + #[test] + fn callsite_enabled_includes_span_directive_multiple_fields() { + let filter = EnvFilter::new("app[mySpan{field=\"value\",field2=2}]=debug").with_subscriber(Registry::default()); + static META: &Metadata<'static> = &Metadata::new( + "mySpan", + "app", + Level::TRACE, + None, + None, + None, + FieldSet::new(&["field"], identify_callsite!(&Cs)), + Kind::SPAN, + ); + + let interest = filter.register_callsite(META); + assert!(interest.is_never()); + } + + #[test] + fn roundtrip() { + let f1: EnvFilter = "[span1{foo=1}]=error,[span2{bar=2 baz=false}],crate2[{quux=\"quuux\"}]=debug" + .parse() + .unwrap(); + let f2: EnvFilter = format!("{}", f1).parse().unwrap(); + assert_eq!(f1.statics, f2.statics); + assert_eq!(f1.dynamics, f2.dynamics); + } + + #[test] + fn size_of_filters() { + fn print_sz(s: &str) { + let filter = s.parse::().expect("filter should parse"); + println!("size_of_val({:?})\n -> {}B", s, std::mem::size_of_val(&filter)); + } + + print_sz("info"); + + print_sz("foo=debug"); + + print_sz( + "crate1::mod1=error,crate1::mod2=warn,crate1::mod2::mod3=info,\ crate2=debug,crate3=trace,crate3::mod2::mod1=off", - ); + ); - print_sz("[span1{foo=1}]=error,[span2{bar=2 baz=false}],crate2[{quux=\"quuux\"}]=debug"); + print_sz("[span1{foo=1}]=error,[span2{bar=2 baz=false}],crate2[{quux=\"quuux\"}]=debug"); - print_sz( - "crate1::mod1=error,crate1::mod2=warn,crate1::mod2::mod3=info,\ + print_sz( + "crate1::mod1=error,crate1::mod2=warn,crate1::mod2::mod3=info,\ crate2=debug,crate3=trace,crate3::mod2::mod1=off,[span1{foo=1}]=error,\ [span2{bar=2 baz=false}],crate2[{quux=\"quuux\"}]=debug", - ); - } - - #[test] - fn parse_empty_string() { - // There is no corresponding test for [`Builder::parse_lossy`] as failed - // parsing does not produce any observable side effects. If this test fails - // check that [`Builder::parse_lossy`] is behaving correctly as well. - assert!(EnvFilter::builder().parse("").is_ok()); - } + ); + } + + #[test] + fn parse_empty_string() { + // There is no corresponding test for [`Builder::parse_lossy`] as failed + // parsing does not produce any observable side effects. If this test fails + // check that [`Builder::parse_lossy`] is behaving correctly as well. + assert!(EnvFilter::builder().parse("").is_ok()); + } } diff --git a/foundations/src/telementry/logging.rs b/foundations/src/telementry/logging.rs index f4f38490..b05ba27b 100644 --- a/foundations/src/telementry/logging.rs +++ b/foundations/src/telementry/logging.rs @@ -1,17 +1,17 @@ use tracing_subscriber::fmt::time::{ChronoLocal, ChronoUtc, FormatTime}; pub enum TimeFormatter { - Local(ChronoLocal), - Utc(ChronoUtc), - None, + Local(ChronoLocal), + Utc(ChronoUtc), + None, } impl FormatTime for TimeFormatter { - fn format_time(&self, w: &mut tracing_subscriber::fmt::format::Writer<'_>) -> std::fmt::Result { - match self { - TimeFormatter::Local(formatter) => formatter.format_time(w), - TimeFormatter::Utc(formatter) => formatter.format_time(w), - TimeFormatter::None => ().format_time(w), - } - } + fn format_time(&self, w: &mut tracing_subscriber::fmt::format::Writer<'_>) -> std::fmt::Result { + match self { + TimeFormatter::Local(formatter) => formatter.format_time(w), + TimeFormatter::Utc(formatter) => formatter.format_time(w), + TimeFormatter::None => ().format_time(w), + } + } } diff --git a/foundations/src/telementry/metrics/mod.rs b/foundations/src/telementry/metrics/mod.rs index bf23a4de..49ea2981 100644 --- a/foundations/src/telementry/metrics/mod.rs +++ b/foundations/src/telementry/metrics/mod.rs @@ -1,7 +1,6 @@ use std::collections::HashMap; pub use prometheus_client; - #[cfg(all(feature = "macros", feature = "metrics"))] pub use scuffle_foundations_macros::metrics; @@ -12,48 +11,44 @@ pub mod registries; pub mod serde; pub fn init(service_info: crate::ServiceInfo, labels: &HashMap) { - registries::Registries::init(service_info, labels) + registries::Registries::init(service_info, labels) } pub fn collect(collect_optional: bool) -> anyhow::Result { - let mut buffer = String::new(); - registries::Registries::collect(&mut buffer, collect_optional)?; - Ok(buffer) + let mut buffer = String::new(); + registries::Registries::collect(&mut buffer, collect_optional)?; + Ok(buffer) } pub trait MetricBuilder { - fn build(&self) -> M; + fn build(&self) -> M; } #[derive(Debug, Clone, Copy)] pub struct HistogramBuilder { - pub buckets: [f64; N], + pub buckets: [f64; N], } impl Default for HistogramBuilder<11> { - fn default() -> Self { - Self { - buckets: [ - 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0, - ], - } - } + fn default() -> Self { + Self { + buckets: [0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1.0, 2.5, 5.0, 10.0], + } + } } -impl MetricBuilder - for HistogramBuilder -{ - fn build(&self) -> prometheus_client::metrics::histogram::Histogram { - prometheus_client::metrics::histogram::Histogram::new(self.buckets.iter().copied()) - } +impl MetricBuilder for HistogramBuilder { + fn build(&self) -> prometheus_client::metrics::histogram::Histogram { + prometheus_client::metrics::histogram::Histogram::new(self.buckets.iter().copied()) + } } impl MetricBuilder for F where - F: Fn() -> M, - M: prometheus_client::metrics::TypedMetric, + F: Fn() -> M, + M: prometheus_client::metrics::TypedMetric, { - fn build(&self) -> M { - self() - } + fn build(&self) -> M { + self() + } } diff --git a/foundations/src/telementry/metrics/registries.rs b/foundations/src/telementry/metrics/registries.rs index fee8b1d3..b0ada5b0 100644 --- a/foundations/src/telementry/metrics/registries.rs +++ b/foundations/src/telementry/metrics/registries.rs @@ -1,4 +1,6 @@ -use std::{borrow::Cow, collections::HashMap, ops::DerefMut}; +use std::borrow::Cow; +use std::collections::HashMap; +use std::ops::DerefMut; use anyhow::Context; use once_cell::sync::OnceCell; @@ -9,93 +11,82 @@ use crate::ServiceInfo; #[doc(hidden)] pub struct Registries { - main: RwLock, - optional: RwLock, + main: RwLock, + optional: RwLock, } static REGISTRIES: OnceCell = OnceCell::new(); impl Registries { - pub(super) fn init(service_info: ServiceInfo, labels: &HashMap) { - REGISTRIES.get_or_init(|| Registries { - main: new_registry( - service_info.metric_name, - labels - .iter() - .map(|(k, v)| (k.clone().into(), v.clone().into())), - ), - optional: new_registry( - service_info.metric_name, - labels - .iter() - .map(|(k, v)| (k.clone().into(), v.clone().into())), - ), - }); - } - - pub(super) fn collect(buffer: &mut String, collect_optional: bool) -> anyhow::Result<()> { - let registries = Self::get(); - - if collect_optional { - encode_registry(®istries.optional.read(), buffer)?; - } - - encode_registry(®istries.main.read(), buffer)?; - - Ok(()) - } - - pub fn get_main_sub_registry(name: &str) -> impl DerefMut { - let registries = Self::get(); - get_subsystem(registries.main.write(), name) - } - - pub fn get_optional_sub_registry(name: &str) -> impl DerefMut { - let registries = Self::get(); - get_subsystem(registries.optional.write(), name) - } - - pub(super) fn get() -> &'static Registries { - REGISTRIES.get_or_init(|| Registries { - main: new_registry("", []), - optional: new_registry("", []), - }) - } + pub(super) fn init(service_info: ServiceInfo, labels: &HashMap) { + REGISTRIES.get_or_init(|| Registries { + main: new_registry( + service_info.metric_name, + labels.iter().map(|(k, v)| (k.clone().into(), v.clone().into())), + ), + optional: new_registry( + service_info.metric_name, + labels.iter().map(|(k, v)| (k.clone().into(), v.clone().into())), + ), + }); + } + + pub(super) fn collect(buffer: &mut String, collect_optional: bool) -> anyhow::Result<()> { + let registries = Self::get(); + + if collect_optional { + encode_registry(®istries.optional.read(), buffer)?; + } + + encode_registry(®istries.main.read(), buffer)?; + + Ok(()) + } + + pub fn get_main_sub_registry(name: &str) -> impl DerefMut { + let registries = Self::get(); + get_subsystem(registries.main.write(), name) + } + + pub fn get_optional_sub_registry(name: &str) -> impl DerefMut { + let registries = Self::get(); + get_subsystem(registries.optional.write(), name) + } + + pub(super) fn get() -> &'static Registries { + REGISTRIES.get_or_init(|| Registries { + main: new_registry("", []), + optional: new_registry("", []), + }) + } } -fn new_registry( - name: &str, - labels: impl IntoIterator, Cow<'static, str>)>, -) -> RwLock { - RwLock::new({ - if name.is_empty() { - Registry::with_labels(labels.into_iter()) - } else { - Registry::with_prefix_and_labels(name, labels.into_iter()) - } - }) +fn new_registry(name: &str, labels: impl IntoIterator, Cow<'static, str>)>) -> RwLock { + RwLock::new({ + if name.is_empty() { + Registry::with_labels(labels.into_iter()) + } else { + Registry::with_prefix_and_labels(name, labels.into_iter()) + } + }) } -fn get_subsystem<'a>( - registry: RwLockWriteGuard<'a, Registry>, - subsystem: &str, -) -> impl DerefMut + 'a { - RwLockWriteGuard::map(registry, |registry| { - if subsystem.is_empty() { - registry - } else { - registry.sub_registry_with_prefix(subsystem) - } - }) +fn get_subsystem<'a>(registry: RwLockWriteGuard<'a, Registry>, subsystem: &str) -> impl DerefMut + 'a { + RwLockWriteGuard::map(registry, |registry| { + if subsystem.is_empty() { + registry + } else { + registry.sub_registry_with_prefix(subsystem) + } + }) } fn encode_registry(registry: &Registry, buffer: &mut String) -> anyhow::Result<()> { - prometheus_client::encoding::text::encode(buffer, registry) - .context("failed to encode registry")?; + prometheus_client::encoding::text::encode(buffer, registry).context("failed to encode registry")?; - if buffer.ends_with("# EOF\n") { - buffer.truncate(buffer.len() - "# EOF\n".len()); - } + if buffer.ends_with("# EOF\n") { + buffer.truncate(buffer.len() - "# EOF\n".len()); + } - Ok(()) + Ok(()) } diff --git a/foundations/src/telementry/metrics/serde/mod.rs b/foundations/src/telementry/metrics/serde/mod.rs index 79419841..cfac75f1 100644 --- a/foundations/src/telementry/metrics/serde/mod.rs +++ b/foundations/src/telementry/metrics/serde/mod.rs @@ -1,40 +1,38 @@ //! Serde bridge. +use std::fmt; +use std::hash::Hash; + use parking_lot::MappedRwLockReadGuard; -use prometheus_client::{ - encoding::{EncodeLabelSet, EncodeMetric, LabelSetEncoder, MetricEncoder}, - metrics::{ - family::{Family as InnerFamily, MetricConstructor}, - MetricType, TypedMetric, - }, -}; +use prometheus_client::encoding::{EncodeLabelSet, EncodeMetric, LabelSetEncoder, MetricEncoder}; +use prometheus_client::metrics::family::{Family as InnerFamily, MetricConstructor}; +use prometheus_client::metrics::{MetricType, TypedMetric}; use serde::ser::Serialize; -use std::{fmt, hash::Hash}; mod top; mod value; #[derive(Debug)] enum Error { - Unexpected(String), - Fmt(std::fmt::Error), + Unexpected(String), + Fmt(std::fmt::Error), } impl std::fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Self::Unexpected(msg) => write!(f, "unexpected error: {}", msg), - Self::Fmt(_) => write!(f, "formatting error"), - } - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Unexpected(msg) => write!(f, "unexpected error: {}", msg), + Self::Fmt(_) => write!(f, "formatting error"), + } + } } impl std::error::Error for Error {} impl serde::ser::Error for Error { - fn custom(msg: T) -> Self { - Self::Unexpected(msg.to_string()) - } + fn custom(msg: T) -> Self { + Self::Unexpected(msg.to_string()) + } } /// A wrapper around [`prometheus_client::metrics::family::Family`] which @@ -99,73 +97,73 @@ impl serde::ser::Error for Error { /// ``` #[derive(Debug)] pub struct Family M> { - inner: InnerFamily, M, C>, + inner: InnerFamily, M, C>, } impl Family where - S: Clone + Eq + Hash, + S: Clone + Eq + Hash, { - pub fn new_with_constructor(constructor: C) -> Self { - Self { - inner: InnerFamily::new_with_constructor(constructor), - } - } + pub fn new_with_constructor(constructor: C) -> Self { + Self { + inner: InnerFamily::new_with_constructor(constructor), + } + } } impl Default for Family where - S: Clone + Eq + Hash, - M: Default, + S: Clone + Eq + Hash, + M: Default, { - fn default() -> Self { - Self { - inner: Default::default(), - } - } + fn default() -> Self { + Self { + inner: Default::default(), + } + } } impl Family where - S: Clone + Eq + Hash, - C: MetricConstructor, + S: Clone + Eq + Hash, + C: MetricConstructor, { - pub fn get_or_create(&self, label_set: &S) -> MappedRwLockReadGuard { - self.inner.get_or_create(Bridge::from_ref(label_set)) - } + pub fn get_or_create(&self, label_set: &S) -> MappedRwLockReadGuard { + self.inner.get_or_create(Bridge::from_ref(label_set)) + } } impl EncodeMetric for Family where - S: Clone + Eq + Hash + Serialize, - M: EncodeMetric + TypedMetric, - C: MetricConstructor, + S: Clone + Eq + Hash + Serialize, + M: EncodeMetric + TypedMetric, + C: MetricConstructor, { - fn encode(&self, encoder: MetricEncoder) -> fmt::Result { - self.inner.encode(encoder) - } + fn encode(&self, encoder: MetricEncoder) -> fmt::Result { + self.inner.encode(encoder) + } - fn metric_type(&self) -> MetricType { - M::TYPE - } + fn metric_type(&self) -> MetricType { + M::TYPE + } } impl TypedMetric for Family where - M: TypedMetric, + M: TypedMetric, { - const TYPE: MetricType = ::TYPE; + const TYPE: MetricType = ::TYPE; } impl Clone for Family where - C: Clone, + C: Clone, { - fn clone(&self) -> Self { - Self { - inner: self.inner.clone(), - } - } + fn clone(&self) -> Self { + Self { + inner: self.inner.clone(), + } + } } #[derive(Clone, Eq, Hash, PartialEq)] @@ -173,29 +171,29 @@ where struct Bridge(S); impl Bridge { - fn from_ref(label_set: &S) -> &Self { - // SAFETY: `Self` is a transparent newtype wrapper. - unsafe { &*(label_set as *const S as *const Bridge) } - } + fn from_ref(label_set: &S) -> &Self { + // SAFETY: `Self` is a transparent newtype wrapper. + unsafe { &*(label_set as *const S as *const Bridge) } + } } impl EncodeLabelSet for Bridge where - S: Serialize, + S: Serialize, { - fn encode(&self, encoder: LabelSetEncoder) -> fmt::Result { - self.0.serialize(top::serializer(encoder)).map_err(|err| { - tracing::error!("failed to serialize labels: {}", err); - fmt::Error - }) - } + fn encode(&self, encoder: LabelSetEncoder) -> fmt::Result { + self.0.serialize(top::serializer(encoder)).map_err(|err| { + tracing::error!("failed to serialize labels: {}", err); + fmt::Error + }) + } } impl fmt::Debug for Bridge where - S: fmt::Debug, + S: fmt::Debug, { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } } diff --git a/foundations/src/telementry/metrics/serde/top.rs b/foundations/src/telementry/metrics/serde/top.rs index 40e465be..310f1133 100644 --- a/foundations/src/telementry/metrics/serde/top.rs +++ b/foundations/src/telementry/metrics/serde/top.rs @@ -1,14 +1,15 @@ -use super::value; use prometheus_client::encoding::{EncodeLabelKey, LabelSetEncoder}; use serde::ser::{Impossible, Serialize, SerializeStruct, Serializer}; +use super::value; + #[inline] pub(super) fn serializer(writer: LabelSetEncoder<'_>) -> TopSerializer<'_> { - TopSerializer { writer } + TopSerializer { writer } } pub(super) struct TopSerializer<'w> { - writer: LabelSetEncoder<'w>, + writer: LabelSetEncoder<'w>, } macro_rules! unsupported_scalars { @@ -21,172 +22,151 @@ macro_rules! unsupported_scalars { } impl<'w> Serializer for TopSerializer<'w> { - type Ok = (); - type Error = super::Error; - type SerializeSeq = Impossible; - type SerializeTuple = Impossible; - type SerializeTupleStruct = Impossible; - type SerializeTupleVariant = Impossible; - type SerializeMap = Impossible; - type SerializeStruct = StructSerializer<'w>; - type SerializeStructVariant = Impossible; - - unsupported_scalars! { - serialize_bool: bool, - serialize_i8: i8, - serialize_i16: i16, - serialize_i32: i32, - serialize_i64: i64, - serialize_u8: u8, - serialize_u16: u16, - serialize_u32: u32, - serialize_u64: u64, - serialize_f32: f32, - serialize_f64: f64, - serialize_char: char, - serialize_str: &str, - serialize_bytes: &[u8], - } - - #[inline] - fn serialize_unit(self) -> Result<(), Self::Error> { - Ok(()) - } - - #[inline] - fn serialize_unit_struct(self, _name: &'static str) -> Result<(), Self::Error> { - Ok(()) - } - - #[inline] - fn serialize_unit_variant( - self, - ty: &'static str, - _index: u32, - name: &'static str, - ) -> Result<(), Self::Error> { - Err(Self::Error::Unexpected(format!( - "unit variant: {ty}::{name}" - ))) - } - - #[inline] - fn serialize_newtype_struct(self, _name: &'static str, value: &T) -> Result<(), Self::Error> - where - T: ?Sized + Serialize, - { - value.serialize(self) - } - - #[inline] - fn serialize_newtype_variant( - self, - ty: &'static str, - _index: u32, - name: &'static str, - _value: &T, - ) -> Result<(), Self::Error> - where - T: ?Sized + Serialize, - { - Err(Self::Error::Unexpected(format!( - "newtype variant: {ty}::{name}" - ))) - } - - #[inline] - fn serialize_none(self) -> Result<(), Self::Error> { - Ok(()) - } - - #[inline] - fn serialize_some(self, value: &T) -> Result<(), Self::Error> - where - T: ?Sized + Serialize, - { - value.serialize(self) - } - - #[inline] - fn serialize_seq(self, len: Option) -> Result { - Err(Self::Error::Unexpected(format!("sequence: {:?}", len))) - } - - #[inline] - fn serialize_tuple(self, len: usize) -> Result { - Err(Self::Error::Unexpected(format!("tuple: {:?}", len))) - } - - #[inline] - fn serialize_tuple_struct( - self, - ty: &'static str, - _len: usize, - ) -> Result { - Err(Self::Error::Unexpected(format!("tuple struct: {ty}"))) - } - - #[inline] - fn serialize_tuple_variant( - self, - ty: &'static str, - _index: u32, - name: &'static str, - _len: usize, - ) -> Result { - Err(Self::Error::Unexpected(format!( - "tuple variant: {ty}::{name}" - ))) - } - - #[inline] - fn serialize_map(self, len: Option) -> Result { - Err(Self::Error::Unexpected(format!("map: {:?}", len))) - } - - #[inline] - fn serialize_struct( - self, - _ty: &'static str, - _len: usize, - ) -> Result { - Ok(StructSerializer(self.writer)) - } - - #[inline] - fn serialize_struct_variant( - self, - ty: &'static str, - _index: u32, - name: &'static str, - _len: usize, - ) -> Result { - Err(Self::Error::Unexpected(format!( - "struct variant: {ty}::{name}" - ))) - } + type Error = super::Error; + type Ok = (); + type SerializeMap = Impossible; + type SerializeSeq = Impossible; + type SerializeStruct = StructSerializer<'w>; + type SerializeStructVariant = Impossible; + type SerializeTuple = Impossible; + type SerializeTupleStruct = Impossible; + type SerializeTupleVariant = Impossible; + + unsupported_scalars! { + serialize_bool: bool, + serialize_i8: i8, + serialize_i16: i16, + serialize_i32: i32, + serialize_i64: i64, + serialize_u8: u8, + serialize_u16: u16, + serialize_u32: u32, + serialize_u64: u64, + serialize_f32: f32, + serialize_f64: f64, + serialize_char: char, + serialize_str: &str, + serialize_bytes: &[u8], + } + + #[inline] + fn serialize_unit(self) -> Result<(), Self::Error> { + Ok(()) + } + + #[inline] + fn serialize_unit_struct(self, _name: &'static str) -> Result<(), Self::Error> { + Ok(()) + } + + #[inline] + fn serialize_unit_variant(self, ty: &'static str, _index: u32, name: &'static str) -> Result<(), Self::Error> { + Err(Self::Error::Unexpected(format!("unit variant: {ty}::{name}"))) + } + + #[inline] + fn serialize_newtype_struct(self, _name: &'static str, value: &T) -> Result<(), Self::Error> + where + T: ?Sized + Serialize, + { + value.serialize(self) + } + + #[inline] + fn serialize_newtype_variant( + self, + ty: &'static str, + _index: u32, + name: &'static str, + _value: &T, + ) -> Result<(), Self::Error> + where + T: ?Sized + Serialize, + { + Err(Self::Error::Unexpected(format!("newtype variant: {ty}::{name}"))) + } + + #[inline] + fn serialize_none(self) -> Result<(), Self::Error> { + Ok(()) + } + + #[inline] + fn serialize_some(self, value: &T) -> Result<(), Self::Error> + where + T: ?Sized + Serialize, + { + value.serialize(self) + } + + #[inline] + fn serialize_seq(self, len: Option) -> Result { + Err(Self::Error::Unexpected(format!("sequence: {:?}", len))) + } + + #[inline] + fn serialize_tuple(self, len: usize) -> Result { + Err(Self::Error::Unexpected(format!("tuple: {:?}", len))) + } + + #[inline] + fn serialize_tuple_struct(self, ty: &'static str, _len: usize) -> Result { + Err(Self::Error::Unexpected(format!("tuple struct: {ty}"))) + } + + #[inline] + fn serialize_tuple_variant( + self, + ty: &'static str, + _index: u32, + name: &'static str, + _len: usize, + ) -> Result { + Err(Self::Error::Unexpected(format!("tuple variant: {ty}::{name}"))) + } + + #[inline] + fn serialize_map(self, len: Option) -> Result { + Err(Self::Error::Unexpected(format!("map: {:?}", len))) + } + + #[inline] + fn serialize_struct(self, _ty: &'static str, _len: usize) -> Result { + Ok(StructSerializer(self.writer)) + } + + #[inline] + fn serialize_struct_variant( + self, + ty: &'static str, + _index: u32, + name: &'static str, + _len: usize, + ) -> Result { + Err(Self::Error::Unexpected(format!("struct variant: {ty}::{name}"))) + } } pub(super) struct StructSerializer<'w>(LabelSetEncoder<'w>); impl SerializeStruct for StructSerializer<'_> { - type Ok = (); - type Error = super::Error; - - fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error> - where - T: ?Sized + Serialize, - { - let mut encoder = self.0.encode_label(); - let mut label_enc = encoder.encode_label_key().map_err(super::Error::Fmt)?; - key.encode(&mut label_enc).map_err(super::Error::Fmt)?; - let value_enc = label_enc.encode_label_value().map_err(super::Error::Fmt)?; - value.serialize(value::serializer(value_enc))?; - - Ok(()) - } - - fn end(self) -> Result { - Ok(()) - } + type Error = super::Error; + type Ok = (); + + fn serialize_field(&mut self, key: &'static str, value: &T) -> Result<(), Self::Error> + where + T: ?Sized + Serialize, + { + let mut encoder = self.0.encode_label(); + let mut label_enc = encoder.encode_label_key().map_err(super::Error::Fmt)?; + key.encode(&mut label_enc).map_err(super::Error::Fmt)?; + let value_enc = label_enc.encode_label_value().map_err(super::Error::Fmt)?; + value.serialize(value::serializer(value_enc))?; + + Ok(()) + } + + fn end(self) -> Result { + Ok(()) + } } diff --git a/foundations/src/telementry/metrics/serde/value.rs b/foundations/src/telementry/metrics/serde/value.rs index f985f42d..8a196363 100644 --- a/foundations/src/telementry/metrics/serde/value.rs +++ b/foundations/src/telementry/metrics/serde/value.rs @@ -1,16 +1,15 @@ +use std::{fmt, str}; + use prometheus_client::encoding::{EncodeLabelValue, LabelValueEncoder}; use serde::ser::{Impossible, Serialize, Serializer}; -use std::{fmt, str}; #[inline] -pub(super) fn serializer( - writer: LabelValueEncoder<'_>, -) -> impl Serializer + '_ { - ValueSerializer { writer } +pub(super) fn serializer(writer: LabelValueEncoder<'_>) -> impl Serializer + '_ { + ValueSerializer { writer } } struct ValueSerializer<'w> { - writer: LabelValueEncoder<'w>, + writer: LabelValueEncoder<'w>, } macro_rules! delegate { @@ -23,203 +22,169 @@ macro_rules! delegate { } impl Serializer for ValueSerializer<'_> { - type Ok = (); - type Error = super::Error; - type SerializeSeq = Impossible; - type SerializeTuple = Impossible; - type SerializeTupleStruct = Impossible; - type SerializeTupleVariant = Impossible; - type SerializeMap = Impossible; - type SerializeStruct = Impossible; - type SerializeStructVariant = Impossible; - - fn serialize_bool(mut self, v: bool) -> Result { - if v { - "true".encode(&mut self.writer).map_err(Self::Error::Fmt)?; - } else { - "false".encode(&mut self.writer).map_err(Self::Error::Fmt)?; - } - - self.writer.finish().map_err(Self::Error::Fmt)?; - - Ok(()) - } - - delegate! { - serialize_i8: i8, - serialize_i16: i16, - serialize_i32: i32, - serialize_i64: i64, - serialize_u8: u8, - serialize_u16: u16, - serialize_u32: u32, - serialize_u64: u64, - serialize_u128: u128, - serialize_i128: i128, - serialize_f64: f64, - } - - fn serialize_f32(mut self, v: f32) -> Result { - (v as f64) - .encode(&mut self.writer) - .map_err(Self::Error::Fmt)?; - - self.writer.finish().map_err(Self::Error::Fmt)?; - - Ok(()) - } - - fn serialize_char(mut self, v: char) -> Result { - format!("{v}") - .encode(&mut self.writer) - .map_err(Self::Error::Fmt)?; - - self.writer.finish().map_err(Self::Error::Fmt)?; - - Ok(()) - } - - fn serialize_str(mut self, value: &str) -> Result { - value.encode(&mut self.writer).map_err(Self::Error::Fmt)?; - - self.writer.finish().map_err(Self::Error::Fmt)?; - - Ok(()) - } - - fn serialize_bytes(self, _value: &[u8]) -> Result { - Err(Self::Error::Unexpected("bytes".to_string())) - } - - fn serialize_unit(mut self) -> Result { - None:: - .encode(&mut self.writer) - .map_err(Self::Error::Fmt)?; - self.writer.finish().map_err(Self::Error::Fmt)?; - Ok(()) - } - - fn serialize_unit_struct(mut self, _ty: &'static str) -> Result { - None:: - .encode(&mut self.writer) - .map_err(Self::Error::Fmt)?; - self.writer.finish().map_err(Self::Error::Fmt)?; - Ok(()) - } - - fn serialize_unit_variant( - self, - _ty: &'static str, - _index: u32, - name: &'static str, - ) -> Result { - self.serialize_str(name) - } - - fn serialize_newtype_struct( - self, - _ty: &'static str, - value: &T, - ) -> Result - where - T: ?Sized + Serialize, - { - value.serialize(self) - } - - fn serialize_newtype_variant( - self, - ty: &'static str, - _index: u32, - name: &'static str, - _value: &T, - ) -> Result - where - T: ?Sized + Serialize, - { - Err(Self::Error::Unexpected(format!( - "newtype variant: {ty}::{name}" - ))) - } - - fn serialize_none(self) -> Result { - Ok(()) - } - - fn serialize_some(self, value: &T) -> Result - where - T: ?Sized + Serialize, - { - value.serialize(self) - } - - fn serialize_seq(self, len: Option) -> Result { - Err(Self::Error::Unexpected(format!("seq: {:?}", len))) - } - - fn serialize_tuple(self, len: usize) -> Result { - Err(Self::Error::Unexpected(format!("tuple: {:?}", len))) - } - - fn serialize_tuple_struct( - self, - ty: &'static str, - _len: usize, - ) -> Result { - Err(Self::Error::Unexpected(format!("tuple struct: {ty}"))) - } - - fn serialize_tuple_variant( - self, - ty: &'static str, - _index: u32, - name: &'static str, - _len: usize, - ) -> Result { - Err(Self::Error::Unexpected(format!( - "tuple variant: {ty}::{name}" - ))) - } - - fn serialize_map(self, len: Option) -> Result { - Err(Self::Error::Unexpected(format!("map: {:?}", len))) - } - - fn serialize_struct( - self, - ty: &'static str, - _len: usize, - ) -> Result { - Err(Self::Error::Unexpected(format!("struct: {ty}"))) - } - - fn serialize_struct_variant( - self, - ty: &'static str, - _index: u32, - name: &'static str, - _len: usize, - ) -> Result { - Err(Self::Error::Unexpected(format!( - "struct variant: {ty}::{name}" - ))) - } - - fn collect_str(mut self, value: &T) -> Result - where - T: ?Sized + fmt::Display, - { - value - .to_string() - .encode(&mut self.writer) - .map_err(Self::Error::Fmt)?; - - self.writer.finish().map_err(Self::Error::Fmt)?; - - Ok(()) - } - - fn is_human_readable(&self) -> bool { - true - } + type Error = super::Error; + type Ok = (); + type SerializeMap = Impossible; + type SerializeSeq = Impossible; + type SerializeStruct = Impossible; + type SerializeStructVariant = Impossible; + type SerializeTuple = Impossible; + type SerializeTupleStruct = Impossible; + type SerializeTupleVariant = Impossible; + + delegate! { + serialize_i8: i8, + serialize_i16: i16, + serialize_i32: i32, + serialize_i64: i64, + serialize_u8: u8, + serialize_u16: u16, + serialize_u32: u32, + serialize_u64: u64, + serialize_u128: u128, + serialize_i128: i128, + serialize_f64: f64, + } + + fn serialize_bool(mut self, v: bool) -> Result { + if v { + "true".encode(&mut self.writer).map_err(Self::Error::Fmt)?; + } else { + "false".encode(&mut self.writer).map_err(Self::Error::Fmt)?; + } + + self.writer.finish().map_err(Self::Error::Fmt)?; + + Ok(()) + } + + fn serialize_f32(mut self, v: f32) -> Result { + (v as f64).encode(&mut self.writer).map_err(Self::Error::Fmt)?; + + self.writer.finish().map_err(Self::Error::Fmt)?; + + Ok(()) + } + + fn serialize_char(mut self, v: char) -> Result { + format!("{v}").encode(&mut self.writer).map_err(Self::Error::Fmt)?; + + self.writer.finish().map_err(Self::Error::Fmt)?; + + Ok(()) + } + + fn serialize_str(mut self, value: &str) -> Result { + value.encode(&mut self.writer).map_err(Self::Error::Fmt)?; + + self.writer.finish().map_err(Self::Error::Fmt)?; + + Ok(()) + } + + fn serialize_bytes(self, _value: &[u8]) -> Result { + Err(Self::Error::Unexpected("bytes".to_string())) + } + + fn serialize_unit(mut self) -> Result { + None::.encode(&mut self.writer).map_err(Self::Error::Fmt)?; + self.writer.finish().map_err(Self::Error::Fmt)?; + Ok(()) + } + + fn serialize_unit_struct(mut self, _ty: &'static str) -> Result { + None::.encode(&mut self.writer).map_err(Self::Error::Fmt)?; + self.writer.finish().map_err(Self::Error::Fmt)?; + Ok(()) + } + + fn serialize_unit_variant(self, _ty: &'static str, _index: u32, name: &'static str) -> Result { + self.serialize_str(name) + } + + fn serialize_newtype_struct(self, _ty: &'static str, value: &T) -> Result + where + T: ?Sized + Serialize, + { + value.serialize(self) + } + + fn serialize_newtype_variant( + self, + ty: &'static str, + _index: u32, + name: &'static str, + _value: &T, + ) -> Result + where + T: ?Sized + Serialize, + { + Err(Self::Error::Unexpected(format!("newtype variant: {ty}::{name}"))) + } + + fn serialize_none(self) -> Result { + Ok(()) + } + + fn serialize_some(self, value: &T) -> Result + where + T: ?Sized + Serialize, + { + value.serialize(self) + } + + fn serialize_seq(self, len: Option) -> Result { + Err(Self::Error::Unexpected(format!("seq: {:?}", len))) + } + + fn serialize_tuple(self, len: usize) -> Result { + Err(Self::Error::Unexpected(format!("tuple: {:?}", len))) + } + + fn serialize_tuple_struct(self, ty: &'static str, _len: usize) -> Result { + Err(Self::Error::Unexpected(format!("tuple struct: {ty}"))) + } + + fn serialize_tuple_variant( + self, + ty: &'static str, + _index: u32, + name: &'static str, + _len: usize, + ) -> Result { + Err(Self::Error::Unexpected(format!("tuple variant: {ty}::{name}"))) + } + + fn serialize_map(self, len: Option) -> Result { + Err(Self::Error::Unexpected(format!("map: {:?}", len))) + } + + fn serialize_struct(self, ty: &'static str, _len: usize) -> Result { + Err(Self::Error::Unexpected(format!("struct: {ty}"))) + } + + fn serialize_struct_variant( + self, + ty: &'static str, + _index: u32, + name: &'static str, + _len: usize, + ) -> Result { + Err(Self::Error::Unexpected(format!("struct variant: {ty}::{name}"))) + } + + fn collect_str(mut self, value: &T) -> Result + where + T: ?Sized + fmt::Display, + { + value.to_string().encode(&mut self.writer).map_err(Self::Error::Fmt)?; + + self.writer.finish().map_err(Self::Error::Fmt)?; + + Ok(()) + } + + fn is_human_readable(&self) -> bool { + true + } } diff --git a/foundations/src/telementry/mod.rs b/foundations/src/telementry/mod.rs index 32a17e94..8dc95bd7 100644 --- a/foundations/src/telementry/mod.rs +++ b/foundations/src/telementry/mod.rs @@ -32,33 +32,33 @@ type Underlying = crate::telementry::EnvFilter; pub struct LevelFilter(Underlying); impl LevelFilter { - #[cfg(not(feature = "env-filter"))] - pub fn new(level: &str) -> Self { - match level.to_lowercase().as_str() { - "trace" => Self(Underlying::from(tracing::Level::TRACE)), - "debug" => Self(Underlying::from(tracing::Level::DEBUG)), - "info" => Self(Underlying::from(tracing::Level::INFO)), - "warn" => Self(Underlying::from(tracing::Level::WARN)), - "error" => Self(Underlying::from(tracing::Level::ERROR)), - _ => { - eprintln!("log level '{level}' is not recognized, defaulting to 'info'"); - Self(Underlying::from(tracing::Level::INFO)) - } - } - } + #[cfg(not(feature = "env-filter"))] + pub fn new(level: &str) -> Self { + match level.to_lowercase().as_str() { + "trace" => Self(Underlying::from(tracing::Level::TRACE)), + "debug" => Self(Underlying::from(tracing::Level::DEBUG)), + "info" => Self(Underlying::from(tracing::Level::INFO)), + "warn" => Self(Underlying::from(tracing::Level::WARN)), + "error" => Self(Underlying::from(tracing::Level::ERROR)), + _ => { + eprintln!("log level '{level}' is not recognized, defaulting to 'info'"); + Self(Underlying::from(tracing::Level::INFO)) + } + } + } - #[cfg(feature = "env-filter")] - pub fn new(level: &str) -> Self { - Self(Underlying::new(level)) - } + #[cfg(feature = "env-filter")] + pub fn new(level: &str) -> Self { + Self(Underlying::new(level)) + } - pub fn filter(self) -> Underlying { - self.0 - } + pub fn filter(self) -> Underlying { + self.0 + } } impl Default for LevelFilter { - fn default() -> Self { - Self::new("info") - } + fn default() -> Self { + Self::new("info") + } } diff --git a/foundations/src/telementry/opentelemetry/exporter.rs b/foundations/src/telementry/opentelemetry/exporter.rs index e0ca8773..3f9ff48f 100644 --- a/foundations/src/telementry/opentelemetry/exporter.rs +++ b/foundations/src/telementry/opentelemetry/exporter.rs @@ -6,310 +6,294 @@ use opentelemetry_otlp::SpanExporter; use opentelemetry_sdk::Resource; use thread_local::ThreadLocal; use tokio::sync::{Mutex, OwnedSemaphorePermit}; - -#[cfg(feature = "runtime")] -use crate::runtime::spawn; #[cfg(not(feature = "runtime"))] use tokio::task::spawn; -use super::{layer::SpanHolder, node::SpanNode}; +use super::layer::SpanHolder; +use super::node::SpanNode; +#[cfg(feature = "runtime")] +use crate::runtime::spawn; #[cfg(feature = "metrics")] #[crate::telementry::metrics::metrics(crate_path = "crate")] mod opentelementry { - use prometheus_client::metrics::counter::Counter; - - #[derive(serde::Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] - #[serde(rename_all = "snake_case")] - pub enum SpanDroppedReason { - ExportFailed, - ExportTimeout, - ThreadBackpressure, - PendingExportBackpressure, - } - - pub fn spans_exported() -> Counter; - pub fn spans_dropped(reason: SpanDroppedReason) -> Counter; + use prometheus_client::metrics::counter::Counter; + + #[derive(serde::Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] + #[serde(rename_all = "snake_case")] + pub enum SpanDroppedReason { + ExportFailed, + ExportTimeout, + ThreadBackpressure, + PendingExportBackpressure, + } + + pub fn spans_exported() -> Counter; + pub fn spans_dropped(reason: SpanDroppedReason) -> Counter; } pub struct BatchExporter { - pub interval: tokio::time::Duration, - pub resource: Resource, - pub batch_size: usize, - pub max_concurrent_exports: usize, - pub max_pending_exports: usize, - #[cfg(feature = "metrics")] - pub metrics: bool, - pub error_handler: Box, - pub drop_handler: Box, - pub export_handler: Box, + pub interval: tokio::time::Duration, + pub resource: Resource, + pub batch_size: usize, + pub max_concurrent_exports: usize, + pub max_pending_exports: usize, + #[cfg(feature = "metrics")] + pub metrics: bool, + pub error_handler: Box, + pub drop_handler: Box, + pub export_handler: Box, } impl BatchExporter { - pub fn with_error_handler(&mut self, handler: F) -> &mut Self - where - F: Fn(TraceError, usize) + Send + Sync + 'static, - { - self.error_handler = Box::new(handler); - self - } - - pub fn with_drop_handler(&mut self, handler: F) -> &mut Self - where - F: Fn(usize) + Send + Sync + 'static, - { - self.drop_handler = Box::new(handler); - self - } - - pub fn with_export_handler(&mut self, handler: F) -> &mut Self - where - F: Fn(usize) + Send + Sync + 'static, - { - self.export_handler = Box::new(handler); - self - } - - pub fn with_interval(&mut self, interval: tokio::time::Duration) -> &mut Self { - self.interval = interval; - self - } - - pub fn with_resource(&mut self, resource: Resource) -> &mut Self { - self.resource = resource; - self - } - - pub fn with_batch_size(&mut self, batch_size: usize) -> &mut Self { - self.batch_size = batch_size; - self - } - - pub fn with_max_concurrent_exports(&mut self, max_concurrent_exports: usize) -> &mut Self { - self.max_concurrent_exports = max_concurrent_exports; - self - } - - pub fn with_max_pending_exports(&mut self, max_pending_exports: usize) -> &mut Self { - self.max_pending_exports = max_pending_exports; - self - } - - pub fn with_service_info(&mut self, info: crate::ServiceInfo) -> &mut Self { - self.resource.merge(&Resource::new(vec![ - opentelemetry::KeyValue::new("service.name", info.metric_name), - opentelemetry::KeyValue::new("service.version", info.version), - ])); - - self - } - - pub fn build(&mut self) -> Self { - std::mem::take(self) - } + pub fn with_error_handler(&mut self, handler: F) -> &mut Self + where + F: Fn(TraceError, usize) + Send + Sync + 'static, + { + self.error_handler = Box::new(handler); + self + } + + pub fn with_drop_handler(&mut self, handler: F) -> &mut Self + where + F: Fn(usize) + Send + Sync + 'static, + { + self.drop_handler = Box::new(handler); + self + } + + pub fn with_export_handler(&mut self, handler: F) -> &mut Self + where + F: Fn(usize) + Send + Sync + 'static, + { + self.export_handler = Box::new(handler); + self + } + + pub fn with_interval(&mut self, interval: tokio::time::Duration) -> &mut Self { + self.interval = interval; + self + } + + pub fn with_resource(&mut self, resource: Resource) -> &mut Self { + self.resource = resource; + self + } + + pub fn with_batch_size(&mut self, batch_size: usize) -> &mut Self { + self.batch_size = batch_size; + self + } + + pub fn with_max_concurrent_exports(&mut self, max_concurrent_exports: usize) -> &mut Self { + self.max_concurrent_exports = max_concurrent_exports; + self + } + + pub fn with_max_pending_exports(&mut self, max_pending_exports: usize) -> &mut Self { + self.max_pending_exports = max_pending_exports; + self + } + + pub fn with_service_info(&mut self, info: crate::ServiceInfo) -> &mut Self { + self.resource.merge(&Resource::new(vec![ + opentelemetry::KeyValue::new("service.name", info.metric_name), + opentelemetry::KeyValue::new("service.version", info.version), + ])); + + self + } + + pub fn build(&mut self) -> Self { + std::mem::take(self) + } } impl std::fmt::Debug for BatchExporter { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("ExporterConfig") - .field("interval", &self.interval) - .field("resource", &self.resource) - .field("batch_size", &self.batch_size) - .field("max_concurrent_exports", &self.max_concurrent_exports) - .field("max_pending_exports", &self.max_pending_exports) - .finish() - } + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ExporterConfig") + .field("interval", &self.interval) + .field("resource", &self.resource) + .field("batch_size", &self.batch_size) + .field("max_concurrent_exports", &self.max_concurrent_exports) + .field("max_pending_exports", &self.max_pending_exports) + .finish() + } } impl Default for BatchExporter { - fn default() -> Self { - Self { - interval: tokio::time::Duration::from_secs(2), - resource: Resource::empty(), - batch_size: 10_000, - max_concurrent_exports: 10, - max_pending_exports: 15, - error_handler: Box::new(|err, count| { - tracing::error!(err = %err, count, "failed to export spans"); - }), - drop_handler: Box::new(|count| { - tracing::warn!(count, "dropped spans"); - }), - export_handler: Box::new(|count| { - tracing::debug!(count, "exported spans"); - }), - #[cfg(feature = "metrics")] - metrics: true, - } - } + fn default() -> Self { + Self { + interval: tokio::time::Duration::from_secs(2), + resource: Resource::empty(), + batch_size: 10_000, + max_concurrent_exports: 10, + max_pending_exports: 15, + error_handler: Box::new(|err, count| { + tracing::error!(err = %err, count, "failed to export spans"); + }), + drop_handler: Box::new(|count| { + tracing::warn!(count, "dropped spans"); + }), + export_handler: Box::new(|count| { + tracing::debug!(count, "exported spans"); + }), + #[cfg(feature = "metrics")] + metrics: true, + } + } } pub(super) struct Exporter { - internal: Arc, - span_buffer: Vec>, - spans: Arc>>, + internal: Arc, + span_buffer: Vec>, + spans: Arc>>, } struct ExportInternal { - channel: Mutex, - config: BatchExporter, - concurrent_semaphore: tokio::sync::Semaphore, - pending_semaphore: Arc, + channel: Mutex, + config: BatchExporter, + concurrent_semaphore: tokio::sync::Semaphore, + pending_semaphore: Arc, } -fn export_batch( - internal: Arc, - batch: Vec, - pending_permit: OwnedSemaphorePermit, -) { - use opentelemetry_sdk::export::trace::SpanExporter; - - spawn(async move { - let _permit = internal.concurrent_semaphore.acquire().await.unwrap(); - drop(pending_permit); - - let batch = batch - .into_iter() - .map(|data| data.into_data(internal.config.resource.clone())) - .collect_vec(); - - let size = batch.len(); - - let fut = { internal.channel.lock().await.export(batch) }; - - if let Err(err) = fut.await { - #[cfg(feature = "metrics")] - if internal.config.metrics { - let reason = match err { - TraceError::ExportTimedOut(_) => { - opentelementry::SpanDroppedReason::ExportTimeout - } - _ => opentelementry::SpanDroppedReason::ExportFailed, - }; - - opentelementry::spans_dropped(reason).inc_by(size as u64); - } - - (internal.config.error_handler)(err, size); - } else { - #[cfg(feature = "metrics")] - if internal.config.metrics { - opentelementry::spans_exported().inc_by(size as u64); - } - - (internal.config.export_handler)(size); - } - }); +fn export_batch(internal: Arc, batch: Vec, pending_permit: OwnedSemaphorePermit) { + use opentelemetry_sdk::export::trace::SpanExporter; + + spawn(async move { + let _permit = internal.concurrent_semaphore.acquire().await.unwrap(); + drop(pending_permit); + + let batch = batch + .into_iter() + .map(|data| data.into_data(internal.config.resource.clone())) + .collect_vec(); + + let size = batch.len(); + + let fut = { internal.channel.lock().await.export(batch) }; + + if let Err(err) = fut.await { + #[cfg(feature = "metrics")] + if internal.config.metrics { + let reason = match err { + TraceError::ExportTimedOut(_) => opentelementry::SpanDroppedReason::ExportTimeout, + _ => opentelementry::SpanDroppedReason::ExportFailed, + }; + + opentelementry::spans_dropped(reason).inc_by(size as u64); + } + + (internal.config.error_handler)(err, size); + } else { + #[cfg(feature = "metrics")] + if internal.config.metrics { + opentelementry::spans_exported().inc_by(size as u64); + } + + (internal.config.export_handler)(size); + } + }); } impl Exporter { - pub fn new( - channel: SpanExporter, - config: BatchExporter, - spans: Arc>>, - ) -> Self { - Self { - internal: Arc::new(ExportInternal { - channel: Mutex::new(channel), - concurrent_semaphore: tokio::sync::Semaphore::new(config.max_concurrent_exports), - pending_semaphore: Arc::new(tokio::sync::Semaphore::new( - config - .max_pending_exports - .max(config.max_concurrent_exports), - )), - config, - }), - spans, - span_buffer: Vec::new(), - } - } - - pub fn fetch_spans(&mut self) -> usize { - let buffers = std::mem::take(&mut self.span_buffer) - .into_iter() - .chain(std::iter::repeat(Vec::new())); - - self.span_buffer.iter_mut().for_each(|spans| { - spans.clear(); - spans.reserve_exact(self.internal.config.batch_size); - }); - - let mut total_dropped = 0; - - self.span_buffer = self - .spans - .iter() - .zip(buffers) - .map(|(spans, buffer)| { - let mut spans = spans.lock(); - total_dropped += spans.drop_count(); - spans.reset_drop_count(); - - spans.drain(buffer) - }) - .collect(); - - #[cfg(feature = "metrics")] - if self.internal.config.metrics { - opentelementry::spans_dropped(opentelementry::SpanDroppedReason::ThreadBackpressure) - .inc_by(total_dropped as u64); - } - - total_dropped - } - - pub async fn run(mut self) { - tracing::debug!("starting exporter"); - - loop { - tokio::time::sleep(self.internal.config.interval).await; - - let thread_total_dropped = self.fetch_spans(); - - let mut drop_pending = false; - - for chunk in self - .span_buffer - .iter_mut() - .flat_map(|spans| spans.drain(..)) - .flat_map(|s| s.flatten()) - .chunks(self.internal.config.batch_size) - .into_iter() - { - let Ok(pending_permit) = - self.internal.pending_semaphore.clone().try_acquire_owned() - else { - drop_pending = true; - break; - }; - - let chunk = chunk.collect_vec(); - tracing::debug!("exporting batch of {} spans", chunk.len()); - export_batch(self.internal.clone(), chunk, pending_permit); - } - - let mut pending_total_dropped = 0; - - if drop_pending { - self.span_buffer.iter_mut().for_each(|spans| { - pending_total_dropped += spans.len(); - spans.clear(); - }); - } - - #[cfg(feature = "metrics")] - if self.internal.config.metrics { - opentelementry::spans_dropped( - opentelementry::SpanDroppedReason::PendingExportBackpressure, - ) - .inc_by(pending_total_dropped as u64); - } - - let total_dropped = thread_total_dropped + pending_total_dropped; - - if total_dropped > 0 { - (self.internal.config.drop_handler)(total_dropped); - } - } - } + pub fn new(channel: SpanExporter, config: BatchExporter, spans: Arc>>) -> Self { + Self { + internal: Arc::new(ExportInternal { + channel: Mutex::new(channel), + concurrent_semaphore: tokio::sync::Semaphore::new(config.max_concurrent_exports), + pending_semaphore: Arc::new(tokio::sync::Semaphore::new( + config.max_pending_exports.max(config.max_concurrent_exports), + )), + config, + }), + spans, + span_buffer: Vec::new(), + } + } + + pub fn fetch_spans(&mut self) -> usize { + let buffers = std::mem::take(&mut self.span_buffer) + .into_iter() + .chain(std::iter::repeat(Vec::new())); + + self.span_buffer.iter_mut().for_each(|spans| { + spans.clear(); + spans.reserve_exact(self.internal.config.batch_size); + }); + + let mut total_dropped = 0; + + self.span_buffer = self + .spans + .iter() + .zip(buffers) + .map(|(spans, buffer)| { + let mut spans = spans.lock(); + total_dropped += spans.drop_count(); + spans.reset_drop_count(); + + spans.drain(buffer) + }) + .collect(); + + #[cfg(feature = "metrics")] + if self.internal.config.metrics { + opentelementry::spans_dropped(opentelementry::SpanDroppedReason::ThreadBackpressure) + .inc_by(total_dropped as u64); + } + + total_dropped + } + + pub async fn run(mut self) { + tracing::debug!("starting exporter"); + + loop { + tokio::time::sleep(self.internal.config.interval).await; + + let thread_total_dropped = self.fetch_spans(); + + let mut drop_pending = false; + + for chunk in self + .span_buffer + .iter_mut() + .flat_map(|spans| spans.drain(..)) + .flat_map(|s| s.flatten()) + .chunks(self.internal.config.batch_size) + .into_iter() + { + let Ok(pending_permit) = self.internal.pending_semaphore.clone().try_acquire_owned() else { + drop_pending = true; + break; + }; + + let chunk = chunk.collect_vec(); + tracing::debug!("exporting batch of {} spans", chunk.len()); + export_batch(self.internal.clone(), chunk, pending_permit); + } + + let mut pending_total_dropped = 0; + + if drop_pending { + self.span_buffer.iter_mut().for_each(|spans| { + pending_total_dropped += spans.len(); + spans.clear(); + }); + } + + #[cfg(feature = "metrics")] + if self.internal.config.metrics { + opentelementry::spans_dropped(opentelementry::SpanDroppedReason::PendingExportBackpressure) + .inc_by(pending_total_dropped as u64); + } + + let total_dropped = thread_total_dropped + pending_total_dropped; + + if total_dropped > 0 { + (self.internal.config.drop_handler)(total_dropped); + } + } + } } diff --git a/foundations/src/telementry/opentelemetry/layer.rs b/foundations/src/telementry/opentelemetry/layer.rs index cec46fc9..9e1fecca 100644 --- a/foundations/src/telementry/opentelemetry/layer.rs +++ b/foundations/src/telementry/opentelemetry/layer.rs @@ -1,478 +1,435 @@ -use std::{ - hash::{Hash, Hasher}, - sync::{ - atomic::{AtomicU64, AtomicUsize}, - Arc, - }, -}; +use std::hash::{Hash, Hasher}; +use std::sync::atomic::{AtomicU64, AtomicUsize}; +use std::sync::Arc; use opentelemetry::trace::SpanId; use opentelemetry_otlp::SpanExporter; use rand::Rng; use thread_local::ThreadLocal; use tracing::{span, Subscriber}; -use tracing_subscriber::{registry::LookupSpan, Layer}; +use tracing_subscriber::registry::LookupSpan; +use tracing_subscriber::Layer; +use super::exporter::{BatchExporter, Exporter}; +use super::node::SpanNode; use crate::runtime::spawn; -use super::{ - exporter::{BatchExporter, Exporter}, - node::SpanNode, -}; - pub(super) struct SpanHolder { - spans: Vec, - max_unprocessed_spans: usize, - drop_count: usize, + spans: Vec, + max_unprocessed_spans: usize, + drop_count: usize, } impl SpanHolder { - pub fn new(max_unprocessed_spans: usize) -> Self { - Self { - spans: Vec::with_capacity(max_unprocessed_spans), - max_unprocessed_spans, - drop_count: 0, - } - } - - pub fn push(&mut self, span: SpanNode) { - if self.spans.len() < self.max_unprocessed_spans { - self.spans.push(span); - } else { - self.drop_count += 1; - } - } - - pub fn drain(&mut self, mut new: Vec) -> Vec { - new.clear(); - new.reserve_exact(self.max_unprocessed_spans); - - std::mem::replace(&mut self.spans, new) - } - - pub fn drop_count(&self) -> usize { - self.drop_count - } - - pub fn reset_drop_count(&mut self) { - self.drop_count = 0; - } - - pub fn register_drop(&mut self) { - self.drop_count += 1; - } + pub fn new(max_unprocessed_spans: usize) -> Self { + Self { + spans: Vec::with_capacity(max_unprocessed_spans), + max_unprocessed_spans, + drop_count: 0, + } + } + + pub fn push(&mut self, span: SpanNode) { + if self.spans.len() < self.max_unprocessed_spans { + self.spans.push(span); + } else { + self.drop_count += 1; + } + } + + pub fn drain(&mut self, mut new: Vec) -> Vec { + new.clear(); + new.reserve_exact(self.max_unprocessed_spans); + + std::mem::replace(&mut self.spans, new) + } + + pub fn drop_count(&self) -> usize { + self.drop_count + } + + pub fn reset_drop_count(&mut self) { + self.drop_count = 0; + } + + pub fn register_drop(&mut self) { + self.drop_count += 1; + } } pub struct SpanObserverLayer { - seed: u64, - config: SpanObserver, - spans: Arc>>, - with_context: WithContext, - _subscriber: std::marker::PhantomData, + seed: u64, + config: SpanObserver, + spans: Arc>>, + with_context: WithContext, + _subscriber: std::marker::PhantomData, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum SampleResult { - Sample, - Dropped, - NotSampled, + Sample, + Dropped, + NotSampled, } pub enum Sampler { - Always, - Never, - TraceIdRatio(f64), - Custom(Box), + Always, + Never, + TraceIdRatio(f64), + Custom(Box), } impl Default for Sampler { - fn default() -> Self { - Self::Always - } + fn default() -> Self { + Self::Always + } } impl std::fmt::Debug for Sampler { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Sampler::Always => write!(f, "Always"), - Sampler::Never => write!(f, "Never"), - Sampler::TraceIdRatio(prob) => write!(f, "TraceIdRatio({})", prob), - Sampler::Custom(_) => write!(f, "Custom"), - } - } + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Sampler::Always => write!(f, "Always"), + Sampler::Never => write!(f, "Never"), + Sampler::TraceIdRatio(prob) => write!(f, "TraceIdRatio({})", prob), + Sampler::Custom(_) => write!(f, "Custom"), + } + } } impl Sampler { - pub const fn trace_id_ratio(prob: f64) -> Self { - Self::TraceIdRatio(prob) - } - - pub const fn always() -> Self { - Self::Always - } - - pub const fn never() -> Self { - Self::Never - } - - pub fn custom(s: impl ShouldSample + 'static) -> Self { - Self::Custom(Box::new(s)) - } - - pub fn cull_children_trace_id_ratio(prob: f64) -> Self { - let sampler = Sampler::trace_id_ratio(prob); - - Self::function(move |node| { - if node.is_child() { - SampleResult::Sample - } else if node.is_root() { - if !node.contains_error() - && !matches!(sampler.should_sample(node), SampleResult::Sample) - { - node.clear_children(); - } - - SampleResult::Sample - } else { - sampler.should_sample(node) - } - }) - } - - pub fn function SampleResult + Send + Sync + 'static>(f: F) -> Self { - Self::custom(SampleFunction::new(f)) - } - - pub fn ratelimit(self, rate: usize, per: std::time::Duration) -> Self { - Self::custom(RatelimitSampler::new(self, rate, per)) - } + pub const fn trace_id_ratio(prob: f64) -> Self { + Self::TraceIdRatio(prob) + } + + pub const fn always() -> Self { + Self::Always + } + + pub const fn never() -> Self { + Self::Never + } + + pub fn custom(s: impl ShouldSample + 'static) -> Self { + Self::Custom(Box::new(s)) + } + + pub fn cull_children_trace_id_ratio(prob: f64) -> Self { + let sampler = Sampler::trace_id_ratio(prob); + + Self::function(move |node| { + if node.is_child() { + SampleResult::Sample + } else if node.is_root() { + if !node.contains_error() && !matches!(sampler.should_sample(node), SampleResult::Sample) { + node.clear_children(); + } + + SampleResult::Sample + } else { + sampler.should_sample(node) + } + }) + } + + pub fn function SampleResult + Send + Sync + 'static>(f: F) -> Self { + Self::custom(SampleFunction::new(f)) + } + + pub fn ratelimit(self, rate: usize, per: std::time::Duration) -> Self { + Self::custom(RatelimitSampler::new(self, rate, per)) + } } pub struct RatelimitSampler { - parent: Sampler, - rate: usize, - per: std::time::Duration, - base: std::time::Instant, - last_sample: AtomicU64, - count: AtomicUsize, + parent: Sampler, + rate: usize, + per: std::time::Duration, + base: std::time::Instant, + last_sample: AtomicU64, + count: AtomicUsize, } pub struct SampleFunction(F); impl SampleResult + Send + Sync + 'static> SampleFunction { - pub fn new(f: F) -> Self { - Self(f) - } + pub fn new(f: F) -> Self { + Self(f) + } } -impl SampleResult + Send + Sync + 'static> ShouldSample - for SampleFunction -{ - fn should_sample(&self, node: &mut SpanNode) -> SampleResult { - (self.0)(node) - } +impl SampleResult + Send + Sync + 'static> ShouldSample for SampleFunction { + fn should_sample(&self, node: &mut SpanNode) -> SampleResult { + (self.0)(node) + } } impl RatelimitSampler { - pub fn new(parent: Sampler, rate: usize, per: std::time::Duration) -> Self { - Self { - parent, - rate, - per, - base: std::time::Instant::now(), - last_sample: AtomicU64::new(0), - count: AtomicUsize::new(0), - } - } + pub fn new(parent: Sampler, rate: usize, per: std::time::Duration) -> Self { + Self { + parent, + rate, + per, + base: std::time::Instant::now(), + last_sample: AtomicU64::new(0), + count: AtomicUsize::new(0), + } + } } impl ShouldSample for RatelimitSampler { - fn should_sample(&self, node: &mut SpanNode) -> SampleResult { - match self.parent.should_sample(node) { - SampleResult::Sample => {} - r => return r, - }; - - let now = std::time::Instant::now(); - let elapsed = now.duration_since(self.base) - + std::time::Duration::from_nanos( - self.last_sample.load(std::sync::atomic::Ordering::Relaxed), - ); - - if elapsed >= self.per { - self.last_sample.store( - now.duration_since(self.base).as_nanos() as u64, - std::sync::atomic::Ordering::Relaxed, - ); - self.count.store(0, std::sync::atomic::Ordering::Relaxed); - } - - let count = self - .count - .fetch_add(1, std::sync::atomic::Ordering::Relaxed); - - if count < self.rate { - SampleResult::Sample - } else { - SampleResult::Dropped - } - } + fn should_sample(&self, node: &mut SpanNode) -> SampleResult { + match self.parent.should_sample(node) { + SampleResult::Sample => {} + r => return r, + }; + + let now = std::time::Instant::now(); + let elapsed = now.duration_since(self.base) + + std::time::Duration::from_nanos(self.last_sample.load(std::sync::atomic::Ordering::Relaxed)); + + if elapsed >= self.per { + self.last_sample.store( + now.duration_since(self.base).as_nanos() as u64, + std::sync::atomic::Ordering::Relaxed, + ); + self.count.store(0, std::sync::atomic::Ordering::Relaxed); + } + + let count = self.count.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + + if count < self.rate { + SampleResult::Sample + } else { + SampleResult::Dropped + } + } } impl ShouldSample for Sampler { - fn should_sample(&self, node: &mut SpanNode) -> SampleResult { - match self { - Sampler::Always => SampleResult::Sample, - Sampler::Never => SampleResult::NotSampled, - Sampler::TraceIdRatio(prob) => { - if prob >= &1.0 { - return SampleResult::Sample; - } else if prob <= &0.0 { - return SampleResult::NotSampled; - } - - let prob_upper_bound = (prob.max(0.0) * (1u64 << 63) as f64) as u64; - let bytes = node.trace_id().to_bytes(); - let (_, low) = bytes.split_at(8); - let trace_id_low = u64::from_be_bytes(low.try_into().unwrap()); - let rnd_from_trace_id = trace_id_low >> 1; - - if rnd_from_trace_id < prob_upper_bound { - SampleResult::Sample - } else { - SampleResult::NotSampled - } - } - Sampler::Custom(s) => s.should_sample(node), - } - } + fn should_sample(&self, node: &mut SpanNode) -> SampleResult { + match self { + Sampler::Always => SampleResult::Sample, + Sampler::Never => SampleResult::NotSampled, + Sampler::TraceIdRatio(prob) => { + if prob >= &1.0 { + return SampleResult::Sample; + } else if prob <= &0.0 { + return SampleResult::NotSampled; + } + + let prob_upper_bound = (prob.max(0.0) * (1u64 << 63) as f64) as u64; + let bytes = node.trace_id().to_bytes(); + let (_, low) = bytes.split_at(8); + let trace_id_low = u64::from_be_bytes(low.try_into().unwrap()); + let rnd_from_trace_id = trace_id_low >> 1; + + if rnd_from_trace_id < prob_upper_bound { + SampleResult::Sample + } else { + SampleResult::NotSampled + } + } + Sampler::Custom(s) => s.should_sample(node), + } + } } pub trait ShouldSample: Send + Sync { - fn should_sample(&self, node: &mut SpanNode) -> SampleResult; + fn should_sample(&self, node: &mut SpanNode) -> SampleResult; } #[derive(Debug)] pub struct SpanObserver { - pub max_unprocessed_spans_per_thread: usize, - pub sampler: Sampler, + pub max_unprocessed_spans_per_thread: usize, + pub sampler: Sampler, } impl Default for SpanObserver { - fn default() -> Self { - Self { - max_unprocessed_spans_per_thread: 500, - sampler: Sampler::Always, - } - } + fn default() -> Self { + Self { + max_unprocessed_spans_per_thread: 500, + sampler: Sampler::Always, + } + } } impl SpanObserverLayer where - S: Subscriber + for<'a> LookupSpan<'a>, + S: Subscriber + for<'a> LookupSpan<'a>, { - pub fn new(config: SpanObserver, batch_config: BatchExporter, exporter: SpanExporter) -> Self { - let spans = Arc::new(ThreadLocal::new()); - - let exporter = Exporter::new(exporter, batch_config, spans.clone()); - - spawn(Box::pin(exporter.run())); - - Self { - config, - seed: rand::thread_rng().gen(), - spans, - with_context: WithContext(Self::get_context), - _subscriber: std::marker::PhantomData, - } - } - - fn get_context( - dispatch: &tracing::Dispatch, - span_id: &span::Id, - f: &mut dyn FnMut(&mut SpanNode), - ) { - let subscriber = dispatch.downcast_ref::().unwrap(); - let span = subscriber.span(span_id).unwrap(); - - let mut extensions = span.extensions_mut(); - if let Some(node) = extensions.get_mut::() { - f(node); - } - } - - fn hash(&self, id: &span::Id) -> SpanId { - let mut hasher = std::collections::hash_map::DefaultHasher::new(); - self.seed.hash(&mut hasher); - id.hash(&mut hasher); - SpanId::from_bytes(hasher.finish().to_be_bytes()) - } + pub fn new(config: SpanObserver, batch_config: BatchExporter, exporter: SpanExporter) -> Self { + let spans = Arc::new(ThreadLocal::new()); + + let exporter = Exporter::new(exporter, batch_config, spans.clone()); + + spawn(Box::pin(exporter.run())); + + Self { + config, + seed: rand::thread_rng().gen(), + spans, + with_context: WithContext(Self::get_context), + _subscriber: std::marker::PhantomData, + } + } + + fn get_context(dispatch: &tracing::Dispatch, span_id: &span::Id, f: &mut dyn FnMut(&mut SpanNode)) { + let subscriber = dispatch.downcast_ref::().unwrap(); + let span = subscriber.span(span_id).unwrap(); + + let mut extensions = span.extensions_mut(); + if let Some(node) = extensions.get_mut::() { + f(node); + } + } + + fn hash(&self, id: &span::Id) -> SpanId { + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + self.seed.hash(&mut hasher); + id.hash(&mut hasher); + SpanId::from_bytes(hasher.finish().to_be_bytes()) + } } #[allow(clippy::type_complexity)] pub(super) struct WithContext(fn(&tracing::Dispatch, &span::Id, f: &mut dyn FnMut(&mut SpanNode))); impl WithContext { - pub fn with_context( - &self, - dispatch: &tracing::Dispatch, - span_id: &span::Id, - mut f: impl FnMut(&mut SpanNode), - ) { - (self.0)(dispatch, span_id, &mut f); - } + pub fn with_context(&self, dispatch: &tracing::Dispatch, span_id: &span::Id, mut f: impl FnMut(&mut SpanNode)) { + (self.0)(dispatch, span_id, &mut f); + } } impl Layer for SpanObserverLayer where - S: Subscriber + for<'a> LookupSpan<'a>, + S: Subscriber + for<'a> LookupSpan<'a>, { - fn on_new_span( - &self, - attrs: &span::Attributes<'_>, - id: &span::Id, - ctx: tracing_subscriber::layer::Context<'_, S>, - ) { - let span = ctx.span(id).unwrap(); - if span.extensions().get::().is_some() { - return; - } - - let mut parent = ctx.current_span().id().cloned(); - - let trace_id = parent - .as_ref() - .and_then(|id| Some(ctx.span(id)?.extensions().get::()?.trace_id())); - - if trace_id.is_none() { - parent = None; - } - - let root_id = parent.as_ref().and_then(|id| { - ctx.span(id)? - .extensions() - .get::()? - .root_id() - .cloned() - }); - - span.extensions_mut().insert(SpanNode::new( - id.clone(), - trace_id, - self.hash(id), - parent.map(|id| self.hash(&id)), - attrs, - root_id, - )); - } - - fn on_close(&self, id: span::Id, ctx: tracing_subscriber::layer::Context<'_, S>) { - let span = ctx.span(&id).unwrap(); - - let mut node = span.extensions_mut().remove::().unwrap(); - node.close(); - - let spans = self.spans.get_or(|| { - spin::Mutex::new(SpanHolder::new( - self.config.max_unprocessed_spans_per_thread, - )) - }); - - match self.config.sampler.should_sample(&mut node) { - SampleResult::Sample => { - if node.is_child() { - let parent_id = node.root_id().unwrap(); - let parent = ctx.span(parent_id).unwrap(); - let mut extensions = parent.extensions_mut(); - if let Some(parent_node) = extensions.get_mut::() { - parent_node.add_child(node); - return; - } - } - - spans.lock().push(node); - } - SampleResult::NotSampled => {} - SampleResult::Dropped => spans.lock().register_drop(), - } - } - - fn on_enter(&self, id: &span::Id, ctx: tracing_subscriber::layer::Context<'_, S>) { - let span = ctx.span(id).unwrap(); - - let mut ext = span.extensions_mut(); - if let Some(node) = ext.get_mut::() { - node.enter(); - } - } - - fn on_exit(&self, id: &span::Id, ctx: tracing_subscriber::layer::Context<'_, S>) { - let span = ctx.span(id).unwrap(); - - let mut ext = span.extensions_mut(); - if let Some(node) = ext.get_mut::() { - node.exit(); - } - } - - fn on_event(&self, event: &tracing::Event<'_>, ctx: tracing_subscriber::layer::Context<'_, S>) { - let span = ctx.current_span(); - let Some(id) = span.id() else { - return; - }; - - let span = ctx.span(id).unwrap(); - - let mut ext = span.extensions_mut(); - if let Some(node) = ext.get_mut::() { - node.event(event); - } - } - - fn on_record( - &self, - span: &span::Id, - values: &span::Record<'_>, - ctx: tracing_subscriber::layer::Context<'_, S>, - ) { - let span = ctx.span(span).unwrap(); - - let mut ext = span.extensions_mut(); - if let Some(node) = ext.get_mut::() { - node.record(values); - } - } - - fn on_follows_from( - &self, - id: &span::Id, - follow_id: &span::Id, - ctx: tracing_subscriber::layer::Context<'_, S>, - ) { - let span = ctx.span(id).unwrap(); - let Some(follow_span) = ctx.span(follow_id) else { - return; - }; - - let mut extensions = span.extensions_mut(); - - let Some(span_data) = extensions.get_mut::() else { - return; - }; - - let follow_span_id = self.hash(follow_id); - - span_data.follows_from(follow_span_id, follow_span.extensions().get::()); - } - - /// Safety: The lifetime of the with_context is tied to the lifetime of the layer. - unsafe fn downcast_raw(&self, id: std::any::TypeId) -> Option<*const ()> { - if id == std::any::TypeId::of::() { - return Some(self as *const Self as *const ()); - } else if id == std::any::TypeId::of::() { - return Some(&self.with_context as *const WithContext as *const ()); - } - - None - } + fn on_new_span(&self, attrs: &span::Attributes<'_>, id: &span::Id, ctx: tracing_subscriber::layer::Context<'_, S>) { + let span = ctx.span(id).unwrap(); + if span.extensions().get::().is_some() { + return; + } + + let mut parent = ctx.current_span().id().cloned(); + + let trace_id = parent + .as_ref() + .and_then(|id| Some(ctx.span(id)?.extensions().get::()?.trace_id())); + + if trace_id.is_none() { + parent = None; + } + + let root_id = parent + .as_ref() + .and_then(|id| ctx.span(id)?.extensions().get::()?.root_id().cloned()); + + span.extensions_mut().insert(SpanNode::new( + id.clone(), + trace_id, + self.hash(id), + parent.map(|id| self.hash(&id)), + attrs, + root_id, + )); + } + + fn on_close(&self, id: span::Id, ctx: tracing_subscriber::layer::Context<'_, S>) { + let span = ctx.span(&id).unwrap(); + + let mut node = span.extensions_mut().remove::().unwrap(); + node.close(); + + let spans = self + .spans + .get_or(|| spin::Mutex::new(SpanHolder::new(self.config.max_unprocessed_spans_per_thread))); + + match self.config.sampler.should_sample(&mut node) { + SampleResult::Sample => { + if node.is_child() { + let parent_id = node.root_id().unwrap(); + let parent = ctx.span(parent_id).unwrap(); + let mut extensions = parent.extensions_mut(); + if let Some(parent_node) = extensions.get_mut::() { + parent_node.add_child(node); + return; + } + } + + spans.lock().push(node); + } + SampleResult::NotSampled => {} + SampleResult::Dropped => spans.lock().register_drop(), + } + } + + fn on_enter(&self, id: &span::Id, ctx: tracing_subscriber::layer::Context<'_, S>) { + let span = ctx.span(id).unwrap(); + + let mut ext = span.extensions_mut(); + if let Some(node) = ext.get_mut::() { + node.enter(); + } + } + + fn on_exit(&self, id: &span::Id, ctx: tracing_subscriber::layer::Context<'_, S>) { + let span = ctx.span(id).unwrap(); + + let mut ext = span.extensions_mut(); + if let Some(node) = ext.get_mut::() { + node.exit(); + } + } + + fn on_event(&self, event: &tracing::Event<'_>, ctx: tracing_subscriber::layer::Context<'_, S>) { + let span = ctx.current_span(); + let Some(id) = span.id() else { + return; + }; + + let span = ctx.span(id).unwrap(); + + let mut ext = span.extensions_mut(); + if let Some(node) = ext.get_mut::() { + node.event(event); + } + } + + fn on_record(&self, span: &span::Id, values: &span::Record<'_>, ctx: tracing_subscriber::layer::Context<'_, S>) { + let span = ctx.span(span).unwrap(); + + let mut ext = span.extensions_mut(); + if let Some(node) = ext.get_mut::() { + node.record(values); + } + } + + fn on_follows_from(&self, id: &span::Id, follow_id: &span::Id, ctx: tracing_subscriber::layer::Context<'_, S>) { + let span = ctx.span(id).unwrap(); + let Some(follow_span) = ctx.span(follow_id) else { + return; + }; + + let mut extensions = span.extensions_mut(); + + let Some(span_data) = extensions.get_mut::() else { + return; + }; + + let follow_span_id = self.hash(follow_id); + + span_data.follows_from(follow_span_id, follow_span.extensions().get::()); + } + + /// Safety: The lifetime of the with_context is tied to the lifetime of the + /// layer. + unsafe fn downcast_raw(&self, id: std::any::TypeId) -> Option<*const ()> { + if id == std::any::TypeId::of::() { + return Some(self as *const Self as *const ()); + } else if id == std::any::TypeId::of::() { + return Some(&self.with_context as *const WithContext as *const ()); + } + + None + } } diff --git a/foundations/src/telementry/opentelemetry/mod.rs b/foundations/src/telementry/opentelemetry/mod.rs index 40790b65..521c14ad 100644 --- a/foundations/src/telementry/opentelemetry/mod.rs +++ b/foundations/src/telementry/opentelemetry/mod.rs @@ -4,67 +4,53 @@ mod node; mod span_ext; pub use exporter::BatchExporter; -pub use layer::{ - RatelimitSampler, SampleFunction, SampleResult, Sampler, ShouldSample, SpanObserver, - SpanObserverLayer, -}; +pub use layer::{RatelimitSampler, SampleFunction, SampleResult, Sampler, ShouldSample, SpanObserver, SpanObserverLayer}; pub use node::SpanNode; use opentelemetry_otlp::SpanExporter; pub use span_ext::OpenTelemetrySpanExt; -pub fn layer( - span_observer: SpanObserver, - batch_config: BatchExporter, - exporter: SpanExporter, -) -> SpanObserverLayer +pub fn layer(span_observer: SpanObserver, batch_config: BatchExporter, exporter: SpanExporter) -> SpanObserverLayer where - S: tracing::Subscriber + for<'a> tracing_subscriber::registry::LookupSpan<'a>, + S: tracing::Subscriber + for<'a> tracing_subscriber::registry::LookupSpan<'a>, { - SpanObserverLayer::new(span_observer, batch_config, exporter) + SpanObserverLayer::new(span_observer, batch_config, exporter) } pub fn complex_rate_sampler( - head_rate: f64, - tail_rate: Option, - error_rate: Option, - sample_on_error: bool, + head_rate: f64, + tail_rate: Option, + error_rate: Option, + sample_on_error: bool, ) -> Sampler { - Sampler::function(move |node| { - let tail_rate = tail_rate.unwrap_or(head_rate); - let rate_to_use = if node.contains_error() { - error_rate.unwrap_or(if node.is_child() { - tail_rate - } else { - head_rate - }) - } else { - head_rate - }; + Sampler::function(move |node| { + let tail_rate = tail_rate.unwrap_or(head_rate); + let rate_to_use = if node.contains_error() { + error_rate.unwrap_or(if node.is_child() { tail_rate } else { head_rate }) + } else { + head_rate + }; - if node.is_child() { - if sample_on_error { - // always sample children because we dont know if there are errors yet. - SampleResult::Sample - } else { - Sampler::TraceIdRatio(rate_to_use).should_sample(node) - } - } else if node.is_root() { - match Sampler::TraceIdRatio(rate_to_use).should_sample(node) { - SampleResult::Sample - if (!sample_on_error || !node.contains_error()) && tail_rate != head_rate => - { - let should_sample_children = - Sampler::TraceIdRatio(tail_rate).should_sample(node); - if should_sample_children != SampleResult::Sample { - node.clear_children(); - } + if node.is_child() { + if sample_on_error { + // always sample children because we dont know if there are errors yet. + SampleResult::Sample + } else { + Sampler::TraceIdRatio(rate_to_use).should_sample(node) + } + } else if node.is_root() { + match Sampler::TraceIdRatio(rate_to_use).should_sample(node) { + SampleResult::Sample if (!sample_on_error || !node.contains_error()) && tail_rate != head_rate => { + let should_sample_children = Sampler::TraceIdRatio(tail_rate).should_sample(node); + if should_sample_children != SampleResult::Sample { + node.clear_children(); + } - SampleResult::Sample - } - r => r, - } - } else { - Sampler::TraceIdRatio(head_rate).should_sample(node) - } - }) + SampleResult::Sample + } + r => r, + } + } else { + Sampler::TraceIdRatio(head_rate).should_sample(node) + } + }) } diff --git a/foundations/src/telementry/opentelemetry/node.rs b/foundations/src/telementry/opentelemetry/node.rs index b71be7d2..f26471e4 100644 --- a/foundations/src/telementry/opentelemetry/node.rs +++ b/foundations/src/telementry/opentelemetry/node.rs @@ -1,352 +1,328 @@ use std::borrow::Cow; -pub use opentelemetry::{ - trace::{Link, SpanContext, SpanId, SpanKind, Status, TraceFlags, TraceId, TraceState}, - KeyValue, -}; -pub use opentelemetry_sdk::{export::trace::SpanData, trace::SpanEvents, Resource}; +pub use opentelemetry::trace::{Link, SpanContext, SpanId, SpanKind, Status, TraceFlags, TraceId, TraceState}; +pub use opentelemetry::KeyValue; +pub use opentelemetry_sdk::export::trace::SpanData; +pub use opentelemetry_sdk::trace::SpanEvents; +pub use opentelemetry_sdk::Resource; use rand::Rng; use spin::Lazy; use tracing::{span, Metadata}; #[derive(Debug, Clone)] pub struct SpanNode { - pub id: span::Id, - pub status: Status, - pub trace_id: TraceId, - pub mapped_id: SpanId, - pub metadata: &'static Metadata<'static>, - pub attributes: Vec, - pub end: Option, - pub last_event_time: std::time::Instant, - pub active_time: std::time::Duration, - pub idle_time: std::time::Duration, - pub events: Vec, - pub links: Vec, - pub start: Option, - pub mapped_parent_id: Option, - pub root: Option, + pub id: span::Id, + pub status: Status, + pub trace_id: TraceId, + pub mapped_id: SpanId, + pub metadata: &'static Metadata<'static>, + pub attributes: Vec, + pub end: Option, + pub last_event_time: std::time::Instant, + pub active_time: std::time::Duration, + pub idle_time: std::time::Duration, + pub events: Vec, + pub links: Vec, + pub start: Option, + pub mapped_parent_id: Option, + pub root: Option, } #[derive(Debug, Clone)] pub enum RootNode { - Root(Vec), - Child(span::Id), + Root(Vec), + Child(span::Id), } #[derive(Debug, Clone)] pub struct SpanEvent { - pub time: std::time::SystemTime, - pub metadata: &'static Metadata<'static>, - pub attributes: Vec, + pub time: std::time::SystemTime, + pub metadata: &'static Metadata<'static>, + pub attributes: Vec, } impl SpanEvent { - fn into_data(mut self) -> opentelemetry::trace::Event { - if let Some(file) = self.metadata.file() { - self.attributes.push(KeyValue::new("code.filepath", file)) - } - if let Some(line) = self.metadata.line() { - self.attributes - .push(KeyValue::new("code.lineno", line as i64)) - } - if let Some(module) = self.metadata.module_path() { - self.attributes - .push(KeyValue::new("code.namespace", module)) - } - self.attributes - .push(KeyValue::new("level", self.metadata.level().as_str())); - - opentelemetry::trace::Event::new(self.metadata.name(), self.time, self.attributes, 0) - } + fn into_data(mut self) -> opentelemetry::trace::Event { + if let Some(file) = self.metadata.file() { + self.attributes.push(KeyValue::new("code.filepath", file)) + } + if let Some(line) = self.metadata.line() { + self.attributes.push(KeyValue::new("code.lineno", line as i64)) + } + if let Some(module) = self.metadata.module_path() { + self.attributes.push(KeyValue::new("code.namespace", module)) + } + self.attributes.push(KeyValue::new("level", self.metadata.level().as_str())); + + opentelemetry::trace::Event::new(self.metadata.name(), self.time, self.attributes, 0) + } } fn gen_trace_id() -> TraceId { - let mut bytes = [0u8; 16]; - rand::thread_rng().fill(&mut bytes); - TraceId::from_bytes(bytes) + let mut bytes = [0u8; 16]; + rand::thread_rng().fill(&mut bytes); + TraceId::from_bytes(bytes) } impl SpanNode { - pub fn new( - id: span::Id, - trace_id: Option, - mapped_id: SpanId, - mapped_parent_id: Option, - attrs: &span::Attributes<'_>, - root_child: Option, - ) -> Self { - let mut this = Self { - id, - status: Status::Unset, - trace_id: trace_id.unwrap_or_else(gen_trace_id), - mapped_id, - metadata: attrs.metadata(), - attributes: Vec::new(), - start: Some(std::time::SystemTime::now()), - end: None, - last_event_time: std::time::Instant::now(), - active_time: std::time::Duration::default(), - idle_time: std::time::Duration::default(), - mapped_parent_id, - events: Vec::new(), - links: Vec::new(), - root: root_child.map(RootNode::Child), - }; - - attrs.record(&mut FieldVisitor(&mut this.attributes)); - - this - } - - pub fn contains_error(&self) -> bool { - matches!(self.status, Status::Error { .. }) - || self - .events - .iter() - .any(|e| e.metadata.level() >= &tracing::Level::ERROR) - || self.children().any(|c| c.contains_error()) - } - - pub fn children(&self) -> impl Iterator { - match &self.root { - Some(RootNode::Root(children)) => children.iter(), - _ => [].iter(), - } - } - - pub fn root_id(&self) -> Option<&span::Id> { - match &self.root { - Some(RootNode::Root(_)) => Some(&self.id), - Some(RootNode::Child(id)) => Some(id), - None => None, - } - } - - pub fn event(&mut self, event: &tracing::Event<'_>) { - let mut attributes = Vec::new(); - event.record(&mut FieldVisitor(&mut attributes)); - self.events.push(SpanEvent { - metadata: event.metadata(), - time: std::time::SystemTime::now(), - attributes, - }); - } - - pub fn record(&mut self, record: &span::Record<'_>) { - record.record(&mut FieldVisitor(&mut self.attributes)); - } - - pub fn follows_from(&mut self, id: SpanId, span: Option<&SpanNode>) { - self.links.push(SpanContext::new( - span.map_or(TraceId::INVALID, |s| s.trace_id), - id, - span.map_or(TraceFlags::NOT_SAMPLED, |_| TraceFlags::SAMPLED), - false, - TraceState::NONE, - )); - } - - pub fn follows_from_context(&mut self, context: SpanContext) { - self.links.push(context); - } - - pub fn close(&mut self) { - self.end = Some(std::time::SystemTime::now()); - } - - pub fn enter(&mut self) { - self.idle_time += self.last_event_time.elapsed(); - self.last_event_time = std::time::Instant::now(); - } - - pub fn exit(&mut self) { - self.active_time += self.last_event_time.elapsed(); - self.last_event_time = std::time::Instant::now(); - } - - pub fn trace_id(&self) -> TraceId { - self.trace_id - } - - pub fn parent(&self) -> Option { - self.mapped_parent_id - } - - pub fn is_root(&self) -> bool { - matches!(self.root, Some(RootNode::Root(_))) - } - - pub fn is_child(&self) -> bool { - matches!(self.root, Some(RootNode::Child(_))) - } - - pub fn make_root(&mut self) { - if self.is_root() { - return; - } - - if let Some(parent_id) = self.mapped_parent_id.take() { - self.links.push(SpanContext::new( - self.trace_id, - parent_id, - TraceFlags::SAMPLED, - false, - TraceState::NONE, - )); - - // Since we are making this a root span - // We will need a new trace id - self.trace_id = gen_trace_id(); - } - - self.root = Some(RootNode::Root(Vec::new())); - } - - pub fn set_status(&mut self, status: Status) { - self.status = status; - } - - pub fn add_child(&mut self, child: SpanNode) { - if let Some(RootNode::Root(children)) = &mut self.root { - children.push(child); - } - } - - pub fn clear_children(&mut self) { - if let Some(RootNode::Root(children)) = &mut self.root { - children.clear(); - } - } - - pub fn span_count(&self) -> usize { - match &self.root { - Some(RootNode::Root(children)) => children.len() + 1, - _ => 1, - } - } - - pub fn flatten(mut self) -> impl Iterator { - let children = match self.root.take() { - Some(RootNode::Root(children)) => Some(children), - _ => None, - } - .into_iter() - .flatten(); - - std::iter::once(self).chain(children) - } - - pub fn into_data(mut self, resource: Resource) -> SpanData { - static DEFAULT_SPAN: Lazy = Lazy::new(|| SpanData { - start_time: std::time::SystemTime::UNIX_EPOCH, - end_time: std::time::SystemTime::UNIX_EPOCH, - dropped_attributes_count: 0, - name: Cow::Borrowed(""), - status: Default::default(), - instrumentation_lib: Default::default(), - events: SpanEvents::default(), - links: Default::default(), - span_kind: SpanKind::Internal, - resource: Cow::Owned(Resource::empty()), - attributes: Vec::new(), - parent_span_id: SpanId::INVALID, - span_context: SpanContext::new( - TraceId::INVALID, - SpanId::INVALID, - TraceFlags::default(), - false, - TraceState::NONE, - ), - }); - - self.attributes - .push(KeyValue::new("busy_ns", self.active_time.as_nanos() as i64)); - self.attributes - .push(KeyValue::new("idle_ns", self.idle_time.as_nanos() as i64)); - if let Some(file) = self.metadata.file() { - self.attributes.push(KeyValue::new("code.filepath", file)) - } - if let Some(line) = self.metadata.line() { - self.attributes - .push(KeyValue::new("code.lineno", line as i64)) - } - if let Some(module) = self.metadata.module_path() { - self.attributes - .push(KeyValue::new("code.namespace", module)) - } - self.attributes - .push(KeyValue::new("level", self.metadata.level().as_str())); - - let mut span = DEFAULT_SPAN.clone(); - - span.status = self.status; - span.start_time = self.start.unwrap(); - span.end_time = self.end.unwrap(); - span.dropped_attributes_count = 0; - span.name = self.metadata.name().into(); - span.attributes = self.attributes; - span.resource = Cow::Owned(resource.clone()); - span.parent_span_id = self.mapped_parent_id.unwrap_or(SpanId::INVALID); - span.span_context = SpanContext::new( - self.trace_id, - self.mapped_id, - TraceFlags::SAMPLED, - false, - TraceState::NONE, - ); - span.events.events = self.events.into_iter().map(|e| e.into_data()).collect(); - span.links.links = self - .links - .into_iter() - .map(|link| Link::new(link, Vec::new())) - .collect(); - - span - } + pub fn new( + id: span::Id, + trace_id: Option, + mapped_id: SpanId, + mapped_parent_id: Option, + attrs: &span::Attributes<'_>, + root_child: Option, + ) -> Self { + let mut this = Self { + id, + status: Status::Unset, + trace_id: trace_id.unwrap_or_else(gen_trace_id), + mapped_id, + metadata: attrs.metadata(), + attributes: Vec::new(), + start: Some(std::time::SystemTime::now()), + end: None, + last_event_time: std::time::Instant::now(), + active_time: std::time::Duration::default(), + idle_time: std::time::Duration::default(), + mapped_parent_id, + events: Vec::new(), + links: Vec::new(), + root: root_child.map(RootNode::Child), + }; + + attrs.record(&mut FieldVisitor(&mut this.attributes)); + + this + } + + pub fn contains_error(&self) -> bool { + matches!(self.status, Status::Error { .. }) + || self.events.iter().any(|e| e.metadata.level() >= &tracing::Level::ERROR) + || self.children().any(|c| c.contains_error()) + } + + pub fn children(&self) -> impl Iterator { + match &self.root { + Some(RootNode::Root(children)) => children.iter(), + _ => [].iter(), + } + } + + pub fn root_id(&self) -> Option<&span::Id> { + match &self.root { + Some(RootNode::Root(_)) => Some(&self.id), + Some(RootNode::Child(id)) => Some(id), + None => None, + } + } + + pub fn event(&mut self, event: &tracing::Event<'_>) { + let mut attributes = Vec::new(); + event.record(&mut FieldVisitor(&mut attributes)); + self.events.push(SpanEvent { + metadata: event.metadata(), + time: std::time::SystemTime::now(), + attributes, + }); + } + + pub fn record(&mut self, record: &span::Record<'_>) { + record.record(&mut FieldVisitor(&mut self.attributes)); + } + + pub fn follows_from(&mut self, id: SpanId, span: Option<&SpanNode>) { + self.links.push(SpanContext::new( + span.map_or(TraceId::INVALID, |s| s.trace_id), + id, + span.map_or(TraceFlags::NOT_SAMPLED, |_| TraceFlags::SAMPLED), + false, + TraceState::NONE, + )); + } + + pub fn follows_from_context(&mut self, context: SpanContext) { + self.links.push(context); + } + + pub fn close(&mut self) { + self.end = Some(std::time::SystemTime::now()); + } + + pub fn enter(&mut self) { + self.idle_time += self.last_event_time.elapsed(); + self.last_event_time = std::time::Instant::now(); + } + + pub fn exit(&mut self) { + self.active_time += self.last_event_time.elapsed(); + self.last_event_time = std::time::Instant::now(); + } + + pub fn trace_id(&self) -> TraceId { + self.trace_id + } + + pub fn parent(&self) -> Option { + self.mapped_parent_id + } + + pub fn is_root(&self) -> bool { + matches!(self.root, Some(RootNode::Root(_))) + } + + pub fn is_child(&self) -> bool { + matches!(self.root, Some(RootNode::Child(_))) + } + + pub fn make_root(&mut self) { + if self.is_root() { + return; + } + + if let Some(parent_id) = self.mapped_parent_id.take() { + self.links.push(SpanContext::new( + self.trace_id, + parent_id, + TraceFlags::SAMPLED, + false, + TraceState::NONE, + )); + + // Since we are making this a root span + // We will need a new trace id + self.trace_id = gen_trace_id(); + } + + self.root = Some(RootNode::Root(Vec::new())); + } + + pub fn set_status(&mut self, status: Status) { + self.status = status; + } + + pub fn add_child(&mut self, child: SpanNode) { + if let Some(RootNode::Root(children)) = &mut self.root { + children.push(child); + } + } + + pub fn clear_children(&mut self) { + if let Some(RootNode::Root(children)) = &mut self.root { + children.clear(); + } + } + + pub fn span_count(&self) -> usize { + match &self.root { + Some(RootNode::Root(children)) => children.len() + 1, + _ => 1, + } + } + + pub fn flatten(mut self) -> impl Iterator { + let children = match self.root.take() { + Some(RootNode::Root(children)) => Some(children), + _ => None, + } + .into_iter() + .flatten(); + + std::iter::once(self).chain(children) + } + + pub fn into_data(mut self, resource: Resource) -> SpanData { + static DEFAULT_SPAN: Lazy = Lazy::new(|| SpanData { + start_time: std::time::SystemTime::UNIX_EPOCH, + end_time: std::time::SystemTime::UNIX_EPOCH, + dropped_attributes_count: 0, + name: Cow::Borrowed(""), + status: Default::default(), + instrumentation_lib: Default::default(), + events: SpanEvents::default(), + links: Default::default(), + span_kind: SpanKind::Internal, + resource: Cow::Owned(Resource::empty()), + attributes: Vec::new(), + parent_span_id: SpanId::INVALID, + span_context: SpanContext::new( + TraceId::INVALID, + SpanId::INVALID, + TraceFlags::default(), + false, + TraceState::NONE, + ), + }); + + self.attributes + .push(KeyValue::new("busy_ns", self.active_time.as_nanos() as i64)); + self.attributes + .push(KeyValue::new("idle_ns", self.idle_time.as_nanos() as i64)); + if let Some(file) = self.metadata.file() { + self.attributes.push(KeyValue::new("code.filepath", file)) + } + if let Some(line) = self.metadata.line() { + self.attributes.push(KeyValue::new("code.lineno", line as i64)) + } + if let Some(module) = self.metadata.module_path() { + self.attributes.push(KeyValue::new("code.namespace", module)) + } + self.attributes.push(KeyValue::new("level", self.metadata.level().as_str())); + + let mut span = DEFAULT_SPAN.clone(); + + span.status = self.status; + span.start_time = self.start.unwrap(); + span.end_time = self.end.unwrap(); + span.dropped_attributes_count = 0; + span.name = self.metadata.name().into(); + span.attributes = self.attributes; + span.resource = Cow::Owned(resource.clone()); + span.parent_span_id = self.mapped_parent_id.unwrap_or(SpanId::INVALID); + span.span_context = SpanContext::new(self.trace_id, self.mapped_id, TraceFlags::SAMPLED, false, TraceState::NONE); + span.events.events = self.events.into_iter().map(|e| e.into_data()).collect(); + span.links.links = self.links.into_iter().map(|link| Link::new(link, Vec::new())).collect(); + + span + } } struct FieldVisitor<'a>(&'a mut Vec); impl tracing::field::Visit for FieldVisitor<'_> { - fn record_bool(&mut self, field: &tracing::field::Field, value: bool) { - self.0.push(KeyValue::new(field.name(), value)); - } - - fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn std::fmt::Debug) { - self.0 - .push(KeyValue::new(field.name(), format!("{:?}", value))); - } - - fn record_u64(&mut self, field: &tracing::field::Field, value: u64) { - self.0.push(KeyValue::new(field.name(), value.to_string())); - } - - fn record_error( - &mut self, - field: &tracing::field::Field, - value: &(dyn std::error::Error + 'static), - ) { - self.0.push(KeyValue::new(field.name(), value.to_string())); - } - - fn record_f64(&mut self, field: &tracing::field::Field, value: f64) { - self.0.push(KeyValue::new(field.name(), value)); - } - - fn record_i128(&mut self, field: &tracing::field::Field, value: i128) { - self.0.push(KeyValue::new(field.name(), value.to_string())); - } - - fn record_i64(&mut self, field: &tracing::field::Field, value: i64) { - self.0.push(KeyValue::new(field.name(), value)); - } - - fn record_str(&mut self, field: &tracing::field::Field, value: &str) { - self.0.push(KeyValue::new(field.name(), value.to_string())); - } - - fn record_u128(&mut self, field: &tracing::field::Field, value: u128) { - self.0.push(KeyValue::new(field.name(), value.to_string())); - } + fn record_bool(&mut self, field: &tracing::field::Field, value: bool) { + self.0.push(KeyValue::new(field.name(), value)); + } + + fn record_debug(&mut self, field: &tracing::field::Field, value: &dyn std::fmt::Debug) { + self.0.push(KeyValue::new(field.name(), format!("{:?}", value))); + } + + fn record_u64(&mut self, field: &tracing::field::Field, value: u64) { + self.0.push(KeyValue::new(field.name(), value.to_string())); + } + + fn record_error(&mut self, field: &tracing::field::Field, value: &(dyn std::error::Error + 'static)) { + self.0.push(KeyValue::new(field.name(), value.to_string())); + } + + fn record_f64(&mut self, field: &tracing::field::Field, value: f64) { + self.0.push(KeyValue::new(field.name(), value)); + } + + fn record_i128(&mut self, field: &tracing::field::Field, value: i128) { + self.0.push(KeyValue::new(field.name(), value.to_string())); + } + + fn record_i64(&mut self, field: &tracing::field::Field, value: i64) { + self.0.push(KeyValue::new(field.name(), value)); + } + + fn record_str(&mut self, field: &tracing::field::Field, value: &str) { + self.0.push(KeyValue::new(field.name(), value.to_string())); + } + + fn record_u128(&mut self, field: &tracing::field::Field, value: u128) { + self.0.push(KeyValue::new(field.name(), value.to_string())); + } } diff --git a/foundations/src/telementry/opentelemetry/span_ext.rs b/foundations/src/telementry/opentelemetry/span_ext.rs index 5ecfd21f..3c3722d1 100644 --- a/foundations/src/telementry/opentelemetry/span_ext.rs +++ b/foundations/src/telementry/opentelemetry/span_ext.rs @@ -3,69 +3,69 @@ use opentelemetry::trace::{SpanContext, Status, TraceId}; use super::layer::WithContext; pub trait OpenTelemetrySpanExt { - fn link_span(&self, context: SpanContext); - fn make_root(&self); - fn set_status(&self, status: Status); - fn trace_id(&self) -> Option; + fn link_span(&self, context: SpanContext); + fn make_root(&self); + fn set_status(&self, status: Status); + fn trace_id(&self) -> Option; } impl OpenTelemetrySpanExt for tracing::Span { - fn link_span(&self, context: SpanContext) { - let mut context = Some(context); - self.with_subscriber(|(id, dispatch)| { - let Some(ctx) = dispatch.downcast_ref::() else { - return; - }; + fn link_span(&self, context: SpanContext) { + let mut context = Some(context); + self.with_subscriber(|(id, dispatch)| { + let Some(ctx) = dispatch.downcast_ref::() else { + return; + }; - ctx.with_context(dispatch, id, |node| { - if let Some(context) = context.take() { - node.follows_from_context(context); - } - }); - }); - } + ctx.with_context(dispatch, id, |node| { + if let Some(context) = context.take() { + node.follows_from_context(context); + } + }); + }); + } - fn make_root(&self) { - self.with_subscriber(|(id, dispatch)| { - let Some(ctx) = dispatch.downcast_ref::() else { - return; - }; + fn make_root(&self) { + self.with_subscriber(|(id, dispatch)| { + let Some(ctx) = dispatch.downcast_ref::() else { + return; + }; - ctx.with_context(dispatch, id, |node| { - node.make_root(); - }); - }); - } + ctx.with_context(dispatch, id, |node| { + node.make_root(); + }); + }); + } - fn set_status(&self, status: Status) { - let mut status = Some(status); + fn set_status(&self, status: Status) { + let mut status = Some(status); - self.with_subscriber(|(id, dispatch)| { - let Some(ctx) = dispatch.downcast_ref::() else { - return; - }; + self.with_subscriber(|(id, dispatch)| { + let Some(ctx) = dispatch.downcast_ref::() else { + return; + }; - ctx.with_context(dispatch, id, |node| { - if let Some(status) = status.take() { - node.set_status(status); - } - }); - }); - } + ctx.with_context(dispatch, id, |node| { + if let Some(status) = status.take() { + node.set_status(status); + } + }); + }); + } - fn trace_id(&self) -> Option { - let mut trace_id = None; + fn trace_id(&self) -> Option { + let mut trace_id = None; - self.with_subscriber(|(id, dispatch)| { - let Some(ctx) = dispatch.downcast_ref::() else { - return; - }; + self.with_subscriber(|(id, dispatch)| { + let Some(ctx) = dispatch.downcast_ref::() else { + return; + }; - ctx.with_context(dispatch, id, |node| { - trace_id = Some(node.trace_id()); - }); - }); + ctx.with_context(dispatch, id, |node| { + trace_id = Some(node.trace_id()); + }); + }); - trace_id - } + trace_id + } } diff --git a/foundations/src/telementry/pprof/cpu.rs b/foundations/src/telementry/pprof/cpu.rs index 8aac17e2..fbe6f6a6 100644 --- a/foundations/src/telementry/pprof/cpu.rs +++ b/foundations/src/telementry/pprof/cpu.rs @@ -1,44 +1,38 @@ use std::io::Write; use anyhow::Context; -use flate2::{write::GzEncoder, Compression}; +use flate2::write::GzEncoder; +use flate2::Compression; use pprof::protos::Message; pub struct Cpu(pprof::ProfilerGuardBuilder); impl Cpu { - pub fn new>(frequency: i32, blocklist: &[S]) -> Self { - Self( - pprof::ProfilerGuardBuilder::default() - .frequency(frequency) - .blocklist(blocklist), - ) - } - - /// Capture a pprof profile for the given duration. - /// The profile is compressed using gzip. - /// The profile can be analyzed using the `pprof` tool. - /// Warning: This method is blocking and may take a long time to complete. It is recommended to run it in a separate thread. - pub fn capture(&self, duration: std::time::Duration) -> anyhow::Result> { - let profiler = self - .0 - .clone() - .build() - .context("failed to build pprof profiler")?; - - std::thread::sleep(duration); - - let report = profiler - .report() - .build() - .context("failed to build pprof report")?; - - let pprof = report.pprof().context("failed to build pprof profile")?; - - let mut gz = GzEncoder::new(Vec::new(), Compression::default()); - gz.write_all(&pprof.encode_to_vec()) - .context("failed to compress pprof profile")?; - gz.finish() - .context("failed to finish compressing pprof profile") - } + pub fn new>(frequency: i32, blocklist: &[S]) -> Self { + Self( + pprof::ProfilerGuardBuilder::default() + .frequency(frequency) + .blocklist(blocklist), + ) + } + + /// Capture a pprof profile for the given duration. + /// The profile is compressed using gzip. + /// The profile can be analyzed using the `pprof` tool. + /// Warning: This method is blocking and may take a long time to complete. + /// It is recommended to run it in a separate thread. + pub fn capture(&self, duration: std::time::Duration) -> anyhow::Result> { + let profiler = self.0.clone().build().context("failed to build pprof profiler")?; + + std::thread::sleep(duration); + + let report = profiler.report().build().context("failed to build pprof report")?; + + let pprof = report.pprof().context("failed to build pprof profile")?; + + let mut gz = GzEncoder::new(Vec::new(), Compression::default()); + gz.write_all(&pprof.encode_to_vec()) + .context("failed to compress pprof profile")?; + gz.finish().context("failed to finish compressing pprof profile") + } } diff --git a/foundations/src/telementry/pprof/heap.rs b/foundations/src/telementry/pprof/heap.rs index b442ebd4..37878ef5 100644 --- a/foundations/src/telementry/pprof/heap.rs +++ b/foundations/src/telementry/pprof/heap.rs @@ -8,34 +8,31 @@ pub static malloc_conf: &[u8] = b"prof:true,prof_active:true,lg_prof_sample:19,a pub struct Heap; impl Default for Heap { - fn default() -> Self { - Self::new() - } + fn default() -> Self { + Self::new() + } } impl Heap { - pub fn new() -> Self { - Self - } + pub fn new() -> Self { + Self + } - /// Capture a heap profile for the given duration. - /// The profile can be analyzed using the `pprof` tool. - /// Warning: This method is blocking and may take a long time to complete. It is recommended to run it in a separate thread. - pub fn capture(&mut self) -> anyhow::Result> { - let mut profiler = jemalloc_pprof::PROF_CTL - .as_ref() - .ok_or_else(|| anyhow::anyhow!("jemalloc profiling is not available"))? - .blocking_lock(); + /// Capture a heap profile for the given duration. + /// The profile can be analyzed using the `pprof` tool. + /// Warning: This method is blocking and may take a long time to complete. + /// It is recommended to run it in a separate thread. + pub fn capture(&mut self) -> anyhow::Result> { + let mut profiler = jemalloc_pprof::PROF_CTL + .as_ref() + .ok_or_else(|| anyhow::anyhow!("jemalloc profiling is not available"))? + .blocking_lock(); - if !profiler.activated() { - // profiler.deactivate().context("failed to deactivate jemalloc profiling")?; - profiler - .activate() - .context("failed to activate jemalloc profiling")?; - } + if !profiler.activated() { + // profiler.deactivate().context("failed to deactivate jemalloc profiling")?; + profiler.activate().context("failed to activate jemalloc profiling")?; + } - profiler - .dump_pprof() - .context("failed to dump jemalloc pprof profile") - } + profiler.dump_pprof().context("failed to dump jemalloc pprof profile") + } } diff --git a/foundations/src/telementry/pprof/mod.rs b/foundations/src/telementry/pprof/mod.rs index 117a018d..5db456c0 100644 --- a/foundations/src/telementry/pprof/mod.rs +++ b/foundations/src/telementry/pprof/mod.rs @@ -4,8 +4,7 @@ mod heap; #[cfg(feature = "pprof-cpu")] mod cpu; -#[cfg(feature = "pprof-heap")] -pub use heap::Heap; - #[cfg(feature = "pprof-cpu")] pub use cpu::Cpu; +#[cfg(feature = "pprof-heap")] +pub use heap::Heap; diff --git a/foundations/src/telementry/server.rs b/foundations/src/telementry/server.rs index ed4c0d13..088d81ae 100644 --- a/foundations/src/telementry/server.rs +++ b/foundations/src/telementry/server.rs @@ -4,202 +4,284 @@ use anyhow::Context; #[derive(Debug)] pub struct ServerSettings { - pub bind: SocketAddr, - #[cfg(feature = "pprof-cpu")] - pub pprof_cpu_path: Option, - #[cfg(feature = "pprof-heap")] - pub pprof_heap_path: Option, - #[cfg(feature = "metrics")] - pub metrics_path: Option, - #[cfg(feature = "context")] - pub context: Option, + pub bind: SocketAddr, + #[cfg(feature = "pprof-cpu")] + pub pprof_cpu_path: Option, + #[cfg(feature = "pprof-heap")] + pub pprof_heap_path: Option, + #[cfg(feature = "metrics")] + pub metrics_path: Option, + #[cfg(feature = "health-check")] + pub health_path: Option, + #[cfg(feature = "context")] + pub context: Option, } impl Default for ServerSettings { - fn default() -> Self { - Self { - bind: SocketAddr::from(([127, 0, 0, 1], 9090)), - #[cfg(feature = "pprof-cpu")] - pprof_cpu_path: Some("/debug/pprof/profile".into()), - #[cfg(feature = "pprof-heap")] - pprof_heap_path: Some("/debug/pprof/heap".into()), - #[cfg(feature = "metrics")] - metrics_path: Some("/metrics".into()), - #[cfg(feature = "context")] - context: Some(crate::context::Context::global()), - } - } + fn default() -> Self { + Self { + bind: SocketAddr::from(([127, 0, 0, 1], 9090)), + #[cfg(feature = "pprof-cpu")] + pprof_cpu_path: Some("/debug/pprof/profile".into()), + #[cfg(feature = "pprof-heap")] + pprof_heap_path: Some("/debug/pprof/heap".into()), + #[cfg(feature = "metrics")] + metrics_path: Some("/metrics".into()), + #[cfg(feature = "health-check")] + health_path: Some("/health".into()), + #[cfg(feature = "context")] + context: Some(crate::context::Context::global()), + } + } } #[derive(serde::Deserialize)] #[serde(default)] struct PprofCpuQuery { - frequency: i32, - blocklist: Vec, - seconds: u32, + frequency: i32, + blocklist: Vec, + seconds: u32, } impl Default for PprofCpuQuery { - fn default() -> Self { - Self { - frequency: 100, - blocklist: Vec::new(), - seconds: 15, - } - } + fn default() -> Self { + Self { + frequency: 100, + blocklist: Vec::new(), + seconds: 15, + } + } } #[cfg(feature = "pprof-cpu")] async fn pprof_cpu( - axum::extract::Query(query): axum::extract::Query, + axum::extract::Query(query): axum::extract::Query, ) -> axum::response::Response { - if query.frequency < 100 { - return axum::response::Response::builder() - .status(axum::http::StatusCode::BAD_REQUEST) - .body("frequency must be greater than or equal to 100".into()) - .unwrap(); - } - - if query.seconds > 60 || query.seconds < 5 { - return axum::response::Response::builder() - .status(axum::http::StatusCode::BAD_REQUEST) - .body("duration must be less than or equal to 60 seconds and greater than or equal to 5 seconds".into()) - .unwrap(); - } - - match tokio::task::spawn_blocking(move || { - crate::telementry::pprof::Cpu::new(query.frequency, &query.blocklist) - .capture(std::time::Duration::from_secs(query.seconds as u64)) - }) - .await - { - Ok(Ok(contents)) => axum::response::Response::builder() - .status(axum::http::StatusCode::OK) - .header("content-type", "application/octet-stream") - .header( - "content-disposition", - "attachment; filename=\"profile.pb.gz\"", - ) - .body(contents.into()) - .unwrap(), - Ok(Err(err)) => { - tracing::error!(%err, "failed to capture pprof cpu profile"); - axum::response::Response::builder() - .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) - .body("failed to capture pprof cpu profile".into()) - .unwrap() - } - Err(err) => { - tracing::error!(%err, "failed to spawn blocking task"); - axum::response::Response::builder() - .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) - .body("failed to spawn blocking task".into()) - .unwrap() - } - } + if query.frequency < 100 { + return axum::response::Response::builder() + .status(axum::http::StatusCode::BAD_REQUEST) + .body("frequency must be greater than or equal to 100".into()) + .unwrap(); + } + + if query.seconds > 60 || query.seconds < 5 { + return axum::response::Response::builder() + .status(axum::http::StatusCode::BAD_REQUEST) + .body("duration must be less than or equal to 60 seconds and greater than or equal to 5 seconds".into()) + .unwrap(); + } + + match tokio::task::spawn_blocking(move || { + crate::telementry::pprof::Cpu::new(query.frequency, &query.blocklist) + .capture(std::time::Duration::from_secs(query.seconds as u64)) + }) + .await + { + Ok(Ok(contents)) => axum::response::Response::builder() + .status(axum::http::StatusCode::OK) + .header("content-type", "application/octet-stream") + .header("content-disposition", "attachment; filename=\"profile.pb.gz\"") + .body(contents.into()) + .unwrap(), + Ok(Err(err)) => { + tracing::error!(%err, "failed to capture pprof cpu profile"); + axum::response::Response::builder() + .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) + .body("failed to capture pprof cpu profile".into()) + .unwrap() + } + Err(err) => { + tracing::error!(%err, "failed to spawn blocking task"); + axum::response::Response::builder() + .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) + .body("failed to spawn blocking task".into()) + .unwrap() + } + } } #[cfg(feature = "pprof-heap")] async fn pprof_heap() -> axum::response::Response { - match tokio::task::spawn_blocking(|| crate::telementry::pprof::Heap::new().capture()).await { - Ok(Ok(contents)) => axum::response::Response::builder() - .status(axum::http::StatusCode::OK) - .header("content-type", "application/octet-stream") - .header("content-disposition", "attachment; filename=\"heap.pb.gz\"") - .body(contents.into()) - .unwrap(), - Ok(Err(err)) => { - tracing::error!(%err, "failed to capture pprof heap profile"); - axum::response::Response::builder() - .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) - .body("failed to capture pprof heap profile".into()) - .unwrap() - } - Err(err) => { - tracing::error!(%err, "failed to spawn blocking task"); - axum::response::Response::builder() - .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) - .body("failed to spawn blocking task".into()) - .unwrap() - } - } + match tokio::task::spawn_blocking(|| crate::telementry::pprof::Heap::new().capture()).await { + Ok(Ok(contents)) => axum::response::Response::builder() + .status(axum::http::StatusCode::OK) + .header("content-type", "application/octet-stream") + .header("content-disposition", "attachment; filename=\"heap.pb.gz\"") + .body(contents.into()) + .unwrap(), + Ok(Err(err)) => { + tracing::error!(%err, "failed to capture pprof heap profile"); + axum::response::Response::builder() + .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) + .body("failed to capture pprof heap profile".into()) + .unwrap() + } + Err(err) => { + tracing::error!(%err, "failed to spawn blocking task"); + axum::response::Response::builder() + .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) + .body("failed to spawn blocking task".into()) + .unwrap() + } + } } #[derive(serde::Deserialize, Default)] #[serde(default)] struct MetricsQuery { - optional: bool, + optional: bool, } #[cfg(feature = "metrics")] async fn metrics( - axum::extract::Query(query): axum::extract::Query, + axum::extract::Query(query): axum::extract::Query, ) -> axum::response::Response { - match tokio::task::spawn_blocking(move || crate::telementry::metrics::collect(query.optional)) - .await - { - Ok(Ok(contents)) => axum::response::Response::builder() - .status(axum::http::StatusCode::OK) - .header("content-type", "text/plain") - .body(contents.into()) - .unwrap(), - Ok(Err(err)) => { - tracing::error!(%err, "failed to collect metrics"); - axum::response::Response::builder() - .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) - .body("failed to collect metrics".into()) - .unwrap() - } - Err(err) => { - tracing::error!(%err, "failed to spawn blocking task"); - axum::response::Response::builder() - .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) - .body("failed to spawn blocking task".into()) - .unwrap() - } - } + match tokio::task::spawn_blocking(move || crate::telementry::metrics::collect(query.optional)).await { + Ok(Ok(contents)) => axum::response::Response::builder() + .status(axum::http::StatusCode::OK) + .header("content-type", "text/plain") + .body(contents.into()) + .unwrap(), + Ok(Err(err)) => { + tracing::error!(%err, "failed to collect metrics"); + axum::response::Response::builder() + .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) + .body("failed to collect metrics".into()) + .unwrap() + } + Err(err) => { + tracing::error!(%err, "failed to spawn blocking task"); + axum::response::Response::builder() + .status(axum::http::StatusCode::INTERNAL_SERVER_ERROR) + .body("failed to spawn blocking task".into()) + .unwrap() + } + } +} + +#[cfg(feature = "health-check")] +pub fn register_health_check(check: F) -> usize +where + F: Fn() -> Fut + Send + Sync + 'static, + Fut: std::future::Future + Send + Sync + 'static, +{ + register_health_check_boxed(Box::new(move || Box::pin(check()))) +} + +#[cfg(feature = "health-check")] +pub fn register_health_check_boxed(check: health_check::BoxHealthCheck) -> usize { + health_check::register(check) +} + +#[cfg(feature = "health-check")] +pub fn unregister_health_check(id: usize) { + health_check::unregister(id); +} + +#[cfg(feature = "health-check")] +mod health_check { + use std::pin::Pin; + use std::sync::atomic::AtomicUsize; + + use scc::HashMap; + + type BoxFut = Pin + Send + Sync>>; + pub(super) type BoxHealthCheck = Box BoxFut + Send + Sync>; + + #[derive(Default)] + struct HealthCheck { + id: AtomicUsize, + health_checks: HashMap, + } + + static HEALTH_CHECK: once_cell::sync::Lazy = + once_cell::sync::Lazy::::new(|| HealthCheck::default()); + + pub fn register(check: BoxHealthCheck) -> usize { + let id = HEALTH_CHECK.id.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + HEALTH_CHECK.health_checks.insert(id, check).ok().expect("id already exists"); + id + } + + pub fn unregister(id: usize) { + HEALTH_CHECK.health_checks.remove(&id); + } + + pub async fn is_healthy() -> bool { + let mut o_entry = HEALTH_CHECK.health_checks.first_entry_async().await; + + while let Some(entry) = o_entry { + if (entry.get())().await { + return false; + } + + o_entry = entry.next_async().await; + } + + true + } +} + +#[cfg(feature = "health-check")] +async fn health() -> axum::response::Response { + if health_check::is_healthy().await { + axum::response::Response::builder() + .status(axum::http::StatusCode::OK) + .body("ok".into()) + .unwrap() + } else { + axum::response::Response::builder() + .status(axum::http::StatusCode::SERVICE_UNAVAILABLE) + .body("unavailable".into()) + .unwrap() + } } async fn not_found() -> &'static str { - "not found" + "not found" } pub async fn init(settings: ServerSettings) -> anyhow::Result<()> { - let mut router = axum::routing::Router::new(); + let mut router = axum::routing::Router::new(); + + #[cfg(feature = "pprof-cpu")] + if let Some(path) = &settings.pprof_cpu_path { + router = router.route(path, axum::routing::get(pprof_cpu)); + } - #[cfg(feature = "pprof-cpu")] - if let Some(path) = &settings.pprof_cpu_path { - router = router.route(path, axum::routing::get(pprof_cpu)); - } + #[cfg(feature = "pprof-heap")] + if let Some(path) = &settings.pprof_heap_path { + router = router.route(path, axum::routing::get(pprof_heap)); + } - #[cfg(feature = "pprof-heap")] - if let Some(path) = &settings.pprof_heap_path { - router = router.route(path, axum::routing::get(pprof_heap)); - } + #[cfg(feature = "metrics")] + if let Some(path) = &settings.metrics_path { + router = router.route(path, axum::routing::get(metrics)); + } - #[cfg(feature = "metrics")] - if let Some(path) = &settings.metrics_path { - router = router.route(path, axum::routing::get(metrics)); - } + #[cfg(feature = "health-check")] + if let Some(path) = &settings.health_path { + router = router.route(path, axum::routing::get(health)); + } - router = router.fallback(axum::routing::any(not_found)); + router = router.fallback(axum::routing::any(not_found)); - let tcp_listener = tokio::net::TcpListener::bind(settings.bind) - .await - .context("failed to bind tcp listener")?; + let tcp_listener = tokio::net::TcpListener::bind(settings.bind) + .await + .context("failed to bind tcp listener")?; - tracing::info!("telemetry server listening on {}", settings.bind); + tracing::info!("telemetry server listening on {}", settings.bind); - let server = axum::serve(tcp_listener, router); + let server = axum::serve(tcp_listener, router); - #[cfg(feature = "context")] - let server = server.with_graceful_shutdown(async move { - if let Some(context) = settings.context { - context.done().await; - } else { - std::future::pending::<()>().await; - } - }); + #[cfg(feature = "context")] + let server = server.with_graceful_shutdown(async move { + if let Some(context) = settings.context { + context.done().await; + } else { + std::future::pending::<()>().await; + } + }); - server.await.context("failed to serve") + server.await.context("failed to serve") } diff --git a/foundations/src/telementry/settings.rs b/foundations/src/telementry/settings.rs index 871942db..644d2bf2 100644 --- a/foundations/src/telementry/settings.rs +++ b/foundations/src/telementry/settings.rs @@ -3,504 +3,476 @@ use std::collections::HashMap; #[cfg(feature = "telemetry-server")] use std::net::SocketAddr; -#[cfg(feature = "logging")] -use crate::telementry::logging::TimeFormatter; -use crate::telementry::opentelemetry::{ - complex_rate_sampler, BatchExporter, Sampler, SpanObserver, -}; use opentelemetry_otlp::WithExportConfig; use opentelemetry_sdk::Resource; #[cfg(feature = "logging")] use tracing_subscriber::fmt::time::{ChronoLocal, ChronoUtc}; +#[cfg(feature = "logging")] +use crate::telementry::logging::TimeFormatter; +use crate::telementry::opentelemetry::{complex_rate_sampler, BatchExporter, Sampler, SpanObserver}; + #[crate::settings::auto_settings(crate_path = "crate")] pub struct TelementrySettings { - /// Settings for metric exporting. - pub metrics: MetricsSettings, - /// Settings for opentelemetry span exporting. - pub opentelemetry: OpentelemetrySettings, - /// Settings for logging. - pub logging: LoggingSettings, - #[cfg(all( - any(feature = "pprof-cpu", feature = "pprof-heap", feature = "metrics",), - feature = "telemetry-server" - ))] - /// Settings for the http server. - pub server: ServerSettings, + /// Settings for metric exporting. + pub metrics: MetricsSettings, + /// Settings for opentelemetry span exporting. + pub opentelemetry: OpentelemetrySettings, + /// Settings for logging. + pub logging: LoggingSettings, + #[cfg(all( + any(feature = "pprof-cpu", feature = "pprof-heap", feature = "metrics",), + feature = "telemetry-server" + ))] + /// Settings for the http server. + pub server: ServerSettings, } #[cfg(feature = "metrics")] #[crate::settings::auto_settings(crate_path = "crate")] pub struct MetricsSettings { - /// Whether to enable metrics. - #[settings(default = true)] - pub enabled: bool, - /// A map of additional labels to add to metrics. - pub labels: HashMap, + /// Whether to enable metrics. + #[settings(default = true)] + pub enabled: bool, + /// A map of additional labels to add to metrics. + pub labels: HashMap, } #[cfg(feature = "opentelemetry")] #[crate::settings::auto_settings(crate_path = "crate")] #[serde(default)] pub struct OpentelemetrySettings { - /// Whether to enable opentelemetry span exporting. - #[settings(default = true)] - pub enabled: bool, - /// A map of additional labels to add to opentelemetry spans. - pub labels: HashMap, - /// The max number of spans that have not started exporting. - /// This value is a per-thread limit. - #[settings(default = 500)] - pub max_backpressure: usize, - /// The number of spans to export in a batch. - #[settings(default = 10_000)] - pub batch_size: usize, - /// The max number of concurrent batch exports. - #[settings(default = 10)] - pub max_concurrent_exports: usize, - /// The max number of pending batch exports. - #[settings(default = 15)] - pub max_pending_exports: usize, - /// The interval to export spans at. - #[settings(default = std::time::Duration::from_secs(2))] - #[serde(with = "humantime_serde")] - pub interval: std::time::Duration, - /// Sampler to use for picking which spans to export. - #[settings(default = OpentelemetrySettingsSampler::Always)] - pub sampler: OpentelemetrySettingsSampler, - /// Export timeout. - #[settings(default = std::time::Duration::from_secs(15))] - #[serde(with = "humantime_serde")] - pub otlp_timeout: std::time::Duration, - /// The endpoint to export spans to. - #[settings(default = "http://localhost:4317".into())] - pub otlp_endpoint: String, - /// The export method to use. - #[settings(default = OpentelemetrySettingsExportMethod::Grpc)] - pub otlp_method: OpentelemetrySettingsExportMethod, - /// Filter to use for filtering spans. - #[cfg_attr( - feature = "env-filter", - doc = "See https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html" - )] - #[cfg_attr( - not(feature = "env-filter"), - doc = "See https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.LevelFilter.html" - )] - #[settings(default = "info".into())] - pub level: String, - /// Export Logging Level - pub logging: OpentelemetrySettingsLogging, - /// Enable metrics for opentelemetry. - #[cfg(feature = "metrics")] - #[settings(default = true)] - pub metrics: bool, + /// Whether to enable opentelemetry span exporting. + #[settings(default = true)] + pub enabled: bool, + /// A map of additional labels to add to opentelemetry spans. + pub labels: HashMap, + /// The max number of spans that have not started exporting. + /// This value is a per-thread limit. + #[settings(default = 500)] + pub max_backpressure: usize, + /// The number of spans to export in a batch. + #[settings(default = 10_000)] + pub batch_size: usize, + /// The max number of concurrent batch exports. + #[settings(default = 10)] + pub max_concurrent_exports: usize, + /// The max number of pending batch exports. + #[settings(default = 15)] + pub max_pending_exports: usize, + /// The interval to export spans at. + #[settings(default = std::time::Duration::from_secs(2))] + #[serde(with = "humantime_serde")] + pub interval: std::time::Duration, + /// Sampler to use for picking which spans to export. + #[settings(default = OpentelemetrySettingsSampler::Always)] + pub sampler: OpentelemetrySettingsSampler, + /// Export timeout. + #[settings(default = std::time::Duration::from_secs(15))] + #[serde(with = "humantime_serde")] + pub otlp_timeout: std::time::Duration, + /// The endpoint to export spans to. + #[settings(default = "http://localhost:4317".into())] + pub otlp_endpoint: String, + /// The export method to use. + #[settings(default = OpentelemetrySettingsExportMethod::Grpc)] + pub otlp_method: OpentelemetrySettingsExportMethod, + /// Filter to use for filtering spans. + #[cfg_attr( + feature = "env-filter", + doc = "See https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html" + )] + #[cfg_attr( + not(feature = "env-filter"), + doc = "See https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.LevelFilter.html" + )] + #[settings(default = "info".into())] + pub level: String, + /// Export Logging Level + pub logging: OpentelemetrySettingsLogging, + /// Enable metrics for opentelemetry. + #[cfg(feature = "metrics")] + #[settings(default = true)] + pub metrics: bool, } #[cfg(feature = "opentelemetry")] #[crate::settings::auto_settings(crate_path = "crate")] #[serde(default)] pub struct OpentelemetrySettingsLogging { - #[settings(default = OpentelemetrySettingsLoggingLevel::Warn)] - pub dropped_spans: OpentelemetrySettingsLoggingLevel, - #[settings(default = OpentelemetrySettingsLoggingLevel::Error)] - pub exporter_errors: OpentelemetrySettingsLoggingLevel, - #[settings(default = OpentelemetrySettingsLoggingLevel::Debug)] - pub exporter_success: OpentelemetrySettingsLoggingLevel, + #[settings(default = OpentelemetrySettingsLoggingLevel::Warn)] + pub dropped_spans: OpentelemetrySettingsLoggingLevel, + #[settings(default = OpentelemetrySettingsLoggingLevel::Error)] + pub exporter_errors: OpentelemetrySettingsLoggingLevel, + #[settings(default = OpentelemetrySettingsLoggingLevel::Debug)] + pub exporter_success: OpentelemetrySettingsLoggingLevel, } #[cfg(feature = "opentelemetry")] #[crate::settings::auto_settings(crate_path = "crate")] #[serde(rename_all = "lowercase")] pub enum OpentelemetrySettingsLoggingLevel { - /// Error level logging. - Error, - /// Warning level logging. - Warn, - #[settings(default)] - /// Info level logging. - Info, - /// Debug level logging. - Debug, - /// Trace level logging. - Trace, - /// No logging. - Off, + /// Error level logging. + Error, + /// Warning level logging. + Warn, + #[settings(default)] + /// Info level logging. + Info, + /// Debug level logging. + Debug, + /// Trace level logging. + Trace, + /// No logging. + Off, } #[cfg(feature = "opentelemetry")] #[crate::settings::auto_settings(crate_path = "crate")] #[serde(rename_all = "lowercase")] pub enum OpentelemetrySettingsExportMethod { - #[settings(default)] - /// Export spans over gRPC. - Grpc, - /// Export spans over HTTP. - Http, + #[settings(default)] + /// Export spans over gRPC. + Grpc, + /// Export spans over HTTP. + Http, } #[cfg(feature = "opentelemetry")] #[crate::settings::auto_settings(crate_path = "crate")] #[serde(rename_all = "lowercase")] pub enum OpentelemetrySettingsSampler { - /// Always sample all spans. - #[settings(default)] - Always, - /// Never sample any spans. - Never, - /// Sample spans based on a rate. - RatioSimple(f64), - /// Sample spans based on a rate, with the ability to set a different rate for root spans. - /// This is useful because you can always sample root spans and then on some rate cull the tail. - /// In production, you might want to sample all root spans and then sample tail spans at a lower rate. - RatioComplex { - /// The rate to sample spans at. - head_rate: f64, - /// The rate to sample root spans at. - /// Root spans are spans that are not children of any other span. - /// If `None`, the root rate is the same as the rate. - tail_rate: Option, - /// Error rate to sample spans at. - /// If `None`, the error rate is the same as the rate. - error_rate: Option, - /// Sample all if any span in the tree contains an error. - #[serde(default = "default_true")] - sample_on_error: bool, - }, + /// Always sample all spans. + #[settings(default)] + Always, + /// Never sample any spans. + Never, + /// Sample spans based on a rate. + RatioSimple(f64), + /// Sample spans based on a rate, with the ability to set a different rate + /// for root spans. This is useful because you can always sample root spans + /// and then on some rate cull the tail. In production, you might want to + /// sample all root spans and then sample tail spans at a lower rate. + RatioComplex { + /// The rate to sample spans at. + head_rate: f64, + /// The rate to sample root spans at. + /// Root spans are spans that are not children of any other span. + /// If `None`, the root rate is the same as the rate. + tail_rate: Option, + /// Error rate to sample spans at. + /// If `None`, the error rate is the same as the rate. + error_rate: Option, + /// Sample all if any span in the tree contains an error. + #[serde(default = "default_true")] + sample_on_error: bool, + }, } fn default_true() -> bool { - true + true } #[cfg(feature = "logging")] #[crate::settings::auto_settings(crate_path = "crate")] #[serde(default)] pub struct LoggingSettings { - /// Whether to enable logging. - #[settings(default = true)] - pub enabled: bool, - /// The log level to filter logs by. - #[cfg_attr( - feature = "env-filter", - doc = "See https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html" - )] - #[cfg_attr( - not(feature = "env-filter"), - doc = "See https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.LevelFilter.html" - )] - #[settings(default = "info".into())] - pub level: String, - /// The log format to use. - #[settings(default = LoggingSettingsFormat::Normal)] - pub format: LoggingSettingsFormat, - /// Show spans in logs. - #[settings(default = true)] - pub show_spans: bool, - /// Show the thread id in logs. - #[settings(default = true)] - pub show_thread_id: bool, - /// Show the file info in logs. - #[settings(default = true)] - pub show_file_info: bool, - /// Show timestamps in logs. - #[settings(default = LoggingSettingsTimestamps::Local)] - pub timestamps: LoggingSettingsTimestamps, + /// Whether to enable logging. + #[settings(default = true)] + pub enabled: bool, + /// The log level to filter logs by. + #[cfg_attr( + feature = "env-filter", + doc = "See https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html" + )] + #[cfg_attr( + not(feature = "env-filter"), + doc = "See https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.LevelFilter.html" + )] + #[settings(default = "info".into())] + pub level: String, + /// The log format to use. + #[settings(default = LoggingSettingsFormat::Normal)] + pub format: LoggingSettingsFormat, + /// Show spans in logs. + #[settings(default = true)] + pub show_spans: bool, + /// Show the thread id in logs. + #[settings(default = true)] + pub show_thread_id: bool, + /// Show the file info in logs. + #[settings(default = true)] + pub show_file_info: bool, + /// Show timestamps in logs. + #[settings(default = LoggingSettingsTimestamps::Local)] + pub timestamps: LoggingSettingsTimestamps, } #[cfg(feature = "logging")] #[crate::settings::auto_settings(crate_path = "crate")] #[serde(rename_all = "lowercase")] pub enum LoggingSettingsTimestamps { - /// Show timestamps in logs in the local timezone. - #[settings(default)] - Local, - /// Show timestamps in logs in UTC. - Utc, - /// Do not show timestamps in logs. - Off, + /// Show timestamps in logs in the local timezone. + #[settings(default)] + Local, + /// Show timestamps in logs in UTC. + Utc, + /// Do not show timestamps in logs. + Off, } #[cfg(feature = "logging")] #[crate::settings::auto_settings(crate_path = "crate")] #[serde(rename_all = "lowercase")] pub enum LoggingSettingsFormat { - #[settings(default)] - /// The default human-readable log format. - Normal, - /// The JSON log format. - Json, - /// The pretty log format. - Pretty, - /// The compact log format. - Compact, + #[settings(default)] + /// The default human-readable log format. + Normal, + /// The JSON log format. + Json, + /// The pretty log format. + Pretty, + /// The compact log format. + Compact, } #[cfg(all( - any(feature = "pprof-cpu", feature = "pprof-heap", feature = "metrics",), - feature = "telemetry-server" + any(feature = "pprof-cpu", feature = "pprof-heap", feature = "metrics",), + feature = "telemetry-server" ))] #[crate::settings::auto_settings(crate_path = "crate")] #[serde(default)] pub struct ServerSettings { - /// Whether to enable the server. - #[settings(default = true)] - pub enabled: bool, - /// The address to bind the server to. - #[settings(default = SocketAddr::from(([127, 0, 0, 1], 9090)))] - pub bind: SocketAddr, - /// The path to the pprof heap endpoint. If `None`, the endpoint is disabled. - #[cfg(feature = "pprof-heap")] - #[settings(default = Some("/debug/pprof/heap".into()))] - pub pprof_heap_path: Option, - /// The path to the pprof CPU endpoint. If `None`, the endpoint is disabled. - #[cfg(feature = "pprof-cpu")] - #[settings(default = Some("/debug/pprof/profile".into()))] - pub pprof_cpu_path: Option, - /// The path to the metrics endpoint. If `None`, the endpoint is disabled. - #[cfg(feature = "metrics")] - #[settings(default = Some("/metrics".into()))] - pub metrics_path: Option, + /// Whether to enable the server. + #[settings(default = true)] + pub enabled: bool, + /// The address to bind the server to. + #[settings(default = SocketAddr::from(([127, 0, 0, 1], 9090)))] + pub bind: SocketAddr, + /// The path to the pprof heap endpoint. If `None`, the endpoint is + /// disabled. + #[cfg(feature = "pprof-heap")] + #[settings(default = Some("/debug/pprof/heap".into()))] + pub pprof_heap_path: Option, + /// The path to the pprof CPU endpoint. If `None`, the endpoint is disabled. + #[cfg(feature = "pprof-cpu")] + #[settings(default = Some("/debug/pprof/profile".into()))] + pub pprof_cpu_path: Option, + /// The path to the metrics endpoint. If `None`, the endpoint is disabled. + #[cfg(feature = "metrics")] + #[settings(default = Some("/metrics".into()))] + pub metrics_path: Option, + /// The path to use for the health check endpoint. If `None`, the endpoint + /// is disabled. + #[cfg(feature = "health-check")] + #[settings(default = Some("/health".into()))] + pub health_path: Option, } pub async fn init(info: crate::ServiceInfo, settings: TelementrySettings) { - #[cfg(feature = "metrics")] - if settings.metrics.enabled { - crate::telementry::metrics::init(info, &settings.metrics.labels); - } + #[cfg(feature = "metrics")] + if settings.metrics.enabled { + crate::telementry::metrics::init(info, &settings.metrics.labels); + } + + #[cfg(any(feature = "opentelemetry", feature = "logging"))] + { + #[cfg(feature = "opentelemetry")] + let opentelemetry = if settings.opentelemetry.enabled { + Some( + crate::telementry::opentelemetry::layer( + SpanObserver { + max_unprocessed_spans_per_thread: settings.opentelemetry.max_backpressure, + sampler: match settings.opentelemetry.sampler { + OpentelemetrySettingsSampler::Always => Sampler::Always, + OpentelemetrySettingsSampler::Never => Sampler::Never, + OpentelemetrySettingsSampler::RatioSimple(rate) => Sampler::TraceIdRatio(rate), + OpentelemetrySettingsSampler::RatioComplex { + tail_rate, + head_rate, + error_rate, + sample_on_error, + } => complex_rate_sampler(head_rate, tail_rate, error_rate, sample_on_error), + }, + }, + BatchExporter { + batch_size: settings.opentelemetry.batch_size, + max_concurrent_exports: settings.opentelemetry.max_concurrent_exports, + max_pending_exports: settings.opentelemetry.max_pending_exports, + interval: settings.opentelemetry.interval, + #[cfg(feature = "metrics")] + metrics: settings.opentelemetry.metrics, + resource: { + let mut kv = vec![]; - #[cfg(any(feature = "opentelemetry", feature = "logging"))] - { - #[cfg(feature = "opentelemetry")] - let opentelemetry = if settings.opentelemetry.enabled { - Some( - crate::telementry::opentelemetry::layer( - SpanObserver { - max_unprocessed_spans_per_thread: settings.opentelemetry.max_backpressure, - sampler: match settings.opentelemetry.sampler { - OpentelemetrySettingsSampler::Always => Sampler::Always, - OpentelemetrySettingsSampler::Never => Sampler::Never, - OpentelemetrySettingsSampler::RatioSimple(rate) => { - Sampler::TraceIdRatio(rate) - } - OpentelemetrySettingsSampler::RatioComplex { - tail_rate, - head_rate, - error_rate, - sample_on_error, - } => complex_rate_sampler( - head_rate, - tail_rate, - error_rate, - sample_on_error, - ), - }, - }, - BatchExporter { - batch_size: settings.opentelemetry.batch_size, - max_concurrent_exports: settings.opentelemetry.max_concurrent_exports, - max_pending_exports: settings.opentelemetry.max_pending_exports, - interval: settings.opentelemetry.interval, - #[cfg(feature = "metrics")] - metrics: settings.opentelemetry.metrics, - resource: { - let mut kv = vec![]; + if !settings.opentelemetry.labels.contains_key("service.name") { + kv.push(opentelemetry::KeyValue::new("service.name", info.metric_name)); + } - if !settings.opentelemetry.labels.contains_key("service.name") { - kv.push(opentelemetry::KeyValue::new( - "service.name", - info.metric_name, - )); - } + if !settings.opentelemetry.labels.contains_key("service.version") { + kv.push(opentelemetry::KeyValue::new("service.version", info.version)); + } - if !settings - .opentelemetry - .labels - .contains_key("service.version") - { - kv.push(opentelemetry::KeyValue::new( - "service.version", - info.version, - )); - } + kv.extend( + settings + .opentelemetry + .labels + .iter() + .map(|(k, v)| opentelemetry::KeyValue::new(k.clone(), v.clone())), + ); - kv.extend( - settings.opentelemetry.labels.iter().map(|(k, v)| { - opentelemetry::KeyValue::new(k.clone(), v.clone()) - }), - ); + Resource::new(kv) + }, + drop_handler: { + const DROPPED_SPANS_ERROR: &str = "opentelementry exporter dropped spans due to backpressure"; - Resource::new(kv) - }, - drop_handler: { - const DROPPED_SPANS_ERROR: &str = - "opentelementry exporter dropped spans due to backpressure"; + match settings.opentelemetry.logging.dropped_spans { + OpentelemetrySettingsLoggingLevel::Error => Box::new(|count| { + tracing::error!(count, DROPPED_SPANS_ERROR); + }), + OpentelemetrySettingsLoggingLevel::Warn => Box::new(|count| { + tracing::warn!(count, DROPPED_SPANS_ERROR); + }), + OpentelemetrySettingsLoggingLevel::Info => Box::new(|count| { + tracing::info!(count, DROPPED_SPANS_ERROR); + }), + OpentelemetrySettingsLoggingLevel::Debug => Box::new(|count| { + tracing::debug!(count, DROPPED_SPANS_ERROR); + }), + OpentelemetrySettingsLoggingLevel::Trace => Box::new(|count| { + tracing::trace!(count, DROPPED_SPANS_ERROR); + }), + OpentelemetrySettingsLoggingLevel::Off => Box::new(|_| {}), + } + }, + error_handler: { + const EXPORTER_ERROR: &str = "opentelementry exporter failed to export spans"; - match settings.opentelemetry.logging.dropped_spans { - OpentelemetrySettingsLoggingLevel::Error => Box::new(|count| { - tracing::error!(count, DROPPED_SPANS_ERROR); - }), - OpentelemetrySettingsLoggingLevel::Warn => Box::new(|count| { - tracing::warn!(count, DROPPED_SPANS_ERROR); - }), - OpentelemetrySettingsLoggingLevel::Info => Box::new(|count| { - tracing::info!(count, DROPPED_SPANS_ERROR); - }), - OpentelemetrySettingsLoggingLevel::Debug => Box::new(|count| { - tracing::debug!(count, DROPPED_SPANS_ERROR); - }), - OpentelemetrySettingsLoggingLevel::Trace => Box::new(|count| { - tracing::trace!(count, DROPPED_SPANS_ERROR); - }), - OpentelemetrySettingsLoggingLevel::Off => Box::new(|_| {}), - } - }, - error_handler: { - const EXPORTER_ERROR: &str = - "opentelementry exporter failed to export spans"; + match settings.opentelemetry.logging.exporter_errors { + OpentelemetrySettingsLoggingLevel::Error => Box::new(|err, count| { + tracing::error!(err = %err, count, EXPORTER_ERROR); + }), + OpentelemetrySettingsLoggingLevel::Warn => Box::new(|err, count| { + tracing::warn!(err = %err, count, EXPORTER_ERROR); + }), + OpentelemetrySettingsLoggingLevel::Info => Box::new(|err, count| { + tracing::info!(err = %err, count, EXPORTER_ERROR); + }), + OpentelemetrySettingsLoggingLevel::Debug => Box::new(|err, count| { + tracing::debug!(err = %err, count, EXPORTER_ERROR); + }), + OpentelemetrySettingsLoggingLevel::Trace => Box::new(|err, count| { + tracing::trace!(err = %err, count, EXPORTER_ERROR); + }), + OpentelemetrySettingsLoggingLevel::Off => Box::new(|_, _| {}), + } + }, + export_handler: { + const EXPORTER_SUCCESS: &str = "opentelementry exporter successfully exported spans"; - match settings.opentelemetry.logging.exporter_errors { - OpentelemetrySettingsLoggingLevel::Error => { - Box::new(|err, count| { - tracing::error!(err = %err, count, EXPORTER_ERROR); - }) - } - OpentelemetrySettingsLoggingLevel::Warn => { - Box::new(|err, count| { - tracing::warn!(err = %err, count, EXPORTER_ERROR); - }) - } - OpentelemetrySettingsLoggingLevel::Info => { - Box::new(|err, count| { - tracing::info!(err = %err, count, EXPORTER_ERROR); - }) - } - OpentelemetrySettingsLoggingLevel::Debug => { - Box::new(|err, count| { - tracing::debug!(err = %err, count, EXPORTER_ERROR); - }) - } - OpentelemetrySettingsLoggingLevel::Trace => { - Box::new(|err, count| { - tracing::trace!(err = %err, count, EXPORTER_ERROR); - }) - } - OpentelemetrySettingsLoggingLevel::Off => Box::new(|_, _| {}), - } - }, - export_handler: { - const EXPORTER_SUCCESS: &str = - "opentelementry exporter successfully exported spans"; + match settings.opentelemetry.logging.exporter_success { + OpentelemetrySettingsLoggingLevel::Error => Box::new(|count| { + tracing::error!(count, EXPORTER_SUCCESS); + }), + OpentelemetrySettingsLoggingLevel::Warn => Box::new(|count| { + tracing::warn!(count, EXPORTER_SUCCESS); + }), + OpentelemetrySettingsLoggingLevel::Info => Box::new(|count| { + tracing::info!(count, EXPORTER_SUCCESS); + }), + OpentelemetrySettingsLoggingLevel::Debug => Box::new(|count| { + tracing::debug!(count, EXPORTER_SUCCESS); + }), + OpentelemetrySettingsLoggingLevel::Trace => Box::new(|count| { + tracing::trace!(count, EXPORTER_SUCCESS); + }), + OpentelemetrySettingsLoggingLevel::Off => Box::new(|_| {}), + } + }, + }, + { + match settings.opentelemetry.otlp_method { + OpentelemetrySettingsExportMethod::Grpc => opentelemetry_otlp::new_exporter() + .tonic() + .with_endpoint(settings.opentelemetry.otlp_endpoint.clone()) + .with_timeout(settings.opentelemetry.otlp_timeout) + .build_span_exporter(), + OpentelemetrySettingsExportMethod::Http => opentelemetry_otlp::new_exporter() + .http() + .with_endpoint(settings.opentelemetry.otlp_endpoint.clone()) + .with_timeout(settings.opentelemetry.otlp_timeout) + .build_span_exporter(), + } + .expect("failed to build otlp exporter") + }, + ) + .with_filter(super::LevelFilter::new(&settings.opentelemetry.level).filter()), + ) + } else { + None + }; - match settings.opentelemetry.logging.exporter_success { - OpentelemetrySettingsLoggingLevel::Error => Box::new(|count| { - tracing::error!(count, EXPORTER_SUCCESS); - }), - OpentelemetrySettingsLoggingLevel::Warn => Box::new(|count| { - tracing::warn!(count, EXPORTER_SUCCESS); - }), - OpentelemetrySettingsLoggingLevel::Info => Box::new(|count| { - tracing::info!(count, EXPORTER_SUCCESS); - }), - OpentelemetrySettingsLoggingLevel::Debug => Box::new(|count| { - tracing::debug!(count, EXPORTER_SUCCESS); - }), - OpentelemetrySettingsLoggingLevel::Trace => Box::new(|count| { - tracing::trace!(count, EXPORTER_SUCCESS); - }), - OpentelemetrySettingsLoggingLevel::Off => Box::new(|_| {}), - } - }, - }, - { - match settings.opentelemetry.otlp_method { - OpentelemetrySettingsExportMethod::Grpc => { - opentelemetry_otlp::new_exporter() - .tonic() - .with_endpoint(settings.opentelemetry.otlp_endpoint.clone()) - .with_timeout(settings.opentelemetry.otlp_timeout) - .build_span_exporter() - } - OpentelemetrySettingsExportMethod::Http => { - opentelemetry_otlp::new_exporter() - .http() - .with_endpoint(settings.opentelemetry.otlp_endpoint.clone()) - .with_timeout(settings.opentelemetry.otlp_timeout) - .build_span_exporter() - } - } - .expect("failed to build otlp exporter") - }, - ) - .with_filter(super::LevelFilter::new(&settings.opentelemetry.level).filter()), - ) - } else { - None - }; + #[cfg(feature = "logging")] + let logging = if settings.logging.enabled { + let layer = tracing_subscriber::fmt::layer() + .with_file(settings.logging.show_file_info) + .with_line_number(settings.logging.show_file_info) + .with_thread_ids(settings.logging.show_thread_id) + .with_timer(match settings.logging.timestamps { + LoggingSettingsTimestamps::Local => TimeFormatter::Local(ChronoLocal::rfc_3339()), + LoggingSettingsTimestamps::Utc => TimeFormatter::Utc(ChronoUtc::rfc_3339()), + LoggingSettingsTimestamps::Off => TimeFormatter::None, + }); - #[cfg(feature = "logging")] - let logging = if settings.logging.enabled { - let layer = tracing_subscriber::fmt::layer() - .with_file(settings.logging.show_file_info) - .with_line_number(settings.logging.show_file_info) - .with_thread_ids(settings.logging.show_thread_id) - .with_timer(match settings.logging.timestamps { - LoggingSettingsTimestamps::Local => { - TimeFormatter::Local(ChronoLocal::rfc_3339()) - } - LoggingSettingsTimestamps::Utc => TimeFormatter::Utc(ChronoUtc::rfc_3339()), - LoggingSettingsTimestamps::Off => TimeFormatter::None, - }); + let layer = match settings.logging.format { + LoggingSettingsFormat::Normal => layer.boxed(), + LoggingSettingsFormat::Json => layer.json().boxed(), + LoggingSettingsFormat::Pretty => layer.pretty().boxed(), + LoggingSettingsFormat::Compact => layer.compact().boxed(), + }; - let layer = match settings.logging.format { - LoggingSettingsFormat::Normal => layer.boxed(), - LoggingSettingsFormat::Json => layer.json().boxed(), - LoggingSettingsFormat::Pretty => layer.pretty().boxed(), - LoggingSettingsFormat::Compact => layer.compact().boxed(), - }; + Some(layer.with_filter(super::LevelFilter::new(&settings.logging.level).filter())) + } else { + None + }; - Some(layer.with_filter(super::LevelFilter::new(&settings.logging.level).filter())) - } else { - None - }; + use tracing_subscriber::prelude::*; - use tracing_subscriber::prelude::*; + tracing_subscriber::registry().with(logging).with(opentelemetry).init(); + } - tracing_subscriber::registry() - .with(logging) - .with(opentelemetry) - .init(); - } + #[cfg(all( + any(feature = "pprof-cpu", feature = "pprof-heap", feature = "metrics",), + feature = "telemetry-server" + ))] + { + #[cfg(not(feature = "runtime"))] + use tokio::spawn; - #[cfg(all( - any(feature = "pprof-cpu", feature = "pprof-heap", feature = "metrics",), - feature = "telemetry-server" - ))] - { - #[cfg(feature = "runtime")] - use crate::runtime::spawn; - #[cfg(not(feature = "runtime"))] - use tokio::spawn; + #[cfg(feature = "runtime")] + use crate::runtime::spawn; - spawn(async move { - match crate::telementry::server::init(super::server::ServerSettings { - bind: settings.server.bind, - #[cfg(feature = "metrics")] - metrics_path: settings.server.metrics_path, - #[cfg(feature = "pprof-cpu")] - pprof_cpu_path: settings.server.pprof_cpu_path, - #[cfg(feature = "pprof-heap")] - pprof_heap_path: settings.server.pprof_heap_path, - #[cfg(feature = "context")] - context: Some(crate::context::Context::global()), - }) - .await - { - Ok(()) => {} - Err(err) => { - tracing::error!(error = %err, "failed to start server"); - } - } - }); - } + spawn(async move { + match crate::telementry::server::init(super::server::ServerSettings { + bind: settings.server.bind, + #[cfg(feature = "metrics")] + metrics_path: settings.server.metrics_path, + #[cfg(feature = "pprof-cpu")] + pprof_cpu_path: settings.server.pprof_cpu_path, + #[cfg(feature = "pprof-heap")] + pprof_heap_path: settings.server.pprof_heap_path, + #[cfg(feature = "health-check")] + health_path: settings.server.health_path, + #[cfg(feature = "context")] + context: Some(crate::context::Context::global()), + }) + .await + { + Ok(()) => {} + Err(err) => { + tracing::error!(error = %err, "failed to start server"); + } + } + }); + } }