diff --git a/server-core/src/filter.rs b/server-core/src/filter.rs index be2d5b98..f0ae0c85 100644 --- a/server-core/src/filter.rs +++ b/server-core/src/filter.rs @@ -2,23 +2,18 @@ use hashbrown::{HashMap, HashSet}; use regex::{self, Regex}; -use cli_core::{ - Allocation, - BacktraceId, - Data, - Timestamp -}; +use cli_core::{Allocation, BacktraceId, Data, Timestamp}; use crate::protocol; #[derive(Clone, Debug)] pub struct GroupFilter { - pub interval_min: Option< Timestamp >, - pub interval_max: Option< Timestamp >, - pub leaked_allocations_min: Option< protocol::NumberOrPercentage >, - pub leaked_allocations_max: Option< protocol::NumberOrPercentage >, + pub interval_min: Option, + pub interval_max: Option, + pub leaked_allocations_min: Option, + pub leaked_allocations_max: Option, pub allocations_min: usize, - pub allocations_max: usize + pub allocations_max: usize, } #[derive(Clone, Debug)] @@ -34,45 +29,45 @@ pub struct Filter { pub size_max_specified: bool, pub size_max: u64, pub lifetime_min: protocol::Interval, - pub lifetime_max: Option< protocol::Interval >, + pub lifetime_max: Option, pub lifetime: protocol::LifetimeFilter, pub backtrace_depth_min: usize, pub backtrace_depth_max: usize, - pub mmaped: Option< protocol::MmapedFilter >, - pub arena: Option< protocol::ArenaFilter >, - pub matched_backtraces: Option< HashSet< BacktraceId > >, - pub marker: Option< u32 >, - pub group_filter: Option< GroupFilter > + pub mmaped: Option, + pub arena: Option, + pub matched_backtraces: Option>, + pub marker: Option, + pub group_filter: Option, } impl Filter { - pub fn timestamp_start_opt( &self ) -> Option< Timestamp > { + pub fn timestamp_start_opt(&self) -> Option { if self.timestamp_start_specified { - Some( self.timestamp_start ) + Some(self.timestamp_start) } else { None } } - pub fn timestamp_end_opt( &self ) -> Option< Timestamp > { + pub fn timestamp_end_opt(&self) -> Option { if self.timestamp_end_specified { - Some( self.timestamp_end ) + Some(self.timestamp_end) } else { None } } - pub fn size_min_opt( &self ) -> Option< u64 > { + pub fn size_min_opt(&self) -> Option { if self.size_min_specified { - Some( self.size_min ) + Some(self.size_min) } else { None } } - pub fn size_max_opt( &self ) -> Option< u64 > { + pub fn size_max_opt(&self) -> Option { if self.size_max_specified { - Some( self.size_max ) + Some(self.size_max) } else { None } @@ -80,169 +75,244 @@ impl Filter { } pub enum PrepareFilterError { - InvalidRegex( &'static str, regex::Error ) + InvalidRegex(&'static str, regex::Error), } -pub fn prepare_filter( data: &Data, filter: &protocol::AllocFilter ) -> Result< Filter, PrepareFilterError > { +pub fn prepare_filter( + data: &Data, + filter: &protocol::AllocFilter, +) -> Result { let matched_backtraces_1; let matched_backtraces_2; - if filter.function_regex.is_some() || filter.source_regex.is_some() || filter.negative_function_regex.is_some() || filter.negative_source_regex.is_some() { - let function_regex = if let Some( ref pattern ) = filter.function_regex { - Some( Regex::new( pattern ).map_err( |err| PrepareFilterError::InvalidRegex( "function_regex", err ) )? ) + if filter.function_regex.is_some() + || filter.source_regex.is_some() + || filter.library_regex.is_some() + || filter.negative_function_regex.is_some() + || filter.negative_source_regex.is_some() + || filter.negative_library_regex.is_some() + { + let function_regex = if let Some(ref pattern) = filter.function_regex { + Some( + Regex::new(pattern) + .map_err(|err| PrepareFilterError::InvalidRegex("function_regex", err))?, + ) } else { None }; - let source_regex = if let Some( ref pattern ) = filter.source_regex { - Some( Regex::new( pattern ).map_err( |err| PrepareFilterError::InvalidRegex( "source_regex", err ) )? ) + let source_regex = if let Some(ref pattern) = filter.source_regex { + Some( + Regex::new(pattern) + .map_err(|err| PrepareFilterError::InvalidRegex("source_regex", err))?, + ) } else { None }; - let negative_function_regex = if let Some( ref pattern ) = filter.negative_function_regex { - Some( Regex::new( pattern ).map_err( |err| PrepareFilterError::InvalidRegex( "negative_function_regex", err ) )? ) + let library_regex = if let Some(ref pattern) = filter.library_regex { + Some( + Regex::new(pattern) + .map_err(|err| PrepareFilterError::InvalidRegex("library_regex", err))?, + ) } else { None }; - let negative_source_regex = if let Some( ref pattern ) = filter.negative_source_regex { - Some( Regex::new( pattern ).map_err( |err| PrepareFilterError::InvalidRegex( "negative_source_regex", err ) )? ) - } else { - None - }; + let negative_function_regex = + if let Some(ref pattern) = filter.negative_function_regex { + Some(Regex::new(pattern).map_err(|err| { + PrepareFilterError::InvalidRegex("negative_function_regex", err) + })?) + } else { + None + }; + + let negative_source_regex = + if let Some(ref pattern) = filter.negative_source_regex { + Some(Regex::new(pattern).map_err(|err| { + PrepareFilterError::InvalidRegex("negative_source_regex", err) + })?) + } else { + None + }; + + let negative_library_regex = + if let Some(ref pattern) = filter.negative_library_regex { + Some(Regex::new(pattern).map_err(|err| { + PrepareFilterError::InvalidRegex("negative_library_regex", err) + })?) + } else { + None + }; let mut matched_backtraces = HashSet::new(); let mut positive_cache = HashMap::new(); let mut negative_cache = HashMap::new(); for (backtrace_id, backtrace) in data.all_backtraces() { - let mut positive_matched = function_regex.is_none() && source_regex.is_none(); + let mut positive_matched = + function_regex.is_none() && source_regex.is_none() && library_regex.is_none(); let mut negative_matched = false; - let check_negative = negative_function_regex.is_some() || negative_source_regex.is_some(); + let check_negative = negative_function_regex.is_some() + || negative_source_regex.is_some() + || negative_library_regex.is_some(); for (frame_id, frame) in backtrace { - let check_positive = - if positive_matched { - false - } else if let Some( &cached_result ) = positive_cache.get( &frame_id ) { - positive_matched = cached_result; - false - } else { - true - }; + let check_positive = if positive_matched { + false + } else if let Some(&cached_result) = positive_cache.get(&frame_id) { + positive_matched = cached_result; + false + } else { + true + }; if positive_matched && !check_negative { break; } let mut function = None; - if (check_positive && function_regex.is_some()) || negative_function_regex.is_some() { - function = frame.function().or_else( || frame.raw_function() ).map( |id| data.interner().resolve( id ).unwrap() ); + if (check_positive && function_regex.is_some()) || negative_function_regex.is_some() + { + function = frame + .function() + .or_else(|| frame.raw_function()) + .map(|id| data.interner().resolve(id).unwrap()); } let mut source = None; if (check_positive && source_regex.is_some()) || negative_source_regex.is_some() { - source = frame.source().map( |id| data.interner().resolve( id ).unwrap() ) + source = frame + .source() + .map(|id| data.interner().resolve(id).unwrap()) + } + + let mut library = None; + if (check_positive && library_regex.is_some()) || negative_library_regex.is_some() { + library = frame + .library() + .map(|id| data.interner().resolve(id).unwrap()) } if check_positive { - let matched_function = - if let Some( regex ) = function_regex.as_ref() { - if let Some( ref function ) = function { - regex.is_match( function ) - } else { - false - } + let matched_function = if let Some(regex) = function_regex.as_ref() { + if let Some(ref function) = function { + regex.is_match(function) } else { - true - }; - - let matched_source = - if let Some( regex ) = source_regex.as_ref() { - if let Some( ref source ) = source { - regex.is_match( source ) - } else { - false - } + false + } + } else { + true + }; + + let matched_source = if let Some(regex) = source_regex.as_ref() { + if let Some(ref source) = source { + regex.is_match(source) + } else { + false + } + } else { + true + }; + + let matched_library = if let Some(regex) = library_regex.as_ref() { + if let Some(ref library) = library { + regex.is_match(library) } else { - true - }; + false + } + } else { + true + }; - positive_matched = matched_function && matched_source; - positive_cache.insert( frame_id, positive_matched ); + positive_matched = matched_function && matched_source && matched_library; + positive_cache.insert(frame_id, positive_matched); } if check_negative { - match negative_cache.get( &frame_id ).cloned() { - Some( true ) => { + match negative_cache.get(&frame_id).cloned() { + Some(true) => { negative_matched = true; break; - }, - Some( false ) => { + } + Some(false) => { continue; - }, + } None => {} } - if let Some( regex ) = negative_function_regex.as_ref() { - if let Some( ref function ) = function { - if regex.is_match( function ) { - negative_cache.insert( frame_id, true ); + if let Some(regex) = negative_function_regex.as_ref() { + if let Some(ref function) = function { + if regex.is_match(function) { + negative_cache.insert(frame_id, true); negative_matched = true; break; } } } - if let Some( regex ) = negative_source_regex.as_ref() { - if let Some( ref source ) = source { - if regex.is_match( source ) { - negative_cache.insert( frame_id, true ); + if let Some(regex) = negative_source_regex.as_ref() { + if let Some(ref source) = source { + if regex.is_match(source) { + negative_cache.insert(frame_id, true); negative_matched = true; break; } } } - negative_cache.insert( frame_id, false ); + if let Some(regex) = negative_library_regex.as_ref() { + if let Some(ref library) = library { + if regex.is_match(library) { + negative_cache.insert(frame_id, true); + negative_matched = true; + break; + } + } + } + + negative_cache.insert(frame_id, false); } } if positive_matched && !negative_matched { - matched_backtraces.insert( backtrace_id ); + matched_backtraces.insert(backtrace_id); } } - matched_backtraces_1 = Some( matched_backtraces ); + matched_backtraces_1 = Some(matched_backtraces); } else { matched_backtraces_1 = None; } - if let Some( backtrace ) = filter.backtraces { + if let Some(backtrace) = filter.backtraces { let mut matched_backtraces = HashSet::new(); - matched_backtraces.insert( BacktraceId::new( backtrace ) ); - matched_backtraces_2 = Some( matched_backtraces ); + matched_backtraces.insert(BacktraceId::new(backtrace)); + matched_backtraces_2 = Some(matched_backtraces); } else { matched_backtraces_2 = None; } let matched_backtraces = match (matched_backtraces_1, matched_backtraces_2) { (None, None) => None, - (Some( left ), None) => Some( left ), - (None, Some( right )) => Some( right ), - (Some( left ), Some( right )) => Some( left.intersection( &right ).cloned().collect() ) + (Some(left), None) => Some(left), + (None, Some(right)) => Some(right), + (Some(left), Some(right)) => Some(left.intersection(&right).cloned().collect()), }; - let group_interval_min = filter.group_interval_min.map( |ts| ts.to_timestamp( data.initial_timestamp(), data.last_timestamp() ) ); - let group_interval_max = filter.group_interval_max.map( |ts| ts.to_timestamp( data.initial_timestamp(), data.last_timestamp() ) ); + let group_interval_min = filter + .group_interval_min + .map(|ts| ts.to_timestamp(data.initial_timestamp(), data.last_timestamp())); + let group_interval_max = filter + .group_interval_max + .map(|ts| ts.to_timestamp(data.initial_timestamp(), data.last_timestamp())); - let has_group_filter = - group_interval_min.is_some() || - group_interval_max.is_some() || - filter.group_leaked_allocations_min.is_some() || - filter.group_leaked_allocations_max.is_some() || - filter.group_allocations_min.is_some() || - filter.group_allocations_max.is_some(); + let has_group_filter = group_interval_min.is_some() + || group_interval_max.is_some() + || filter.group_leaked_allocations_min.is_some() + || filter.group_leaked_allocations_max.is_some() + || filter.group_allocations_min.is_some() + || filter.group_allocations_max.is_some(); let group_filter = if has_group_filter { let group_filter = GroupFilter { @@ -250,42 +320,54 @@ pub fn prepare_filter( data: &Data, filter: &protocol::AllocFilter ) -> Result< interval_max: group_interval_max, leaked_allocations_min: filter.group_leaked_allocations_min, leaked_allocations_max: filter.group_leaked_allocations_max, - allocations_min: filter.group_allocations_min.map( |value| value as usize ).unwrap_or( 0 ), - allocations_max: filter.group_allocations_max.map( |value| value as usize ).unwrap_or( -1_i32 as _ ), + allocations_min: filter + .group_allocations_min + .map(|value| value as usize) + .unwrap_or(0), + allocations_max: filter + .group_allocations_max + .map(|value| value as usize) + .unwrap_or(-1_i32 as _), }; - Some( group_filter ) + Some(group_filter) } else { None }; let filter = Filter { timestamp_start_specified: filter.from.is_some(), - timestamp_start: filter.from.map( |ts| ts.to_timestamp( data.initial_timestamp(), data.last_timestamp() ) ).unwrap_or( Timestamp::min() ), + timestamp_start: filter + .from + .map(|ts| ts.to_timestamp(data.initial_timestamp(), data.last_timestamp())) + .unwrap_or(Timestamp::min()), timestamp_end_specified: filter.to.is_some(), - timestamp_end: filter.to.map( |ts| ts.to_timestamp( data.initial_timestamp(), data.last_timestamp() ) ).unwrap_or( Timestamp::max() ), - address_min: filter.address_min.unwrap_or( 0 ), - address_max: filter.address_max.unwrap_or( -1_i32 as _ ), + timestamp_end: filter + .to + .map(|ts| ts.to_timestamp(data.initial_timestamp(), data.last_timestamp())) + .unwrap_or(Timestamp::max()), + address_min: filter.address_min.unwrap_or(0), + address_max: filter.address_max.unwrap_or(-1_i32 as _), size_min_specified: filter.size_min.is_some(), - size_min: filter.size_min.unwrap_or( 0 ), + size_min: filter.size_min.unwrap_or(0), size_max_specified: filter.size_max.is_some(), - size_max: filter.size_max.unwrap_or( -1_i32 as _ ), - lifetime_min: filter.lifetime_min.unwrap_or( protocol::Interval::min() ), + size_max: filter.size_max.unwrap_or(-1_i32 as _), + lifetime_min: filter.lifetime_min.unwrap_or(protocol::Interval::min()), lifetime_max: filter.lifetime_max, - lifetime: filter.lifetime.unwrap_or( protocol::LifetimeFilter::All ), - backtrace_depth_min: filter.backtrace_depth_min.unwrap_or( 0 ) as _, - backtrace_depth_max: filter.backtrace_depth_max.unwrap_or( -1_i32 as _ ) as _, + lifetime: filter.lifetime.unwrap_or(protocol::LifetimeFilter::All), + backtrace_depth_min: filter.backtrace_depth_min.unwrap_or(0) as _, + backtrace_depth_max: filter.backtrace_depth_max.unwrap_or(-1_i32 as _) as _, mmaped: filter.mmaped, arena: filter.arena, matched_backtraces, marker: filter.marker, - group_filter + group_filter, }; - Ok( filter ) + Ok(filter) } #[inline] -pub fn match_allocation( data: &Data, allocation: &Allocation, filter: &Filter ) -> bool { +pub fn match_allocation(data: &Data, allocation: &Allocation, filter: &Filter) -> bool { let timestamp_start = filter.timestamp_start; let timestamp_end = filter.timestamp_end; let size_min = filter.size_min; @@ -316,57 +398,57 @@ pub fn match_allocation( data: &Data, allocation: &Allocation, filter: &Filter ) } match filter.lifetime { - protocol::LifetimeFilter::All => {}, + protocol::LifetimeFilter::All => {} protocol::LifetimeFilter::OnlyLeaked => { if allocation.deallocation.is_some() { return false; } - }, + } protocol::LifetimeFilter::OnlyNotDeallocatedInCurrentRange => { - if let Some( ref deallocation ) = allocation.deallocation { + if let Some(ref deallocation) = allocation.deallocation { if deallocation.timestamp <= timestamp_end { return false; } } - }, + } protocol::LifetimeFilter::OnlyDeallocatedInCurrentRange => { - if let Some( ref deallocation ) = allocation.deallocation { + if let Some(ref deallocation) = allocation.deallocation { if deallocation.timestamp > timestamp_end { return false; } } else { return false; } - }, + } protocol::LifetimeFilter::OnlyTemporary => { if allocation.deallocation.is_none() { return false; } - }, + } protocol::LifetimeFilter::OnlyWholeGroupLeaked => { if allocation.deallocation.is_some() { return false; } - let stats = data.get_group_statistics( allocation.backtrace ); + let stats = data.get_group_statistics(allocation.backtrace); if stats.free_count != 0 { return false; } } } - let backtrace = data.get_backtrace( allocation.backtrace ); + let backtrace = data.get_backtrace(allocation.backtrace); if backtrace.len() < backtrace_depth_min || backtrace.len() > backtrace_depth_max { return false; } - if let Some( ref deallocation ) = allocation.deallocation { + if let Some(ref deallocation) = allocation.deallocation { let lifetime = deallocation.timestamp - allocation.timestamp; if lifetime < lifetime_min.0 { return false; } - if let Some( lifetime_max ) = lifetime_max { + if let Some(lifetime_max) = lifetime_max { if lifetime > lifetime_max.0 { return false; } @@ -377,40 +459,40 @@ pub fn match_allocation( data: &Data, allocation: &Allocation, filter: &Filter ) } } - if let Some( mmaped ) = filter.mmaped { + if let Some(mmaped) = filter.mmaped { let ok = match mmaped { protocol::MmapedFilter::Yes => allocation.is_mmaped(), - protocol::MmapedFilter::No => !allocation.is_mmaped() + protocol::MmapedFilter::No => !allocation.is_mmaped(), }; if !ok { return false; } } - if let Some( arena ) = filter.arena { + if let Some(arena) = filter.arena { let ok = match arena { protocol::ArenaFilter::Main => !allocation.in_non_main_arena(), - protocol::ArenaFilter::NonMain => allocation.in_non_main_arena() + protocol::ArenaFilter::NonMain => allocation.in_non_main_arena(), }; if !ok { return false; } } - if let Some( marker ) = filter.marker { + if let Some(marker) = filter.marker { if allocation.marker != marker { return false; } } - if let Some( ref matched_backtraces ) = filter.matched_backtraces { - if !matched_backtraces.contains( &allocation.backtrace ) { + if let Some(ref matched_backtraces) = filter.matched_backtraces { + if !matched_backtraces.contains(&allocation.backtrace) { return false; } } - if let Some( ref group_filter ) = filter.group_filter { - let group_allocations = data.get_allocation_ids_by_backtrace( allocation.backtrace ); + if let Some(ref group_filter) = filter.group_filter { + let group_allocations = data.get_allocation_ids_by_backtrace(allocation.backtrace); if group_allocations.len() < group_filter.allocations_min { return false; } @@ -419,24 +501,34 @@ pub fn match_allocation( data: &Data, allocation: &Allocation, filter: &Filter ) return false; } - let first_timestamp = data.get_allocation( *group_allocations.first().unwrap() ).timestamp; - let last_timestamp = data.get_allocation( *group_allocations.last().unwrap() ).timestamp; + let first_timestamp = data + .get_allocation(*group_allocations.first().unwrap()) + .timestamp; + let last_timestamp = data + .get_allocation(*group_allocations.last().unwrap()) + .timestamp; let interval = last_timestamp - first_timestamp; - if interval < group_filter.interval_min.unwrap_or( Timestamp::min() ) { + if interval < group_filter.interval_min.unwrap_or(Timestamp::min()) { return false; } - if interval > group_filter.interval_max.unwrap_or( Timestamp::max() ) { + if interval > group_filter.interval_max.unwrap_or(Timestamp::max()) { return false; } - let stats = data.get_group_statistics( allocation.backtrace ); + let stats = data.get_group_statistics(allocation.backtrace); let total_allocations = stats.alloc_count as u32; let leaked = (stats.alloc_count - stats.free_count) as u32; - let leaked_min = group_filter.leaked_allocations_min.map( |threshold| threshold.get( total_allocations ) ).unwrap_or( 0 ); - let leaked_max = group_filter.leaked_allocations_max.map( |threshold| threshold.get( total_allocations ) ).unwrap_or( -1_i32 as _ ); + let leaked_min = group_filter + .leaked_allocations_min + .map(|threshold| threshold.get(total_allocations)) + .unwrap_or(0); + let leaked_max = group_filter + .leaked_allocations_max + .map(|threshold| threshold.get(total_allocations)) + .unwrap_or(-1_i32 as _); if leaked < leaked_min { return false; diff --git a/server-core/src/protocol.rs b/server-core/src/protocol.rs index 020af610..ac629517 100644 --- a/server-core/src/protocol.rs +++ b/server-core/src/protocol.rs @@ -1,52 +1,52 @@ use std::borrow::Cow; +use std::fmt; use std::marker::PhantomData; use std::str::FromStr; -use std::fmt; -use serde::Serialize; use cli_core::Timestamp; +use serde::Serialize; #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Debug, Hash)] #[serde(transparent)] -pub struct Secs( u64 ); +pub struct Secs(u64); #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Debug, Hash)] #[serde(transparent)] -pub struct FractNanos( u32 ); +pub struct FractNanos(u32); -impl From< Secs > for Timestamp { +impl From for Timestamp { #[inline] - fn from( value: Secs ) -> Self { - Timestamp::from_secs( value.0 ) + fn from(value: Secs) -> Self { + Timestamp::from_secs(value.0) } } -impl From< Timestamp > for Secs { +impl From for Secs { #[inline] - fn from( value: Timestamp ) -> Self { - Secs( value.as_secs() ) + fn from(value: Timestamp) -> Self { + Secs(value.as_secs()) } } -impl From< Timestamp > for FractNanos { +impl From for FractNanos { #[inline] - fn from( value: Timestamp ) -> Self { - FractNanos( value.fract_nsecs() as _ ) + fn from(value: Timestamp) -> Self { + FractNanos(value.fract_nsecs() as _) } } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Serialize)] pub struct Timeval { pub secs: Secs, - pub fract_nsecs: FractNanos + pub fract_nsecs: FractNanos, } -impl From< Timestamp > for Timeval { +impl From for Timeval { #[inline] - fn from( value: Timestamp ) -> Self { + fn from(value: Timestamp) -> Self { Timeval { secs: value.into(), - fract_nsecs: value.into() + fract_nsecs: value.into(), } } } @@ -61,55 +61,55 @@ pub struct ResponseMetadata { pub runtime: Timeval, pub unique_backtrace_count: u64, pub maximum_backtrace_depth: u32, - pub timestamp: Timeval + pub timestamp: Timeval, } #[derive(Serialize)] pub struct ResponseTimeline { - pub xs: Vec< u64 >, - pub size_delta: Vec< i64 >, - pub count_delta: Vec< i64 >, - pub allocated_size: Vec< u64 >, - pub allocated_count: Vec< u64 >, - pub leaked_size: Vec< u64 >, - pub leaked_count: Vec< u64 >, - pub allocations: Vec< u32 >, - pub deallocations: Vec< u32 > + pub xs: Vec, + pub size_delta: Vec, + pub count_delta: Vec, + pub allocated_size: Vec, + pub allocated_count: Vec, + pub leaked_size: Vec, + pub leaked_count: Vec, + pub allocations: Vec, + pub deallocations: Vec, } #[derive(Serialize)] pub struct ResponseFragmentationTimeline { - pub xs: Vec< u64 >, - pub fragmentation: Vec< u64 > + pub xs: Vec, + pub fragmentation: Vec, } #[derive(Serialize)] -pub struct Frame< 'a > { +pub struct Frame<'a> { pub address: u64, pub address_s: String, pub count: u64, - pub library: Option< &'a str >, - pub function: Option< Cow< 'a, str > >, - pub raw_function: Option< &'a str >, - pub source: Option< &'a str >, - pub line: Option< u32 >, - pub column: Option< u32 >, - pub is_inline: bool + pub library: Option<&'a str>, + pub function: Option>, + pub raw_function: Option<&'a str>, + pub source: Option<&'a str>, + pub line: Option, + pub column: Option, + pub is_inline: bool, } #[derive(Serialize)] -pub struct ResponseBacktrace< 'a > { - pub frames: Vec< Frame< 'a > > +pub struct ResponseBacktrace<'a> { + pub frames: Vec>, } #[derive(Serialize)] pub struct Deallocation { pub timestamp: Timeval, - pub thread: u32 + pub thread: u32, } #[derive(Serialize)] -pub struct Allocation< 'a > { +pub struct Allocation<'a> { pub address: u64, pub address_s: String, pub timestamp: Timeval, @@ -118,11 +118,11 @@ pub struct Allocation< 'a > { pub thread: u32, pub size: u64, pub backtrace_id: u32, - pub deallocation: Option< Deallocation >, - pub backtrace: Vec< Frame< 'a > >, + pub deallocation: Option, + pub backtrace: Vec>, pub is_mmaped: bool, pub in_main_arena: bool, - pub extra_space: u32 + pub extra_space: u32, } #[derive(Serialize)] @@ -138,31 +138,31 @@ pub struct AllocationGroupData { pub max_timestamp_relative_p: f32, pub interval: Timeval, pub leaked_count: u64, - pub allocated_count: u64 + pub allocated_count: u64, } #[derive(Serialize)] -pub struct AllocationGroup< 'a > { +pub struct AllocationGroup<'a> { pub all: AllocationGroupData, pub only_matched: AllocationGroupData, pub backtrace_id: u32, - pub backtrace: Vec< Frame< 'a > > + pub backtrace: Vec>, } #[derive(Serialize)] -pub struct Mallopt< 'a > { +pub struct Mallopt<'a> { pub timestamp: Timeval, pub thread: u32, pub backtrace_id: u32, - pub backtrace: Vec< Frame< 'a > >, + pub backtrace: Vec>, pub raw_param: i32, - pub param: Option< String >, + pub param: Option, pub value: i32, - pub result: i32 + pub result: i32, } #[derive(Serialize)] -pub enum MmapOperation< 'a > { +pub enum MmapOperation<'a> { #[serde(rename = "mmap")] Mmap { timestamp: Timeval, @@ -170,7 +170,7 @@ pub enum MmapOperation< 'a > { pointer_s: String, length: u64, backtrace_id: u32, - backtrace: Vec< Frame< 'a > >, + backtrace: Vec>, requested_address: u64, requested_address_s: String, is_readable: bool, @@ -186,7 +186,7 @@ pub enum MmapOperation< 'a > { is_uninitialized: bool, offset: u64, file_descriptor: i32, - thread: u32 + thread: u32, }, #[serde(rename = "munmap")] Munmap { @@ -195,35 +195,35 @@ pub enum MmapOperation< 'a > { pointer_s: String, length: u64, backtrace_id: u32, - backtrace: Vec< Frame< 'a > >, - thread: u32 - } + backtrace: Vec>, + thread: u32, + }, } #[derive(Serialize)] -pub struct ResponseAllocations< T: Serialize > { +pub struct ResponseAllocations { pub allocations: T, - pub total_count: u64 + pub total_count: u64, } #[derive(Serialize)] -pub struct ResponseAllocationGroups< T: Serialize > { +pub struct ResponseAllocationGroups { pub allocations: T, - pub total_count: u64 + pub total_count: u64, } #[derive(Serialize)] -pub struct ResponseMmaps< T: Serialize > { - pub operations: T +pub struct ResponseMmaps { + pub operations: T, } #[derive(Serialize)] -pub struct ResponseRegions< T: Serialize > { +pub struct ResponseRegions { pub main_heap_start: u64, pub main_heap_end: u64, pub main_heap_start_s: String, pub main_heap_end_s: String, - pub regions: T + pub regions: T, } #[derive(Copy, Clone, PartialEq, Eq, Deserialize, Debug, Hash)] @@ -239,7 +239,7 @@ pub enum LifetimeFilter { #[serde(rename = "only_temporary")] OnlyTemporary, #[serde(rename = "only_whole_group_leaked")] - OnlyWholeGroupLeaked + OnlyWholeGroupLeaked, } #[derive(Copy, Clone, PartialEq, Eq, Deserialize, Debug, Hash)] @@ -247,7 +247,7 @@ pub enum MmapedFilter { #[serde(rename = "yes")] Yes, #[serde(rename = "no")] - No + No, } #[derive(Copy, Clone, PartialEq, Eq, Deserialize, Debug, Hash)] @@ -255,7 +255,7 @@ pub enum ArenaFilter { #[serde(rename = "main")] Main, #[serde(rename = "non_main")] - NonMain + NonMain, } #[derive(Copy, Clone, Deserialize, Debug)] @@ -265,7 +265,7 @@ pub enum AllocSortBy { #[serde(rename = "address")] Address, #[serde(rename = "size")] - Size + Size, } #[derive(Copy, Clone, PartialEq, Eq, Deserialize, Debug, Hash)] @@ -294,7 +294,7 @@ pub enum AllocGroupsSortBy { #[serde(rename = "all.leaked_count")] GlobalLeakedCount, #[serde(rename = "all.size")] - GlobalSize + GlobalSize, } impl Default for AllocSortBy { @@ -308,7 +308,7 @@ pub enum Order { #[serde(rename = "asc")] Asc, #[serde(rename = "dsc")] - Dsc + Dsc, } impl Default for Order { @@ -317,17 +317,17 @@ impl Default for Order { } } -fn get_while< 'a >( p: &mut &'a str, callback: impl Fn( char ) -> bool ) -> &'a str { +fn get_while<'a>(p: &mut &'a str, callback: impl Fn(char) -> bool) -> &'a str { let mut found = None; for (index, ch) in p.char_indices() { - if !callback( ch ) { - found = Some( index ); + if !callback(ch) { + found = Some(index); break; } } - if let Some( index ) = found { - let (before, after) = p.split_at( index ); + if let Some(index) = found { + let (before, after) = p.split_at(index); *p = after; before } else { @@ -341,85 +341,89 @@ fn get_while< 'a >( p: &mut &'a str, callback: impl Fn( char ) -> bool ) -> &'a pub struct IntervalParseError; #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] -pub struct Interval( pub Timestamp ); +pub struct Interval(pub Timestamp); impl Interval { pub fn min() -> Self { - Interval( Timestamp::min() ) + Interval(Timestamp::min()) } } impl FromStr for Interval { type Err = IntervalParseError; - fn from_str( string: &str ) -> Result< Self, Self::Err > { + fn from_str(string: &str) -> Result { let mut timestamp = Timestamp::min(); - let string = string.replace( " ", "" ); + let string = string.replace(" ", ""); let mut string = string.as_str(); while !string.is_empty() { - let number = get_while( &mut string, |ch| ch.is_digit( 10 ) || ch == ' ' ); - let unit = get_while( &mut string, |ch| ch.is_alphabetic() || ch == ' ' ); + let number = get_while(&mut string, |ch| ch.is_digit(10) || ch == ' '); + let unit = get_while(&mut string, |ch| ch.is_alphabetic() || ch == ' '); if number.is_empty() || (unit.is_empty() && !string.is_empty()) { - return Err( IntervalParseError ); + return Err(IntervalParseError); } let unit = match unit { - "h" | "H" => Timestamp::from_secs( 3600 ), - "m" | "M" => Timestamp::from_secs( 60 ), - "s" | "S" | "" => Timestamp::from_secs( 1 ), - "ms" | "MS" | "Ms" | "mS" => Timestamp::from_usecs( 1000 ), - "us" | "US" | "Us" | "uS" => Timestamp::from_usecs( 1 ), - _ => return Err( IntervalParseError ) + "h" | "H" => Timestamp::from_secs(3600), + "m" | "M" => Timestamp::from_secs(60), + "s" | "S" | "" => Timestamp::from_secs(1), + "ms" | "MS" | "Ms" | "mS" => Timestamp::from_usecs(1000), + "us" | "US" | "Us" | "uS" => Timestamp::from_usecs(1), + _ => return Err(IntervalParseError), }; - let number: u64 = number.parse().map_err( |_| IntervalParseError )?; + let number: u64 = number.parse().map_err(|_| IntervalParseError)?; let number = number as f64; timestamp = timestamp + (unit * number); } - Ok( Interval( timestamp ) ) + Ok(Interval(timestamp)) } } #[test] fn test_parse_interval() { - fn assert( string: &str, ts: Timestamp ) { + fn assert(string: &str, ts: Timestamp) { let x: Interval = string.parse().unwrap(); - assert_eq!( x.0, ts ); + assert_eq!(x.0, ts); } - assert( "1", Timestamp::from_secs( 1 ) ); - assert( "10", Timestamp::from_secs( 10 ) ); - assert( "10s", Timestamp::from_secs( 10 ) ); - assert( "3m", Timestamp::from_secs( 60 * 3 ) ); - assert( "3h", Timestamp::from_secs( 3600 * 3 ) ); - assert( "4h3m", Timestamp::from_secs( 3600 * 4 + 60 * 3 ) ); - assert( "4h3m2s", Timestamp::from_secs( 3600 * 4 + 60 * 3 + 2 ) ); - assert( "4h2s", Timestamp::from_secs( 3600 * 4 + 2 ) ); - assert( "1000ms", Timestamp::from_secs( 1 ) ); - assert( "100ms", Timestamp::from_usecs( 100_000 ) ); - assert( "100us", Timestamp::from_usecs( 100 ) ); -} - -impl< 'de > serde::Deserialize< 'de > for Interval { - fn deserialize< D >( deserializer: D ) -> Result< Self, D::Error > - where D: serde::Deserializer< 'de > + assert("1", Timestamp::from_secs(1)); + assert("10", Timestamp::from_secs(10)); + assert("10s", Timestamp::from_secs(10)); + assert("3m", Timestamp::from_secs(60 * 3)); + assert("3h", Timestamp::from_secs(3600 * 3)); + assert("4h3m", Timestamp::from_secs(3600 * 4 + 60 * 3)); + assert("4h3m2s", Timestamp::from_secs(3600 * 4 + 60 * 3 + 2)); + assert("4h2s", Timestamp::from_secs(3600 * 4 + 2)); + assert("1000ms", Timestamp::from_secs(1)); + assert("100ms", Timestamp::from_usecs(100_000)); + assert("100us", Timestamp::from_usecs(100)); +} + +impl<'de> serde::Deserialize<'de> for Interval { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, { struct Visitor; - impl< 'de > serde::de::Visitor< 'de > for Visitor { + impl<'de> serde::de::Visitor<'de> for Visitor { type Value = Interval; - fn expecting( &self, formatter: &mut fmt::Formatter ) -> fmt::Result { - write!( formatter, "interval" ) + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "interval") } - fn visit_str< E >( self, value: &str ) -> Result< Self::Value, E > - where E: serde::de::Error + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, { - let interval: Interval = value.parse().map_err( |_| E::custom( "not a valid interval" ) )?; - Ok( interval ) + let interval: Interval = value + .parse() + .map_err(|_| E::custom("not a valid interval"))?; + Ok(interval) } } - deserializer.deserialize_any( Visitor ) + deserializer.deserialize_any(Visitor) } } @@ -434,42 +438,57 @@ pub struct TimestampMin; pub struct TimestampMax; impl TimevalKind for TimestampMin { - fn is_end_of_the_range() -> bool { false } - fn is_interval() -> bool { false } + fn is_end_of_the_range() -> bool { + false + } + fn is_interval() -> bool { + false + } } impl TimevalKind for TimestampMax { - fn is_end_of_the_range() -> bool { true } - fn is_interval() -> bool { false } + fn is_end_of_the_range() -> bool { + true + } + fn is_interval() -> bool { + false + } } impl TimevalKind for Interval { - fn is_end_of_the_range() -> bool { false } - fn is_interval() -> bool { true } + fn is_end_of_the_range() -> bool { + false + } + fn is_interval() -> bool { + true + } } #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] -pub enum TimestampFilter< K: TimevalKind > { - Timestamp( Timestamp ), - Secs( Secs, PhantomData< K > ), - Percent( u32 ) +pub enum TimestampFilter { + Timestamp(Timestamp), + Secs(Secs, PhantomData), + Percent(u32), } -impl< K > TimestampFilter< K > where K: TimevalKind { - pub fn to_timestamp( &self, start_at: Timestamp, end_at: Timestamp ) -> Timestamp { +impl TimestampFilter +where + K: TimevalKind, +{ + pub fn to_timestamp(&self, start_at: Timestamp, end_at: Timestamp) -> Timestamp { match *self { - TimestampFilter::Secs( secs, _ ) => { + TimestampFilter::Secs(secs, _) => { let mut timestamp = secs.into(); if K::is_end_of_the_range() { // We need to do this since the filter is specifed // in seconds while we use a higher precision timestamps // internally. if timestamp != Timestamp::max() { - timestamp = timestamp + Timestamp::from_secs( 1 ) - Timestamp::eps(); + timestamp = timestamp + Timestamp::from_secs(1) - Timestamp::eps(); } } timestamp - }, - TimestampFilter::Timestamp( timestamp ) => timestamp, - TimestampFilter::Percent( percentage ) => { + } + TimestampFilter::Timestamp(timestamp) => timestamp, + TimestampFilter::Percent(percentage) => { let range = end_at - start_at; let p = percentage as f64 / 100.0; let shift = range * p; @@ -484,134 +503,153 @@ impl< K > TimestampFilter< K > where K: TimevalKind { } } -impl< 'de, K > serde::Deserialize< 'de > for TimestampFilter< K > where K: TimevalKind { - fn deserialize< D >( deserializer: D ) -> Result< Self, D::Error > - where D: serde::Deserializer< 'de > +impl<'de, K> serde::Deserialize<'de> for TimestampFilter +where + K: TimevalKind, +{ + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, { - struct Visitor< K >( PhantomData< K > ); - impl< 'de, K > serde::de::Visitor< 'de > for Visitor< K > where K: TimevalKind { - type Value = TimestampFilter< K >; - - fn expecting( &self, formatter: &mut fmt::Formatter ) -> fmt::Result { - write!( formatter, "timestamp or percentage" ) + struct Visitor(PhantomData); + impl<'de, K> serde::de::Visitor<'de> for Visitor + where + K: TimevalKind, + { + type Value = TimestampFilter; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "timestamp or percentage") } - fn visit_str< E >( self, value: &str ) -> Result< Self::Value, E > - where E: serde::de::Error + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, { - if value.ends_with( "%" ) { - let value = value[ 0..value.len() - 1 ].parse().map_err( |_| E::custom( "not a valid percentage" ) )?; - Ok( TimestampFilter::Percent( value ) ) + if value.ends_with("%") { + let value = value[0..value.len() - 1] + .parse() + .map_err(|_| E::custom("not a valid percentage"))?; + Ok(TimestampFilter::Percent(value)) } else { if K::is_interval() { - let value: Interval = value.parse().map_err( |_| E::custom( "not a valid interval" ) )?; - Ok( TimestampFilter::Timestamp( value.0 ) ) + let value: Interval = value + .parse() + .map_err(|_| E::custom("not a valid interval"))?; + Ok(TimestampFilter::Timestamp(value.0)) } else { - let value: u64 = value.parse().map_err( |_| E::custom( "not a valid number" ) )?; - Ok( TimestampFilter::Secs( Secs( value ), PhantomData ) ) + let value: u64 = + value.parse().map_err(|_| E::custom("not a valid number"))?; + Ok(TimestampFilter::Secs(Secs(value), PhantomData)) } } } } - deserializer.deserialize_any( Visitor( PhantomData ) ) + deserializer.deserialize_any(Visitor(PhantomData)) } } #[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] pub enum NumberOrPercentage { - Absolute( u32 ), - Percent( u32 ) + Absolute(u32), + Percent(u32), } impl NumberOrPercentage { - pub fn get( self, maximum: u32 ) -> u32 { + pub fn get(self, maximum: u32) -> u32 { match self { - NumberOrPercentage::Absolute( value ) => value, - NumberOrPercentage::Percent( percent ) => { + NumberOrPercentage::Absolute(value) => value, + NumberOrPercentage::Percent(percent) => { ((percent as f32 / 100.0) * maximum as f32) as _ } } } } -impl< 'de > serde::Deserialize< 'de > for NumberOrPercentage { - fn deserialize< D >( deserializer: D ) -> Result< Self, D::Error > - where D: serde::Deserializer< 'de > +impl<'de> serde::Deserialize<'de> for NumberOrPercentage { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, { struct Visitor; - impl< 'de > serde::de::Visitor< 'de > for Visitor { + impl<'de> serde::de::Visitor<'de> for Visitor { type Value = NumberOrPercentage; - fn expecting( &self, formatter: &mut fmt::Formatter ) -> fmt::Result { - write!( formatter, "number or percentage" ) + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + write!(formatter, "number or percentage") } - fn visit_str< E >( self, value: &str ) -> Result< Self::Value, E > - where E: serde::de::Error + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, { - if value.ends_with( "%" ) { - let value = value[ 0..value.len() - 1 ].parse().map_err( |_| E::custom( "not a valid percentage" ) )?; - Ok( NumberOrPercentage::Percent( value ) ) + if value.ends_with("%") { + let value = value[0..value.len() - 1] + .parse() + .map_err(|_| E::custom("not a valid percentage"))?; + Ok(NumberOrPercentage::Percent(value)) } else { - let value = value.parse().map_err( |_| E::custom( "not a valid number" ) )?; - Ok( NumberOrPercentage::Absolute( value ) ) + let value = value.parse().map_err(|_| E::custom("not a valid number"))?; + Ok(NumberOrPercentage::Absolute(value)) } } } - deserializer.deserialize_any( Visitor ) + deserializer.deserialize_any(Visitor) } } #[derive(Clone, PartialEq, Eq, Deserialize, Debug, Hash)] pub struct AllocFilter { - pub from: Option< TimestampFilter< TimestampMin > >, - pub to: Option< TimestampFilter< TimestampMax > >, - pub lifetime: Option< LifetimeFilter >, - pub address_min: Option< u64 >, - pub address_max: Option< u64 >, - pub size_min: Option< u64 >, - pub size_max: Option< u64 >, - pub lifetime_min: Option< Interval >, - pub lifetime_max: Option< Interval >, - pub backtrace_depth_min: Option< u32 >, - pub backtrace_depth_max: Option< u32 >, - pub backtraces: Option< u32 >, // TODO: Support multiple. - pub mmaped: Option< MmapedFilter >, - pub arena: Option< ArenaFilter >, - pub function_regex: Option< String >, - pub source_regex: Option< String >, - pub negative_function_regex: Option< String >, - pub negative_source_regex: Option< String >, - pub marker: Option< u32 >, - pub group_interval_min: Option< TimestampFilter< Interval > >, - pub group_interval_max: Option< TimestampFilter< Interval > >, - pub group_leaked_allocations_min: Option< NumberOrPercentage >, - pub group_leaked_allocations_max: Option< NumberOrPercentage >, - pub group_allocations_min: Option< u32 >, - pub group_allocations_max: Option< u32 > + pub from: Option>, + pub to: Option>, + pub lifetime: Option, + pub address_min: Option, + pub address_max: Option, + pub size_min: Option, + pub size_max: Option, + pub lifetime_min: Option, + pub lifetime_max: Option, + pub backtrace_depth_min: Option, + pub backtrace_depth_max: Option, + pub backtraces: Option, // TODO: Support multiple. + pub mmaped: Option, + pub arena: Option, + pub function_regex: Option, + pub source_regex: Option, + pub library_regex: Option, + pub negative_function_regex: Option, + pub negative_source_regex: Option, + pub negative_library_regex: Option, + pub marker: Option, + pub group_interval_min: Option>, + pub group_interval_max: Option>, + pub group_leaked_allocations_min: Option, + pub group_leaked_allocations_max: Option, + pub group_allocations_min: Option, + pub group_allocations_max: Option, } #[derive(Clone, Deserialize, Debug)] pub struct BacktraceFormat { - pub strip_template_args: Option< bool > + pub strip_template_args: Option, } #[derive(Deserialize, Debug)] pub struct RequestAllocations { - pub skip: Option< u64 >, - pub count: Option< u32 >, + pub skip: Option, + pub count: Option, - pub sort_by: Option< AllocSortBy >, - pub order: Option< Order > + pub sort_by: Option, + pub order: Option, } #[derive(Deserialize, Debug)] pub struct RequestAllocationGroups { - pub skip: Option< u64 >, - pub count: Option< u32 >, + pub skip: Option, + pub count: Option, - pub sort_by: Option< AllocGroupsSortBy >, - pub order: Option< Order > + pub sort_by: Option, + pub order: Option, } diff --git a/webui/src/PageDataAllocations.js b/webui/src/PageDataAllocations.js index 2ca9c552..9fdca7e7 100644 --- a/webui/src/PageDataAllocations.js +++ b/webui/src/PageDataAllocations.js @@ -260,6 +260,16 @@ const FIELDS = { label: "Negative source file regex", badge: value => "Sources NOT matching /" + value + "/" }, + library_regex: { + ...REGEX_FIELD, + label: "Library regex", + badge: value => "Libraries matching /" + value + "/" + }, + negative_library_regex: { + ...REGEX_FIELD, + label: "Negative library regex", + badge: value => "Libraries NOT matching /" + value + "/" + }, backtraces: { label: "Backtrace", badge: value => "Matching backtrace with ID " + value @@ -436,6 +446,10 @@ class FilterEditor extends React.Component {
{this.field("negative_source_regex")}
+
+ {this.field("library_regex")} +
+ {this.field("negative_library_regex")}
{this.field("backtrace_depth_min")}