diff --git a/Cargo.lock b/Cargo.lock index bf28939ac87b2..d83bdf767e5c1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3887,11 +3887,13 @@ dependencies = [ "rustc_lexer", "rustc_lint_defs", "rustc_macros", + "rustc_middle", "rustc_parse", "rustc_proc_macro", "rustc_serialize", "rustc_session", "rustc_span", + "scoped-tls", "smallvec", "thin-vec", "tracing", diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index 079238b12bfd6..b2b1f997ac545 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -3348,7 +3348,8 @@ impl UseTree { /// Distinguishes between `Attribute`s that decorate items and Attributes that /// are contained as statements within items. These two cases need to be /// distinguished for pretty-printing. -#[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy, HashStable_Generic, Walkable)] +#[derive(Clone, PartialEq, Eq, Hash, Debug, Copy)] +#[derive(Encodable, Decodable, HashStable_Generic, Walkable)] pub enum AttrStyle { Outer, Inner, diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index accf4d1816323..453e7443d3247 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -40,13 +40,13 @@ impl DocFragmentKind { } } -#[derive(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Debug, HashStable_Generic)] pub enum CommentKind { Line, Block, } -#[derive(Copy, Clone, PartialEq, Debug, Encodable, Decodable, HashStable_Generic)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Encodable, Decodable, HashStable_Generic)] pub enum InvisibleOrigin { // From the expansion of a metavariable in a declarative macro. MetaVar(MetaVarKind), @@ -123,7 +123,7 @@ impl fmt::Display for MetaVarKind { /// Describes how a sequence of token trees is delimited. /// Cannot use `proc_macro::Delimiter` directly because this /// structure should implement some additional traits. -#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)] pub enum Delimiter { /// `( ... )` Parenthesis, @@ -186,7 +186,7 @@ impl Delimiter { // type. This means that float literals like `1f32` are classified by this type // as `Int`. Only upon conversion to `ast::LitKind` will such a literal be // given the `Float` kind. -#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Debug, HashStable_Generic)] pub enum LitKind { Bool, // AST only, must never appear in a `Token` Byte, @@ -203,7 +203,7 @@ pub enum LitKind { } /// A literal token. -#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Debug, HashStable_Generic)] pub struct Lit { pub kind: LitKind, pub symbol: Symbol, @@ -349,7 +349,7 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool { .contains(&name) } -#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic)] +#[derive(PartialEq, Eq, Encodable, Decodable, Hash, Debug, Copy, Clone, HashStable_Generic)] pub enum IdentIsRaw { No, Yes, @@ -376,7 +376,7 @@ impl From for IdentIsRaw { } } -#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Debug, HashStable_Generic)] pub enum TokenKind { /* Expression-operator symbols. */ /// `=` @@ -526,7 +526,7 @@ pub enum TokenKind { Eof, } -#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Encodable, Decodable, Debug, HashStable_Generic)] pub struct Token { pub kind: TokenKind, pub span: Span, diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 4111182c3b7dc..e346a56bcf407 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -5,6 +5,7 @@ //! which are themselves a single [`Token`] or a `Delimited` subsequence of tokens. use std::borrow::Cow; +use std::hash::Hash; use std::ops::Range; use std::sync::Arc; use std::{cmp, fmt, iter, mem}; @@ -22,7 +23,7 @@ use crate::token::{self, Delimiter, Token, TokenKind}; use crate::{AttrVec, Attribute}; /// Part of a `TokenStream`. -#[derive(Debug, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)] +#[derive(Debug, Clone, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)] pub enum TokenTree { /// A single token. Should never be `OpenDelim` or `CloseDelim`, because /// delimiters are implicitly represented by `Delimited`. @@ -538,7 +539,7 @@ pub struct AttrsTarget { /// compound token. Used for conversions to `proc_macro::Spacing`. Also used to /// guide pretty-printing, which is where the `JointHidden` value (which isn't /// part of `proc_macro::Spacing`) comes in useful. -#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)] pub enum Spacing { /// The token cannot join with the following token to form a compound /// token. @@ -595,7 +596,7 @@ pub enum Spacing { } /// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s. -#[derive(Clone, Debug, Default, Encodable, Decodable)] +#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Encodable, Decodable)] pub struct TokenStream(pub(crate) Arc>); impl TokenStream { @@ -811,14 +812,6 @@ impl TokenStream { } } -impl PartialEq for TokenStream { - fn eq(&self, other: &TokenStream) -> bool { - self.iter().eq(other.iter()) - } -} - -impl Eq for TokenStream {} - impl FromIterator for TokenStream { fn from_iter>(iter: I) -> Self { TokenStream::new(iter.into_iter().collect::>()) @@ -970,7 +963,8 @@ impl TokenCursor { } } -#[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic, Walkable)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Encodable, Decodable, HashStable_Generic, Walkable)] pub struct DelimSpan { pub open: Span, pub close: Span, @@ -994,7 +988,7 @@ impl DelimSpan { } } -#[derive(Copy, Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Encodable, Decodable, HashStable_Generic)] pub struct DelimSpacing { pub open: Spacing, pub close: Spacing, diff --git a/compiler/rustc_const_eval/src/const_eval/type_info.rs b/compiler/rustc_const_eval/src/const_eval/type_info.rs index 814c81278a104..5d37db06d76ac 100644 --- a/compiler/rustc_const_eval/src/const_eval/type_info.rs +++ b/compiler/rustc_const_eval/src/const_eval/type_info.rs @@ -1,6 +1,6 @@ use rustc_abi::FieldIdx; use rustc_hir::LangItem; -use rustc_middle::mir::interpret::CtfeProvenance; +use rustc_middle::mir::interpret::{CtfeProvenance, Scalar}; use rustc_middle::span_bug; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::ty::{self, Const, ScalarInt, Ty}; @@ -35,6 +35,7 @@ impl<'tcx> InterpCx<'tcx, CompileTimeMachine<'tcx>> { interp_ok((variant_id, self.project_downcast(&field_dest, variant_id)?)) }; + let ptr_bit_width = || self.tcx.data_layout.pointer_size().bits(); match field.name { sym::kind => { let variant_index = match ty.kind() { @@ -64,13 +65,46 @@ impl<'tcx> InterpCx<'tcx, CompileTimeMachine<'tcx>> { variant } - // For now just merge all primitives into one `Leaf` variant with no data - ty::Uint(_) | ty::Int(_) | ty::Float(_) | ty::Char | ty::Bool => { - downcast(sym::Leaf)?.0 + ty::Bool => { + let (variant, _variant_place) = downcast(sym::Bool)?; + variant + } + ty::Char => { + let (variant, _variant_place) = downcast(sym::Char)?; + variant + } + ty::Int(int_ty) => { + let (variant, variant_place) = downcast(sym::Int)?; + let place = self.project_field(&variant_place, FieldIdx::ZERO)?; + self.write_int_type_info( + place, + int_ty.bit_width().unwrap_or_else(/* isize */ ptr_bit_width), + true, + )?; + variant + } + ty::Uint(uint_ty) => { + let (variant, variant_place) = downcast(sym::Int)?; + let place = self.project_field(&variant_place, FieldIdx::ZERO)?; + self.write_int_type_info( + place, + uint_ty.bit_width().unwrap_or_else(/* usize */ ptr_bit_width), + false, + )?; + variant + } + ty::Float(float_ty) => { + let (variant, variant_place) = downcast(sym::Float)?; + let place = self.project_field(&variant_place, FieldIdx::ZERO)?; + self.write_float_type_info(place, float_ty.bit_width())?; + variant + } + ty::Str => { + let (variant, _variant_place) = downcast(sym::Str)?; + variant } ty::Adt(_, _) | ty::Foreign(_) - | ty::Str | ty::Pat(_, _) | ty::Slice(_) | ty::RawPtr(..) @@ -203,4 +237,46 @@ impl<'tcx> InterpCx<'tcx, CompileTimeMachine<'tcx>> { interp_ok(()) } + + fn write_int_type_info( + &mut self, + place: impl Writeable<'tcx, CtfeProvenance>, + bit_width: u64, + signed: bool, + ) -> InterpResult<'tcx> { + for (field_idx, field) in + place.layout().ty.ty_adt_def().unwrap().non_enum_variant().fields.iter_enumerated() + { + let field_place = self.project_field(&place, field_idx)?; + match field.name { + sym::bit_width => self.write_scalar( + ScalarInt::try_from_target_usize(bit_width, self.tcx.tcx).unwrap(), + &field_place, + )?, + sym::signed => self.write_scalar(Scalar::from_bool(signed), &field_place)?, + other => span_bug!(self.tcx.def_span(field.did), "unimplemented field {other}"), + } + } + interp_ok(()) + } + + fn write_float_type_info( + &mut self, + place: impl Writeable<'tcx, CtfeProvenance>, + bit_width: u64, + ) -> InterpResult<'tcx> { + for (field_idx, field) in + place.layout().ty.ty_adt_def().unwrap().non_enum_variant().fields.iter_enumerated() + { + let field_place = self.project_field(&place, field_idx)?; + match field.name { + sym::bit_width => self.write_scalar( + ScalarInt::try_from_target_usize(bit_width, self.tcx.tcx).unwrap(), + &field_place, + )?, + other => span_bug!(self.tcx.def_span(field.did), "unimplemented field {other}"), + } + } + interp_ok(()) + } } diff --git a/compiler/rustc_data_structures/src/fx.rs b/compiler/rustc_data_structures/src/fx.rs index c1a5c8ebc7645..026ec5c230ec6 100644 --- a/compiler/rustc_data_structures/src/fx.rs +++ b/compiler/rustc_data_structures/src/fx.rs @@ -28,3 +28,18 @@ macro_rules! define_stable_id_collections { pub type $entry_name<'a, T> = $crate::fx::IndexEntry<'a, $key, T>; }; } + +pub mod default { + use super::{FxBuildHasher, FxHashMap, FxHashSet}; + + // FIXME: These two functions will become unnecessary after + // lands and we start using the corresponding + // `rustc-hash` version. After that we can use `Default::default()` instead. + pub const fn fx_hash_map() -> FxHashMap { + FxHashMap::with_hasher(FxBuildHasher) + } + + pub const fn fx_hash_set() -> FxHashSet { + FxHashSet::with_hasher(FxBuildHasher) + } +} diff --git a/compiler/rustc_data_structures/src/lib.rs b/compiler/rustc_data_structures/src/lib.rs index 41f6292e740bd..ff1dd41c82ccb 100644 --- a/compiler/rustc_data_structures/src/lib.rs +++ b/compiler/rustc_data_structures/src/lib.rs @@ -18,6 +18,8 @@ #![feature(assert_matches)] #![feature(auto_traits)] #![feature(cfg_select)] +#![feature(const_default)] +#![feature(const_trait_impl)] #![feature(core_intrinsics)] #![feature(dropck_eyepatch)] #![feature(extend_one)] diff --git a/compiler/rustc_data_structures/src/unord.rs b/compiler/rustc_data_structures/src/unord.rs index 3d44fb1fd48db..0a9a86d7a43b8 100644 --- a/compiler/rustc_data_structures/src/unord.rs +++ b/compiler/rustc_data_structures/src/unord.rs @@ -8,7 +8,7 @@ use std::hash::Hash; use std::iter::{Product, Sum}; use std::ops::Index; -use rustc_hash::{FxHashMap, FxHashSet}; +use rustc_hash::{FxBuildHasher, FxHashMap, FxHashSet}; use rustc_macros::{Decodable_NoContext, Encodable_NoContext}; use crate::fingerprint::Fingerprint; @@ -241,10 +241,10 @@ pub struct UnordSet { impl UnordCollection for UnordSet {} -impl Default for UnordSet { +impl const Default for UnordSet { #[inline] fn default() -> Self { - Self { inner: FxHashSet::default() } + Self { inner: FxHashSet::with_hasher(FxBuildHasher) } } } @@ -438,10 +438,10 @@ pub struct UnordMap { impl UnordCollection for UnordMap {} -impl Default for UnordMap { +impl const Default for UnordMap { #[inline] fn default() -> Self { - Self { inner: FxHashMap::default() } + Self { inner: FxHashMap::with_hasher(FxBuildHasher) } } } diff --git a/compiler/rustc_expand/Cargo.toml b/compiler/rustc_expand/Cargo.toml index f897833d85c00..a18506c42afcf 100644 --- a/compiler/rustc_expand/Cargo.toml +++ b/compiler/rustc_expand/Cargo.toml @@ -21,6 +21,7 @@ rustc_hir = { path = "../rustc_hir" } rustc_lexer = { path = "../rustc_lexer" } rustc_lint_defs = { path = "../rustc_lint_defs" } rustc_macros = { path = "../rustc_macros" } +rustc_middle = { path = "../rustc_middle" } rustc_parse = { path = "../rustc_parse" } # We must use the proc_macro version that we will compile proc-macros against, # not the one from our own sysroot. @@ -28,6 +29,7 @@ rustc_proc_macro = { path = "../rustc_proc_macro" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } rustc_span = { path = "../rustc_span" } +scoped-tls = "1.0" smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } thin-vec = "0.2.12" tracing = "0.1" diff --git a/compiler/rustc_expand/src/lib.rs b/compiler/rustc_expand/src/lib.rs index 5eefa4bcdf6be..5ac7ec0cd0353 100644 --- a/compiler/rustc_expand/src/lib.rs +++ b/compiler/rustc_expand/src/lib.rs @@ -29,4 +29,8 @@ pub mod module; #[allow(rustc::untranslatable_diagnostic)] pub mod proc_macro; +pub fn provide(providers: &mut rustc_middle::query::Providers) { + providers.derive_macro_expansion = proc_macro::provide_derive_macro_expansion; +} + rustc_fluent_macro::fluent_messages! { "../messages.ftl" } diff --git a/compiler/rustc_expand/src/proc_macro.rs b/compiler/rustc_expand/src/proc_macro.rs index 9bfda8764f552..5e7a272bcba4f 100644 --- a/compiler/rustc_expand/src/proc_macro.rs +++ b/compiler/rustc_expand/src/proc_macro.rs @@ -1,9 +1,11 @@ use rustc_ast::tokenstream::TokenStream; use rustc_errors::ErrorGuaranteed; +use rustc_middle::ty::{self, TyCtxt}; use rustc_parse::parser::{ForceCollect, Parser}; +use rustc_session::Session; use rustc_session::config::ProcMacroExecutionStrategy; -use rustc_span::Span; use rustc_span::profiling::SpannedEventArgRecorder; +use rustc_span::{LocalExpnId, Span}; use {rustc_ast as ast, rustc_proc_macro as pm}; use crate::base::{self, *}; @@ -30,9 +32,9 @@ impl pm::bridge::server::MessagePipe for MessagePipe { } } -fn exec_strategy(ecx: &ExtCtxt<'_>) -> impl pm::bridge::server::ExecutionStrategy + 'static { +fn exec_strategy(sess: &Session) -> impl pm::bridge::server::ExecutionStrategy + 'static { pm::bridge::server::MaybeCrossThread::>::new( - ecx.sess.opts.unstable_opts.proc_macro_execution_strategy + sess.opts.unstable_opts.proc_macro_execution_strategy == ProcMacroExecutionStrategy::CrossThread, ) } @@ -54,7 +56,7 @@ impl base::BangProcMacro for BangProcMacro { }); let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace; - let strategy = exec_strategy(ecx); + let strategy = exec_strategy(ecx.sess); let server = proc_macro_server::Rustc::new(ecx); self.client.run(&strategy, server, input, proc_macro_backtrace).map_err(|e| { ecx.dcx().emit_err(errors::ProcMacroPanicked { @@ -85,7 +87,7 @@ impl base::AttrProcMacro for AttrProcMacro { }); let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace; - let strategy = exec_strategy(ecx); + let strategy = exec_strategy(ecx.sess); let server = proc_macro_server::Rustc::new(ecx); self.client.run(&strategy, server, annotation, annotated, proc_macro_backtrace).map_err( |e| { @@ -101,7 +103,7 @@ impl base::AttrProcMacro for AttrProcMacro { } pub struct DeriveProcMacro { - pub client: pm::bridge::client::Client, + pub client: DeriveClient, } impl MultiItemModifier for DeriveProcMacro { @@ -113,6 +115,13 @@ impl MultiItemModifier for DeriveProcMacro { item: Annotatable, _is_derive_const: bool, ) -> ExpandResult, Annotatable> { + let _timer = ecx.sess.prof.generic_activity_with_arg_recorder( + "expand_derive_proc_macro_outer", + |recorder| { + recorder.record_arg_with_span(ecx.sess.source_map(), ecx.expansion_descr(), span); + }, + ); + // We need special handling for statement items // (e.g. `fn foo() { #[derive(Debug)] struct Bar; }`) let is_stmt = matches!(item, Annotatable::Stmt(..)); @@ -123,36 +132,31 @@ impl MultiItemModifier for DeriveProcMacro { // altogether. See #73345. crate::base::ann_pretty_printing_compatibility_hack(&item, &ecx.sess.psess); let input = item.to_tokens(); - let stream = { - let _timer = - ecx.sess.prof.generic_activity_with_arg_recorder("expand_proc_macro", |recorder| { - recorder.record_arg_with_span( - ecx.sess.source_map(), - ecx.expansion_descr(), - span, - ); - }); - let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace; - let strategy = exec_strategy(ecx); - let server = proc_macro_server::Rustc::new(ecx); - match self.client.run(&strategy, server, input, proc_macro_backtrace) { - Ok(stream) => stream, - Err(e) => { - ecx.dcx().emit_err({ - errors::ProcMacroDerivePanicked { - span, - message: e.as_str().map(|message| { - errors::ProcMacroDerivePanickedHelp { message: message.into() } - }), - } - }); - return ExpandResult::Ready(vec![]); - } - } + + let invoc_id = ecx.current_expansion.id; + + let res = if ecx.sess.opts.incremental.is_some() + && ecx.sess.opts.unstable_opts.cache_proc_macros + { + ty::tls::with(|tcx| { + let input = &*tcx.arena.alloc(input); + let key: (LocalExpnId, &TokenStream) = (invoc_id, input); + + QueryDeriveExpandCtx::enter(ecx, self.client, move || { + tcx.derive_macro_expansion(key).cloned() + }) + }) + } else { + expand_derive_macro(invoc_id, input, ecx, self.client) + }; + + let Ok(output) = res else { + // error will already have been emitted + return ExpandResult::Ready(vec![]); }; let error_count_before = ecx.dcx().err_count(); - let mut parser = Parser::new(&ecx.sess.psess, stream, Some("proc-macro derive")); + let mut parser = Parser::new(&ecx.sess.psess, output, Some("proc-macro derive")); let mut items = vec![]; loop { @@ -180,3 +184,101 @@ impl MultiItemModifier for DeriveProcMacro { ExpandResult::Ready(items) } } + +/// Provide a query for computing the output of a derive macro. +pub(super) fn provide_derive_macro_expansion<'tcx>( + tcx: TyCtxt<'tcx>, + key: (LocalExpnId, &'tcx TokenStream), +) -> Result<&'tcx TokenStream, ()> { + let (invoc_id, input) = key; + + // Make sure that we invalidate the query when the crate defining the proc macro changes + let _ = tcx.crate_hash(invoc_id.expn_data().macro_def_id.unwrap().krate); + + QueryDeriveExpandCtx::with(|ecx, client| { + expand_derive_macro(invoc_id, input.clone(), ecx, client).map(|ts| &*tcx.arena.alloc(ts)) + }) +} + +type DeriveClient = pm::bridge::client::Client; + +fn expand_derive_macro( + invoc_id: LocalExpnId, + input: TokenStream, + ecx: &mut ExtCtxt<'_>, + client: DeriveClient, +) -> Result { + let _timer = + ecx.sess.prof.generic_activity_with_arg_recorder("expand_proc_macro", |recorder| { + let invoc_expn_data = invoc_id.expn_data(); + let span = invoc_expn_data.call_site; + let event_arg = invoc_expn_data.kind.descr(); + recorder.record_arg_with_span(ecx.sess.source_map(), event_arg.clone(), span); + }); + + let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace; + let strategy = exec_strategy(ecx.sess); + let server = proc_macro_server::Rustc::new(ecx); + + match client.run(&strategy, server, input, proc_macro_backtrace) { + Ok(stream) => Ok(stream), + Err(e) => { + let invoc_expn_data = invoc_id.expn_data(); + let span = invoc_expn_data.call_site; + ecx.dcx().emit_err({ + errors::ProcMacroDerivePanicked { + span, + message: e.as_str().map(|message| errors::ProcMacroDerivePanickedHelp { + message: message.into(), + }), + } + }); + Err(()) + } + } +} + +/// Stores the context necessary to expand a derive proc macro via a query. +struct QueryDeriveExpandCtx { + /// Type-erased version of `&mut ExtCtxt` + expansion_ctx: *mut (), + client: DeriveClient, +} + +impl QueryDeriveExpandCtx { + /// Store the extension context and the client into the thread local value. + /// It will be accessible via the `with` method while `f` is active. + fn enter(ecx: &mut ExtCtxt<'_>, client: DeriveClient, f: F) -> R + where + F: FnOnce() -> R, + { + // We need erasure to get rid of the lifetime + let ctx = Self { expansion_ctx: ecx as *mut _ as *mut (), client }; + DERIVE_EXPAND_CTX.set(&ctx, || f()) + } + + /// Accesses the thread local value of the derive expansion context. + /// Must be called while the `enter` function is active. + fn with(f: F) -> R + where + F: for<'a, 'b> FnOnce(&'b mut ExtCtxt<'a>, DeriveClient) -> R, + { + DERIVE_EXPAND_CTX.with(|ctx| { + let ectx = { + let casted = ctx.expansion_ctx.cast::>(); + // SAFETY: We can only get the value from `with` while the `enter` function + // is active (on the callstack), and that function's signature ensures that the + // lifetime is valid. + // If `with` is called at some other time, it will panic due to usage of + // `scoped_tls::with`. + unsafe { casted.as_mut().unwrap() } + }; + + f(ectx, ctx.client) + }) + } +} + +// When we invoke a query to expand a derive proc macro, we need to provide it with the expansion +// context and derive Client. We do that using a thread-local. +scoped_tls::scoped_thread_local!(static DERIVE_EXPAND_CTX: QueryDeriveExpandCtx); diff --git a/compiler/rustc_hir/src/definitions.rs b/compiler/rustc_hir/src/definitions.rs index 01f84c90ec7ab..5e361891f6d04 100644 --- a/compiler/rustc_hir/src/definitions.rs +++ b/compiler/rustc_hir/src/definitions.rs @@ -103,7 +103,7 @@ pub struct DisambiguatorState { } impl DisambiguatorState { - pub fn new() -> Self { + pub const fn new() -> Self { Self { next: Default::default() } } diff --git a/compiler/rustc_incremental/src/persist/dirty_clean.rs b/compiler/rustc_incremental/src/persist/dirty_clean.rs index 64166255fa485..71fb188952466 100644 --- a/compiler/rustc_incremental/src/persist/dirty_clean.rs +++ b/compiler/rustc_incremental/src/persist/dirty_clean.rs @@ -26,7 +26,7 @@ use rustc_hir::def_id::LocalDefId; use rustc_hir::{ Attribute, ImplItemKind, ItemKind as HirItem, Node as HirNode, TraitItemKind, intravisit, }; -use rustc_middle::dep_graph::{DepNode, DepNodeExt, label_strs}; +use rustc_middle::dep_graph::{DepNode, DepNodeExt, dep_kind_from_label, label_strs}; use rustc_middle::hir::nested_filter; use rustc_middle::ty::TyCtxt; use rustc_span::{Span, Symbol, sym}; @@ -357,17 +357,6 @@ impl<'tcx> DirtyCleanVisitor<'tcx> { } } - fn assert_loaded_from_disk(&self, item_span: Span, dep_node: DepNode) { - debug!("assert_loaded_from_disk({:?})", dep_node); - - if !self.tcx.dep_graph.debug_was_loaded_from_disk(dep_node) { - let dep_node_str = self.dep_node_str(&dep_node); - self.tcx - .dcx() - .emit_err(errors::NotLoaded { span: item_span, dep_node_str: &dep_node_str }); - } - } - fn check_item(&mut self, item_id: LocalDefId) { let item_span = self.tcx.def_span(item_id.to_def_id()); let def_path_hash = self.tcx.def_path_hash(item_id.to_def_id()); @@ -385,8 +374,27 @@ impl<'tcx> DirtyCleanVisitor<'tcx> { self.assert_dirty(item_span, dep_node); } for label in assertion.loaded_from_disk.items().into_sorted_stable_ord() { - let dep_node = DepNode::from_label_string(self.tcx, label, def_path_hash).unwrap(); - self.assert_loaded_from_disk(item_span, dep_node); + match DepNode::from_label_string(self.tcx, label, def_path_hash) { + Ok(dep_node) => { + if !self.tcx.dep_graph.debug_was_loaded_from_disk(dep_node) { + let dep_node_str = self.dep_node_str(&dep_node); + self.tcx.dcx().emit_err(errors::NotLoaded { + span: item_span, + dep_node_str: &dep_node_str, + }); + } + } + // Opaque/unit hash, we only know the dep kind + Err(()) => { + let dep_kind = dep_kind_from_label(label); + if !self.tcx.dep_graph.debug_dep_kind_was_loaded_from_disk(dep_kind) { + self.tcx.dcx().emit_err(errors::NotLoaded { + span: item_span, + dep_node_str: &label, + }); + } + } + } } } } diff --git a/compiler/rustc_index/src/bit_set.rs b/compiler/rustc_index/src/bit_set.rs index a9bdf597e128f..184fa409d9605 100644 --- a/compiler/rustc_index/src/bit_set.rs +++ b/compiler/rustc_index/src/bit_set.rs @@ -634,22 +634,12 @@ impl ChunkedBitSet { match *chunk { Zeros => { if chunk_domain_size > 1 { - #[cfg(feature = "nightly")] let mut words = { // We take some effort to avoid copying the words. let words = Rc::<[Word; CHUNK_WORDS]>::new_zeroed(); // SAFETY: `words` can safely be all zeroes. unsafe { words.assume_init() } }; - #[cfg(not(feature = "nightly"))] - let mut words = { - // FIXME: unconditionally use `Rc::new_zeroed` once it is stable (#129396). - let words = mem::MaybeUninit::<[Word; CHUNK_WORDS]>::zeroed(); - // SAFETY: `words` can safely be all zeroes. - let words = unsafe { words.assume_init() }; - // Unfortunate possibly-large copy - Rc::new(words) - }; let words_ref = Rc::get_mut(&mut words).unwrap(); let (word_index, mask) = chunk_word_index_and_mask(elem); @@ -695,22 +685,12 @@ impl ChunkedBitSet { Zeros => false, Ones => { if chunk_domain_size > 1 { - #[cfg(feature = "nightly")] let mut words = { // We take some effort to avoid copying the words. let words = Rc::<[Word; CHUNK_WORDS]>::new_zeroed(); // SAFETY: `words` can safely be all zeroes. unsafe { words.assume_init() } }; - #[cfg(not(feature = "nightly"))] - let mut words = { - // FIXME: unconditionally use `Rc::new_zeroed` once it is stable (#129396). - let words = mem::MaybeUninit::<[Word; CHUNK_WORDS]>::zeroed(); - // SAFETY: `words` can safely be all zeroes. - let words = unsafe { words.assume_init() }; - // Unfortunate possibly-large copy - Rc::new(words) - }; let words_ref = Rc::get_mut(&mut words).unwrap(); // Set only the bits in use. diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 35ab202d4c27e..12a6a616d64f9 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -889,6 +889,7 @@ pub static DEFAULT_QUERY_PROVIDERS: LazyLock = LazyLock::new(|| { providers.queries.env_var_os = env_var_os; limits::provide(&mut providers.queries); proc_macro_decls::provide(&mut providers.queries); + rustc_expand::provide(&mut providers.queries); rustc_const_eval::provide(providers); rustc_middle::hir::provide(&mut providers.queries); rustc_borrowck::provide(&mut providers.queries); diff --git a/compiler/rustc_macros/src/query.rs b/compiler/rustc_macros/src/query.rs index 5d32950875adb..104e95277a1a2 100644 --- a/compiler/rustc_macros/src/query.rs +++ b/compiler/rustc_macros/src/query.rs @@ -93,7 +93,7 @@ struct QueryModifiers { cache: Option<(Option, Block)>, /// A cycle error for this query aborting the compilation with a fatal error. - fatal_cycle: Option, + cycle_fatal: Option, /// A cycle error results in a delay_bug call cycle_delay_bug: Option, @@ -136,7 +136,7 @@ fn parse_query_modifiers(input: ParseStream<'_>) -> Result { let mut arena_cache = None; let mut cache = None; let mut desc = None; - let mut fatal_cycle = None; + let mut cycle_fatal = None; let mut cycle_delay_bug = None; let mut cycle_stash = None; let mut no_hash = None; @@ -189,8 +189,8 @@ fn parse_query_modifiers(input: ParseStream<'_>) -> Result { try_insert!(cache = (args, block)); } else if modifier == "arena_cache" { try_insert!(arena_cache = modifier); - } else if modifier == "fatal_cycle" { - try_insert!(fatal_cycle = modifier); + } else if modifier == "cycle_fatal" { + try_insert!(cycle_fatal = modifier); } else if modifier == "cycle_delay_bug" { try_insert!(cycle_delay_bug = modifier); } else if modifier == "cycle_stash" { @@ -220,7 +220,7 @@ fn parse_query_modifiers(input: ParseStream<'_>) -> Result { arena_cache, cache, desc, - fatal_cycle, + cycle_fatal, cycle_delay_bug, cycle_stash, no_hash, @@ -366,8 +366,8 @@ pub(super) fn rustc_queries(input: TokenStream) -> TokenStream { } passthrough!( - fatal_cycle, arena_cache, + cycle_fatal, cycle_delay_bug, cycle_stash, no_hash, diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index 4fa39eb83e9e9..0bdc1bfd45eef 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -119,6 +119,7 @@ macro_rules! arena_types { [decode] specialization_graph: rustc_middle::traits::specialization_graph::Graph, [] crate_inherent_impls: rustc_middle::ty::CrateInherentImpls, [] hir_owner_nodes: rustc_hir::OwnerNodes<'tcx>, + [decode] token_stream: rustc_ast::tokenstream::TokenStream, ]); ) } diff --git a/compiler/rustc_middle/src/dep_graph/dep_node.rs b/compiler/rustc_middle/src/dep_graph/dep_node.rs index 0c757a390ca01..3ee1db67911ff 100644 --- a/compiler/rustc_middle/src/dep_graph/dep_node.rs +++ b/compiler/rustc_middle/src/dep_graph/dep_node.rs @@ -176,6 +176,12 @@ impl DepNodeExt for DepNode { } } +/// Maps a query label to its DepKind. Panics if a query with the given label does not exist. +pub fn dep_kind_from_label(label: &str) -> DepKind { + dep_kind_from_label_string(label) + .unwrap_or_else(|_| panic!("Query label {label} does not exist")) +} + impl<'tcx> DepNodeParams> for () { #[inline(always)] fn fingerprint_style() -> FingerprintStyle { diff --git a/compiler/rustc_middle/src/dep_graph/mod.rs b/compiler/rustc_middle/src/dep_graph/mod.rs index 781e3e442e64c..b24ea4acc6b72 100644 --- a/compiler/rustc_middle/src/dep_graph/mod.rs +++ b/compiler/rustc_middle/src/dep_graph/mod.rs @@ -8,7 +8,7 @@ use crate::ty::{self, TyCtxt}; #[macro_use] mod dep_node; -pub use dep_node::{DepKind, DepNode, DepNodeExt, dep_kinds, label_strs}; +pub use dep_node::{DepKind, DepNode, DepNodeExt, dep_kind_from_label, dep_kinds, label_strs}; pub(crate) use dep_node::{make_compile_codegen_unit, make_compile_mono_item, make_metadata}; pub use rustc_query_system::dep_graph::debug::{DepNodeFilter, EdgeFilter}; pub use rustc_query_system::dep_graph::{ diff --git a/compiler/rustc_middle/src/query/erase.rs b/compiler/rustc_middle/src/query/erase.rs index 711a597d46030..940cc30c17e6e 100644 --- a/compiler/rustc_middle/src/query/erase.rs +++ b/compiler/rustc_middle/src/query/erase.rs @@ -2,6 +2,7 @@ use std::ffi::OsStr; use std::intrinsics::transmute_unchecked; use std::mem::MaybeUninit; +use rustc_ast::tokenstream::TokenStream; use rustc_span::ErrorGuaranteed; use rustc_span::source_map::Spanned; @@ -188,6 +189,10 @@ impl EraseType >()]; } +impl EraseType for Result<&'_ TokenStream, ()> { + type Result = [u8; size_of::>()]; +} + impl EraseType for Option<&'_ T> { type Result = [u8; size_of::>()]; } diff --git a/compiler/rustc_middle/src/query/keys.rs b/compiler/rustc_middle/src/query/keys.rs index 4d914c42cfc62..dd9ba43255459 100644 --- a/compiler/rustc_middle/src/query/keys.rs +++ b/compiler/rustc_middle/src/query/keys.rs @@ -2,11 +2,12 @@ use std::ffi::OsStr; +use rustc_ast::tokenstream::TokenStream; use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE, LocalDefId, LocalModDefId, ModDefId}; use rustc_hir::hir_id::{HirId, OwnerId}; use rustc_query_system::dep_graph::DepNodeIndex; use rustc_query_system::query::{DefIdCache, DefaultCache, SingleCache, VecCache}; -use rustc_span::{DUMMY_SP, Ident, Span, Symbol}; +use rustc_span::{DUMMY_SP, Ident, LocalExpnId, Span, Symbol}; use crate::infer::canonical::CanonicalQueryInput; use crate::mir::mono::CollectionMode; @@ -616,6 +617,19 @@ impl Key for (LocalDefId, HirId) { } } +impl<'tcx> Key for (LocalExpnId, &'tcx TokenStream) { + type Cache = DefaultCache; + + fn default_span(&self, _tcx: TyCtxt<'_>) -> Span { + self.0.expn_data().call_site + } + + #[inline(always)] + fn key_as_def_id(&self) -> Option { + None + } +} + impl<'tcx> Key for (ValidityRequirement, ty::PseudoCanonicalInput<'tcx, Ty<'tcx>>) { type Cache = DefaultCache; diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 5f40c36423b33..2f83f3078e89f 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -28,7 +28,7 @@ //! - `desc { ... }`: Sets the human-readable description for diagnostics and profiling. Required for every query. //! - `arena_cache`: Use an arena for in-memory caching of the query result. //! - `cache_on_disk_if { ... }`: Cache the query result to disk if the provided block evaluates to true. -//! - `fatal_cycle`: If a dependency cycle is detected, abort compilation with a fatal error. +//! - `cycle_fatal`: If a dependency cycle is detected, abort compilation with a fatal error. //! - `cycle_delay_bug`: If a dependency cycle is detected, emit a delayed bug instead of aborting immediately. //! - `cycle_stash`: If a dependency cycle is detected, stash the error for later handling. //! - `no_hash`: Do not hash the query result for incremental compilation; just mark as dirty if recomputed. @@ -70,6 +70,7 @@ use std::sync::Arc; use rustc_abi::Align; use rustc_arena::TypedArena; use rustc_ast::expand::allocator::AllocatorKind; +use rustc_ast::tokenstream::TokenStream; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::sorted_map::SortedMap; use rustc_data_structures::steal::Steal; @@ -87,7 +88,7 @@ use rustc_index::IndexVec; use rustc_lint_defs::LintId; use rustc_macros::rustc_queries; use rustc_query_system::ich::StableHashingContext; -use rustc_query_system::query::{QueryMode, QueryStackDeferred, QueryState}; +use rustc_query_system::query::{QueryMode, QueryState}; use rustc_session::Limits; use rustc_session::config::{EntryFnType, OptLevel, OutputFilenames, SymbolManglingVersion}; use rustc_session::cstore::{ @@ -96,7 +97,7 @@ use rustc_session::cstore::{ use rustc_session::lint::LintExpectationId; use rustc_span::def_id::LOCAL_CRATE; use rustc_span::source_map::Spanned; -use rustc_span::{DUMMY_SP, Span, Symbol}; +use rustc_span::{DUMMY_SP, LocalExpnId, Span, Symbol}; use rustc_target::spec::PanicStrategy; use {rustc_abi as abi, rustc_ast as ast, rustc_hir as hir}; @@ -160,9 +161,20 @@ pub mod plumbing; // The result type of each query must implement `Clone`, and additionally // `ty::query::values::Value`, which produces an appropriate placeholder // (error) value if the query resulted in a query cycle. -// Queries marked with `fatal_cycle` do not need the latter implementation, +// Queries marked with `cycle_fatal` do not need the latter implementation, // as they will raise an fatal error on query cycles instead. rustc_queries! { + /// Caches the expansion of a derive proc macro, e.g. `#[derive(Serialize)]`. + /// The key is: + /// - A unique key corresponding to the invocation of a macro. + /// - Token stream which serves as an input to the macro. + /// + /// The output is the token stream generated by the proc macro. + query derive_macro_expansion(key: (LocalExpnId, &'tcx TokenStream)) -> Result<&'tcx TokenStream, ()> { + desc { "expanding a derive (proc) macro" } + cache_on_disk_if { true } + } + /// This exists purely for testing the interactions between delayed bugs and incremental. query trigger_delayed_bug(key: DefId) { desc { "triggering a delayed bug for testing incremental" } @@ -584,7 +596,7 @@ rustc_queries! { } query is_panic_runtime(_: CrateNum) -> bool { - fatal_cycle + cycle_fatal desc { "checking if the crate is_panic_runtime" } separate_provide_extern } @@ -1315,7 +1327,7 @@ rustc_queries! { /// Return the set of (transitive) callees that may result in a recursive call to `key`, /// if we were able to walk all callees. query mir_callgraph_cyclic(key: LocalDefId) -> &'tcx Option> { - fatal_cycle + cycle_fatal arena_cache desc { |tcx| "computing (transitive) callees of `{}` that may recurse", @@ -1326,7 +1338,7 @@ rustc_queries! { /// Obtain all the calls into other local functions query mir_inliner_callees(key: ty::InstanceKind<'tcx>) -> &'tcx [(DefId, GenericArgsRef<'tcx>)] { - fatal_cycle + cycle_fatal desc { |tcx| "computing all local function calls in `{}`", tcx.def_path_str(key.def_id()), @@ -1822,31 +1834,31 @@ rustc_queries! { } query is_compiler_builtins(_: CrateNum) -> bool { - fatal_cycle + cycle_fatal desc { "checking if the crate is_compiler_builtins" } separate_provide_extern } query has_global_allocator(_: CrateNum) -> bool { // This query depends on untracked global state in CStore eval_always - fatal_cycle + cycle_fatal desc { "checking if the crate has_global_allocator" } separate_provide_extern } query has_alloc_error_handler(_: CrateNum) -> bool { // This query depends on untracked global state in CStore eval_always - fatal_cycle + cycle_fatal desc { "checking if the crate has_alloc_error_handler" } separate_provide_extern } query has_panic_handler(_: CrateNum) -> bool { - fatal_cycle + cycle_fatal desc { "checking if the crate has_panic_handler" } separate_provide_extern } query is_profiler_runtime(_: CrateNum) -> bool { - fatal_cycle + cycle_fatal desc { "checking if a crate is `#![profiler_runtime]`" } separate_provide_extern } @@ -1855,22 +1867,22 @@ rustc_queries! { cache_on_disk_if { true } } query required_panic_strategy(_: CrateNum) -> Option { - fatal_cycle + cycle_fatal desc { "getting a crate's required panic strategy" } separate_provide_extern } query panic_in_drop_strategy(_: CrateNum) -> PanicStrategy { - fatal_cycle + cycle_fatal desc { "getting a crate's configured panic-in-drop strategy" } separate_provide_extern } query is_no_builtins(_: CrateNum) -> bool { - fatal_cycle + cycle_fatal desc { "getting whether a crate has `#![no_builtins]`" } separate_provide_extern } query symbol_mangling_version(_: CrateNum) -> SymbolManglingVersion { - fatal_cycle + cycle_fatal desc { "getting a crate's symbol mangling version" } separate_provide_extern } diff --git a/compiler/rustc_middle/src/query/on_disk_cache.rs b/compiler/rustc_middle/src/query/on_disk_cache.rs index 29cceb708850e..5ef0c2500e7a9 100644 --- a/compiler/rustc_middle/src/query/on_disk_cache.rs +++ b/compiler/rustc_middle/src/query/on_disk_cache.rs @@ -785,6 +785,13 @@ impl<'a, 'tcx> Decodable> } } +impl<'a, 'tcx> Decodable> for &'tcx rustc_ast::tokenstream::TokenStream { + #[inline] + fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self { + RefDecodable::decode(d) + } +} + macro_rules! impl_ref_decoder { (<$tcx:tt> $($ty:ty,)*) => { $(impl<'a, $tcx> Decodable> for &$tcx [$ty] { diff --git a/compiler/rustc_middle/src/query/plumbing.rs b/compiler/rustc_middle/src/query/plumbing.rs index 8d01d9482ed4b..df333e68add1a 100644 --- a/compiler/rustc_middle/src/query/plumbing.rs +++ b/compiler/rustc_middle/src/query/plumbing.rs @@ -278,7 +278,7 @@ macro_rules! define_callbacks { ($V) ); - /// This function takes `ProvidedValue` and coverts it to an erased `Value` by + /// This function takes `ProvidedValue` and converts it to an erased `Value` by /// allocating it on an arena if the query has the `arena_cache` modifier. The /// value is then erased and returned. This will happen when computing the query /// using a provider or decoding a stored result. @@ -431,7 +431,7 @@ macro_rules! define_callbacks { #[derive(Default)] pub struct QueryStates<'tcx> { $( - pub $name: QueryState<$($K)*, QueryStackDeferred<'tcx>>, + pub $name: QueryState<$($K)*>, )* } @@ -532,7 +532,7 @@ macro_rules! define_feedable { // The result type of each query must implement `Clone`, and additionally // `ty::query::values::Value`, which produces an appropriate placeholder // (error) value if the query resulted in a query cycle. -// Queries marked with `fatal_cycle` do not need the latter implementation, +// Queries marked with `cycle_fatal` do not need the latter implementation, // as they will raise an fatal error on query cycles instead. mod sealed { diff --git a/compiler/rustc_middle/src/values.rs b/compiler/rustc_middle/src/values.rs index bc73d36216ef4..8d614a535498f 100644 --- a/compiler/rustc_middle/src/values.rs +++ b/compiler/rustc_middle/src/values.rs @@ -88,7 +88,7 @@ impl<'tcx> Value> for Representability { if info.query.dep_kind == dep_kinds::representability && let Some(field_id) = info.query.def_id && let Some(field_id) = field_id.as_local() - && let Some(DefKind::Field) = info.query.info.def_kind + && let Some(DefKind::Field) = info.query.def_kind { let parent_id = tcx.parent(field_id.to_def_id()); let item_id = match tcx.def_kind(parent_id) { @@ -224,7 +224,7 @@ impl<'tcx, T> Value> for Result> continue; }; let frame_span = - frame.query.info.default_span(cycle[(i + 1) % cycle.len()].span); + frame.query.default_span(cycle[(i + 1) % cycle.len()].span); if frame_span.is_dummy() { continue; } diff --git a/compiler/rustc_query_impl/src/lib.rs b/compiler/rustc_query_impl/src/lib.rs index 6904af771f0cc..f763b707aa234 100644 --- a/compiler/rustc_query_impl/src/lib.rs +++ b/compiler/rustc_query_impl/src/lib.rs @@ -21,8 +21,8 @@ use rustc_middle::ty::TyCtxt; use rustc_query_system::dep_graph::SerializedDepNodeIndex; use rustc_query_system::ich::StableHashingContext; use rustc_query_system::query::{ - CycleError, HashResult, QueryCache, QueryConfig, QueryMap, QueryMode, QueryStackDeferred, - QueryState, get_query_incr, get_query_non_incr, + CycleError, HashResult, QueryCache, QueryConfig, QueryMap, QueryMode, QueryState, + get_query_incr, get_query_non_incr, }; use rustc_query_system::{HandleCycleError, Value}; use rustc_span::{ErrorGuaranteed, Span}; @@ -79,10 +79,7 @@ where } #[inline(always)] - fn query_state<'a>( - self, - qcx: QueryCtxt<'tcx>, - ) -> &'a QueryState> + fn query_state<'a>(self, qcx: QueryCtxt<'tcx>) -> &'a QueryState where QueryCtxt<'tcx>: 'a, { @@ -91,7 +88,7 @@ where unsafe { &*(&qcx.tcx.query_system.states as *const QueryStates<'tcx>) .byte_add(self.dynamic.query_state) - .cast::>>() + .cast::>() } } diff --git a/compiler/rustc_query_impl/src/plumbing.rs b/compiler/rustc_query_impl/src/plumbing.rs index 39b6fac4ebc0b..67ab1114af62b 100644 --- a/compiler/rustc_query_impl/src/plumbing.rs +++ b/compiler/rustc_query_impl/src/plumbing.rs @@ -6,7 +6,6 @@ use std::num::NonZero; use rustc_data_structures::jobserver::Proxy; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; -use rustc_data_structures::sync::{DynSend, DynSync}; use rustc_data_structures::unord::UnordMap; use rustc_hashes::Hash64; use rustc_hir::limit::Limit; @@ -27,8 +26,8 @@ use rustc_middle::ty::{self, TyCtxt}; use rustc_query_system::dep_graph::{DepNodeParams, HasDepContext}; use rustc_query_system::ich::StableHashingContext; use rustc_query_system::query::{ - QueryCache, QueryConfig, QueryContext, QueryJobId, QueryMap, QuerySideEffect, - QueryStackDeferred, QueryStackFrame, QueryStackFrameExtra, force_query, + QueryCache, QueryConfig, QueryContext, QueryJobId, QueryMap, QuerySideEffect, QueryStackFrame, + force_query, }; use rustc_query_system::{QueryOverflow, QueryOverflowNote}; use rustc_serialize::{Decodable, Encodable}; @@ -67,9 +66,7 @@ impl<'tcx> HasDepContext for QueryCtxt<'tcx> { } } -impl<'tcx> QueryContext for QueryCtxt<'tcx> { - type QueryInfo = QueryStackDeferred<'tcx>; - +impl QueryContext for QueryCtxt<'_> { #[inline] fn jobserver_proxy(&self) -> &Proxy { &*self.jobserver_proxy @@ -98,10 +95,7 @@ impl<'tcx> QueryContext for QueryCtxt<'tcx> { /// Prefer passing `false` to `require_complete` to avoid potential deadlocks, /// especially when called from within a deadlock handler, unless a /// complete map is needed and no deadlock is possible at this call site. - fn collect_active_jobs( - self, - require_complete: bool, - ) -> Result>, QueryMap>> { + fn collect_active_jobs(self, require_complete: bool) -> Result { let mut jobs = QueryMap::default(); let mut complete = true; @@ -114,13 +108,6 @@ impl<'tcx> QueryContext for QueryCtxt<'tcx> { if complete { Ok(jobs) } else { Err(jobs) } } - fn lift_query_info( - self, - info: &QueryStackDeferred<'tcx>, - ) -> rustc_query_system::query::QueryStackFrameExtra { - info.extract() - } - // Interactions with on_disk_cache fn load_side_effect( self, @@ -181,10 +168,7 @@ impl<'tcx> QueryContext for QueryCtxt<'tcx> { self.sess.dcx().emit_fatal(QueryOverflow { span: info.job.span, - note: QueryOverflowNote { - desc: self.lift_query_info(&info.query.info).description, - depth, - }, + note: QueryOverflowNote { desc: info.query.description, depth }, suggested_limit, crate_name: self.crate_name(LOCAL_CRATE), }); @@ -219,7 +203,7 @@ macro_rules! handle_cycle_error { ([]) => {{ rustc_query_system::HandleCycleError::Error }}; - ([(fatal_cycle) $($rest:tt)*]) => {{ + ([(cycle_fatal) $($rest:tt)*]) => {{ rustc_query_system::HandleCycleError::Fatal }}; ([(cycle_stash) $($rest:tt)*]) => {{ @@ -321,17 +305,16 @@ macro_rules! should_ever_cache_on_disk { }; } -fn create_query_frame_extra<'tcx, K: Key + Copy + 'tcx>( - (tcx, key, kind, name, do_describe): ( - TyCtxt<'tcx>, - K, - DepKind, - &'static str, - fn(TyCtxt<'tcx>, K) -> String, - ), -) -> QueryStackFrameExtra { - let def_id = key.key_as_def_id(); - +pub(crate) fn create_query_frame< + 'tcx, + K: Copy + Key + for<'a> HashStable>, +>( + tcx: TyCtxt<'tcx>, + do_describe: fn(TyCtxt<'tcx>, K) -> String, + key: K, + kind: DepKind, + name: &'static str, +) -> QueryStackFrame { // If reduced queries are requested, we may be printing a query stack due // to a panic. Avoid using `default_span` and `def_kind` in that case. let reduce_queries = with_reduced_queries(); @@ -343,49 +326,36 @@ fn create_query_frame_extra<'tcx, K: Key + Copy + 'tcx>( } else { description }; - let span = if kind == dep_graph::dep_kinds::def_span || reduce_queries { + + let span = if reduce_queries { // The `def_span` query is used to calculate `default_span`, // so exit to avoid infinite recursion. None } else { - Some(key.default_span(tcx)) + Some(tcx.with_reduced_queries(|| key.default_span(tcx))) }; - let def_kind = if kind == dep_graph::dep_kinds::def_kind || reduce_queries { + let def_id = key.key_as_def_id(); + + let def_kind = if reduce_queries { // Try to avoid infinite recursion. None } else { - def_id.and_then(|def_id| def_id.as_local()).map(|def_id| tcx.def_kind(def_id)) + def_id + .and_then(|def_id| def_id.as_local()) + .map(|def_id| tcx.with_reduced_queries(|| tcx.def_kind(def_id))) }; - QueryStackFrameExtra::new(description, span, def_kind) -} - -pub(crate) fn create_query_frame< - 'tcx, - K: Copy + DynSend + DynSync + Key + for<'a> HashStable> + 'tcx, ->( - tcx: TyCtxt<'tcx>, - do_describe: fn(TyCtxt<'tcx>, K) -> String, - key: K, - kind: DepKind, - name: &'static str, -) -> QueryStackFrame> { - let def_id = key.key_as_def_id(); - let hash = || { - tcx.with_stable_hashing_context(|mut hcx| { - let mut hasher = StableHasher::new(); - kind.as_usize().hash_stable(&mut hcx, &mut hasher); - key.hash_stable(&mut hcx, &mut hasher); - hasher.finish::() - }) - }; let def_id_for_ty_in_cycle = key.def_id_for_ty_in_cycle(); - let info = - QueryStackDeferred::new((tcx, key, kind, name, do_describe), create_query_frame_extra); + let hash = tcx.with_stable_hashing_context(|mut hcx| { + let mut hasher = StableHasher::new(); + kind.as_usize().hash_stable(&mut hcx, &mut hasher); + key.hash_stable(&mut hcx, &mut hasher); + hasher.finish::() + }); - QueryStackFrame::new(info, kind, hash, def_id, def_id_for_ty_in_cycle) + QueryStackFrame::new(description, span, def_id, def_kind, kind, def_id_for_ty_in_cycle, hash) } pub(crate) fn encode_query_results<'a, 'tcx, Q>( @@ -737,7 +707,7 @@ macro_rules! define_queries { pub(crate) fn collect_active_jobs<'tcx>( tcx: TyCtxt<'tcx>, - qmap: &mut QueryMap>, + qmap: &mut QueryMap, require_complete: bool, ) -> Option<()> { let make_query = |tcx, key| { @@ -821,7 +791,7 @@ macro_rules! define_queries { // These arrays are used for iteration and can't be indexed by `DepKind`. const COLLECT_ACTIVE_JOBS: &[ - for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap>, bool) -> Option<()> + for<'tcx> fn(TyCtxt<'tcx>, &mut QueryMap, bool) -> Option<()> ] = &[$(query_impl::$name::collect_active_jobs),*]; diff --git a/compiler/rustc_query_system/src/dep_graph/graph.rs b/compiler/rustc_query_system/src/dep_graph/graph.rs index 8634274c3a751..0b50d376b5521 100644 --- a/compiler/rustc_query_system/src/dep_graph/graph.rs +++ b/compiler/rustc_query_system/src/dep_graph/graph.rs @@ -795,6 +795,18 @@ impl DepGraph { self.data.as_ref().unwrap().debug_loaded_from_disk.lock().contains(&dep_node) } + pub fn debug_dep_kind_was_loaded_from_disk(&self, dep_kind: DepKind) -> bool { + // We only check if we have a dep node corresponding to the given dep kind. + #[allow(rustc::potential_query_instability)] + self.data + .as_ref() + .unwrap() + .debug_loaded_from_disk + .lock() + .iter() + .any(|node| node.kind == dep_kind) + } + #[cfg(debug_assertions)] #[inline(always)] pub(crate) fn register_dep_node_debug_str(&self, dep_node: DepNode, debug_str_gen: F) diff --git a/compiler/rustc_query_system/src/query/config.rs b/compiler/rustc_query_system/src/query/config.rs index e508eadb73b0b..371b896400a58 100644 --- a/compiler/rustc_query_system/src/query/config.rs +++ b/compiler/rustc_query_system/src/query/config.rs @@ -6,7 +6,6 @@ use std::hash::Hash; use rustc_data_structures::fingerprint::Fingerprint; use rustc_span::ErrorGuaranteed; -use super::QueryStackFrameExtra; use crate::dep_graph::{DepKind, DepNode, DepNodeParams, SerializedDepNodeIndex}; use crate::error::HandleCycleError; use crate::ich::StableHashingContext; @@ -28,7 +27,7 @@ pub trait QueryConfig: Copy { fn format_value(self) -> fn(&Self::Value) -> String; // Don't use this method to access query results, instead use the methods on TyCtxt - fn query_state<'a>(self, tcx: Qcx) -> &'a QueryState + fn query_state<'a>(self, tcx: Qcx) -> &'a QueryState where Qcx: 'a; @@ -58,7 +57,7 @@ pub trait QueryConfig: Copy { fn value_from_cycle_error( self, tcx: Qcx::DepContext, - cycle_error: &CycleError, + cycle_error: &CycleError, guar: ErrorGuaranteed, ) -> Self::Value; diff --git a/compiler/rustc_query_system/src/query/job.rs b/compiler/rustc_query_system/src/query/job.rs index 7d9b594d501ff..4a9b6a0d557cb 100644 --- a/compiler/rustc_query_system/src/query/job.rs +++ b/compiler/rustc_query_system/src/query/job.rs @@ -1,4 +1,3 @@ -use std::fmt::Debug; use std::hash::Hash; use std::io::Write; use std::iter; @@ -12,7 +11,6 @@ use rustc_hir::def::DefKind; use rustc_session::Session; use rustc_span::{DUMMY_SP, Span}; -use super::QueryStackFrameExtra; use crate::dep_graph::DepContext; use crate::error::CycleStack; use crate::query::plumbing::CycleError; @@ -20,54 +18,45 @@ use crate::query::{QueryContext, QueryStackFrame}; /// Represents a span and a query key. #[derive(Clone, Debug)] -pub struct QueryInfo { +pub struct QueryInfo { /// The span corresponding to the reason for which this query was required. pub span: Span, - pub query: QueryStackFrame, + pub query: QueryStackFrame, } -impl QueryInfo { - pub(crate) fn lift>( - &self, - qcx: Qcx, - ) -> QueryInfo { - QueryInfo { span: self.span, query: self.query.lift(qcx) } - } -} - -pub type QueryMap = FxHashMap>; +pub type QueryMap = FxHashMap; /// A value uniquely identifying an active query job. #[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)] pub struct QueryJobId(pub NonZero); impl QueryJobId { - fn query(self, map: &QueryMap) -> QueryStackFrame { + fn query(self, map: &QueryMap) -> QueryStackFrame { map.get(&self).unwrap().query.clone() } - fn span(self, map: &QueryMap) -> Span { + fn span(self, map: &QueryMap) -> Span { map.get(&self).unwrap().job.span } - fn parent(self, map: &QueryMap) -> Option { + fn parent(self, map: &QueryMap) -> Option { map.get(&self).unwrap().job.parent } - fn latch(self, map: &QueryMap) -> Option<&QueryLatch> { + fn latch(self, map: &QueryMap) -> Option<&QueryLatch> { map.get(&self).unwrap().job.latch.as_ref() } } #[derive(Clone, Debug)] -pub struct QueryJobInfo { - pub query: QueryStackFrame, - pub job: QueryJob, +pub struct QueryJobInfo { + pub query: QueryStackFrame, + pub job: QueryJob, } /// Represents an active query job. #[derive(Debug)] -pub struct QueryJob { +pub struct QueryJob { pub id: QueryJobId, /// The span corresponding to the reason for which this query was required. @@ -77,23 +66,23 @@ pub struct QueryJob { pub parent: Option, /// The latch that is used to wait on this job. - latch: Option>, + latch: Option, } -impl Clone for QueryJob { +impl Clone for QueryJob { fn clone(&self) -> Self { Self { id: self.id, span: self.span, parent: self.parent, latch: self.latch.clone() } } } -impl QueryJob { +impl QueryJob { /// Creates a new query job. #[inline] pub fn new(id: QueryJobId, span: Span, parent: Option) -> Self { QueryJob { id, span, parent, latch: None } } - pub(super) fn latch(&mut self) -> QueryLatch { + pub(super) fn latch(&mut self) -> QueryLatch { if self.latch.is_none() { self.latch = Some(QueryLatch::new()); } @@ -113,12 +102,12 @@ impl QueryJob { } impl QueryJobId { - pub(super) fn find_cycle_in_stack( + pub(super) fn find_cycle_in_stack( &self, - query_map: QueryMap, + query_map: QueryMap, current_job: &Option, span: Span, - ) -> CycleError { + ) -> CycleError { // Find the waitee amongst `current_job` parents let mut cycle = Vec::new(); let mut current_job = Option::clone(current_job); @@ -152,7 +141,7 @@ impl QueryJobId { #[cold] #[inline(never)] - pub fn find_dep_kind_root(&self, query_map: QueryMap) -> (QueryJobInfo, usize) { + pub fn find_dep_kind_root(&self, query_map: QueryMap) -> (QueryJobInfo, usize) { let mut depth = 1; let info = query_map.get(&self).unwrap(); let dep_kind = info.query.dep_kind; @@ -172,31 +161,31 @@ impl QueryJobId { } #[derive(Debug)] -struct QueryWaiter { +struct QueryWaiter { query: Option, condvar: Condvar, span: Span, - cycle: Mutex>>, + cycle: Mutex>, } #[derive(Debug)] -struct QueryLatchInfo { +struct QueryLatchInfo { complete: bool, - waiters: Vec>>, + waiters: Vec>, } #[derive(Debug)] -pub(super) struct QueryLatch { - info: Arc>>, +pub(super) struct QueryLatch { + info: Arc>, } -impl Clone for QueryLatch { +impl Clone for QueryLatch { fn clone(&self) -> Self { Self { info: Arc::clone(&self.info) } } } -impl QueryLatch { +impl QueryLatch { fn new() -> Self { QueryLatch { info: Arc::new(Mutex::new(QueryLatchInfo { complete: false, waiters: Vec::new() })), @@ -209,7 +198,7 @@ impl QueryLatch { qcx: impl QueryContext, query: Option, span: Span, - ) -> Result<(), CycleError> { + ) -> Result<(), CycleError> { let waiter = Arc::new(QueryWaiter { query, span, cycle: Mutex::new(None), condvar: Condvar::new() }); self.wait_on_inner(qcx, &waiter); @@ -224,7 +213,7 @@ impl QueryLatch { } /// Awaits the caller on this latch by blocking the current thread. - fn wait_on_inner(&self, qcx: impl QueryContext, waiter: &Arc>) { + fn wait_on_inner(&self, qcx: impl QueryContext, waiter: &Arc) { let mut info = self.info.lock(); if !info.complete { // We push the waiter on to the `waiters` list. It can be accessed inside @@ -260,7 +249,7 @@ impl QueryLatch { /// Removes a single waiter from the list of waiters. /// This is used to break query cycles. - fn extract_waiter(&self, waiter: usize) -> Arc> { + fn extract_waiter(&self, waiter: usize) -> Arc { let mut info = self.info.lock(); debug_assert!(!info.complete); // Remove the waiter from the list of waiters @@ -280,11 +269,7 @@ type Waiter = (QueryJobId, usize); /// For visits of resumable waiters it returns Some(Some(Waiter)) which has the /// required information to resume the waiter. /// If all `visit` calls returns None, this function also returns None. -fn visit_waiters( - query_map: &QueryMap, - query: QueryJobId, - mut visit: F, -) -> Option> +fn visit_waiters(query_map: &QueryMap, query: QueryJobId, mut visit: F) -> Option> where F: FnMut(Span, QueryJobId) -> Option>, { @@ -314,8 +299,8 @@ where /// `span` is the reason for the `query` to execute. This is initially DUMMY_SP. /// If a cycle is detected, this initial value is replaced with the span causing /// the cycle. -fn cycle_check( - query_map: &QueryMap, +fn cycle_check( + query_map: &QueryMap, query: QueryJobId, span: Span, stack: &mut Vec<(Span, QueryJobId)>, @@ -354,8 +339,8 @@ fn cycle_check( /// Finds out if there's a path to the compiler root (aka. code which isn't in a query) /// from `query` without going through any of the queries in `visited`. /// This is achieved with a depth first search. -fn connected_to_root( - query_map: &QueryMap, +fn connected_to_root( + query_map: &QueryMap, query: QueryJobId, visited: &mut FxHashSet, ) -> bool { @@ -376,7 +361,7 @@ fn connected_to_root( } // Deterministically pick an query from a list -fn pick_query<'a, I: Clone, T, F>(query_map: &QueryMap, queries: &'a [T], f: F) -> &'a T +fn pick_query<'a, T, F>(query_map: &QueryMap, queries: &'a [T], f: F) -> &'a T where F: Fn(&T) -> (Span, QueryJobId), { @@ -401,10 +386,10 @@ where /// the function return true. /// If a cycle was not found, the starting query is removed from `jobs` and /// the function returns false. -fn remove_cycle( - query_map: &QueryMap, +fn remove_cycle( + query_map: &QueryMap, jobs: &mut Vec, - wakelist: &mut Vec>>, + wakelist: &mut Vec>, ) -> bool { let mut visited = FxHashSet::default(); let mut stack = Vec::new(); @@ -505,10 +490,7 @@ fn remove_cycle( /// uses a query latch and then resuming that waiter. /// There may be multiple cycles involved in a deadlock, so this searches /// all active queries for cycles before finally resuming all the waiters at once. -pub fn break_query_cycles( - query_map: QueryMap, - registry: &rustc_thread_pool::Registry, -) { +pub fn break_query_cycles(query_map: QueryMap, registry: &rustc_thread_pool::Registry) { let mut wakelist = Vec::new(); // It is OK per the comments: // - https://github.com/rust-lang/rust/pull/131200#issuecomment-2798854932 @@ -559,7 +541,7 @@ pub fn report_cycle<'a>( ) -> Diag<'a> { assert!(!stack.is_empty()); - let span = stack[0].query.info.default_span(stack[1 % stack.len()].span); + let span = stack[0].query.default_span(stack[1 % stack.len()].span); let mut cycle_stack = Vec::new(); @@ -568,31 +550,31 @@ pub fn report_cycle<'a>( for i in 1..stack.len() { let query = &stack[i].query; - let span = query.info.default_span(stack[(i + 1) % stack.len()].span); - cycle_stack.push(CycleStack { span, desc: query.info.description.to_owned() }); + let span = query.default_span(stack[(i + 1) % stack.len()].span); + cycle_stack.push(CycleStack { span, desc: query.description.to_owned() }); } let mut cycle_usage = None; if let Some((span, ref query)) = *usage { cycle_usage = Some(crate::error::CycleUsage { - span: query.info.default_span(span), - usage: query.info.description.to_string(), + span: query.default_span(span), + usage: query.description.to_string(), }); } - let alias = - if stack.iter().all(|entry| matches!(entry.query.info.def_kind, Some(DefKind::TyAlias))) { - Some(crate::error::Alias::Ty) - } else if stack.iter().all(|entry| entry.query.info.def_kind == Some(DefKind::TraitAlias)) { - Some(crate::error::Alias::Trait) - } else { - None - }; + let alias = if stack.iter().all(|entry| matches!(entry.query.def_kind, Some(DefKind::TyAlias))) + { + Some(crate::error::Alias::Ty) + } else if stack.iter().all(|entry| entry.query.def_kind == Some(DefKind::TraitAlias)) { + Some(crate::error::Alias::Trait) + } else { + None + }; let cycle_diag = crate::error::Cycle { span, cycle_stack, - stack_bottom: stack[0].query.info.description.to_owned(), + stack_bottom: stack[0].query.description.to_owned(), alias, cycle_usage, stack_count, @@ -628,7 +610,6 @@ pub fn print_query_stack( let Some(query_info) = query_map.get(&query) else { break; }; - let query_extra = qcx.lift_query_info(&query_info.query.info); if Some(count_printed) < limit_frames || limit_frames.is_none() { // Only print to stderr as many stack frames as `num_frames` when present. // FIXME: needs translation @@ -636,7 +617,7 @@ pub fn print_query_stack( #[allow(rustc::untranslatable_diagnostic)] dcx.struct_failure_note(format!( "#{} [{:?}] {}", - count_printed, query_info.query.dep_kind, query_extra.description + count_printed, query_info.query.dep_kind, query_info.query.description )) .with_span(query_info.job.span) .emit(); @@ -649,7 +630,7 @@ pub fn print_query_stack( "#{} [{}] {}", count_total, qcx.dep_context().dep_kind_info(query_info.query.dep_kind).name, - query_extra.description + query_info.query.description ); } diff --git a/compiler/rustc_query_system/src/query/mod.rs b/compiler/rustc_query_system/src/query/mod.rs index ce3456d532e69..b524756d81b61 100644 --- a/compiler/rustc_query_system/src/query/mod.rs +++ b/compiler/rustc_query_system/src/query/mod.rs @@ -1,23 +1,4 @@ -mod plumbing; -use std::fmt::Debug; -use std::marker::PhantomData; -use std::mem::transmute; -use std::sync::Arc; - -pub use self::plumbing::*; - -mod job; -pub use self::job::{ - QueryInfo, QueryJob, QueryJobId, QueryJobInfo, QueryMap, break_query_cycles, print_query_stack, - report_cycle, -}; - -mod caches; -pub use self::caches::{DefIdCache, DefaultCache, QueryCache, SingleCache, VecCache}; - -mod config; use rustc_data_structures::jobserver::Proxy; -use rustc_data_structures::sync::{DynSend, DynSync}; use rustc_errors::DiagInner; use rustc_hashes::Hash64; use rustc_hir::def::DefKind; @@ -25,66 +6,49 @@ use rustc_macros::{Decodable, Encodable}; use rustc_span::Span; use rustc_span::def_id::DefId; +pub use self::caches::{DefIdCache, DefaultCache, QueryCache, SingleCache, VecCache}; pub use self::config::{HashResult, QueryConfig}; +pub use self::job::{ + QueryInfo, QueryJob, QueryJobId, QueryJobInfo, QueryMap, break_query_cycles, print_query_stack, + report_cycle, +}; +pub use self::plumbing::*; use crate::dep_graph::{DepKind, DepNodeIndex, HasDepContext, SerializedDepNodeIndex}; +mod caches; +mod config; +mod job; +mod plumbing; + /// Description of a frame in the query stack. /// /// This is mostly used in case of cycles for error reporting. #[derive(Clone, Debug)] -pub struct QueryStackFrame { - /// This field initially stores a `QueryStackDeferred` during collection, - /// but can later be changed to `QueryStackFrameExtra` containing concrete information - /// by calling `lift`. This is done so that collecting query does not need to invoke - /// queries, instead `lift` will call queries in a more appropriate location. - pub info: I, - +pub struct QueryStackFrame { + pub description: String, + span: Option, + pub def_id: Option, + pub def_kind: Option, + /// A def-id that is extracted from a `Ty` in a query key + pub def_id_for_ty_in_cycle: Option, pub dep_kind: DepKind, /// This hash is used to deterministically pick /// a query to remove cycles in the parallel compiler. hash: Hash64, - pub def_id: Option, - /// A def-id that is extracted from a `Ty` in a query key - pub def_id_for_ty_in_cycle: Option, } -impl QueryStackFrame { +impl QueryStackFrame { #[inline] pub fn new( - info: I, - dep_kind: DepKind, - hash: impl FnOnce() -> Hash64, + description: String, + span: Option, def_id: Option, + def_kind: Option, + dep_kind: DepKind, def_id_for_ty_in_cycle: Option, + hash: Hash64, ) -> Self { - Self { info, def_id, dep_kind, hash: hash(), def_id_for_ty_in_cycle } - } - - fn lift>( - &self, - qcx: Qcx, - ) -> QueryStackFrame { - QueryStackFrame { - info: qcx.lift_query_info(&self.info), - dep_kind: self.dep_kind, - hash: self.hash, - def_id: self.def_id, - def_id_for_ty_in_cycle: self.def_id_for_ty_in_cycle, - } - } -} - -#[derive(Clone, Debug)] -pub struct QueryStackFrameExtra { - pub description: String, - span: Option, - pub def_kind: Option, -} - -impl QueryStackFrameExtra { - #[inline] - pub fn new(description: String, span: Option, def_kind: Option) -> Self { - Self { description, span, def_kind } + Self { description, span, def_id, def_kind, def_id_for_ty_in_cycle, dep_kind, hash } } // FIXME(eddyb) Get more valid `Span`s on queries. @@ -97,40 +61,6 @@ impl QueryStackFrameExtra { } } -/// Track a 'side effect' for a particular query. -/// This is used to hold a closure which can create `QueryStackFrameExtra`. -#[derive(Clone)] -pub struct QueryStackDeferred<'tcx> { - _dummy: PhantomData<&'tcx ()>, - - // `extract` may contain references to 'tcx, but we can't tell drop checking that it won't - // access it in the destructor. - extract: Arc QueryStackFrameExtra + DynSync + DynSend>, -} - -impl<'tcx> QueryStackDeferred<'tcx> { - pub fn new( - context: C, - extract: fn(C) -> QueryStackFrameExtra, - ) -> Self { - let extract: Arc QueryStackFrameExtra + DynSync + DynSend + 'tcx> = - Arc::new(move || extract(context)); - // SAFETY: The `extract` closure does not access 'tcx in its destructor as the only - // captured variable is `context` which is Copy and cannot have a destructor. - Self { _dummy: PhantomData, extract: unsafe { transmute(extract) } } - } - - pub fn extract(&self) -> QueryStackFrameExtra { - (self.extract)() - } -} - -impl<'tcx> Debug for QueryStackDeferred<'tcx> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str("QueryStackDeferred") - } -} - /// Tracks 'side effects' for a particular query. /// This struct is saved to disk along with the query result, /// and loaded from disk if we mark the query as green. @@ -150,8 +80,6 @@ pub enum QuerySideEffect { } pub trait QueryContext: HasDepContext { - type QueryInfo: Clone; - /// Gets a jobserver reference which is used to release then acquire /// a token while waiting on a query. fn jobserver_proxy(&self) -> &Proxy; @@ -161,12 +89,7 @@ pub trait QueryContext: HasDepContext { /// Get the query information from the TLS context. fn current_query_job(self) -> Option; - fn collect_active_jobs( - self, - require_complete: bool, - ) -> Result, QueryMap>; - - fn lift_query_info(self, info: &Self::QueryInfo) -> QueryStackFrameExtra; + fn collect_active_jobs(self, require_complete: bool) -> Result; /// Load a side effect associated to the node in the previous session. fn load_side_effect( diff --git a/compiler/rustc_query_system/src/query/plumbing.rs b/compiler/rustc_query_system/src/query/plumbing.rs index dea47c8fa787e..fa5a94d651885 100644 --- a/compiler/rustc_query_system/src/query/plumbing.rs +++ b/compiler/rustc_query_system/src/query/plumbing.rs @@ -18,7 +18,7 @@ use rustc_errors::{Diag, FatalError, StashKey}; use rustc_span::{DUMMY_SP, Span}; use tracing::instrument; -use super::{QueryConfig, QueryStackFrameExtra}; +use super::QueryConfig; use crate::HandleCycleError; use crate::dep_graph::{DepContext, DepGraphData, DepNode, DepNodeIndex, DepNodeParams}; use crate::ich::StableHashingContext; @@ -31,23 +31,23 @@ fn equivalent_key(k: &K) -> impl Fn(&(K, V)) -> bool + '_ { move |x| x.0 == *k } -pub struct QueryState { - active: Sharded)>>, +pub struct QueryState { + active: Sharded>, } /// Indicates the state of a query for a given key in a query map. -enum QueryResult { +enum QueryResult { /// An already executing query. The query job can be used to await for its completion. - Started(QueryJob), + Started(QueryJob), /// The query panicked. Queries trying to wait on this will raise a fatal error which will /// silently panic. Poisoned, } -impl QueryResult { +impl QueryResult { /// Unwraps the query job expecting that it has started. - fn expect_job(self) -> QueryJob { + fn expect_job(self) -> QueryJob { match self { Self::Started(job) => job, Self::Poisoned => { @@ -57,7 +57,7 @@ impl QueryResult { } } -impl QueryState +impl QueryState where K: Eq + Hash + Copy + Debug, { @@ -68,13 +68,13 @@ where pub fn collect_active_jobs( &self, qcx: Qcx, - make_query: fn(Qcx, K) -> QueryStackFrame, - jobs: &mut QueryMap, + make_query: fn(Qcx, K) -> QueryStackFrame, + jobs: &mut QueryMap, require_complete: bool, ) -> Option<()> { let mut active = Vec::new(); - let mut collect = |iter: LockGuard<'_, HashTable<(K, QueryResult)>>| { + let mut collect = |iter: LockGuard<'_, HashTable<(K, QueryResult)>>| { for (k, v) in iter.iter() { if let QueryResult::Started(ref job) = *v { active.push((*k, job.clone())); @@ -105,19 +105,19 @@ where } } -impl Default for QueryState { - fn default() -> QueryState { +impl Default for QueryState { + fn default() -> QueryState { QueryState { active: Default::default() } } } /// A type representing the responsibility to execute the job in the `job` field. /// This will poison the relevant query if dropped. -struct JobOwner<'tcx, K, I> +struct JobOwner<'tcx, K> where K: Eq + Hash + Copy, { - state: &'tcx QueryState, + state: &'tcx QueryState, key: K, } @@ -159,7 +159,7 @@ where } Stash => { let guar = if let Some(root) = cycle_error.cycle.first() - && let Some(span) = root.query.info.span + && let Some(span) = root.query.span { error.stash(span, StashKey::Cycle).unwrap() } else { @@ -170,7 +170,7 @@ where } } -impl<'tcx, K, I> JobOwner<'tcx, K, I> +impl<'tcx, K> JobOwner<'tcx, K> where K: Eq + Hash + Copy, { @@ -207,7 +207,7 @@ where } } -impl<'tcx, K, I> Drop for JobOwner<'tcx, K, I> +impl<'tcx, K> Drop for JobOwner<'tcx, K> where K: Eq + Hash + Copy, { @@ -235,19 +235,10 @@ where } #[derive(Clone, Debug)] -pub struct CycleError { +pub struct CycleError { /// The query and related span that uses the cycle. - pub usage: Option<(Span, QueryStackFrame)>, - pub cycle: Vec>, -} - -impl CycleError { - fn lift>(&self, qcx: Qcx) -> CycleError { - CycleError { - usage: self.usage.as_ref().map(|(span, frame)| (*span, frame.lift(qcx))), - cycle: self.cycle.iter().map(|info| info.lift(qcx)).collect(), - } - } + pub usage: Option<(Span, QueryStackFrame)>, + pub cycle: Vec, } /// Checks whether there is already a value for this key in the in-memory @@ -284,10 +275,10 @@ where { // Ensure there was no errors collecting all active jobs. // We need the complete map to ensure we find a cycle to break. - let query_map = qcx.collect_active_jobs(false).ok().expect("failed to collect active queries"); + let query_map = qcx.collect_active_jobs(false).expect("failed to collect active queries"); let error = try_execute.find_cycle_in_stack(query_map, &qcx.current_query_job(), span); - (mk_cycle(query, qcx, error.lift(qcx)), None) + (mk_cycle(query, qcx, error), None) } #[inline(always)] @@ -296,7 +287,7 @@ fn wait_for_query( qcx: Qcx, span: Span, key: Q::Key, - latch: QueryLatch, + latch: QueryLatch, current: Option, ) -> (Q::Value, Option) where @@ -336,7 +327,7 @@ where (v, Some(index)) } - Err(cycle) => (mk_cycle(query, qcx, cycle.lift(qcx)), None), + Err(cycle) => (mk_cycle(query, qcx, cycle), None), } } @@ -414,7 +405,7 @@ where fn execute_job( query: Q, qcx: Qcx, - state: &QueryState, + state: &QueryState, key: Q::Key, key_hash: u64, id: QueryJobId, diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index 82655a867b5b7..a16235c5a1e49 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -13,6 +13,8 @@ #![feature(arbitrary_self_types)] #![feature(assert_matches)] #![feature(box_patterns)] +#![feature(const_default)] +#![feature(const_trait_impl)] #![feature(control_flow_into_value)] #![feature(decl_macro)] #![feature(default_field_values)] @@ -45,7 +47,7 @@ use rustc_ast::{ self as ast, AngleBracketedArg, CRATE_NODE_ID, Crate, Expr, ExprKind, GenericArg, GenericArgs, NodeId, Path, attr, }; -use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet}; +use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet, default}; use rustc_data_structures::intern::Interned; use rustc_data_structures::steal::Steal; use rustc_data_structures::sync::{FreezeReadGuard, FreezeWriteGuard}; @@ -1113,7 +1115,7 @@ pub struct Resolver<'ra, 'tcx> { tcx: TyCtxt<'tcx>, /// Item with a given `LocalDefId` was defined during macro expansion with ID `ExpnId`. - expn_that_defined: UnordMap, + expn_that_defined: UnordMap = Default::default(), graph_root: Module<'ra>, @@ -1124,12 +1126,12 @@ pub struct Resolver<'ra, 'tcx> { extern_prelude: FxIndexMap>, /// N.B., this is used only for better diagnostics, not name resolution itself. - field_names: LocalDefIdMap>, - field_defaults: LocalDefIdMap>, + field_names: LocalDefIdMap> = Default::default(), + field_defaults: LocalDefIdMap> = Default::default(), /// Span of the privacy modifier in fields of an item `DefId` accessible with dot syntax. /// Used for hints during error reporting. - field_visibility_spans: FxHashMap>, + field_visibility_spans: FxHashMap> = default::fx_hash_map(), /// All imports known to succeed or fail. determined_imports: Vec> = Vec::new(), @@ -1139,26 +1141,26 @@ pub struct Resolver<'ra, 'tcx> { // Spans for local variables found during pattern resolution. // Used for suggestions during error reporting. - pat_span_map: NodeMap, + pat_span_map: NodeMap = Default::default(), /// Resolutions for nodes that have a single resolution. - partial_res_map: NodeMap, + partial_res_map: NodeMap = Default::default(), /// Resolutions for import nodes, which have multiple resolutions in different namespaces. - import_res_map: NodeMap>>, + import_res_map: NodeMap>> = Default::default(), /// An import will be inserted into this map if it has been used. - import_use_map: FxHashMap, Used>, + import_use_map: FxHashMap, Used> = default::fx_hash_map(), /// Resolutions for labels (node IDs of their corresponding blocks or loops). - label_res_map: NodeMap, + label_res_map: NodeMap = Default::default(), /// Resolutions for lifetimes. - lifetimes_res_map: NodeMap, + lifetimes_res_map: NodeMap = Default::default(), /// Lifetime parameters that lowering will have to introduce. - extra_lifetime_params_map: NodeMap>, + extra_lifetime_params_map: NodeMap> = Default::default(), /// `CrateNum` resolutions of `extern crate` items. - extern_crate_map: UnordMap, - module_children: LocalDefIdMap>, - ambig_module_children: LocalDefIdMap>, - trait_map: NodeMap>, + extern_crate_map: UnordMap = Default::default(), + module_children: LocalDefIdMap> = Default::default(), + ambig_module_children: LocalDefIdMap> = Default::default(), + trait_map: NodeMap> = Default::default(), /// A map from nodes to anonymous modules. /// Anonymous modules are pseudo-modules that are implicitly created around items @@ -1174,7 +1176,7 @@ pub struct Resolver<'ra, 'tcx> { /// /// There will be an anonymous module created around `g` with the ID of the /// entry block for `f`. - block_map: NodeMap>, + block_map: NodeMap> = Default::default(), /// A fake module that contains no definition and no prelude. Used so that /// some AST passes can generate identifiers that only resolve to local or /// lang items. @@ -1190,7 +1192,7 @@ pub struct Resolver<'ra, 'tcx> { glob_map: FxIndexMap>, glob_error: Option = None, visibilities_for_hashing: Vec<(LocalDefId, Visibility)> = Vec::new(), - used_imports: FxHashSet, + used_imports: FxHashSet = default::fx_hash_set(), maybe_unused_trait_imports: FxIndexSet, /// Privacy errors are delayed until the end in order to deduplicate them. @@ -1206,56 +1208,56 @@ pub struct Resolver<'ra, 'tcx> { /// When a type is re-exported that has an inaccessible constructor because it has fields that /// are inaccessible from the import's scope, we mark that as the type won't be able to be built /// through the re-export. We use this information to extend the existing diagnostic. - inaccessible_ctor_reexport: FxHashMap, + inaccessible_ctor_reexport: FxHashMap = default::fx_hash_map(), arenas: &'ra ResolverArenas<'ra>, dummy_decl: Decl<'ra>, builtin_type_decls: FxHashMap>, builtin_attr_decls: FxHashMap>, registered_tool_decls: FxHashMap>, - macro_names: FxHashSet, - builtin_macros: FxHashMap, + macro_names: FxHashSet = default::fx_hash_set(), + builtin_macros: FxHashMap = default::fx_hash_map(), registered_tools: &'tcx RegisteredTools, macro_use_prelude: FxIndexMap>, /// Eagerly populated map of all local macro definitions. - local_macro_map: FxHashMap, + local_macro_map: FxHashMap = default::fx_hash_map(), /// Lazily populated cache of macro definitions loaded from external crates. extern_macro_map: CacheRefCell>, dummy_ext_bang: Arc, dummy_ext_derive: Arc, non_macro_attr: &'ra MacroData, - local_macro_def_scopes: FxHashMap>, - ast_transform_scopes: FxHashMap>, + local_macro_def_scopes: FxHashMap> = default::fx_hash_map(), + ast_transform_scopes: FxHashMap> = default::fx_hash_map(), unused_macros: FxIndexMap, /// A map from the macro to all its potentially unused arms. unused_macro_rules: FxIndexMap>, - proc_macro_stubs: FxHashSet, + proc_macro_stubs: FxHashSet = default::fx_hash_set(), /// Traces collected during macro resolution and validated when it's complete. single_segment_macro_resolutions: CmRefCell, Option>, Option)>>, multi_segment_macro_resolutions: CmRefCell, Span, MacroKind, ParentScope<'ra>, Option, Namespace)>>, - builtin_attrs: Vec<(Ident, ParentScope<'ra>)>, + builtin_attrs: Vec<(Ident, ParentScope<'ra>)> = Vec::new(), /// `derive(Copy)` marks items they are applied to so they are treated specially later. /// Derive macros cannot modify the item themselves and have to store the markers in the global /// context, so they attach the markers to derive container IDs using this resolver table. - containers_deriving_copy: FxHashSet, + containers_deriving_copy: FxHashSet = default::fx_hash_set(), /// Parent scopes in which the macros were invoked. /// FIXME: `derives` are missing in these parent scopes and need to be taken from elsewhere. - invocation_parent_scopes: FxHashMap>, + invocation_parent_scopes: FxHashMap> = default::fx_hash_map(), /// `macro_rules` scopes *produced* by expanding the macro invocations, /// include all the `macro_rules` items and other invocations generated by them. - output_macro_rules_scopes: FxHashMap>, + output_macro_rules_scopes: FxHashMap> = default::fx_hash_map(), /// `macro_rules` scopes produced by `macro_rules` item definitions. - macro_rules_scopes: FxHashMap>, + macro_rules_scopes: FxHashMap> = default::fx_hash_map(), /// Helper attributes that are in scope for the given expansion. - helper_attrs: FxHashMap)>>, + helper_attrs: FxHashMap)>> = default::fx_hash_map(), /// Ready or in-progress results of resolving paths inside the `#[derive(...)]` attribute /// with the given `ExpnId`. - derive_data: FxHashMap, + derive_data: FxHashMap = default::fx_hash_map(), /// Avoid duplicated errors for "name already defined". - name_already_seen: FxHashMap, + name_already_seen: FxHashMap = default::fx_hash_map(), potentially_unused_imports: Vec> = Vec::new(), @@ -1264,7 +1266,7 @@ pub struct Resolver<'ra, 'tcx> { /// Table for mapping struct IDs into struct constructor IDs, /// it's not used during normal resolution, only for better error reporting. /// Also includes of list of each fields visibility - struct_constructors: LocalDefIdMap<(Res, Visibility, Vec>)>, + struct_constructors: LocalDefIdMap<(Res, Visibility, Vec>)> = Default::default(), lint_buffer: LintBuffer, @@ -1272,19 +1274,19 @@ pub struct Resolver<'ra, 'tcx> { node_id_to_def_id: NodeMap>, - disambiguator: DisambiguatorState, + disambiguator: DisambiguatorState = DisambiguatorState::new(), /// Indices of unnamed struct or variant fields with unresolved attributes. - placeholder_field_indices: FxHashMap, + placeholder_field_indices: FxHashMap = default::fx_hash_map(), /// When collecting definitions from an AST fragment produced by a macro invocation `ExpnId` /// we know what parent node that fragment should be attached to thanks to this table, /// and how the `impl Trait` fragments were introduced. invocation_parents: FxHashMap, /// Amount of lifetime parameters for each item in the crate. - item_generics_num_lifetimes: FxHashMap, - delegation_fn_sigs: LocalDefIdMap, - delegation_infos: LocalDefIdMap, + item_generics_num_lifetimes: FxHashMap = default::fx_hash_map(), + delegation_fn_sigs: LocalDefIdMap = Default::default(), + delegation_infos: LocalDefIdMap = Default::default(), main_def: Option = None, trait_impls: FxIndexMap>, @@ -1293,7 +1295,7 @@ pub struct Resolver<'ra, 'tcx> { proc_macros: Vec = Vec::new(), confused_type_with_std_module: FxIndexMap, /// Whether lifetime elision was successful. - lifetime_elision_allowed: FxHashSet, + lifetime_elision_allowed: FxHashSet = default::fx_hash_set(), /// Names of items that were stripped out via cfg with their corresponding cfg meta item. stripped_cfg_items: Vec> = Vec::new(), @@ -1301,22 +1303,22 @@ pub struct Resolver<'ra, 'tcx> { effective_visibilities: EffectiveVisibilities, doc_link_resolutions: FxIndexMap, doc_link_traits_in_scope: FxIndexMap>, - all_macro_rules: UnordSet, + all_macro_rules: UnordSet = Default::default(), /// Invocation ids of all glob delegations. - glob_delegation_invoc_ids: FxHashSet, + glob_delegation_invoc_ids: FxHashSet = default::fx_hash_set(), /// Analogue of module `unexpanded_invocations` but in trait impls, excluding glob delegations. /// Needed because glob delegations wait for all other neighboring macros to expand. - impl_unexpanded_invocations: FxHashMap>, + impl_unexpanded_invocations: FxHashMap> = default::fx_hash_map(), /// Simplified analogue of module `resolutions` but in trait impls, excluding glob delegations. /// Needed because glob delegations exclude explicitly defined names. - impl_binding_keys: FxHashMap>, + impl_binding_keys: FxHashMap> = default::fx_hash_map(), /// This is the `Span` where an `extern crate foo;` suggestion would be inserted, if `foo` /// could be a crate that wasn't imported. For diagnostics use only. current_crate_outer_attr_insert_span: Span, - mods_with_parse_errors: FxHashSet, + mods_with_parse_errors: FxHashSet = default::fx_hash_set(), /// Whether `Resolver::register_macros_for_all_crates` has been called once already, as we /// don't need to run it more than once. @@ -1325,7 +1327,7 @@ pub struct Resolver<'ra, 'tcx> { // Stores pre-expansion and pre-placeholder-fragment-insertion names for `impl Trait` types // that were encountered during resolution. These names are used to generate item names // for APITs, so we don't want to leak details of resolution into these names. - impl_trait_names: FxHashMap, + impl_trait_names: FxHashMap = default::fx_hash_map(), } /// This provides memory for the rest of the crate. The `'ra` lifetime that is @@ -1587,41 +1589,19 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { let mut resolver = Resolver { tcx, - expn_that_defined: Default::default(), - // The outermost module has def ID 0; this is not reflected in the // AST. graph_root, assert_speculative: false, // Only set/cleared in Resolver::resolve_imports for now - prelude: None, extern_prelude, - field_names: Default::default(), - field_defaults: Default::default(), - field_visibility_spans: FxHashMap::default(), - - pat_span_map: Default::default(), - partial_res_map: Default::default(), - import_res_map: Default::default(), - import_use_map: Default::default(), - label_res_map: Default::default(), - lifetimes_res_map: Default::default(), - extra_lifetime_params_map: Default::default(), - extern_crate_map: Default::default(), - module_children: Default::default(), - ambig_module_children: Default::default(), - trait_map: NodeMap::default(), empty_module, local_modules, local_module_map, extern_module_map: Default::default(), - block_map: Default::default(), - ast_transform_scopes: FxHashMap::default(), glob_map: Default::default(), - used_imports: FxHashSet::default(), maybe_unused_trait_imports: Default::default(), - inaccessible_ctor_reexport: Default::default(), arenas, dummy_decl: arenas.new_pub_def_decl(Res::Err, DUMMY_SP, LocalExpnId::ROOT), @@ -1649,53 +1629,27 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { (*ident, decl) }) .collect(), - macro_names: FxHashSet::default(), - builtin_macros: Default::default(), registered_tools, macro_use_prelude: Default::default(), - local_macro_map: Default::default(), extern_macro_map: Default::default(), dummy_ext_bang: Arc::new(SyntaxExtension::dummy_bang(edition)), dummy_ext_derive: Arc::new(SyntaxExtension::dummy_derive(edition)), non_macro_attr: arenas .alloc_macro(MacroData::new(Arc::new(SyntaxExtension::non_macro_attr(edition)))), - invocation_parent_scopes: Default::default(), - output_macro_rules_scopes: Default::default(), - macro_rules_scopes: Default::default(), - helper_attrs: Default::default(), - derive_data: Default::default(), - local_macro_def_scopes: FxHashMap::default(), - name_already_seen: FxHashMap::default(), - struct_constructors: Default::default(), unused_macros: Default::default(), unused_macro_rules: Default::default(), - proc_macro_stubs: Default::default(), single_segment_macro_resolutions: Default::default(), multi_segment_macro_resolutions: Default::default(), - builtin_attrs: Default::default(), - containers_deriving_copy: Default::default(), lint_buffer: LintBuffer::default(), node_id_to_def_id, - disambiguator: DisambiguatorState::new(), - placeholder_field_indices: Default::default(), invocation_parents, - item_generics_num_lifetimes: Default::default(), trait_impls: Default::default(), confused_type_with_std_module: Default::default(), - lifetime_elision_allowed: Default::default(), stripped_cfg_items: Default::default(), effective_visibilities: Default::default(), doc_link_resolutions: Default::default(), doc_link_traits_in_scope: Default::default(), - all_macro_rules: Default::default(), - delegation_fn_sigs: Default::default(), - glob_delegation_invoc_ids: Default::default(), - impl_unexpanded_invocations: Default::default(), - impl_binding_keys: Default::default(), current_crate_outer_attr_insert_span, - mods_with_parse_errors: Default::default(), - impl_trait_names: Default::default(), - delegation_infos: Default::default(), .. }; diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 99ab134038124..96bdcf8beffb9 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -2278,6 +2278,8 @@ options! { "set options for branch target identification and pointer authentication on AArch64"), build_sdylib_interface: bool = (false, parse_bool, [UNTRACKED], "whether the stable interface is being built"), + cache_proc_macros: bool = (false, parse_bool, [TRACKED], + "cache the results of derive proc macro invocations (potentially unsound!) (default: no"), cf_protection: CFProtection = (CFProtection::None, parse_cfprotection, [TRACKED], "instrument control-flow architecture protection"), check_cfg_all_expected: bool = (false, parse_bool, [UNTRACKED], diff --git a/compiler/rustc_span/src/hygiene.rs b/compiler/rustc_span/src/hygiene.rs index 51da538de94d4..d94d82835d650 100644 --- a/compiler/rustc_span/src/hygiene.rs +++ b/compiler/rustc_span/src/hygiene.rs @@ -1571,3 +1571,9 @@ impl HashStable for ExpnId { hash.hash_stable(ctx, hasher); } } + +impl HashStable for LocalExpnId { + fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { + self.to_expn_id().hash_stable(hcx, hasher); + } +} diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 98db0df593bec..51920db8cd79e 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -190,6 +190,7 @@ symbols! { BTreeMap, BTreeSet, BinaryHeap, + Bool, Borrow, BorrowMut, Break, @@ -202,6 +203,7 @@ symbols! { Capture, Cell, Center, + Char, Child, Cleanup, Clone, @@ -238,6 +240,7 @@ symbols! { Error, File, FileType, + Float, FmtArgumentsNew, FmtWrite, Fn, @@ -263,6 +266,7 @@ symbols! { IndexOutput, Input, Instant, + Int, Into, IntoFuture, IntoIterator, @@ -285,7 +289,6 @@ symbols! { IteratorItem, IteratorMap, Layout, - Leaf, Left, LinkedList, LintDiagnostic, @@ -363,6 +366,7 @@ symbols! { Some, SpanCtxt, Stdin, + Str, String, StructuralPartialEq, SubdiagMessage, @@ -585,6 +589,7 @@ symbols! { binaryheap_iter, bind_by_move_pattern_guards, bindings_after_at, + bit_width, bitand, bitand_assign, bitor, @@ -2060,6 +2065,7 @@ symbols! { shr_assign, sig_dfl, sig_ign, + signed, simd, simd_add, simd_and, diff --git a/library/core/src/mem/type_info.rs b/library/core/src/mem/type_info.rs index e4ccc408f1c65..2e3bdb45ce052 100644 --- a/library/core/src/mem/type_info.rs +++ b/library/core/src/mem/type_info.rs @@ -45,9 +45,16 @@ pub enum TypeKind { Tuple(Tuple), /// Arrays. Array(Array), - /// Primitives - /// FIXME(#146922): disambiguate further - Leaf, + /// Primitive boolean type. + Bool(Bool), + /// Primitive character type. + Char(Char), + /// Primitive signed and unsigned integer type. + Int(Int), + /// Primitive floating-point type. + Float(Float), + /// String slice type. + Str(Str), /// FIXME(#146922): add all the common types Other, } @@ -82,3 +89,47 @@ pub struct Array { /// The length of the array. pub len: usize, } + +/// Compile-time type information about `bool`. +#[derive(Debug)] +#[non_exhaustive] +#[unstable(feature = "type_info", issue = "146922")] +pub struct Bool { + // No additional information to provide for now. +} + +/// Compile-time type information about `char`. +#[derive(Debug)] +#[non_exhaustive] +#[unstable(feature = "type_info", issue = "146922")] +pub struct Char { + // No additional information to provide for now. +} + +/// Compile-time type information about signed and unsigned integer types. +#[derive(Debug)] +#[non_exhaustive] +#[unstable(feature = "type_info", issue = "146922")] +pub struct Int { + /// The bit width of the signed integer type. + pub bit_width: usize, + /// Whether the integer type is signed. + pub signed: bool, +} + +/// Compile-time type information about floating-point types. +#[derive(Debug)] +#[non_exhaustive] +#[unstable(feature = "type_info", issue = "146922")] +pub struct Float { + /// The bit width of the floating-point type. + pub bit_width: usize, +} + +/// Compile-time type information about string slice types. +#[derive(Debug)] +#[non_exhaustive] +#[unstable(feature = "type_info", issue = "146922")] +pub struct Str { + // No additional information to provide for now. +} diff --git a/library/coretests/tests/mem/type_info.rs b/library/coretests/tests/mem/type_info.rs index b3b8d96d49b00..fc13637a5574c 100644 --- a/library/coretests/tests/mem/type_info.rs +++ b/library/coretests/tests/mem/type_info.rs @@ -38,7 +38,7 @@ fn test_tuples() { assert_tuple_arity::<(u8, u8), 2>(); const { - match Type::of::<(u8, u8)>().kind { + match Type::of::<(i8, u8)>().kind { TypeKind::Tuple(tup) => { let [a, b] = tup.fields else { unreachable!() }; @@ -46,7 +46,10 @@ fn test_tuples() { assert!(b.offset == 1); match (a.ty.info().kind, b.ty.info().kind) { - (TypeKind::Leaf, TypeKind::Leaf) => {} + (TypeKind::Int(a), TypeKind::Int(b)) => { + assert!(a.bit_width == 8 && a.signed); + assert!(b.bit_width == 8 && !b.signed); + } _ => unreachable!(), } } @@ -54,3 +57,41 @@ fn test_tuples() { } } } + +#[test] +fn test_primitives() { + use TypeKind::*; + + let Type { kind: Bool(_ty), size, .. } = (const { Type::of::() }) else { panic!() }; + assert_eq!(size, Some(1)); + + let Type { kind: Char(_ty), size, .. } = (const { Type::of::() }) else { panic!() }; + assert_eq!(size, Some(4)); + + let Type { kind: Int(ty), size, .. } = (const { Type::of::() }) else { panic!() }; + assert_eq!(size, Some(4)); + assert_eq!(ty.bit_width, 32); + assert!(ty.signed); + + let Type { kind: Int(ty), size, .. } = (const { Type::of::() }) else { panic!() }; + assert_eq!(size, Some(size_of::())); + assert_eq!(ty.bit_width, size_of::() * 8); + assert!(ty.signed); + + let Type { kind: Int(ty), size, .. } = (const { Type::of::() }) else { panic!() }; + assert_eq!(size, Some(4)); + assert_eq!(ty.bit_width, 32); + assert!(!ty.signed); + + let Type { kind: Int(ty), size, .. } = (const { Type::of::() }) else { panic!() }; + assert_eq!(size, Some(size_of::())); + assert_eq!(ty.bit_width, size_of::() * 8); + assert!(!ty.signed); + + let Type { kind: Float(ty), size, .. } = (const { Type::of::() }) else { panic!() }; + assert_eq!(size, Some(4)); + assert_eq!(ty.bit_width, 32); + + let Type { kind: Str(_ty), size, .. } = (const { Type::of::() }) else { panic!() }; + assert_eq!(size, None); +} diff --git a/tests/incremental/derive_macro_expansion/auxiliary/derive_nothing.rs b/tests/incremental/derive_macro_expansion/auxiliary/derive_nothing.rs new file mode 100644 index 0000000000000..24957b052856a --- /dev/null +++ b/tests/incremental/derive_macro_expansion/auxiliary/derive_nothing.rs @@ -0,0 +1,15 @@ +extern crate proc_macro; +use proc_macro::TokenStream; + +#[proc_macro_derive(Nothing)] +pub fn derive(_input: TokenStream) -> TokenStream { + return r#" + pub mod nothing_mod { + pub fn nothing() { + eprintln!("nothing"); + } + } + "# + .parse() + .unwrap(); +} diff --git a/tests/incremental/derive_macro_expansion/proc_macro_unchanged.rs b/tests/incremental/derive_macro_expansion/proc_macro_unchanged.rs new file mode 100644 index 0000000000000..64364e31bc456 --- /dev/null +++ b/tests/incremental/derive_macro_expansion/proc_macro_unchanged.rs @@ -0,0 +1,18 @@ +// This test tests that derive proc macro execution is cached. + +//@ proc-macro:derive_nothing.rs +//@ revisions:rpass1 rpass2 +//@ compile-flags: -Zquery-dep-graph -Zcache-proc-macros +//@ ignore-backends: gcc + +#![feature(rustc_attrs)] + +#[macro_use] +extern crate derive_nothing; + +#[cfg(any(rpass1, rpass2))] +#[rustc_clean(cfg = "rpass2", loaded_from_disk = "derive_macro_expansion")] +#[derive(Nothing)] +pub struct Foo; + +fn main() {} diff --git a/tests/mir-opt/const_prop/invalid_constant.main.GVN.diff b/tests/mir-opt/const_prop/invalid_constant.main.GVN.diff index 20923d0352cd5..3dc6fc9e3f65b 100644 --- a/tests/mir-opt/const_prop/invalid_constant.main.GVN.diff +++ b/tests/mir-opt/const_prop/invalid_constant.main.GVN.diff @@ -19,7 +19,7 @@ debug _enum_without_variants => const [ZeroSized: Empty]; let _9: main::Str<"���">; scope 4 { - debug _non_utf8_str => const Str::<"���">; + debug _non_utf8_str => const main::Str::<"���">; } } } diff --git a/tests/mir-opt/const_prop/invalid_constant.main.RemoveZsts.diff b/tests/mir-opt/const_prop/invalid_constant.main.RemoveZsts.diff index 6593b329756c2..c16cbaf4c0f27 100644 --- a/tests/mir-opt/const_prop/invalid_constant.main.RemoveZsts.diff +++ b/tests/mir-opt/const_prop/invalid_constant.main.RemoveZsts.diff @@ -21,7 +21,7 @@ let _9: main::Str<"���">; scope 4 { - debug _non_utf8_str => _9; -+ debug _non_utf8_str => const Str::<"���">; ++ debug _non_utf8_str => const main::Str::<"���">; } } } diff --git a/tests/ui/lint/recommend-literal.rs b/tests/ui/lint/recommend-literal.rs index 45f9ae0a7bdfb..be074c1114532 100644 --- a/tests/ui/lint/recommend-literal.rs +++ b/tests/ui/lint/recommend-literal.rs @@ -1,3 +1,5 @@ +//~vv HELP consider importing this struct + type Real = double; //~^ ERROR cannot find type `double` in this scope //~| HELP perhaps you intended to use this type diff --git a/tests/ui/lint/recommend-literal.stderr b/tests/ui/lint/recommend-literal.stderr index 6b6dd134e1d29..01e993df17a98 100644 --- a/tests/ui/lint/recommend-literal.stderr +++ b/tests/ui/lint/recommend-literal.stderr @@ -1,5 +1,5 @@ error[E0425]: cannot find type `double` in this scope - --> $DIR/recommend-literal.rs:1:13 + --> $DIR/recommend-literal.rs:3:13 | LL | type Real = double; | ^^^^^^ @@ -8,7 +8,7 @@ LL | type Real = double; | help: perhaps you intended to use this type: `f64` error[E0425]: cannot find type `long` in this scope - --> $DIR/recommend-literal.rs:7:12 + --> $DIR/recommend-literal.rs:9:12 | LL | let y: long = 74802374902374923; | ^^^^ @@ -17,7 +17,7 @@ LL | let y: long = 74802374902374923; | help: perhaps you intended to use this type: `i64` error[E0425]: cannot find type `Boolean` in this scope - --> $DIR/recommend-literal.rs:10:13 + --> $DIR/recommend-literal.rs:12:13 | LL | let v1: Boolean = true; | ^^^^^^^ @@ -26,7 +26,7 @@ LL | let v1: Boolean = true; | help: perhaps you intended to use this type: `bool` error[E0425]: cannot find type `Bool` in this scope - --> $DIR/recommend-literal.rs:13:13 + --> $DIR/recommend-literal.rs:15:13 | LL | let v2: Bool = true; | ^^^^ @@ -41,9 +41,13 @@ help: perhaps you intended to use this type LL - let v2: Bool = true; LL + let v2: bool = true; | +help: consider importing this struct + | +LL + use std::mem::type_info::Bool; + | error[E0425]: cannot find type `boolean` in this scope - --> $DIR/recommend-literal.rs:19:9 + --> $DIR/recommend-literal.rs:21:9 | LL | fn z(a: boolean) { | ^^^^^^^ @@ -52,7 +56,7 @@ LL | fn z(a: boolean) { | help: perhaps you intended to use this type: `bool` error[E0425]: cannot find type `byte` in this scope - --> $DIR/recommend-literal.rs:24:11 + --> $DIR/recommend-literal.rs:26:11 | LL | fn a() -> byte { | ^^^^ @@ -61,7 +65,7 @@ LL | fn a() -> byte { | help: perhaps you intended to use this type: `u8` error[E0425]: cannot find type `float` in this scope - --> $DIR/recommend-literal.rs:31:12 + --> $DIR/recommend-literal.rs:33:12 | LL | width: float, | ^^^^^ @@ -70,7 +74,7 @@ LL | width: float, | help: perhaps you intended to use this type: `f32` error[E0425]: cannot find type `int` in this scope - --> $DIR/recommend-literal.rs:34:19 + --> $DIR/recommend-literal.rs:36:19 | LL | depth: Option, | ^^^ not found in this scope @@ -86,7 +90,7 @@ LL | struct Data { | +++++ error[E0425]: cannot find type `short` in this scope - --> $DIR/recommend-literal.rs:40:16 + --> $DIR/recommend-literal.rs:42:16 | LL | impl Stuff for short {} | ^^^^^ diff --git a/tests/ui/reflection/dump.bit32.run.stdout b/tests/ui/reflection/dump.bit32.run.stdout new file mode 100644 index 0000000000000..483efdbbd12ab --- /dev/null +++ b/tests/ui/reflection/dump.bit32.run.stdout @@ -0,0 +1,184 @@ +Type { + kind: Tuple( + Tuple { + fields: [ + Field { + ty: TypeId(0x0596b48cc04376e64d5c788c2aa46bdb), + offset: 0, + }, + Field { + ty: TypeId(0x0596b48cc04376e64d5c788c2aa46bdb), + offset: 1, + }, + Field { + ty: TypeId(0x41223169ff28813ba79b7268a2a968d9), + offset: 2, + }, + ], + }, + ), + size: Some( + 2, + ), +} +Type { + kind: Array( + Array { + element_ty: TypeId(0x0596b48cc04376e64d5c788c2aa46bdb), + len: 2, + }, + ), + size: Some( + 2, + ), +} +Type { + kind: Int( + Int { + bit_width: 8, + signed: true, + }, + ), + size: Some( + 1, + ), +} +Type { + kind: Int( + Int { + bit_width: 32, + signed: true, + }, + ), + size: Some( + 4, + ), +} +Type { + kind: Int( + Int { + bit_width: 64, + signed: true, + }, + ), + size: Some( + 8, + ), +} +Type { + kind: Int( + Int { + bit_width: 128, + signed: true, + }, + ), + size: Some( + 16, + ), +} +Type { + kind: Int( + Int { + bit_width: 32, + signed: true, + }, + ), + size: Some( + 4, + ), +} +Type { + kind: Int( + Int { + bit_width: 8, + signed: false, + }, + ), + size: Some( + 1, + ), +} +Type { + kind: Int( + Int { + bit_width: 32, + signed: false, + }, + ), + size: Some( + 4, + ), +} +Type { + kind: Int( + Int { + bit_width: 64, + signed: false, + }, + ), + size: Some( + 8, + ), +} +Type { + kind: Int( + Int { + bit_width: 128, + signed: false, + }, + ), + size: Some( + 16, + ), +} +Type { + kind: Int( + Int { + bit_width: 32, + signed: false, + }, + ), + size: Some( + 4, + ), +} +Type { + kind: Other, + size: Some( + 4, + ), +} +Type { + kind: Other, + size: Some( + 12, + ), +} +Type { + kind: Other, + size: Some( + 8, + ), +} +Type { + kind: Other, + size: Some( + 8, + ), +} +Type { + kind: Other, + size: Some( + 8, + ), +} +Type { + kind: Str( + Str, + ), + size: None, +} +Type { + kind: Other, + size: None, +} diff --git a/tests/ui/reflection/dump.bit64.run.stdout b/tests/ui/reflection/dump.bit64.run.stdout new file mode 100644 index 0000000000000..681e81b71d56b --- /dev/null +++ b/tests/ui/reflection/dump.bit64.run.stdout @@ -0,0 +1,184 @@ +Type { + kind: Tuple( + Tuple { + fields: [ + Field { + ty: TypeId(0x0596b48cc04376e64d5c788c2aa46bdb), + offset: 0, + }, + Field { + ty: TypeId(0x0596b48cc04376e64d5c788c2aa46bdb), + offset: 1, + }, + Field { + ty: TypeId(0x41223169ff28813ba79b7268a2a968d9), + offset: 2, + }, + ], + }, + ), + size: Some( + 2, + ), +} +Type { + kind: Array( + Array { + element_ty: TypeId(0x0596b48cc04376e64d5c788c2aa46bdb), + len: 2, + }, + ), + size: Some( + 2, + ), +} +Type { + kind: Int( + Int { + bit_width: 8, + signed: true, + }, + ), + size: Some( + 1, + ), +} +Type { + kind: Int( + Int { + bit_width: 32, + signed: true, + }, + ), + size: Some( + 4, + ), +} +Type { + kind: Int( + Int { + bit_width: 64, + signed: true, + }, + ), + size: Some( + 8, + ), +} +Type { + kind: Int( + Int { + bit_width: 128, + signed: true, + }, + ), + size: Some( + 16, + ), +} +Type { + kind: Int( + Int { + bit_width: 64, + signed: true, + }, + ), + size: Some( + 8, + ), +} +Type { + kind: Int( + Int { + bit_width: 8, + signed: false, + }, + ), + size: Some( + 1, + ), +} +Type { + kind: Int( + Int { + bit_width: 32, + signed: false, + }, + ), + size: Some( + 4, + ), +} +Type { + kind: Int( + Int { + bit_width: 64, + signed: false, + }, + ), + size: Some( + 8, + ), +} +Type { + kind: Int( + Int { + bit_width: 128, + signed: false, + }, + ), + size: Some( + 16, + ), +} +Type { + kind: Int( + Int { + bit_width: 64, + signed: false, + }, + ), + size: Some( + 8, + ), +} +Type { + kind: Other, + size: Some( + 4, + ), +} +Type { + kind: Other, + size: Some( + 24, + ), +} +Type { + kind: Other, + size: Some( + 16, + ), +} +Type { + kind: Other, + size: Some( + 16, + ), +} +Type { + kind: Other, + size: Some( + 16, + ), +} +Type { + kind: Str( + Str, + ), + size: None, +} +Type { + kind: Other, + size: None, +} diff --git a/tests/ui/reflection/dump.rs b/tests/ui/reflection/dump.rs index 0adcda481b5a8..584b7c8ed4ae8 100644 --- a/tests/ui/reflection/dump.rs +++ b/tests/ui/reflection/dump.rs @@ -1,6 +1,11 @@ -#![feature(type_info)] +// Some types whose length depends on the target pointer length will be dumped. +//@ revisions: bit32 bit64 +//@[bit32] only-32bit +//@[bit64] only-64bit //@ run-pass //@ check-run-results + +#![feature(type_info)] #![allow(dead_code)] use std::mem::type_info::Type; @@ -20,14 +25,20 @@ struct Unsized { s: str, } +macro_rules! dump_types { + ($($ty:ty),+ $(,)?) => { + $(println!("{:#?}", const { Type::of::<$ty>() });)+ + }; +} + fn main() { - println!("{:#?}", const { Type::of::<(u8, u8, ())>() }.kind); - println!("{:#?}", const { Type::of::<[u8; 2]>() }.kind); - println!("{:#?}", const { Type::of::() }.kind); - println!("{:#?}", const { Type::of::() }.kind); - println!("{:#?}", const { Type::of::<&Unsized>() }.kind); - println!("{:#?}", const { Type::of::<&str>() }.kind); - println!("{:#?}", const { Type::of::<&[u8]>() }.kind); - println!("{:#?}", const { Type::of::() }.kind); - println!("{:#?}", const { Type::of::<[u8]>() }.kind); + dump_types! { + (u8, u8, ()), + [u8; 2], + i8, i32, i64, i128, isize, + u8, u32, u64, u128, usize, + Foo, Bar, + &Unsized, &str, &[u8], + str, [u8], + } } diff --git a/tests/ui/reflection/dump.run.stdout b/tests/ui/reflection/dump.run.stdout deleted file mode 100644 index dfd128664e2d9..0000000000000 --- a/tests/ui/reflection/dump.run.stdout +++ /dev/null @@ -1,31 +0,0 @@ -Tuple( - Tuple { - fields: [ - Field { - ty: TypeId(0x0596b48cc04376e64d5c788c2aa46bdb), - offset: 0, - }, - Field { - ty: TypeId(0x0596b48cc04376e64d5c788c2aa46bdb), - offset: 1, - }, - Field { - ty: TypeId(0x41223169ff28813ba79b7268a2a968d9), - offset: 2, - }, - ], - }, -) -Array( - Array { - element_ty: TypeId(0x0596b48cc04376e64d5c788c2aa46bdb), - len: 2, - }, -) -Other -Other -Other -Other -Other -Other -Other diff --git a/tests/ui/resolve/query-cycle-issue-124901.rs b/tests/ui/resolve/query-cycle-issue-124901.rs new file mode 100644 index 0000000000000..ccaee0e6bc6fd --- /dev/null +++ b/tests/ui/resolve/query-cycle-issue-124901.rs @@ -0,0 +1,24 @@ +//~ ERROR: cycle detected when getting HIR ID of `Default` +trait Default { + type Id; + + fn intu(&self) -> &Self::Id; +} + +impl, U: Copy> Default for U { + default type Id = T; + fn intu(&self) -> &Self::Id { + self + } +} + +fn specialization(t: T) -> U { + *t.intu() +} + +use std::num::NonZero; + +fn main() { + let assert_eq = NonZero::>>(0); + assert_eq!(specialization, None); +} diff --git a/tests/ui/resolve/query-cycle-issue-124901.stderr b/tests/ui/resolve/query-cycle-issue-124901.stderr new file mode 100644 index 0000000000000..3679925c6db42 --- /dev/null +++ b/tests/ui/resolve/query-cycle-issue-124901.stderr @@ -0,0 +1,12 @@ +error[E0391]: cycle detected when getting HIR ID of `Default` + | + = note: ...which requires getting the crate HIR... + = note: ...which requires perform lints prior to AST lowering... + = note: ...which requires looking up span for `Default`... + = note: ...which again requires getting HIR ID of `Default`, completing the cycle + = note: cycle used when getting the resolver for lowering + = note: see https://rustc-dev-guide.rust-lang.org/overview.html#queries and https://rustc-dev-guide.rust-lang.org/query.html for more information + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0391`. diff --git a/tests/ui/suggestions/semi-suggestion-when-stmt-and-expr-span-equal.stderr b/tests/ui/suggestions/semi-suggestion-when-stmt-and-expr-span-equal.stderr index aa96159aacf57..9f34d27478814 100644 --- a/tests/ui/suggestions/semi-suggestion-when-stmt-and-expr-span-equal.stderr +++ b/tests/ui/suggestions/semi-suggestion-when-stmt-and-expr-span-equal.stderr @@ -21,14 +21,14 @@ LL | .collect::(); | = help: the trait `FromIterator<()>` is not implemented for `String` = help: the following other types implement trait `FromIterator`: - `String` implements `FromIterator<&Char>` `String` implements `FromIterator<&char>` + `String` implements `FromIterator<&std::ascii::Char>` `String` implements `FromIterator<&str>` `String` implements `FromIterator>` - `String` implements `FromIterator` `String` implements `FromIterator>` `String` implements `FromIterator` `String` implements `FromIterator` + `String` implements `FromIterator` note: the method call chain might not have had the expected associated types --> $DIR/semi-suggestion-when-stmt-and-expr-span-equal.rs:20:10 | diff --git a/tests/ui/traits/issue-77982.stderr b/tests/ui/traits/issue-77982.stderr index 4bc24e81215a0..b1baabc4394b0 100644 --- a/tests/ui/traits/issue-77982.stderr +++ b/tests/ui/traits/issue-77982.stderr @@ -44,10 +44,10 @@ LL | let ips: Vec<_> = (0..100_000).map(|_| u32::from(0u32.into())).collect( | type must be known at this point | = note: multiple `impl`s satisfying `u32: From<_>` found in the `core` crate: - - impl From for u32; - impl From for u32; - impl From for u32; - impl From for u32; + - impl From for u32; - impl From for u32; - impl From for u32; help: try using a fully qualified path to specify the expected types diff --git a/tests/ui/try-trait/bad-interconversion.stderr b/tests/ui/try-trait/bad-interconversion.stderr index 61fecaf89917c..f8c0deba99ba6 100644 --- a/tests/ui/try-trait/bad-interconversion.stderr +++ b/tests/ui/try-trait/bad-interconversion.stderr @@ -18,7 +18,7 @@ help: the following other types implement trait `From` = note: in this macro invocation --> $SRC_DIR/core/src/ascii/ascii_char.rs:LL:COL | - = note: `u8` implements `From` + = note: `u8` implements `From` ::: $SRC_DIR/core/src/ascii/ascii_char.rs:LL:COL | = note: in this macro invocation diff --git a/triagebot.toml b/triagebot.toml index 456fd696af435..e51622f1e5a93 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -1458,7 +1458,6 @@ compiler = [ "@jieyouxu", "@jdonszelmann", "@JonathanBrouwer", - "@lcnr", "@madsmtm", "@mati865", "@Nadrieril",