diff --git a/compiler/rustc_error_codes/src/error_codes/E0570.md b/compiler/rustc_error_codes/src/error_codes/E0570.md index 355e71ffb4329..e8a7200c1d052 100644 --- a/compiler/rustc_error_codes/src/error_codes/E0570.md +++ b/compiler/rustc_error_codes/src/error_codes/E0570.md @@ -1,7 +1,7 @@ The requested ABI is unsupported by the current target. -The rust compiler maintains for each target a list of unsupported ABIs on -that target. If an ABI is present in such a list this usually means that the +The Rust compiler maintains a list of unsupported ABIs for each target. +If an ABI is present in such a list, this usually means that the target / ABI combination is currently unsupported by llvm. If necessary, you can circumvent this check using custom target specifications. diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index c7d2e273a76fc..a8a305310238e 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -86,8 +86,7 @@ pub fn features(sess: &Session, krate_attrs: &[Attribute], crate_name: Symbol) - if let Some(f) = REMOVED_LANG_FEATURES.iter().find(|f| name == f.feature.name) { let pull_note = if let Some(pull) = f.pull { format!( - "; see for more information", - pull + "; see for more information", ) } else { "".to_owned() @@ -123,7 +122,7 @@ pub fn features(sess: &Session, krate_attrs: &[Attribute], crate_name: Symbol) - // If the enabled feature is unstable, record it. if UNSTABLE_LANG_FEATURES.iter().find(|f| name == f.name).is_some() { - // When the ICE comes a standard library crate, there's a chance that the person + // When the ICE comes from a standard library crate, there's a chance that the person // hitting the ICE may be using -Zbuild-std or similar with an untested target. // The bug is probably in the standard library and not the compiler in that case, // but that doesn't really matter - we want a bug report. diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 36be4a0378060..ec1de5fe56de0 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -64,8 +64,6 @@ pub struct EnabledLibFeature { } impl Features { - /// `since` should be set for stable features that are nevertheless enabled with a `#[feature]` - /// attribute, indicating since when they are stable. pub fn set_enabled_lang_feature(&mut self, lang_feat: EnabledLangFeature) { self.enabled_lang_features.push(lang_feat); self.enabled_features.insert(lang_feat.gate_name); @@ -779,8 +777,9 @@ impl Features { } } -/// Some features are not allowed to be used together at the same time, if -/// the two are present, produce an error. +/// Some features are not allowed to be used together at the same time. +/// +/// If the two are present, produce an error. pub const INCOMPATIBLE_FEATURES: &[(Symbol, Symbol)] = &[ // Experimental match ergonomics rulesets are incompatible with each other, to simplify the // boolean logic required to tell which typing rules to use. diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index 9e80ddbd551f9..2bec9273d38f6 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -3659,10 +3659,10 @@ declare_lint! { /// `stdcall`, `fastcall`, and `cdecl` calling conventions (or their unwind /// variants) on targets that cannot meaningfully be supported for the requested target. /// - /// For example `stdcall` does not make much sense for a x86_64 or, more apparently, powerpc + /// For example, `stdcall` does not make much sense for a x86_64 or, more apparently, powerpc /// code, because this calling convention was never specified for those targets. /// - /// Historically MSVC toolchains have fallen back to the regular C calling convention for + /// Historically, MSVC toolchains have fallen back to the regular C calling convention for /// targets other than x86, but Rust doesn't really see a similar need to introduce a similar /// hack across many more targets. /// @@ -3689,7 +3689,7 @@ declare_lint! { /// /// ### Explanation /// - /// On most of the targets the behaviour of `stdcall` and similar calling conventions is not + /// On most of the targets, the behaviour of `stdcall` and similar calling conventions is not /// defined at all, but was previously accepted due to a bug in the implementation of the /// compiler. pub UNSUPPORTED_CALLING_CONVENTIONS, diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs index 0c06d1be9a3fd..f969bb734d6d6 100644 --- a/compiler/rustc_metadata/src/native_libs.rs +++ b/compiler/rustc_metadata/src/native_libs.rs @@ -61,6 +61,8 @@ pub fn walk_native_lib_search_dirs( // library directory instead of the self-contained directories. // Sanitizer libraries have the same issue and are also linked by name on Apple targets. // The targets here should be in sync with `copy_third_party_objects` in bootstrap. + // Finally there is shared LLVM library, which unlike compiler libraries, is linked by the name, + // therefore requiring the search path for the linker. // FIXME: implement `-Clink-self-contained=+/-unwind,+/-sanitizers`, move the shipped libunwind // and sanitizers to self-contained directory, and stop adding this search path. // FIXME: On AIX this also has the side-effect of making the list of library search paths @@ -71,6 +73,9 @@ pub fn walk_native_lib_search_dirs( || sess.target.os == Os::Fuchsia || sess.target.is_like_aix || sess.target.is_like_darwin && !sess.sanitizers().is_empty() + || sess.target.os == Os::Windows + && sess.target.env == Env::Gnu + && sess.target.abi == Abi::Llvm { f(&sess.target_tlib_path.dir, false)?; } diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs index 63fd61cb257b2..434c8caae7dc0 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/suggestions.rs @@ -5608,15 +5608,17 @@ pub(super) fn get_explanation_based_on_obligation<'tcx>( None => String::new(), }; if let ty::PredicatePolarity::Positive = trait_predicate.polarity() { + // If the trait in question is unstable, mention that fact in the diagnostic. + // But if we're building with `-Zforce-unstable-if-unmarked` then _any_ trait + // not explicitly marked stable is considered unstable, so the extra text is + // unhelpful noise. See . + let mention_unstable = !tcx.sess.opts.unstable_opts.force_unstable_if_unmarked + && try { tcx.lookup_stability(trait_predicate.def_id())?.level.is_stable() } + == Some(false); + let unstable = if mention_unstable { "nightly-only, unstable " } else { "" }; + format!( - "{pre_message}the {}trait `{}` is not implemented for{desc} `{}`", - if tcx.lookup_stability(trait_predicate.def_id()).map(|s| s.level.is_stable()) - == Some(false) - { - "nightly-only, unstable " - } else { - "" - }, + "{pre_message}the {unstable}trait `{}` is not implemented for{desc} `{}`", trait_predicate.print_modifiers_and_trait_path(), tcx.short_string(trait_predicate.self_ty().skip_binder(), long_ty_path), ) diff --git a/compiler/rustc_type_ir/src/predicate.rs b/compiler/rustc_type_ir/src/predicate.rs index 42d204a8b0941..ab4677fa0a5fb 100644 --- a/compiler/rustc_type_ir/src/predicate.rs +++ b/compiler/rustc_type_ir/src/predicate.rs @@ -42,7 +42,9 @@ where } } -/// A complete reference to a trait. These take numerous guises in syntax, +/// A complete reference to a trait. +/// +/// These take numerous guises in syntax, /// but perhaps the most recognizable form is in a where-clause: /// ```ignore (illustrative) /// T: Foo @@ -241,7 +243,9 @@ impl ImplPolarity { } } -/// Polarity for a trait predicate. May either be negative or positive. +/// Polarity for a trait predicate. +/// +/// May either be negative or positive. /// Distinguished from [`ImplPolarity`] since we never compute goals with /// "reservation" level. #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] @@ -327,6 +331,7 @@ impl ty::Binder> { } /// An existential reference to a trait, where `Self` is erased. +/// /// For example, the trait object `Trait<'a, 'b, X, Y>` is: /// ```ignore (illustrative) /// exists T. T: Trait<'a, 'b, X, Y> @@ -442,6 +447,7 @@ impl ExistentialProjection { } /// Extracts the underlying existential trait reference from this projection. + /// /// For example, if this is a projection of `exists T. ::Item == X`, /// then this function would return an `exists T. T: Iterator` existential trait /// reference. @@ -493,14 +499,17 @@ impl ty::Binder> { #[cfg_attr(feature = "nightly", derive(Encodable, Decodable, HashStable_NoContext))] pub enum AliasTermKind { /// A projection `::AssocType`. + /// /// Can get normalized away if monomorphic enough. ProjectionTy, /// An associated type in an inherent `impl` InherentTy, /// An opaque type (usually from `impl Trait` in type aliases or function return types) + /// /// Can only be normalized away in PostAnalysis mode or its defining scope. OpaqueTy, /// A free type alias that actually checks its trait bounds. + /// /// Currently only used if the type alias references opaque types. /// Can always be normalized away. FreeTy, diff --git a/compiler/rustc_type_ir/src/ty_kind.rs b/compiler/rustc_type_ir/src/ty_kind.rs index 7cb71387e8680..983d8f0820b6b 100644 --- a/compiler/rustc_type_ir/src/ty_kind.rs +++ b/compiler/rustc_type_ir/src/ty_kind.rs @@ -29,14 +29,17 @@ mod closure; )] pub enum AliasTyKind { /// A projection `::AssocType`. + /// /// Can get normalized away if monomorphic enough. Projection, /// An associated type in an inherent `impl` Inherent, /// An opaque type (usually from `impl Trait` in type aliases or function return types) + /// /// Can only be normalized away in PostAnalysis mode or its defining scope. Opaque, /// A type alias that actually checks its trait bounds. + /// /// Currently only used if the type alias references opaque types. /// Can always be normalized away. Free, @@ -99,7 +102,9 @@ pub enum TyKind { /// An array with the given length. Written as `[T; N]`. Array(I::Ty, I::Const), - /// A pattern newtype. Takes any type and restricts its valid values to its pattern. + /// A pattern newtype. + /// + /// Takes any type and restricts its valid values to its pattern. /// This will also change the layout to take advantage of this restriction. /// Only `Copy` and `Clone` will automatically get implemented for pattern types. /// Auto-traits treat this as if it were an aggregate with a single nested type. @@ -116,8 +121,9 @@ pub enum TyKind { /// `&'a mut T` or `&'a T`. Ref(I::Region, I::Ty, Mutability), - /// The anonymous type of a function declaration/definition. Each - /// function has a unique type. + /// The anonymous type of a function declaration/definition. + /// + /// Each function has a unique type. /// /// For the function `fn foo() -> i32 { 3 }` this type would be /// shown to the user as `fn() -> i32 {foo}`. @@ -129,7 +135,9 @@ pub enum TyKind { /// ``` FnDef(I::FunctionId, I::GenericArgs), - /// A pointer to a function. Written as `fn() -> i32`. + /// A pointer to a function. + /// + /// Written as `fn() -> i32`. /// /// Note that both functions and closures start out as either /// [FnDef] or [Closure] which can be then be coerced to this variant. @@ -179,6 +187,7 @@ pub enum TyKind { Coroutine(I::CoroutineId, I::GenericArgs), /// A type representing the types stored inside a coroutine. + /// /// This should only appear as part of the `CoroutineArgs`. /// /// Unlike upvars, the witness can reference lifetimes from @@ -210,6 +219,7 @@ pub enum TyKind { Tuple(I::Tys), /// A projection, opaque type, free type alias, or inherent associated type. + /// /// All of these types are represented as pairs of def-id and args, and can /// be normalized, so they are grouped conceptually. Alias(AliasTyKind, AliasTy), @@ -253,8 +263,9 @@ pub enum TyKind { /// inside of the type. Infer(InferTy), - /// A placeholder for a type which could not be computed; this is - /// propagated to avoid useless error messages. + /// A placeholder for a type which could not be computed. + /// + /// This is propagated to avoid useless error messages. Error(I::ErrorGuaranteed), } @@ -282,7 +293,9 @@ impl TyKind { } /// Returns `true` when the outermost type cannot be further normalized, - /// resolved, or instantiated. This includes all primitive types, but also + /// resolved, or instantiated. + /// + /// This includes all primitive types, but also /// things like ADTs and trait objects, since even if their arguments or /// nested types may be further simplified, the outermost [`ty::TyKind`] or /// type constructor remains the same. @@ -481,6 +494,7 @@ impl AliasTy { } /// Extracts the underlying trait reference and own args from this projection. + /// /// For example, if this is a projection of `::Item<'a>`, /// then this function would return a `T: StreamingIterator` trait reference and /// `['a]` as the own args. @@ -490,6 +504,7 @@ impl AliasTy { } /// Extracts the underlying trait reference from this projection. + /// /// For example, if this is a projection of `::Item`, /// then this function would return a `T: Iterator` trait reference. /// @@ -593,8 +608,9 @@ pub enum InferTy { FloatVar(FloatVid), /// A [`FreshTy`][Self::FreshTy] is one that is generated as a replacement - /// for an unbound type variable. This is convenient for caching etc. See - /// `TypeFreshener` for more details. + /// for an unbound type variable. + /// + /// This is convenient for caching etc. See `TypeFreshener` for more details. /// /// Compare with [`TyVar`][Self::TyVar]. FreshTy(u32), diff --git a/rust-bors.toml b/rust-bors.toml index 9b632481c7c3f..0efbb5e8fc61c 100644 --- a/rust-bors.toml +++ b/rust-bors.toml @@ -25,7 +25,7 @@ labels_blocking_approval = [ "S-waiting-on-t-rustdoc-frontend", "S-waiting-on-t-clippy", # PR manually set to blocked - "S-blocked" + "S-blocked", ] # If CI runs quicker than this duration, consider it to be a failure @@ -41,7 +41,7 @@ approved = [ "-S-waiting-on-author", "-S-waiting-on-crater", "-S-waiting-on-review", - "-S-waiting-on-team" + "-S-waiting-on-team", ] unapproved = [ "+S-waiting-on-author", @@ -49,16 +49,16 @@ unapproved = [ "-S-waiting-on-bors", "-S-waiting-on-crater", "-S-waiting-on-review", - "-S-waiting-on-team" + "-S-waiting-on-team", ] try_failed = [ "+S-waiting-on-author", "-S-waiting-on-review", - "-S-waiting-on-crater" + "-S-waiting-on-crater", ] auto_build_succeeded = [ "+merged-by-bors", - "-S-waiting-on-bors" + "-S-waiting-on-bors", ] auto_build_failed = [ "+S-waiting-on-review", @@ -66,5 +66,5 @@ auto_build_failed = [ "-S-waiting-on-bors", "-S-waiting-on-author", "-S-waiting-on-crater", - "-S-waiting-on-team" + "-S-waiting-on-team", ] diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index eee960027a9f9..75deb695a4823 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -2557,7 +2557,7 @@ pub fn maybe_install_llvm_target(builder: &Builder<'_>, target: TargetSelection, ), )] pub fn maybe_install_llvm_runtime(builder: &Builder<'_>, target: TargetSelection, sysroot: &Path) { - let dst_libdir = sysroot.join(builder.sysroot_libdir_relative(Compiler::new(1, target))); + let dst_libdir = sysroot.join(builder.libdir_relative(Compiler::new(1, target))); // We do not need to copy LLVM files into the sysroot if it is not // dynamically linked; it is already included into librustc_llvm // statically. diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml index 2300904f22c10..88b6855784979 100644 --- a/src/ci/github-actions/jobs.yml +++ b/src/ci/github-actions/jobs.yml @@ -712,6 +712,7 @@ auto: --target=aarch64-pc-windows-gnullvm,i686-pc-windows-gnullvm --enable-full-tools --enable-profiler + --enable-llvm-link-shared DIST_REQUIRE_ALL_TOOLS: 1 CODEGEN_BACKENDS: llvm,cranelift CC_i686_pc_windows_gnullvm: i686-w64-mingw32-clang @@ -724,6 +725,7 @@ auto: --build=x86_64-pc-windows-gnullvm --enable-full-tools --enable-profiler + --enable-llvm-link-shared DIST_REQUIRE_ALL_TOOLS: 1 CODEGEN_BACKENDS: llvm,cranelift <<: *job-windows diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index 57428b6f481e8..c0f0167aec799 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -59,8 +59,8 @@ use rustc_hir::def_id::{DefId, DefIdSet}; use rustc_hir::{ConstStability, Mutability, RustcVersion, StabilityLevel, StableSince}; use rustc_middle::ty::print::PrintTraitRefExt; use rustc_middle::ty::{self, TyCtxt}; +use rustc_span::DUMMY_SP; use rustc_span::symbol::{Symbol, sym}; -use rustc_span::{BytePos, DUMMY_SP, FileName}; use tracing::{debug, info}; pub(crate) use self::context::*; @@ -2779,46 +2779,12 @@ fn render_call_locations( let needs_expansion = line_max - line_min > NUM_VISIBLE_LINES; let locations_encoded = serde_json::to_string(&line_ranges).unwrap(); - let source_map = tcx.sess.source_map(); - let files = source_map.files(); - let local = tcx.sess.local_crate_source_file().unwrap(); - - let get_file_start_pos = || { - let crate_src = local.clone().into_local_path()?; - let abs_crate_src = crate_src.canonicalize().ok()?; - let crate_root = abs_crate_src.parent()?.parent()?; - let rel_path = path.strip_prefix(crate_root).ok()?; - files - .iter() - .find(|file| match &file.name { - FileName::Real(real) => real.local_path().map_or(false, |p| p == rel_path), - _ => false, - }) - .map(|file| file.start_pos) - }; - - // Look for the example file in the source map if it exists, otherwise - // return a span to the local crate's source file - let Some(file_span) = get_file_start_pos() - .or_else(|| { - files - .iter() - .find(|file| match &file.name { - FileName::Real(file_name) => file_name == &local, - _ => false, - }) - .map(|file| file.start_pos) - }) - .map(|start_pos| { - rustc_span::Span::with_root_ctxt( - start_pos + BytePos(byte_min), - start_pos + BytePos(byte_max), - ) - }) - else { - // if the fallback span can't be built, don't render the code for this example - return false; - }; + // For scraped examples, we don't need a real span from the SourceMap. + // The URL is already provided in ScrapedInfo, and sources::print_src + // will use that directly. We use DUMMY_SP as a placeholder. + // Note: DUMMY_SP is safe here because href_from_span won't be called + // for scraped examples. + let file_span = rustc_span::DUMMY_SP; let mut decoration_info = FxIndexMap::default(); decoration_info.insert("highlight focus", vec![byte_ranges.remove(0)]); diff --git a/src/librustdoc/html/sources.rs b/src/librustdoc/html/sources.rs index 89fd78979839b..dda9b7c55351c 100644 --- a/src/librustdoc/html/sources.rs +++ b/src/librustdoc/html/sources.rs @@ -344,9 +344,15 @@ pub(crate) fn print_src( lines += line_info.start_line as usize; } let code = fmt::from_fn(move |fmt| { - let current_href = context - .href_from_span(clean::Span::new(file_span), false) - .expect("only local crates should have sources emitted"); + // For scraped examples, use the URL from ScrapedInfo directly. + // For regular sources, derive it from the span. + let current_href = if let SourceContext::Embedded(info) = source_context { + info.url.to_string() + } else { + context + .href_from_span(clean::Span::new(file_span), false) + .expect("only local crates should have sources emitted") + }; highlight::write_code( fmt, s, diff --git a/src/tools/miri/CONTRIBUTING.md b/src/tools/miri/CONTRIBUTING.md index 1995300c5bcbb..f9cb60c66d916 100644 --- a/src/tools/miri/CONTRIBUTING.md +++ b/src/tools/miri/CONTRIBUTING.md @@ -66,6 +66,23 @@ process for such contributions: This process is largely informal, and its primary goal is to more clearly communicate expectations. Please get in touch with us if you have any questions! +## Scope of Miri shims + +Miri has "shims" to implement functionality that is usually implemented in C libraries which are +invoked from Rust code, such as opening files or spawning threads, as well as for +CPU-vendor-provided SIMD intrinsics. However, the set of C functions that Rust code invokes this way +is enormous, and for obvious reasons we have no intention of implementing every C API ever written +in Miri. + +At the moment, the general guideline for "could this function have a shim in Miri" is: we will +generally only add shims for functions that can be implemented in a portable way using just what is +provided by the Rust standard library. The function should also be reasonably widely-used in Rust +code to justify the review and maintenance effort (i.e. the easier the function is to implement, the +lower the barrier). Other than that, we might make exceptions for certain cases if (a) there is a +good case for why Miri should support those APIs, and (b) robust and widely-used portable libraries +exist in the Rust ecosystem. We will generally not add shims to Miri that would require Miri to +directly interact with platform-specific APIs (such as `libc` or `windows-sys`). + ## Preparing the build environment Miri heavily relies on internal and unstable rustc interfaces to execute MIR, diff --git a/src/tools/miri/Cargo.lock b/src/tools/miri/Cargo.lock index b314aaafbdf0c..2d332ae98ddaa 100644 --- a/src/tools/miri/Cargo.lock +++ b/src/tools/miri/Cargo.lock @@ -562,9 +562,9 @@ checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" [[package]] name = "git2" -version = "0.20.2" +version = "0.20.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2deb07a133b1520dc1a5690e9bd08950108873d7ed5de38dcc74d3b5ebffa110" +checksum = "7b88256088d75a56f8ecfa070513a775dd9107f6530ef14919dac831af9cfe2b" dependencies = [ "bitflags", "libc", @@ -804,9 +804,9 @@ dependencies = [ [[package]] name = "libgit2-sys" -version = "0.18.2+1.9.1" +version = "0.18.3+1.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c42fe03df2bd3c53a3a9c7317ad91d80c81cd1fb0caec8d7cc4cd2bfa10c222" +checksum = "c9b3acc4b91781bb0b3386669d325163746af5f6e4f73e6d2d630e09a35f3487" dependencies = [ "cc", "libc", diff --git a/src/tools/miri/Cargo.toml b/src/tools/miri/Cargo.toml index e7c90e45eba59..62387848868d5 100644 --- a/src/tools/miri/Cargo.toml +++ b/src/tools/miri/Cargo.toml @@ -39,7 +39,7 @@ serde = { version = "1.0.219", features = ["derive"], optional = true } [target.'cfg(target_os = "linux")'.dependencies] nix = { version = "0.30.1", features = ["mman", "ptrace", "signal"], optional = true } ipc-channel = { version = "0.20.0", optional = true } -capstone = { version = "0.14", optional = true } +capstone = { version = "0.14", features = ["arch_x86", "full"], default-features = false, optional = true} [target.'cfg(all(target_os = "linux", target_pointer_width = "64", target_endian = "little"))'.dependencies] genmc-sys = { path = "./genmc-sys/", version = "0.1.0", optional = true } diff --git a/src/tools/miri/README.md b/src/tools/miri/README.md index 925b85f58766c..f254eb357a454 100644 --- a/src/tools/miri/README.md +++ b/src/tools/miri/README.md @@ -626,6 +626,8 @@ Definite bugs found: * [`ReentrantLock` not correctly dealing with reuse of addresses for TLS storage of different threads](https://github.com/rust-lang/rust/pull/141248) * [Rare Deadlock in the thread (un)parking example code](https://github.com/rust-lang/rust/issues/145816) * [`winit` registering a global constructor with the wrong ABI on Windows](https://github.com/rust-windowing/winit/issues/4435) +* [`VecDeque::splice` confusing physical and logical indices](https://github.com/rust-lang/rust/issues/151758) +* [Data race in `oneshot` channel](https://github.com/faern/oneshot/issues/69) Violations of [Stacked Borrows] found that are likely bugs (but Stacked Borrows is currently just an experiment): diff --git a/src/tools/miri/cargo-miri/src/phases.rs b/src/tools/miri/cargo-miri/src/phases.rs index 0f04397b72d22..567d51c0b31e7 100644 --- a/src/tools/miri/cargo-miri/src/phases.rs +++ b/src/tools/miri/cargo-miri/src/phases.rs @@ -63,6 +63,9 @@ pub fn phase_cargo_miri(mut args: impl Iterator) { "setup" => MiriCommand::Setup, "test" | "t" | "run" | "r" | "nextest" => MiriCommand::Forward(subcommand), "clean" => MiriCommand::Clean, + // For use by the `./miri test` dependency builder. + "build" if env::var_os("MIRI_BUILD_TEST_DEPS").is_some() => + MiriCommand::Forward("build".into()), _ => { // Check for version and help flags. if has_arg_flag("--help") || has_arg_flag("-h") { @@ -309,6 +312,7 @@ pub fn phase_rustc(args: impl Iterator, phase: RustcPhase) { // Ask rustc for the filename (since that is target-dependent). let mut rustc = miri_for_host(); // sysroot doesn't matter for this so we just use the host rustc.arg("--print").arg("file-names"); + rustc.arg("-Zunstable-options"); // needed for JSON targets for flag in ["--crate-name", "--crate-type", "--target"] { for val in get_arg_flag_values(flag) { rustc.arg(flag).arg(val); diff --git a/src/tools/miri/cargo-miri/src/setup.rs b/src/tools/miri/cargo-miri/src/setup.rs index c7682093663ed..76e9e62f52001 100644 --- a/src/tools/miri/cargo-miri/src/setup.rs +++ b/src/tools/miri/cargo-miri/src/setup.rs @@ -88,6 +88,11 @@ pub fn setup( }; let cargo_cmd = { let mut command = cargo(); + // Allow JSON targets since users do not have a good way to set this flag otherwise. + if env::var("RUSTC_STAGE").is_err() { + // ^ is a HACK for bootstrap cargo. FIXME(cfg(bootstrap)) remove the hack. + command.arg("-Zjson-target-spec"); + } // Use Miri as rustc to build a libstd compatible with us (and use the right flags). // We set ourselves (`cargo-miri`) instead of Miri directly to be able to patch the flags // for `libpanic_abort` (usually this is done by bootstrap but we have to do it ourselves). diff --git a/src/tools/miri/ci/ci.sh b/src/tools/miri/ci/ci.sh index c8e359cf23851..6c0bceac7731f 100755 --- a/src/tools/miri/ci/ci.sh +++ b/src/tools/miri/ci/ci.sh @@ -129,7 +129,7 @@ function run_tests_minimal { time ./miri test $TARGET_FLAG "$@" # Ensure that a small smoke test of cargo-miri works. - time cargo miri run --manifest-path test-cargo-miri/no-std-smoke/Cargo.toml $TARGET_FLAG + time cargo miri run --manifest-path test-cargo-miri/no-std-smoke/Cargo.toml -Zjson-target-spec $TARGET_FLAG endgroup } @@ -173,7 +173,9 @@ case $HOST_TARGET in # Host MIR_OPT=1 MANY_SEEDS=64 TEST_BENCH=1 CARGO_MIRI_ENV=1 run_tests # Custom target JSON file - TEST_TARGET=tests/x86_64-unknown-kernel.json MIRI_NO_STD=1 run_tests_minimal no_std + TEST_TARGET=tests/x86_64-unknown-kernel.json MIRI_NO_STD=1 MIRIFLAGS="-Zunstable-options" run_tests_minimal no_std + # Not officially supported tier 2 + MANY_SEEDS=16 TEST_TARGET=x86_64-pc-solaris run_tests ;; aarch64-apple-darwin) # Host @@ -184,7 +186,6 @@ case $HOST_TARGET in # Not officially supported tier 2 MANY_SEEDS=16 TEST_TARGET=mips-unknown-linux-gnu run_tests # a 32bit big-endian target, and also a target without 64bit atomics MANY_SEEDS=16 TEST_TARGET=x86_64-unknown-illumos run_tests - MANY_SEEDS=16 TEST_TARGET=x86_64-pc-solaris run_tests ;; i686-pc-windows-msvc) # Host diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index 90ba52120ee87..28c3e88535f61 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -d10ac47c20152feb5e99b1c35a2e6830f77c66dc +7bee525095c0872e87c038c412c781b9bbb3f5dc diff --git a/src/tools/miri/src/alloc/isolated_alloc.rs b/src/tools/miri/src/alloc/isolated_alloc.rs index 1745727b16b40..5c4a7230cf585 100644 --- a/src/tools/miri/src/alloc/isolated_alloc.rs +++ b/src/tools/miri/src/alloc/isolated_alloc.rs @@ -66,10 +66,10 @@ impl IsolatedAlloc { // And make sure the align is at least one page let align = std::cmp::max(layout.align(), self.page_size); // pg_count gives us the # of pages needed to satisfy the size. For - // align > page_size where align = n * page_size, a sufficently-aligned + // align > page_size where align = n * page_size, a sufficiently-aligned // address must exist somewhere in the range of // some_page_aligned_address..some_page_aligned_address + (n-1) * page_size - // (since if some_page_aligned_address + n * page_size is sufficently aligned, + // (since if some_page_aligned_address + n * page_size is sufficiently aligned, // then so is some_page_aligned_address itself per the definition of n, so we // can avoid using that 1 extra page). // Thus we allocate n-1 extra pages diff --git a/src/tools/miri/src/bin/log/tracing_chrome_instant.rs b/src/tools/miri/src/bin/log/tracing_chrome_instant.rs index b5f00852b82ff..04705b8846d9c 100644 --- a/src/tools/miri/src/bin/log/tracing_chrome_instant.rs +++ b/src/tools/miri/src/bin/log/tracing_chrome_instant.rs @@ -2,7 +2,7 @@ //! . //! A useful resource is also //! , -//! although this file does not implement TSC synchronization but insteads pins threads to CPUs, +//! although this file does not implement TSC synchronization but instead pins threads to CPUs, //! since the former is not reliable (i.e. it might lead to non-monotonic time measurements). //! Another useful resource for future improvements might be measureme's time measurement utils: //! . @@ -11,7 +11,7 @@ #![cfg(feature = "tracing")] /// This alternative `TracingChromeInstant` implementation was made entirely to suit the needs of -/// [crate::log::tracing_chrome], and shouldn't be used for anything else. It featues two functions: +/// [crate::log::tracing_chrome], and shouldn't be used for anything else. It features two functions: /// - [TracingChromeInstant::setup_for_thread_and_start], which sets up the current thread to do /// proper time tracking and returns a point in time to use as "t=0", and /// - [TracingChromeInstant::with_elapsed_micros_subtracting_tracing], which allows diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs index 064c1cc5b95f7..b62c5f242c374 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/perms.rs @@ -578,7 +578,7 @@ pub mod diagnostics { // - created as Reserved { conflicted: false }, // then Unique -> Disabled is forbidden // A potential `Reserved { conflicted: false } - // -> Reserved { conflicted: true }` is inexistant or irrelevant, + // -> Reserved { conflicted: true }` is inexistent or irrelevant, // and so is the `Reserved { conflicted: false } -> Unique` (Unique, Frozen) => false, (ReservedFrz { conflicted: true }, _) => false, diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs index 2c6be522837cb..54a0e0cefe743 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree.rs @@ -26,7 +26,7 @@ use super::foreign_access_skipping::IdempotentForeignAccess; use super::perms::{PermTransition, Permission}; use super::tree_visitor::{ChildrenVisitMode, ContinueTraversal, NodeAppArgs, TreeVisitor}; use super::unimap::{UniIndex, UniKeyMap, UniValMap}; -use super::wildcard::WildcardState; +use super::wildcard::ExposedCache; use crate::borrow_tracker::{AccessKind, GlobalState, ProtectorKind}; use crate::*; @@ -89,7 +89,7 @@ impl LocationState { &mut self, idx: UniIndex, nodes: &mut UniValMap, - wildcard_accesses: &mut UniValMap, + exposed_cache: &mut ExposedCache, access_kind: AccessKind, relatedness: AccessRelatedness, protected: bool, @@ -99,7 +99,7 @@ impl LocationState { // ensures it is only called when `skip_if_known_noop` returns // `Recurse`, due to the contract of `traverse_this_parents_children_other`. self.record_new_access(access_kind, relatedness); - + let old_access_level = self.permission.strongest_allowed_local_access(protected); let transition = self.perform_access(access_kind, relatedness, protected)?; if !transition.is_noop() { let node = nodes.get_mut(idx).unwrap(); @@ -111,8 +111,8 @@ impl LocationState { // We need to update the wildcard state, if the permission // of an exposed pointer changes. if node.is_exposed { - let access_type = self.permission.strongest_allowed_local_access(protected); - WildcardState::update_exposure(idx, access_type, nodes, wildcard_accesses); + let access_level = self.permission.strongest_allowed_local_access(protected); + exposed_cache.update_exposure(nodes, idx, old_access_level, access_level); } } Ok(()) @@ -226,7 +226,7 @@ impl LocationState { /// Records a new access, so that future access can potentially be skipped /// by `skip_if_known_noop`. This must be called on child accesses, and otherwise - /// shoud be called on foreign accesses for increased performance. It should not be called + /// should be called on foreign accesses for increased performance. It should not be called /// when `skip_if_known_noop` indicated skipping, since it then is a no-op. /// See `foreign_access_skipping.rs` fn record_new_access(&mut self, access_kind: AccessKind, rel_pos: AccessRelatedness) { @@ -261,14 +261,8 @@ pub struct LocationTree { /// /// We do uphold the fact that `keys(perms)` is a subset of `keys(nodes)` pub perms: UniValMap, - /// Maps a tag and a location to its wildcard access tracking information, - /// with possible lazy initialization. - /// - /// If this allocation doesn't have any exposed nodes, then this map doesn't get - /// initialized. This way we only need to allocate the map if we need it. - /// - /// NOTE: same guarantees on entry initialization as for `perms`. - pub wildcard_accesses: UniValMap, + /// Caches information about the relatedness of nodes for a wildcard access. + pub exposed_cache: ExposedCache, } /// Tree structure with both parents and children since we want to be /// able to traverse the tree efficiently in both directions. @@ -276,7 +270,7 @@ pub struct LocationTree { pub struct Tree { /// Mapping from tags to keys. The key obtained can then be used in /// any of the `UniValMap` relative to this allocation, i.e. - /// `nodes`, `LocationTree::perms` and `LocationTree::wildcard_accesses` + /// `nodes`, `LocationTree::perms` and `LocationTree::exposed_cache` /// of the same `Tree`. /// The parent-child relationship in `Node` is encoded in terms of these same /// keys, so traversing the entire tree needs exactly one access to @@ -372,8 +366,8 @@ impl Tree { IdempotentForeignAccess::None, ), ); - let wildcard_accesses = UniValMap::default(); - DedupRangeMap::new(size, LocationTree { perms, wildcard_accesses }) + let exposed_cache = ExposedCache::default(); + DedupRangeMap::new(size, LocationTree { perms, exposed_cache }) }; Self { roots: SmallVec::from_slice(&[root_idx]), nodes, locations, tag_mapping } } @@ -451,19 +445,9 @@ impl<'tcx> Tree { } } - // We need to ensure the consistency of the wildcard access tracking data structure. - // For this, we insert the correct entry for this tag based on its parent, if it exists. - // If we are inserting a new wildcard root (with Wildcard as parent_prov) then we insert - // the special wildcard root initial state instead. - for (_range, loc) in self.locations.iter_mut_all() { - if let Some(parent_idx) = parent_idx { - if let Some(parent_access) = loc.wildcard_accesses.get(parent_idx) { - loc.wildcard_accesses.insert(idx, parent_access.for_new_child()); - } - } else { - loc.wildcard_accesses.insert(idx, WildcardState::for_wildcard_root()); - } - } + // We don't have to update `exposed_cache` as the new node is not exposed and + // has no children so the default counts of 0 are correct. + // If the parent is a wildcard pointer, then it doesn't track SIFA and doesn't need to be updated. if let Some(parent_idx) = parent_idx { // Inserting the new perms might have broken the SIFA invariant (see @@ -807,7 +791,7 @@ impl Tree { let node = self.nodes.remove(this).unwrap(); for (_range, loc) in self.locations.iter_mut_all() { loc.perms.remove(this); - loc.wildcard_accesses.remove(this); + loc.exposed_cache.remove(this); } self.tag_mapping.remove(&node.tag); } @@ -943,7 +927,7 @@ impl<'tcx> LocationTree { }; let accessed_root_tag = accessed_root.map(|idx| nodes.get(idx).unwrap().tag); - for root in roots { + for (i, root) in roots.enumerate() { let tag = nodes.get(root).unwrap().tag; // On a protector release access we have to skip the children of the accessed tag. // However, if the tag has exposed children then some of the wildcard subtrees could @@ -981,6 +965,7 @@ impl<'tcx> LocationTree { access_kind, global, diagnostics, + /*is_wildcard_tree*/ i != 0, )?; } interp_ok(()) @@ -1029,7 +1014,7 @@ impl<'tcx> LocationTree { .perform_transition( args.idx, args.nodes, - &mut args.data.wildcard_accesses, + &mut args.data.exposed_cache, access_kind, args.rel_pos, protected, @@ -1074,12 +1059,18 @@ impl<'tcx> LocationTree { access_kind: AccessKind, global: &GlobalState, diagnostics: &DiagnosticInfo, + is_wildcard_tree: bool, ) -> InterpResult<'tcx> { let get_relatedness = |idx: UniIndex, node: &Node, loc: &LocationTree| { - let wildcard_state = loc.wildcard_accesses.get(idx).cloned().unwrap_or_default(); // If the tag is larger than `max_local_tag` then the access can only be foreign. let only_foreign = max_local_tag.is_some_and(|max_local_tag| max_local_tag < node.tag); - wildcard_state.access_relatedness(access_kind, only_foreign) + loc.exposed_cache.access_relatedness( + root, + idx, + access_kind, + is_wildcard_tree, + only_foreign, + ) }; // Whether there is an exposed node in this tree that allows this access. @@ -1156,7 +1147,7 @@ impl<'tcx> LocationTree { perm.perform_transition( args.idx, args.nodes, - &mut args.data.wildcard_accesses, + &mut args.data.exposed_cache, access_kind, relatedness, protected, @@ -1175,19 +1166,11 @@ impl<'tcx> LocationTree { }) }, )?; - // If there is no exposed node in this tree that allows this access, then the - // access *must* be foreign. So we check if the root of this tree would allow this - // as a foreign access, and if not, then we can error. - // In practice, all wildcard trees accept foreign accesses, but the main tree does - // not, so this catches UB when none of the nodes in the main tree allows this access. - if !has_valid_exposed - && self - .wildcard_accesses - .get(root) - .unwrap() - .access_relatedness(access_kind, /* only_foreign */ true) - .is_none() - { + // If there is no exposed node in this tree that allows this access, then the access *must* + // be foreign to the entire subtree. Foreign accesses are only possible on wildcard subtrees + // as there are no ancestors to the main root. So if we do not find a valid exposed node in + // the main tree then this access is UB. + if !has_valid_exposed && !is_wildcard_tree { return Err(no_valid_exposed_references_error(diagnostics)).into(); } interp_ok(()) diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs index 0b83de2cedc09..52fe1c08a3092 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree/tests.rs @@ -741,7 +741,7 @@ mod spurious_read { ); eprintln!(" (arbitrary code instanciated with '{opaque}')"); err += 1; - // We found an instanciation of the opaque code that makes this Pattern + // We found an instantiation of the opaque code that makes this Pattern // fail, we don't really need to check the rest. break; } diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/tree_visitor.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/tree_visitor.rs index b1ceeecf577de..ab3a19ad9e17c 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/tree_visitor.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/tree_visitor.rs @@ -99,7 +99,7 @@ where assert!(self.stack.is_empty()); // First, handle accessed node. A bunch of things need to // be handled differently here compared to the further parents - // of `accesssed_node`. + // of `accessesed_node`. { self.propagate_at(this, accessed_node, AccessRelatedness::LocalAccess)?; if matches!(visit_children, ChildrenVisitMode::VisitChildrenOfAccessed) { diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/wildcard.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/wildcard.rs index b5ae0ee4c7d31..b03635de70ae7 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/wildcard.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/wildcard.rs @@ -1,4 +1,3 @@ -use std::cmp::max; use std::fmt::Debug; use super::Tree; @@ -51,374 +50,142 @@ impl WildcardAccessRelatedness { } } +/// Caches information about where in the tree exposed nodes with permission to do reads/ rites are +/// located. [`ExposedCache`] stores this information a single location (or rather, a range of +/// homogeneous locations) for all nodes in an allocation. +/// +/// Nodes not in this map have a default [`ExposedCacheNode`], i.e. they have no exposed children. +/// In particular, this map remains empty (and thus consumes no memory) until the first +/// node in the tree gets exposed. +#[derive(Clone, Debug, Default, PartialEq, Eq)] +pub struct ExposedCache(UniValMap); + /// State per location per node keeping track of where relative to this /// node exposed nodes are and what access permissions they have. -/// -/// Designed to be completely determined by its parent, siblings and -/// direct children's max_local_access/max_foreign_access. -#[derive(Clone, Default, PartialEq, Eq)] -pub struct WildcardState { - /// How many of this node's direct children have `max_local_access()==Write`. - child_writes: u16, - /// How many of this node's direct children have `max_local_access()>=Read`. - child_reads: u16, - /// The maximum access level that could happen from an exposed node - /// that is foreign to this node. - /// - /// This is calculated as the `max()` of the parent's `max_foreign_access`, - /// `exposed_as` and the siblings' `max_local_access()`. - max_foreign_access: WildcardAccessLevel, - /// At what access level this node itself is exposed. - exposed_as: WildcardAccessLevel, -} -impl Debug for WildcardState { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("WildcardState") - .field("child_r/w", &(self.child_reads, self.child_writes)) - .field("foreign", &self.max_foreign_access) - .field("exposed_as", &self.exposed_as) - .finish() - } +#[derive(Clone, Default, Debug, PartialEq, Eq)] +struct ExposedCacheNode { + /// How many local nodes (in this subtree) are exposed with write permissions. + local_writes: u16, + /// How many local nodes (in this subtree) are exposed with read permissions. + local_reads: u16, } -impl WildcardState { - /// The maximum access level that could happen from an exposed - /// node that is local to this node. - fn max_local_access(&self) -> WildcardAccessLevel { - use WildcardAccessLevel::*; - max( - self.exposed_as, - if self.child_writes > 0 { - Write - } else if self.child_reads > 0 { - Read - } else { - None - }, - ) - } - /// From where relative to the node with this wildcard info a read or write access could happen. - /// If `only_foreign` is true then we treat `LocalAccess` as impossible. This means we return - /// `None` if only a `LocalAccess` is possible, and we treat `EitherAccess` as a - /// `ForeignAccess`. +impl ExposedCache { + /// Returns the relatedness of a wildcard access to a node. + /// + /// This function only considers a single subtree. If the current subtree does not contain + /// any valid exposed nodes then the function return `None`. + /// + /// * `root`: The root of the subtree the node belongs to. + /// * `id`: The id of the node. + /// * `kind`: The kind of the wildcard access. + /// * `is_wildcard_tree`: This nodes belongs to a wildcard subtree. + /// This means we always treat foreign accesses as possible. + /// * `only_foreign`: Assume the access cannot come from a local node. pub fn access_relatedness( &self, + root: UniIndex, + id: UniIndex, kind: AccessKind, + is_wildcard_tree: bool, only_foreign: bool, ) -> Option { - let rel = match kind { - AccessKind::Read => self.read_access_relatedness(), - AccessKind::Write => self.write_access_relatedness(), + // All nodes in the tree are local to the root, so we can use the root to get the total + // number of valid exposed nodes in the tree. + let root = self.0.get(root).cloned().unwrap_or_default(); + let node = self.0.get(id).cloned().unwrap_or_default(); + + let (total_num, local_num) = match kind { + AccessKind::Read => (root.local_reads, node.local_reads), + AccessKind::Write => (root.local_writes, node.local_writes), }; - if only_foreign { - use WildcardAccessRelatedness as E; - match rel { - Some(E::EitherAccess | E::ForeignAccess) => Some(E::ForeignAccess), - Some(E::LocalAccess) | None => None, - } - } else { - rel - } - } - /// From where relative to the node with this wildcard info a read access could happen. - fn read_access_relatedness(&self) -> Option { - let has_foreign = self.max_foreign_access >= WildcardAccessLevel::Read; - let has_local = self.max_local_access() >= WildcardAccessLevel::Read; - use WildcardAccessRelatedness as E; - match (has_foreign, has_local) { - (true, true) => Some(E::EitherAccess), - (true, false) => Some(E::ForeignAccess), - (false, true) => Some(E::LocalAccess), - (false, false) => None, - } - } - - /// From where relative to the node with this wildcard info a write access could happen. - fn write_access_relatedness(&self) -> Option { - let has_foreign = self.max_foreign_access == WildcardAccessLevel::Write; - let has_local = self.max_local_access() == WildcardAccessLevel::Write; - use WildcardAccessRelatedness as E; - match (has_foreign, has_local) { - (true, true) => Some(E::EitherAccess), - (true, false) => Some(E::ForeignAccess), - (false, true) => Some(E::LocalAccess), - (false, false) => None, - } - } - - /// Gets the access tracking information for a new child node of a parent with this - /// wildcard info. - /// The new node doesn't have any child reads/writes, but calculates `max_foreign_access` - /// from its parent. - pub fn for_new_child(&self) -> Self { - Self { - max_foreign_access: max(self.max_foreign_access, self.max_local_access()), - ..Default::default() - } - } - /// Crates the initial `WildcardState` for a wildcard root. - /// This has `max_foreign_access==Write` as it actually is the child of *some* exposed node - /// through which we can receive foreign accesses. - /// - /// This is different from the main root which has `max_foreign_access==None`, since there - /// cannot be a foreign access to the root of the allocation. - pub fn for_wildcard_root() -> Self { - Self { max_foreign_access: WildcardAccessLevel::Write, ..Default::default() } - } - - /// Pushes the nodes of `children` onto the stack who's `max_foreign_access` - /// needs to be updated. - /// - /// * `children`: A list of nodes with the same parent. `children` doesn't - /// necessarily have to contain all children of parent, but can just be - /// a subset. - /// - /// * `child_reads`, `child_writes`: How many of `children` have `max_local_access()` - /// of at least `read`/`write` - /// - /// * `new_foreign_access`, `old_foreign_access`: - /// The max possible access level that is foreign to all `children` - /// (i.e., it is not local to *any* of them). - /// This can be calculated as the max of the parent's `exposed_as()`, `max_foreign_access` - /// and of all `max_local_access()` of any nodes with the same parent that are - /// not listed in `children`. - /// - /// This access level changed from `old` to `new`, which is why we need to - /// update `children`. - fn push_relevant_children( - stack: &mut Vec<(UniIndex, WildcardAccessLevel)>, - new_foreign_access: WildcardAccessLevel, - old_foreign_access: WildcardAccessLevel, - child_reads: u16, - child_writes: u16, - children: impl Iterator, - - wildcard_accesses: &UniValMap, - ) { - use WildcardAccessLevel::*; - - // Nothing changed so we don't need to update anything. - if new_foreign_access == old_foreign_access { - return; - } - - // We need to consider that the children's `max_local_access()` affect each - // other's `max_foreign_access`, but do not affect their own `max_foreign_access`. - - // The new `max_foreign_acces` for children with `max_local_access()==Write`. - let write_foreign_access = max( - new_foreign_access, - if child_writes > 1 { - // There exists at least one more child with exposed write access. - // This means that a foreign write through that node is possible. - Write - } else if child_reads > 1 { - // There exists at least one more child with exposed read access, - // but no other with write access. - // This means that a foreign read but no write through that node - // is possible. - Read - } else { - // There are no other nodes with read or write access. - // This means no foreign writes through other children are possible. - None - }, - ); - - // The new `max_foreign_acces` for children with `max_local_access()==Read`. - let read_foreign_access = max( - new_foreign_access, - if child_writes > 0 { - // There exists at least one child with write access (and it's not this one). - Write - } else if child_reads > 1 { - // There exists at least one more child with exposed read access, - // but no other with write access. - Read - } else { - // There are no other nodes with read or write access, - None - }, - ); - - // The new `max_foreign_acces` for children with `max_local_access()==None`. - let none_foreign_access = max( - new_foreign_access, - if child_writes > 0 { - // There exists at least one child with write access (and it's not this one). - Write - } else if child_reads > 0 { - // There exists at least one child with read access (and it's not this one), - // but none with write access. - Read + // If this is a wildcard tree then an access can always be foreign as + // it could come from another tree. + // We can represent this by adding 1 to the total which means there + // always exists a foreign exposed node. + // (We cannot bake this into the root's count as then if `node == root` it would + // affect both `total` and `local`.) + let total_num = total_num + u16::from(is_wildcard_tree); + + use WildcardAccessRelatedness::*; + let relatedness = if total_num == 0 { + // we return None if the tree does not contain any valid exposed nodes. + None + } else { + Some(if total_num == local_num { + // If all valid exposed nodes are local to this node then the access is local. + LocalAccess + } else if local_num == 0 { + // If the node does not have any exposed nodes as children then the access is foreign. + ForeignAccess } else { - // No children are exposed as read or write. - None - }, - ); - - stack.extend(children.filter_map(|child| { - let state = wildcard_accesses.get(child).cloned().unwrap_or_default(); - - let new_foreign_access = match state.max_local_access() { - Write => write_foreign_access, - Read => read_foreign_access, - None => none_foreign_access, - }; + // If some but not all of the valid exposed nodes are local then we cannot determine the correct relatedness. + EitherAccess + }) + }; - if new_foreign_access != state.max_foreign_access { - Some((child, new_foreign_access)) - } else { - Option::None + if only_foreign { + // This is definitely not a local access; clamp the result accordingly. + match relatedness { + Some(LocalAccess) => None, + Some(ForeignAccess) => Some(ForeignAccess), + Some(EitherAccess) => Some(ForeignAccess), + None => None, } - })); + } else { + relatedness + } } - /// Update the tracking information of a tree, to reflect that the node specified by `id` is - /// now exposed with `new_exposed_as`. + /// now exposed with `new_exposed_as` permission. /// /// Propagates the Willard access information over the tree. This needs to be called every /// time the access level of an exposed node changes, to keep the state in sync with /// the rest of the tree. + /// + /// * `from`: The previous access level of the exposed node. + /// Set to `None` if the node was not exposed before. + /// * `to`: The new access level. pub fn update_exposure( - id: UniIndex, - new_exposed_as: WildcardAccessLevel, + &mut self, nodes: &UniValMap, - wildcard_accesses: &mut UniValMap, + id: UniIndex, + from: WildcardAccessLevel, + to: WildcardAccessLevel, ) { - let mut entry = wildcard_accesses.entry(id); - let src_state = entry.or_insert(Default::default()); - let old_exposed_as = src_state.exposed_as; - // If the exposure doesn't change, then we don't need to update anything. - if old_exposed_as == new_exposed_as { + if from == to { return; } - let src_old_local_access = src_state.max_local_access(); - - src_state.exposed_as = new_exposed_as; - - let src_new_local_access = src_state.max_local_access(); - - // Stack of nodes for which the max_foreign_access field needs to be updated. - // Will be filled with the children of this node and its parents children before - // we begin downwards traversal. - let mut stack: Vec<(UniIndex, WildcardAccessLevel)> = Vec::new(); - - // Add the direct children of this node to the stack. - { + // Update the counts of this node and all its ancestors. + let mut next_id = Some(id); + while let Some(id) = next_id { let node = nodes.get(id).unwrap(); - Self::push_relevant_children( - &mut stack, - // new_foreign_access - max(src_state.max_foreign_access, new_exposed_as), - // old_foreign_access - max(src_state.max_foreign_access, old_exposed_as), - // Consider all children. - src_state.child_reads, - src_state.child_writes, - node.children.iter().copied(), - wildcard_accesses, - ); - } - // We need to propagate the tracking info up the tree, for this we traverse - // up the parents. - // We can skip propagating info to the parent and siblings of a node if its - // access didn't change. - { - // The child from which we came. - let mut child = id; - // This is the `max_local_access()` of the child we came from, before - // this update... - let mut old_child_access = src_old_local_access; - // and after this update. - let mut new_child_access = src_new_local_access; - while let Some(parent_id) = nodes.get(child).unwrap().parent { - let parent_node = nodes.get(parent_id).unwrap(); - let mut entry = wildcard_accesses.entry(parent_id); - let parent_state = entry.or_insert(Default::default()); - - let old_parent_local_access = parent_state.max_local_access(); - use WildcardAccessLevel::*; - // Updating this node's tracking state for its children. - match (old_child_access, new_child_access) { - (None | Read, Write) => parent_state.child_writes += 1, - (Write, None | Read) => parent_state.child_writes -= 1, - _ => {} - } - match (old_child_access, new_child_access) { - (None, Read | Write) => parent_state.child_reads += 1, - (Read | Write, None) => parent_state.child_reads -= 1, - _ => {} - } - - let new_parent_local_access = parent_state.max_local_access(); - - { - // We need to update the `max_foreign_access` of `child`'s - // siblings. For this we can reuse the `push_relevant_children` - // function. - // - // We pass it just the siblings without child itself. Since - // `child`'s `max_local_access()` is foreign to all of its - // siblings we can pass it as part of the foreign access. - - let parent_access = - max(parent_state.exposed_as, parent_state.max_foreign_access); - // This is how many of `child`'s siblings have read/write local access. - // If `child` itself has access, then we need to subtract its access from the count. - let sibling_reads = - parent_state.child_reads - if new_child_access >= Read { 1 } else { 0 }; - let sibling_writes = - parent_state.child_writes - if new_child_access >= Write { 1 } else { 0 }; - Self::push_relevant_children( - &mut stack, - // new_foreign_access - max(parent_access, new_child_access), - // old_foreign_access - max(parent_access, old_child_access), - // Consider only siblings of child. - sibling_reads, - sibling_writes, - parent_node.children.iter().copied().filter(|id| child != *id), - wildcard_accesses, - ); - } - if old_parent_local_access == new_parent_local_access { - // We didn't change `max_local_access()` for parent, so we don't need to propagate further upwards. - break; - } - - old_child_access = old_parent_local_access; - new_child_access = new_parent_local_access; - child = parent_id; + let mut state = self.0.entry(id); + let state = state.or_insert(Default::default()); + + use WildcardAccessLevel::*; + match (from, to) { + (None | Read, Write) => state.local_writes += 1, + (Write, None | Read) => state.local_writes -= 1, + _ => {} } - } - // Traverses down the tree to update max_foreign_access fields of children and cousins who need to be updated. - while let Some((id, new_access)) = stack.pop() { - let node = nodes.get(id).unwrap(); - let mut entry = wildcard_accesses.entry(id); - let state = entry.or_insert(Default::default()); - - let old_access = state.max_foreign_access; - state.max_foreign_access = new_access; - - Self::push_relevant_children( - &mut stack, - // new_foreign_access - max(state.exposed_as, new_access), - // old_foreign_access - max(state.exposed_as, old_access), - // Consider all children. - state.child_reads, - state.child_writes, - node.children.iter().copied(), - wildcard_accesses, - ); + match (from, to) { + (None, Read | Write) => state.local_reads += 1, + (Read | Write, None) => state.local_reads -= 1, + _ => {} + } + next_id = node.parent; } } + /// Removes a node from the datastructure. + /// + /// The caller needs to ensure that the node does not have any children. + pub fn remove(&mut self, idx: UniIndex) { + self.0.remove(idx); + } } impl Tree { @@ -428,25 +195,28 @@ impl Tree { pub fn expose_tag(&mut self, tag: BorTag, protected: bool) { let id = self.tag_mapping.get(&tag).unwrap(); let node = self.nodes.get_mut(id).unwrap(); - node.is_exposed = true; - let node = self.nodes.get(id).unwrap(); - - // When the first tag gets exposed then we initialize the - // wildcard state for every node and location in the tree. - for (_, loc) in self.locations.iter_mut_all() { - let perm = loc - .perms - .get(id) - .map(|p| p.permission()) - .unwrap_or_else(|| node.default_location_state().permission()); - - let access_type = perm.strongest_allowed_local_access(protected); - WildcardState::update_exposure( - id, - access_type, - &self.nodes, - &mut loc.wildcard_accesses, - ); + if !node.is_exposed { + node.is_exposed = true; + let node = self.nodes.get(id).unwrap(); + + for (_, loc) in self.locations.iter_mut_all() { + let perm = loc + .perms + .get(id) + .map(|p| p.permission()) + .unwrap_or_else(|| node.default_location_state().permission()); + + let access_level = perm.strongest_allowed_local_access(protected); + // An unexposed node gets treated as access level `None`. Therefore, + // the initial exposure transitions from `None` to the node's actual + // `access_level`. + loc.exposed_cache.update_exposure( + &self.nodes, + id, + WildcardAccessLevel::None, + access_level, + ); + } } } @@ -457,10 +227,19 @@ impl Tree { // We check if the node is already exposed, as we don't want to expose any // nodes which aren't already exposed. - - if self.nodes.get(idx).unwrap().is_exposed { - // Updates the exposure to the new permission on every location. - self.expose_tag(tag, /* protected */ false); + let node = self.nodes.get(idx).unwrap(); + if node.is_exposed { + for (_, loc) in self.locations.iter_mut_all() { + let perm = loc + .perms + .get(idx) + .map(|p| p.permission()) + .unwrap_or_else(|| node.default_location_state().permission()); + // We are transitioning from protected to unprotected. + let old_access_type = perm.strongest_allowed_local_access(/*protected*/ true); + let access_type = perm.strongest_allowed_local_access(/*protected*/ false); + loc.exposed_cache.update_exposure(&self.nodes, idx, old_access_type, access_type); + } } } } @@ -472,20 +251,15 @@ impl Tree { pub fn verify_wildcard_consistency(&self, global: &GlobalState) { // We rely on the fact that `roots` is ordered according to tag from low to high. assert!(self.roots.is_sorted_by_key(|idx| self.nodes.get(*idx).unwrap().tag)); - let main_root_idx = self.roots[0]; let protected_tags = &global.borrow().protected_tags; for (_, loc) in self.locations.iter_all() { - let wildcard_accesses = &loc.wildcard_accesses; + let exposed_cache = &loc.exposed_cache; let perms = &loc.perms; - // Checks if accesses is empty. - if wildcard_accesses.is_empty() { - return; - } for (id, node) in self.nodes.iter() { - let state = wildcard_accesses.get(id).unwrap(); + let state = exposed_cache.0.get(id).cloned().unwrap_or_default(); - let expected_exposed_as = if node.is_exposed { + let exposed_as = if node.is_exposed { let perm = perms.get(id).copied().unwrap_or_else(|| node.default_location_state()); @@ -495,72 +269,25 @@ impl Tree { WildcardAccessLevel::None }; - // The foreign wildcard accesses possible at a node are determined by which - // accesses can originate from their siblings, their parent, and from above - // their parent. - let expected_max_foreign_access = if let Some(parent) = node.parent { - let parent_node = self.nodes.get(parent).unwrap(); - let parent_state = wildcard_accesses.get(parent).unwrap(); - - let max_sibling_access = parent_node - .children - .iter() - .copied() - .filter(|child| *child != id) - .map(|child| { - let state = wildcard_accesses.get(child).unwrap(); - state.max_local_access() - }) - .fold(WildcardAccessLevel::None, max); - - max_sibling_access - .max(parent_state.max_foreign_access) - .max(parent_state.exposed_as) - } else { - if main_root_idx == id { - // There can never be a foreign access to the root of the allocation. - // So its foreign access level is always `None`. - WildcardAccessLevel::None - } else { - // For wildcard roots any access on a different subtree can be foreign - // to it. So a wildcard root has the maximum possible foreign access - // level. - WildcardAccessLevel::Write - } - }; - - // Count how many children can be the source of wildcard reads or writes - // (either directly, or via their children). - let child_accesses = node.children.iter().copied().map(|child| { - let state = wildcard_accesses.get(child).unwrap(); - state.max_local_access() - }); - let expected_child_reads = - child_accesses.clone().filter(|a| *a >= WildcardAccessLevel::Read).count(); - let expected_child_writes = - child_accesses.filter(|a| *a >= WildcardAccessLevel::Write).count(); - - assert_eq!( - expected_exposed_as, state.exposed_as, - "tag {:?} (id:{id:?}) should be exposed as {expected_exposed_as:?} but is exposed as {:?}", - node.tag, state.exposed_as - ); - assert_eq!( - expected_max_foreign_access, state.max_foreign_access, - "expected {:?}'s (id:{id:?}) max_foreign_access to be {:?} instead of {:?}", - node.tag, expected_max_foreign_access, state.max_foreign_access - ); - let child_reads: usize = state.child_reads.into(); + let (child_reads, child_writes) = node + .children + .iter() + .copied() + .map(|id| exposed_cache.0.get(id).cloned().unwrap_or_default()) + .fold((0, 0), |acc, wc| (acc.0 + wc.local_reads, acc.1 + wc.local_writes)); + let expected_reads = + child_reads + u16::from(exposed_as >= WildcardAccessLevel::Read); + let expected_writes = + child_writes + u16::from(exposed_as >= WildcardAccessLevel::Write); assert_eq!( - expected_child_reads, child_reads, - "expected {:?}'s (id:{id:?}) child_reads to be {} instead of {}", - node.tag, expected_child_reads, child_reads + state.local_reads, expected_reads, + "expected {:?}'s (id:{id:?}) local_reads to be {expected_reads:?} instead of {:?} (child_reads: {child_reads:?}, exposed_as: {exposed_as:?})", + node.tag, state.local_reads ); - let child_writes: usize = state.child_writes.into(); assert_eq!( - expected_child_writes, child_writes, - "expected {:?}'s (id:{id:?}) child_writes to be {} instead of {}", - node.tag, expected_child_writes, child_writes + state.local_writes, expected_writes, + "expected {:?}'s (id:{id:?}) local_writes to be {expected_writes:?} instead of {:?} (child_writes: {child_writes:?}, exposed_as: {exposed_as:?})", + node.tag, state.local_writes ); } } diff --git a/src/tools/miri/src/concurrency/data_race.rs b/src/tools/miri/src/concurrency/data_race.rs index c18b780998606..336abff6c52d6 100644 --- a/src/tools/miri/src/concurrency/data_race.rs +++ b/src/tools/miri/src/concurrency/data_race.rs @@ -371,7 +371,7 @@ impl AccessType { if let Some(size) = size { if size == Size::ZERO { - // In this case there were multiple read accesss with different sizes and then a write. + // In this case there were multiple read accesses with different sizes and then a write. // We will be reporting *one* of the other reads, but we don't have enough information // to determine which one had which size. assert!(self == AccessType::AtomicLoad); diff --git a/src/tools/miri/src/concurrency/genmc/global_allocations.rs b/src/tools/miri/src/concurrency/genmc/global_allocations.rs index 7f34c60dcdaff..76be8c3a4c9a8 100644 --- a/src/tools/miri/src/concurrency/genmc/global_allocations.rs +++ b/src/tools/miri/src/concurrency/genmc/global_allocations.rs @@ -62,7 +62,7 @@ impl GlobalStateInner { let entry = match self.base_addr.entry(alloc_id) { Entry::Occupied(occupied_entry) => { // Looks like some other thread allocated this for us - // between when we released the read lock and aquired the write lock, + // between when we released the read lock and acquired the write lock, // so we just return that value. return interp_ok(*occupied_entry.get()); } diff --git a/src/tools/miri/src/concurrency/genmc/mod.rs b/src/tools/miri/src/concurrency/genmc/mod.rs index 740553ab85d64..092fc7294d15d 100644 --- a/src/tools/miri/src/concurrency/genmc/mod.rs +++ b/src/tools/miri/src/concurrency/genmc/mod.rs @@ -252,7 +252,7 @@ impl GenmcCtx { /// Inform GenMC about an atomic load. /// Returns that value that the load should read. /// - /// `old_value` is the value that a non-atomic load would read here, or `None` if the memory is uninitalized. + /// `old_value` is the value that a non-atomic load would read here, or `None` if the memory is uninitialized. pub(crate) fn atomic_load<'tcx>( &self, ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, @@ -275,7 +275,7 @@ impl GenmcCtx { /// Inform GenMC about an atomic store. /// Returns `true` if the stored value should be reflected in Miri's memory. /// - /// `old_value` is the value that a non-atomic load would read here, or `None` if the memory is uninitalized. + /// `old_value` is the value that a non-atomic load would read here, or `None` if the memory is uninitialized. pub(crate) fn atomic_store<'tcx>( &self, ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, @@ -320,7 +320,7 @@ impl GenmcCtx { /// /// Returns `(old_val, Option)`. `new_val` might not be the latest write in coherence order, which is indicated by `None`. /// - /// `old_value` is the value that a non-atomic load would read here, or `None` if the memory is uninitalized. + /// `old_value` is the value that a non-atomic load would read here, or `None` if the memory is uninitialized. pub(crate) fn atomic_rmw_op<'tcx>( &self, ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, @@ -345,7 +345,7 @@ impl GenmcCtx { /// Returns `(old_val, Option)`. `new_val` might not be the latest write in coherence order, which is indicated by `None`. /// - /// `old_value` is the value that a non-atomic load would read here, or `None` if the memory is uninitalized. + /// `old_value` is the value that a non-atomic load would read here, or `None` if the memory is uninitialized. pub(crate) fn atomic_exchange<'tcx>( &self, ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, @@ -370,7 +370,7 @@ impl GenmcCtx { /// /// Returns the old value read by the compare exchange, optionally the value that Miri should write back to its memory, and whether the compare-exchange was a success or not. /// - /// `old_value` is the value that a non-atomic load would read here, or `None` if the memory is uninitalized. + /// `old_value` is the value that a non-atomic load would read here, or `None` if the memory is uninitialized. pub(crate) fn atomic_compare_exchange<'tcx>( &self, ecx: &InterpCx<'tcx, MiriMachine<'tcx>>, diff --git a/src/tools/miri/src/concurrency/genmc/run.rs b/src/tools/miri/src/concurrency/genmc/run.rs index 6ff8e0656f36e..de6d87373ec02 100644 --- a/src/tools/miri/src/concurrency/genmc/run.rs +++ b/src/tools/miri/src/concurrency/genmc/run.rs @@ -30,7 +30,7 @@ pub fn run_genmc_mode<'tcx>( config: &MiriConfig, eval_entry: impl Fn(Rc) -> Result<(), NonZeroI32>, ) -> Result<(), NonZeroI32> { - // Check for supported target: endianess and pointer size must match the host. + // Check for supported target: endianness and pointer size must match the host. if tcx.data_layout.endian != Endian::Little || tcx.data_layout.pointer_size().bits() != 64 { tcx.dcx().fatal("GenMC only supports 64bit little-endian targets"); } diff --git a/src/tools/miri/src/concurrency/weak_memory.rs b/src/tools/miri/src/concurrency/weak_memory.rs index 6fe73fec0f57f..3aded9a1454a7 100644 --- a/src/tools/miri/src/concurrency/weak_memory.rs +++ b/src/tools/miri/src/concurrency/weak_memory.rs @@ -389,7 +389,7 @@ impl<'tcx> StoreBuffer { }) .filter(|&store_elem| { if is_seqcst && store_elem.is_seqcst { - // An SC load needs to ignore all but last store maked SC (stores not marked SC are not + // An SC load needs to ignore all but last store marked SC (stores not marked SC are not // affected) let include = !found_sc; found_sc = true; diff --git a/src/tools/miri/src/eval.rs b/src/tools/miri/src/eval.rs index 0423b0ea5abdf..1e75df7d278fb 100644 --- a/src/tools/miri/src/eval.rs +++ b/src/tools/miri/src/eval.rs @@ -113,7 +113,7 @@ pub struct MiriConfig { pub float_nondet: bool, /// Whether floating-point operations can have a non-deterministic rounding error. pub float_rounding_error: FloatRoundingErrorMode, - /// Whether Miri artifically introduces short reads/writes on file descriptors. + /// Whether Miri artificially introduces short reads/writes on file descriptors. pub short_fd_operations: bool, /// A list of crates that are considered user-relevant. pub user_relevant_crates: Vec, diff --git a/src/tools/miri/src/helpers.rs b/src/tools/miri/src/helpers.rs index f4fc478481a7a..5dcd2d9ec2084 100644 --- a/src/tools/miri/src/helpers.rs +++ b/src/tools/miri/src/helpers.rs @@ -138,7 +138,7 @@ pub fn iter_exported_symbols<'tcx>( } // Next, all our dependencies. - // `dependency_formats` includes all the transitive informations needed to link a crate, + // `dependency_formats` includes all the transitive information needed to link a crate, // which is what we need here since we need to dig out `exported_symbols` from all transitive // dependencies. let dependency_formats = tcx.dependency_formats(()); @@ -1148,7 +1148,7 @@ impl ToUsize for u32 { } /// Similarly, a maximum address size of `u64` is assumed widely here, so let's have ergonomic -/// converion from `usize` to `u64`. +/// conversion from `usize` to `u64`. pub trait ToU64 { fn to_u64(self) -> u64; } diff --git a/src/tools/miri/src/machine.rs b/src/tools/miri/src/machine.rs index f17bd5ac4319c..d50475c748747 100644 --- a/src/tools/miri/src/machine.rs +++ b/src/tools/miri/src/machine.rs @@ -654,7 +654,7 @@ pub struct MiriMachine<'tcx> { /// Whether floating-point operations can have a non-deterministic rounding error. pub float_rounding_error: FloatRoundingErrorMode, - /// Whether Miri artifically introduces short reads/writes on file descriptors. + /// Whether Miri artificially introduces short reads/writes on file descriptors. pub short_fd_operations: bool, } @@ -1802,7 +1802,7 @@ impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> { // We have to skip the frame that is just being popped. ecx.active_thread_mut().recompute_top_user_relevant_frame(/* skip */ 1); } - // tracing-tree can autoamtically annotate scope changes, but it gets very confused by our + // tracing-tree can automatically annotate scope changes, but it gets very confused by our // concurrency and what it prints is just plain wrong. So we print our own information // instead. (Cc https://github.com/rust-lang/miri/issues/2266) info!("Leaving {}", ecx.frame().instance()); diff --git a/src/tools/miri/src/shims/aarch64.rs b/src/tools/miri/src/shims/aarch64.rs index 595a6595b531d..d06b02a41334f 100644 --- a/src/tools/miri/src/shims/aarch64.rs +++ b/src/tools/miri/src/shims/aarch64.rs @@ -58,7 +58,33 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { this.write_immediate(*res_lane, &dest)?; } } + // Vector table lookup: each index selects a byte from the 16-byte table, out-of-range -> 0. + // Used to implement vtbl1_u8 function. + // LLVM does not have a portable shuffle that takes non-const indices + // so we need to implement this ourselves. + // https://developer.arm.com/architectures/instruction-sets/intrinsics/vtbl1_u8 + "neon.tbl1.v16i8" => { + let [table, indices] = + this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; + + let (table, table_len) = this.project_to_simd(table)?; + let (indices, idx_len) = this.project_to_simd(indices)?; + let (dest, dest_len) = this.project_to_simd(dest)?; + assert_eq!(table_len, 16); + assert_eq!(idx_len, dest_len); + for i in 0..dest_len { + let idx = this.read_immediate(&this.project_index(&indices, i)?)?; + let idx_u = idx.to_scalar().to_u8()?; + let val = if u64::from(idx_u) < table_len { + let t = this.read_immediate(&this.project_index(&table, idx_u.into())?)?; + t.to_scalar() + } else { + Scalar::from_u8(0) + }; + this.write_scalar(val, &this.project_index(&dest, i)?)?; + } + } _ => return interp_ok(EmulateItemResult::NotSupported), } interp_ok(EmulateItemResult::NeedsReturn) diff --git a/src/tools/miri/src/shims/native_lib/trace/child.rs b/src/tools/miri/src/shims/native_lib/trace/child.rs index 021ec2e9aeb3b..d7d9a591911a0 100644 --- a/src/tools/miri/src/shims/native_lib/trace/child.rs +++ b/src/tools/miri/src/shims/native_lib/trace/child.rs @@ -31,7 +31,7 @@ pub struct Supervisor { /// Used for synchronisation, allowing us to receive confirmation that the /// parent process has handled the request from `message_tx`. confirm_rx: ipc::IpcReceiver, - /// Receiver for memory acceses that ocurred during the FFI call. + /// Receiver for memory accesses that occurred during the FFI call. event_rx: ipc::IpcReceiver, } diff --git a/src/tools/miri/src/shims/native_lib/trace/parent.rs b/src/tools/miri/src/shims/native_lib/trace/parent.rs index 5476cccc02e3b..f73b1359cef74 100644 --- a/src/tools/miri/src/shims/native_lib/trace/parent.rs +++ b/src/tools/miri/src/shims/native_lib/trace/parent.rs @@ -395,8 +395,6 @@ fn capstone_find_events( _ => (), } } - // FIXME: arm64 - _ => unimplemented!(), } false diff --git a/src/tools/miri/src/shims/os_str.rs b/src/tools/miri/src/shims/os_str.rs index 28b03ffb88c61..db9cb3a7a32bd 100644 --- a/src/tools/miri/src/shims/os_str.rs +++ b/src/tools/miri/src/shims/os_str.rs @@ -316,7 +316,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // The new path is still absolute on Windows. path.remove(0); } - // If this starts withs a `\` but not a `\\`, then this was absolute on Unix but is + // If this starts with a `\` but not a `\\`, then this was absolute on Unix but is // relative on Windows (relative to "the root of the current directory", e.g. the // drive letter). else if path.first() == Some(&sep) && path.get(1) != Some(&sep) { diff --git a/src/tools/miri/src/shims/time.rs b/src/tools/miri/src/shims/time.rs index 865a80251e314..2c2b029a1232f 100644 --- a/src/tools/miri/src/shims/time.rs +++ b/src/tools/miri/src/shims/time.rs @@ -401,11 +401,11 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { }; let timeout_anchor = if flags == 0 { - // No flags set, the timespec should be interperted as a duration + // No flags set, the timespec should be interpreted as a duration // to sleep for TimeoutAnchor::Relative } else if flags == this.eval_libc_i32("TIMER_ABSTIME") { - // Only flag TIMER_ABSTIME set, the timespec should be interperted as + // Only flag TIMER_ABSTIME set, the timespec should be interpreted as // an absolute time. TimeoutAnchor::Absolute } else { diff --git a/src/tools/miri/src/shims/unix/foreign_items.rs b/src/tools/miri/src/shims/unix/foreign_items.rs index 87a307c989484..48e2ebd0f13ea 100644 --- a/src/tools/miri/src/shims/unix/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/foreign_items.rs @@ -1045,7 +1045,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { &[Os::Linux, Os::FreeBsd, Os::Illumos, Os::Solaris, Os::Android, Os::MacOs], link_name, )?; - // This function looks and behaves excatly like miri_start_unwind. + // This function looks and behaves exactly like miri_start_unwind. let [payload] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; this.handle_miri_start_unwind(payload)?; return interp_ok(EmulateItemResult::NeedsUnwind); diff --git a/src/tools/miri/src/shims/unix/freebsd/sync.rs b/src/tools/miri/src/shims/unix/freebsd/sync.rs index ae8a167080b90..8cf4464389631 100644 --- a/src/tools/miri/src/shims/unix/freebsd/sync.rs +++ b/src/tools/miri/src/shims/unix/freebsd/sync.rs @@ -169,7 +169,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let Some(futex_ref) = this.get_sync_or_init(obj, |_| FreeBsdFutex { futex: Default::default() }) else { - // From Linux implemenation: + // From Linux implementation: // No AllocId, or no live allocation at that AllocId. // Return an error code. (That seems nicer than silently doing something non-intuitive.) // This means that if an address gets reused by a new allocation, diff --git a/src/tools/miri/src/shims/unix/linux/foreign_items.rs b/src/tools/miri/src/shims/unix/linux/foreign_items.rs index 12cee0d162a0c..b92732de73ca0 100644 --- a/src/tools/miri/src/shims/unix/linux/foreign_items.rs +++ b/src/tools/miri/src/shims/unix/linux/foreign_items.rs @@ -165,7 +165,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { )? { ThreadNameResult::Ok => Scalar::from_u32(0), ThreadNameResult::NameTooLong => this.eval_libc("ERANGE"), - // Act like we faild to open `/proc/self/task/$tid/comm`. + // Act like we failed to open `/proc/self/task/$tid/comm`. ThreadNameResult::ThreadNotFound => this.eval_libc("ENOENT"), }; this.write_scalar(res, dest)?; @@ -186,7 +186,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { )? { ThreadNameResult::Ok => Scalar::from_u32(0), ThreadNameResult::NameTooLong => unreachable!(), - // Act like we faild to open `/proc/self/task/$tid/comm`. + // Act like we failed to open `/proc/self/task/$tid/comm`. ThreadNameResult::ThreadNotFound => this.eval_libc("ENOENT"), } } else { diff --git a/src/tools/miri/src/shims/unix/linux_like/epoll.rs b/src/tools/miri/src/shims/unix/linux_like/epoll.rs index ff5367ea87ab9..7480db00d6ed3 100644 --- a/src/tools/miri/src/shims/unix/linux_like/epoll.rs +++ b/src/tools/miri/src/shims/unix/linux_like/epoll.rs @@ -176,7 +176,7 @@ impl EpollInterestTable { if let Some(epolls) = self.0.remove(&id) { for epoll in epolls.iter().filter_map(|(_id, epoll)| epoll.upgrade()) { // This is a still-live epoll with interest in this FD. Remove all - // relevent interests (including from the ready set). + // relevant interests (including from the ready set). epoll .interest_list .borrow_mut() diff --git a/src/tools/miri/src/shims/unix/macos/sync.rs b/src/tools/miri/src/shims/unix/macos/sync.rs index be32ca9abd597..6ee9ffaf37762 100644 --- a/src/tools/miri/src/shims/unix/macos/sync.rs +++ b/src/tools/miri/src/shims/unix/macos/sync.rs @@ -169,7 +169,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let is_shared = flags == shared; let timeout = clock_timeout.map(|(_, anchor, timeout)| { - // The only clock that is currenlty supported is the monotonic clock. + // The only clock that is currently supported is the monotonic clock. // While the deadline argument of `os_sync_wait_on_address_with_deadline` // is actually not in nanoseconds but in the units of `mach_current_time`, // the two are equivalent in miri. diff --git a/src/tools/miri/src/shims/windows/foreign_items.rs b/src/tools/miri/src/shims/windows/foreign_items.rs index 0bdf6bb785056..2d1a153d9262e 100644 --- a/src/tools/miri/src/shims/windows/foreign_items.rs +++ b/src/tools/miri/src/shims/windows/foreign_items.rs @@ -1199,7 +1199,7 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { "`_Unwind_RaiseException` is not supported on non-MinGW Windows", ); } - // This function looks and behaves excatly like miri_start_unwind. + // This function looks and behaves exactly like miri_start_unwind. let [payload] = this.check_shim_sig( shim_sig!(extern "C" fn(*mut _) -> unwind::libunwind::_Unwind_Reason_Code), link_name, diff --git a/src/tools/miri/src/shims/windows/sync.rs b/src/tools/miri/src/shims/windows/sync.rs index db1860bdfd309..14562450e6e07 100644 --- a/src/tools/miri/src/shims/windows/sync.rs +++ b/src/tools/miri/src/shims/windows/sync.rs @@ -67,7 +67,7 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> { ) } - /// Returns `true` if we were succssful, `false` if we would block. + /// Returns `true` if we were successful, `false` if we would block. fn init_once_try_begin( &mut self, init_once_ref: &InitOnceRef, diff --git a/src/tools/miri/src/shims/x86/avx2.rs b/src/tools/miri/src/shims/x86/avx2.rs index 7d8e52db73d64..bddb9d47457ca 100644 --- a/src/tools/miri/src/shims/x86/avx2.rs +++ b/src/tools/miri/src/shims/x86/avx2.rs @@ -194,7 +194,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm256_sign_epi{8,16,32} functions. // Negates elements from `left` when the corresponding element in // `right` is negative. If an element from `right` is zero, zero - // is writen to the corresponding output element. + // is written to the corresponding output element. // Basically, we multiply `left` with `right.signum()`. "psign.b" | "psign.w" | "psign.d" => { let [left, right] = diff --git a/src/tools/miri/src/shims/x86/bmi.rs b/src/tools/miri/src/shims/x86/bmi.rs index 814823d2acb17..877ecf319ca40 100644 --- a/src/tools/miri/src/shims/x86/bmi.rs +++ b/src/tools/miri/src/shims/x86/bmi.rs @@ -44,7 +44,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let right = if is_64_bit { right.to_u64()? } else { u64::from(right.to_u32()?) }; let result = match unprefixed_name { - // Extract a contigous range of bits from an unsigned integer. + // Extract a contiguous range of bits from an unsigned integer. // https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_bextr_u32 "bextr" => { let start = u32::try_from(right & 0xff).unwrap(); diff --git a/src/tools/miri/src/shims/x86/sse.rs b/src/tools/miri/src/shims/x86/sse.rs index 309fbb61de5a7..7da7a0b57c907 100644 --- a/src/tools/miri/src/shims/x86/sse.rs +++ b/src/tools/miri/src/shims/x86/sse.rs @@ -24,8 +24,8 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Prefix should have already been checked. let unprefixed_name = link_name.as_str().strip_prefix("llvm.x86.sse.").unwrap(); // All these intrinsics operate on 128-bit (f32x4) SIMD vectors unless stated otherwise. - // Many intrinsic names are sufixed with "ps" (packed single) or "ss" (scalar single), - // where single means single precision floating point (f32). "ps" means thet the operation + // Many intrinsic names are suffixed with "ps" (packed single) or "ss" (scalar single), + // where single means single precision floating point (f32). "ps" means that the operation // is performed on each element of the vector, while "ss" means that the operation is // performed only on the first element, copying the remaining elements from the input // vector (for binary operations, from the left-hand side). diff --git a/src/tools/miri/src/shims/x86/sse2.rs b/src/tools/miri/src/shims/x86/sse2.rs index f712814a5eda8..1a33f4c70fd28 100644 --- a/src/tools/miri/src/shims/x86/sse2.rs +++ b/src/tools/miri/src/shims/x86/sse2.rs @@ -26,14 +26,14 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // These intrinsics operate on 128-bit (f32x4, f64x2, i8x16, i16x8, i32x4, i64x2) SIMD // vectors unless stated otherwise. - // Many intrinsic names are sufixed with "ps" (packed single), "ss" (scalar signle), + // Many intrinsic names are suffixed with "ps" (packed single), "ss" (scalar single), // "pd" (packed double) or "sd" (scalar double), where single means single precision // floating point (f32) and double means double precision floating point (f64). "ps" - // and "pd" means thet the operation is performed on each element of the vector, while + // and "pd" means that the operation is performed on each element of the vector, while // "ss" and "sd" means that the operation is performed only on the first element, copying // the remaining elements from the input vector (for binary operations, from the left-hand // side). - // Intrinsincs sufixed with "epiX" or "epuX" operate with X-bit signed or unsigned + // Intrinsics suffixed with "epiX" or "epuX" operate with X-bit signed or unsigned // vectors. match unprefixed_name { // Used to implement the _mm_sad_epu8 function. diff --git a/src/tools/miri/src/shims/x86/sse41.rs b/src/tools/miri/src/shims/x86/sse41.rs index 1e8b0f34428d1..c5a4a98ba881b 100644 --- a/src/tools/miri/src/shims/x86/sse41.rs +++ b/src/tools/miri/src/shims/x86/sse41.rs @@ -115,7 +115,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { round_all::(this, op, rounding, dest)?; } // Used to implement the _mm_minpos_epu16 function. - // Find the minimum unsinged 16-bit integer in `op` and + // Find the minimum unsigned 16-bit integer in `op` and // returns its value and position. "phminposuw" => { let [op] = this.check_shim_sig_lenient(abi, CanonAbi::C, link_name, args)?; diff --git a/src/tools/miri/src/shims/x86/sse42.rs b/src/tools/miri/src/shims/x86/sse42.rs index aa8aea3558832..7c0f9c570e2ef 100644 --- a/src/tools/miri/src/shims/x86/sse42.rs +++ b/src/tools/miri/src/shims/x86/sse42.rs @@ -213,7 +213,7 @@ fn deconstruct_args<'tcx>( }; // The fourth letter of each string comparison intrinsic is either 'e' for "explicit" or 'i' for "implicit". - // The distinction will correspond to the intrinsics type signature. In this constext, "explicit" and "implicit" + // The distinction will correspond to the intrinsics type signature. In this context, "explicit" and "implicit" // refer to the way the string length is determined. The length is either passed explicitly in the "explicit" // case or determined by a null terminator in the "implicit" case. let is_explicit = match unprefixed_name.as_bytes().get(4) { @@ -297,7 +297,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { deconstruct_args(unprefixed_name, this, link_name, abi, args)?; let mask = compare_strings(this, &str1, &str2, len, imm)?; - // The sixth bit inside the immediate byte distiguishes + // The sixth bit inside the immediate byte distinguishes // between a bit mask or a byte mask when generating a mask. if imm & 0b100_0000 != 0 { let (array_layout, size) = if imm & USE_WORDS != 0 { @@ -347,7 +347,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { let mask = compare_strings(this, &str1, &str2, len, imm)?; let len = default_len::(imm); - // The sixth bit inside the immediate byte distiguishes between the least + // The sixth bit inside the immediate byte distinguishes between the least // significant bit and the most significant bit when generating an index. let result = if imm & 0b100_0000 != 0 { // most significant bit diff --git a/src/tools/miri/src/shims/x86/ssse3.rs b/src/tools/miri/src/shims/x86/ssse3.rs index b01a8795b4d13..7c1d15ff1265d 100644 --- a/src/tools/miri/src/shims/x86/ssse3.rs +++ b/src/tools/miri/src/shims/x86/ssse3.rs @@ -68,7 +68,7 @@ pub(super) trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> { // Used to implement the _mm_sign_epi{8,16,32} functions. // Negates elements from `left` when the corresponding element in // `right` is negative. If an element from `right` is zero, zero - // is writen to the corresponding output element. + // is written to the corresponding output element. // Basically, we multiply `left` with `right.signum()`. "psign.b.128" | "psign.w.128" | "psign.d.128" => { let [left, right] = diff --git a/src/tools/miri/tests/deps/Cargo.lock b/src/tools/miri/tests/deps/Cargo.lock index 2549396251672..f54628e810425 100644 --- a/src/tools/miri/tests/deps/Cargo.lock +++ b/src/tools/miri/tests/deps/Cargo.lock @@ -46,9 +46,9 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "bytes" -version = "1.10.1" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" [[package]] name = "cfg-if" diff --git a/src/tools/miri/tests/deps/src/main.rs b/src/tools/miri/tests/deps/src/main.rs index f328e4d9d04c3..363c9744fa486 100644 --- a/src/tools/miri/tests/deps/src/main.rs +++ b/src/tools/miri/tests/deps/src/main.rs @@ -1 +1,3 @@ -fn main() {} +fn main() { + unreachable!() +} diff --git a/src/tools/miri/tests/fail-dep/libc/prctl-get-name-buffer-too-small.stderr b/src/tools/miri/tests/fail-dep/libc/prctl-get-name-buffer-too-small.stderr index cc50564a43f5a..09f1a3682da5d 100644 --- a/src/tools/miri/tests/fail-dep/libc/prctl-get-name-buffer-too-small.stderr +++ b/src/tools/miri/tests/fail-dep/libc/prctl-get-name-buffer-too-small.stderr @@ -11,7 +11,6 @@ help: ALLOC was allocated here: | LL | let mut buf = vec![0u8; 15]; | ^^^^^^^^^^^^^ - = note: this error originates in the macro `vec` (in Nightly builds, run with -Z macro-backtrace for more info) note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/match/all_variants_uninhabited.rs b/src/tools/miri/tests/fail/match/all_variants_uninhabited.rs index e7ca45579d84d..b073d339f10cd 100644 --- a/src/tools/miri/tests/fail/match/all_variants_uninhabited.rs +++ b/src/tools/miri/tests/fail/match/all_variants_uninhabited.rs @@ -5,7 +5,7 @@ enum Never {} fn main() { unsafe { match *std::ptr::null::>() { - //~^ ERROR: read discriminant of an uninhabited enum variant + //~^ ERROR: read discriminant of an uninhabited enum variant Ok(_) => { lol(); } diff --git a/src/tools/miri/tests/fail/match/closures/uninhabited-variant2.rs b/src/tools/miri/tests/fail/match/closures/uninhabited-variant2.rs index ed68e357fbd59..050e83884d1e2 100644 --- a/src/tools/miri/tests/fail/match/closures/uninhabited-variant2.rs +++ b/src/tools/miri/tests/fail/match/closures/uninhabited-variant2.rs @@ -22,7 +22,8 @@ fn main() { // After rust-lang/rust#138961, constructing the closure performs a reborrow of r. // Nevertheless, the discriminant is only actually inspected when the closure // is called. - match r { //~ ERROR: read discriminant of an uninhabited enum variant + match r { + //~^ ERROR: read discriminant of an uninhabited enum variant E::V0 => {} E::V1(_) => {} } diff --git a/src/tools/miri/tests/fail/match/only_inhabited_variant.rs b/src/tools/miri/tests/fail/match/only_inhabited_variant.rs index 30a7350d2b667..2be5e8083aa7f 100644 --- a/src/tools/miri/tests/fail/match/only_inhabited_variant.rs +++ b/src/tools/miri/tests/fail/match/only_inhabited_variant.rs @@ -4,8 +4,8 @@ #[repr(C)] #[allow(dead_code)] enum E { - V0, // discriminant: 0 - V1(!), // 1 + V0, // discriminant: 0 + V1(!), // 1 } fn main() { @@ -14,7 +14,8 @@ fn main() { let val = 1u32; let ptr = (&raw const val).cast::(); let r = unsafe { &*ptr }; - match r { //~ ERROR: read discriminant of an uninhabited enum variant + match r { + //~^ ERROR: read discriminant of an uninhabited enum variant E::V0 => {} E::V1(_) => {} } diff --git a/src/tools/miri/tests/fail/match/single_variant.rs b/src/tools/miri/tests/fail/match/single_variant.rs index dcef6d461a2c3..35bb63620ea23 100644 --- a/src/tools/miri/tests/fail/match/single_variant.rs +++ b/src/tools/miri/tests/fail/match/single_variant.rs @@ -20,12 +20,13 @@ fn main() { let x: &[u8; 2] = &[21, 37]; let y: &Exhaustive = std::mem::transmute(x); match y { - Exhaustive::A(_) => {}, + Exhaustive::A(_) => {} } let y: &NonExhaustive = std::mem::transmute(x); - match y { //~ ERROR: enum value has invalid tag - NonExhaustive::A(_) => {}, + match y { + //~^ ERROR: enum value has invalid tag + NonExhaustive::A(_) => {} } } } diff --git a/src/tools/miri/tests/fail/match/single_variant_uninit.rs b/src/tools/miri/tests/fail/match/single_variant_uninit.rs index 51e8bc57c837a..e04947f996288 100644 --- a/src/tools/miri/tests/fail/match/single_variant_uninit.rs +++ b/src/tools/miri/tests/fail/match/single_variant_uninit.rs @@ -28,7 +28,8 @@ fn main() { _ => {} } - match *nexh { //~ ERROR: memory is uninitialized + match *nexh { + //~^ ERROR: memory is uninitialized NonExhaustive::A(ref _val) => {} _ => {} } diff --git a/src/tools/miri/tests/fail/validity/uninhabited_variant.rs b/src/tools/miri/tests/fail/validity/uninhabited_variant.rs new file mode 100644 index 0000000000000..303584423fdcf --- /dev/null +++ b/src/tools/miri/tests/fail/validity/uninhabited_variant.rs @@ -0,0 +1,17 @@ +// NOTE: this is essentially a smoke-test, with more comprehensive tests living in the rustc +// repository at tests/ui/consts/const-eval/ub-enum.rs +#![feature(never_type)] + +#[repr(C)] +#[allow(dead_code)] +enum E { + V1, // discriminant: 0 + V2(!), // 1 +} + +fn main() { + unsafe { + std::mem::transmute::(1); + //~^ ERROR: encountered an uninhabited enum variant + } +} diff --git a/src/tools/miri/tests/fail/validity/uninhabited_variant.stderr b/src/tools/miri/tests/fail/validity/uninhabited_variant.stderr new file mode 100644 index 0000000000000..76ee25009b6ec --- /dev/null +++ b/src/tools/miri/tests/fail/validity/uninhabited_variant.stderr @@ -0,0 +1,13 @@ +error: Undefined Behavior: constructing invalid value at .: encountered an uninhabited enum variant + --> tests/fail/validity/uninhabited_variant.rs:LL:CC + | +LL | std::mem::transmute::(1); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Undefined Behavior occurred here + | + = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior + = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to 1 previous error + diff --git a/src/tools/miri/tests/pass/align_strange_enum_discriminant_offset.rs b/src/tools/miri/tests/pass/align_strange_enum_discriminant_offset.rs index e0d05e0b65abf..d8d28bc9a768d 100644 --- a/src/tools/miri/tests/pass/align_strange_enum_discriminant_offset.rs +++ b/src/tools/miri/tests/pass/align_strange_enum_discriminant_offset.rs @@ -1,5 +1,4 @@ -#![allow(unused)] - +#[allow(unused)] #[repr(u16)] enum DeviceKind { Nil = 0, @@ -19,5 +18,5 @@ fn main() { let x = None::<(DeviceInfo, u8)>; let y = None::<(DeviceInfo, u16)>; let z = None::<(DeviceInfo, u64)>; - format!("{} {} {}", x.is_some(), y.is_some(), y.is_some()); + let _out = format!("{} {} {}", x.is_some(), y.is_some(), z.is_some()); } diff --git a/src/tools/miri/tests/pass/async-closure.rs b/src/tools/miri/tests/pass/async-closure.rs index 1b38f06eb7cd9..5067f1d2d8e78 100644 --- a/src/tools/miri/tests/pass/async-closure.rs +++ b/src/tools/miri/tests/pass/async-closure.rs @@ -1,5 +1,4 @@ #![feature(async_fn_traits)] -#![allow(unused)] use std::future::Future; use std::ops::{AsyncFn, AsyncFnMut, AsyncFnOnce}; diff --git a/src/tools/miri/tests/pass/shims/aarch64/intrinsics-aarch64-neon.rs b/src/tools/miri/tests/pass/shims/aarch64/intrinsics-aarch64-neon.rs index 84485dbad8c9e..6d3f153e194f3 100644 --- a/src/tools/miri/tests/pass/shims/aarch64/intrinsics-aarch64-neon.rs +++ b/src/tools/miri/tests/pass/shims/aarch64/intrinsics-aarch64-neon.rs @@ -4,17 +4,19 @@ use std::arch::aarch64::*; use std::arch::is_aarch64_feature_detected; +use std::mem::transmute; fn main() { assert!(is_aarch64_feature_detected!("neon")); unsafe { - test_neon(); + test_vpmaxq_u8(); + test_tbl1_v16i8_basic(); } } #[target_feature(enable = "neon")] -unsafe fn test_neon() { +unsafe fn test_vpmaxq_u8() { // Adapted from library/stdarch/crates/core_arch/src/aarch64/neon/mod.rs unsafe fn test_vpmaxq_u8() { let a = vld1q_u8([1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8].as_ptr()); @@ -38,3 +40,28 @@ unsafe fn test_neon() { } test_vpmaxq_u8_is_unsigned(); } + +#[target_feature(enable = "neon")] +fn test_tbl1_v16i8_basic() { + unsafe { + // table = 0..15 + let table: uint8x16_t = + transmute::<[u8; 16], _>([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); + // indices + let idx: uint8x16_t = + transmute::<[u8; 16], _>([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); + let got = vqtbl1q_u8(table, idx); + let got_arr: [u8; 16] = transmute(got); + assert_eq!(got_arr, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); + + // Also try different order and out-of-range indices (16, 255). + let idx2: uint8x16_t = + transmute::<[u8; 16], _>([15, 16, 255, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]); + let got2 = vqtbl1q_u8(table, idx2); + let got2_arr: [u8; 16] = transmute(got2); + assert_eq!(got2_arr[0], 15); + assert_eq!(got2_arr[1], 0); // out-of-range + assert_eq!(got2_arr[2], 0); // out-of-range + assert_eq!(&got2_arr[3..16], &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12][..]); + } +} diff --git a/src/tools/miri/tests/ui.rs b/src/tools/miri/tests/ui.rs index 70739eef28838..047cdeb357c20 100644 --- a/src/tools/miri/tests/ui.rs +++ b/src/tools/miri/tests/ui.rs @@ -132,14 +132,19 @@ fn miri_config( // (It's a separate crate, so we don't get an env var from cargo.) program: miri_path() .with_file_name(format!("cargo-miri{}", env::consts::EXE_SUFFIX)), - // There is no `cargo miri build` so we just use `cargo miri run`. // Add `-Zbinary-dep-depinfo` since it is needed for bootstrap builds (and doesn't harm otherwise). - args: ["miri", "run", "--quiet", "-Zbinary-dep-depinfo"] + args: ["miri", "build", "-Zbinary-dep-depinfo"] .into_iter() .map(Into::into) .collect(), - // Reset `RUSTFLAGS` to work around . - envs: vec![("RUSTFLAGS".into(), None)], + envs: vec![ + // Reset `RUSTFLAGS` to work around . + ("RUSTFLAGS".into(), None), + // Reset `MIRIFLAGS` because it caused trouble in the past and should not be needed. + ("MIRIFLAGS".into(), None), + // Allow `cargo miri build`. + ("MIRI_BUILD_TEST_DEPS".into(), Some("1".into())), + ], ..CommandBuilder::cargo() }, crate_manifest_path: Path::new("tests/deps").join("Cargo.toml"), @@ -361,15 +366,16 @@ fn run_dep_mode(target: String, args: impl Iterator) -> Result< miri_config(&target, "", Mode::RunDep, Some(WithDependencies { bless: false })); config.comment_defaults.base().custom.remove("edition"); // `./miri` adds an `--edition` in `args`, so don't set it twice config.fill_host_and_target()?; + let dep_builder = BuildManager::one_off(config.clone()); + // Only set these for the actual run, not the dep builder, so invalid flags do not fail + // the dependency build. config.program.args = args.collect(); + let test_config = TestConfig::one_off_runner(config, PathBuf::new()); - let test_config = TestConfig::one_off_runner(config.clone(), PathBuf::new()); - - let build_manager = BuildManager::one_off(config); let mut cmd = test_config.config.program.build(&test_config.config.out_dir); cmd.arg("--target").arg(test_config.config.target.as_ref().unwrap()); // Build dependencies - test_config.apply_custom(&mut cmd, &build_manager).unwrap(); + test_config.apply_custom(&mut cmd, &dep_builder).expect("failed to build dependencies"); if cmd.spawn()?.wait()?.success() { Ok(()) } else { std::process::exit(1) } } diff --git a/src/tools/rust-analyzer/.github/workflows/ci.yaml b/src/tools/rust-analyzer/.github/workflows/ci.yaml index 1a0deee564aeb..ca7d3058d8f0b 100644 --- a/src/tools/rust-analyzer/.github/workflows/ci.yaml +++ b/src/tools/rust-analyzer/.github/workflows/ci.yaml @@ -96,7 +96,7 @@ jobs: run: | rustup update --no-self-update stable rustup default stable - rustup component add --toolchain stable rust-src clippy rustfmt + rustup component add --toolchain stable rust-src rustfmt # We also install a nightly rustfmt, because we use `--file-lines` in # a test. rustup toolchain install nightly --profile minimal --component rustfmt @@ -128,10 +128,6 @@ jobs: - name: Run cargo-machete run: cargo machete - - name: Run Clippy - if: matrix.os == 'macos-latest' - run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr - analysis-stats: if: github.repository == 'rust-lang/rust-analyzer' runs-on: ubuntu-latest @@ -178,6 +174,28 @@ jobs: - run: cargo fmt -- --check + clippy: + if: github.repository == 'rust-lang/rust-analyzer' + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Note that clippy output is currently dependent on whether rust-src is installed, + # https://github.com/rust-lang/rust-clippy/issues/14625 + - name: Install Rust toolchain + run: | + rustup update --no-self-update stable + rustup default stable + rustup component add --toolchain stable rust-src clippy + + # https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json + - name: Install Rust Problem Matcher + run: echo "::add-matcher::.github/rust.json" + + - run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr + miri: if: github.repository == 'rust-lang/rust-analyzer' runs-on: ubuntu-latest @@ -188,8 +206,11 @@ jobs: - name: Install Rust toolchain run: | - rustup update --no-self-update nightly - rustup default nightly + # FIXME: Pin nightly due to a regression in miri on nightly-2026-02-12. + # See https://github.com/rust-lang/miri/issues/4855. + # Revert to plain `nightly` once this is fixed upstream. + rustup toolchain install nightly-2026-02-10 + rustup default nightly-2026-02-10 rustup component add miri # - name: Cache Dependencies @@ -309,7 +330,7 @@ jobs: run: typos conclusion: - needs: [rust, rust-cross, typescript, typo-check, proc-macro-srv, miri, rustfmt, analysis-stats] + needs: [rust, rust-cross, typescript, typo-check, proc-macro-srv, miri, rustfmt, clippy, analysis-stats] # We need to ensure this job does *not* get skipped if its dependencies fail, # because a skipped job is considered a success by GitHub. So we have to # overwrite `if:`. We use `!cancelled()` to ensure the job does still not get run diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 755ae55eea462..9db4dd7cb1a3a 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -1845,6 +1845,7 @@ dependencies = [ "paths", "postcard", "proc-macro-srv", + "rayon", "rustc-hash 2.1.1", "semver", "serde", @@ -2453,9 +2454,9 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "salsa" -version = "0.26.0" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f77debccd43ba198e9cee23efd7f10330ff445e46a98a2b107fed9094a1ee676" +checksum = "e2e2aa2fca57727371eeafc975acc8e6f4c52f8166a78035543f6ee1c74c2dcc" dependencies = [ "boxcar", "crossbeam-queue", @@ -2478,15 +2479,15 @@ dependencies = [ [[package]] name = "salsa-macro-rules" -version = "0.26.0" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea07adbf42d91cc076b7daf3b38bc8168c19eb362c665964118a89bc55ef19a5" +checksum = "1bfc2a1e7bf06964105515451d728f2422dedc3a112383324a00b191a5c397a3" [[package]] name = "salsa-macros" -version = "0.26.0" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d16d4d8b66451b9c75ddf740b7fc8399bc7b8ba33e854a5d7526d18708f67b05" +checksum = "3d844c1aa34946da46af683b5c27ec1088a3d9d84a2b837a108223fd830220e1" dependencies = [ "proc-macro2", "quote", diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 04559f15eda41..9f31e1903af59 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -135,13 +135,13 @@ rayon = "1.10.0" rowan = "=0.15.17" # Ideally we'd not enable the macros feature but unfortunately the `tracked` attribute does not work # on impls without it -salsa = { version = "0.26", default-features = false, features = [ +salsa = { version = "0.25.2", default-features = false, features = [ "rayon", "salsa_unstable", "macros", "inventory", ] } -salsa-macros = "0.26" +salsa-macros = "0.25.2" semver = "1.0.26" serde = { version = "1.0.219" } serde_derive = { version = "1.0.219" } diff --git a/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs b/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs index dd419f48fc7ec..13fb05d565479 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/editioned_file_id.rs @@ -60,7 +60,7 @@ const _: () = { } } - impl zalsa_::HashEqLike for EditionedFileIdData { + impl zalsa_struct_::HashEqLike for EditionedFileIdData { #[inline] fn hash(&self, state: &mut H) { Hash::hash(self, state); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs index 701586c258386..1cecd1976b409 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expr_store/lower.rs @@ -32,8 +32,8 @@ use triomphe::Arc; use tt::TextRange; use crate::{ - AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, MacroId, - ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro, + AdtId, BlockId, BlockLoc, DefWithBodyId, FunctionId, GenericDefId, ImplId, ItemContainerId, + MacroId, ModuleDefId, ModuleId, TraitId, TypeAliasId, UnresolvedMacro, attrs::AttrFlags, db::DefDatabase, expr_store::{ @@ -141,9 +141,19 @@ pub(super) fn lower_body( source_map_self_param = Some(collector.expander.in_file(AstPtr::new(&self_param_syn))); } + let is_extern = matches!( + owner, + DefWithBodyId::FunctionId(id) + if matches!(id.loc(db).container, ItemContainerId::ExternBlockId(_)), + ); + for param in param_list.params() { if collector.check_cfg(¶m) { - let param_pat = collector.collect_pat_top(param.pat()); + let param_pat = if is_extern { + collector.collect_extern_fn_param(param.pat()) + } else { + collector.collect_pat_top(param.pat()) + }; params.push(param_pat); } } @@ -2248,6 +2258,32 @@ impl<'db> ExprCollector<'db> { } } + fn collect_extern_fn_param(&mut self, pat: Option) -> PatId { + // `extern` functions cannot have pattern-matched parameters, and furthermore, the identifiers + // in their parameters are always interpreted as bindings, even if in a normal function they + // won't be, because they would refer to a path pattern. + let Some(pat) = pat else { return self.missing_pat() }; + + match &pat { + ast::Pat::IdentPat(bp) => { + // FIXME: Emit an error if `!bp.is_simple_ident()`. + + let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); + let hygiene = bp + .name() + .map(|name| self.hygiene_id_for(name.syntax().text_range())) + .unwrap_or(HygieneId::ROOT); + let binding = self.alloc_binding(name, BindingAnnotation::Unannotated, hygiene); + let pat = + self.alloc_pat(Pat::Bind { id: binding, subpat: None }, AstPtr::new(&pat)); + self.add_definition_to_binding(binding, pat); + pat + } + // FIXME: Emit an error. + _ => self.missing_pat(), + } + } + // region: patterns fn collect_pat_top(&mut self, pat: Option) -> PatId { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index 1303773b59d43..b11a8bcd9097d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -483,6 +483,11 @@ impl ItemScope { self.declarations.push(def) } + pub(crate) fn remove_from_value_ns(&mut self, name: &Name, def: ModuleDefId) { + let entry = self.values.shift_remove(name); + assert!(entry.is_some_and(|entry| entry.def == def)) + } + pub(crate) fn get_legacy_macro(&self, name: &Name) -> Option<&[MacroId]> { self.legacy_macros.get(name).map(|it| &**it) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 1e3ea50c5a0f3..5fda1beab4130 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -211,6 +211,7 @@ struct DefMapCrateData { /// Side table for resolving derive helpers. exported_derives: FxHashMap>, fn_proc_macro_mapping: FxHashMap, + fn_proc_macro_mapping_back: FxHashMap, /// Custom tool modules registered with `#![register_tool]`. registered_tools: Vec, @@ -230,6 +231,7 @@ impl DefMapCrateData { Self { exported_derives: FxHashMap::default(), fn_proc_macro_mapping: FxHashMap::default(), + fn_proc_macro_mapping_back: FxHashMap::default(), registered_tools: PREDEFINED_TOOLS.iter().map(|it| Symbol::intern(it)).collect(), unstable_features: FxHashSet::default(), rustc_coherence_is_core: false, @@ -244,6 +246,7 @@ impl DefMapCrateData { let Self { exported_derives, fn_proc_macro_mapping, + fn_proc_macro_mapping_back, registered_tools, unstable_features, rustc_coherence_is_core: _, @@ -254,6 +257,7 @@ impl DefMapCrateData { } = self; exported_derives.shrink_to_fit(); fn_proc_macro_mapping.shrink_to_fit(); + fn_proc_macro_mapping_back.shrink_to_fit(); registered_tools.shrink_to_fit(); unstable_features.shrink_to_fit(); } @@ -570,6 +574,10 @@ impl DefMap { self.data.fn_proc_macro_mapping.get(&id).copied() } + pub fn proc_macro_as_fn(&self, id: ProcMacroId) -> Option { + self.data.fn_proc_macro_mapping_back.get(&id).copied() + } + pub fn krate(&self) -> Crate { self.krate } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index f51524c1b5511..e672e83f0194f 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -634,6 +634,7 @@ impl<'db> DefCollector<'db> { crate_data.exported_derives.insert(proc_macro_id.into(), helpers); } crate_data.fn_proc_macro_mapping.insert(fn_id, proc_macro_id); + crate_data.fn_proc_macro_mapping_back.insert(proc_macro_id, fn_id); } /// Define a macro with `macro_rules`. @@ -2095,6 +2096,8 @@ impl ModCollector<'_, '_> { let vis = resolve_vis(def_map, local_def_map, &self.item_tree[it.visibility]); + update_def(self.def_collector, fn_id.into(), &it.name, vis, false); + if self.def_collector.def_map.block.is_none() && self.def_collector.is_proc_macro && self.module_id == self.def_collector.def_map.root @@ -2105,9 +2108,14 @@ impl ModCollector<'_, '_> { InFile::new(self.file_id(), id), fn_id, ); - } - update_def(self.def_collector, fn_id.into(), &it.name, vis, false); + // A proc macro is implemented as a function, but it's treated as a macro, not a function. + // You cannot call it like a function, for example, except in its defining crate. + // So we keep the function definition, but remove it from the scope, leaving only the macro. + self.def_collector.def_map[module_id] + .scope + .remove_from_value_ns(&it.name, fn_id.into()); + } } ModItemId::Struct(id) => { let it = &self.item_tree[id]; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs index a943f6f0ac01e..a013f8b2bc1a4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/tests/macros.rs @@ -1068,10 +1068,8 @@ pub fn derive_macro_2(_item: TokenStream) -> TokenStream { - AnotherTrait : macro# - DummyTrait : macro# - TokenStream : type value - - attribute_macro : value macro# - - derive_macro : value - - derive_macro_2 : value - - function_like_macro : value macro! + - attribute_macro : macro# + - function_like_macro : macro! "#]], ); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index 2ac0f90fb2090..d32e53fc6bee2 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -32,7 +32,7 @@ use crate::{ BindingId, ExprId, LabelId, generics::{GenericParams, TypeOrConstParamData}, }, - item_scope::{BUILTIN_SCOPE, BuiltinShadowMode, ImportOrExternCrate, ImportOrGlob, ItemScope}, + item_scope::{BUILTIN_SCOPE, BuiltinShadowMode, ImportOrExternCrate, ItemScope}, lang_item::LangItemTarget, nameres::{DefMap, LocalDefMap, MacroSubNs, ResolvePathResultPrefixInfo, block_def_map}, per_ns::PerNs, @@ -111,8 +111,8 @@ pub enum TypeNs { #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum ResolveValueResult { - ValueNs(ValueNs, Option), - Partial(TypeNs, usize, Option), + ValueNs(ValueNs), + Partial(TypeNs, usize), } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] @@ -332,20 +332,17 @@ impl<'db> Resolver<'db> { Path::Normal(it) => &it.mod_path, Path::LangItem(l, None) => { return Some(( - ResolveValueResult::ValueNs( - match *l { - LangItemTarget::FunctionId(it) => ValueNs::FunctionId(it), - LangItemTarget::StaticId(it) => ValueNs::StaticId(it), - LangItemTarget::StructId(it) => ValueNs::StructId(it), - LangItemTarget::EnumVariantId(it) => ValueNs::EnumVariantId(it), - LangItemTarget::UnionId(_) - | LangItemTarget::ImplId(_) - | LangItemTarget::TypeAliasId(_) - | LangItemTarget::TraitId(_) - | LangItemTarget::EnumId(_) => return None, - }, - None, - ), + ResolveValueResult::ValueNs(match *l { + LangItemTarget::FunctionId(it) => ValueNs::FunctionId(it), + LangItemTarget::StaticId(it) => ValueNs::StaticId(it), + LangItemTarget::StructId(it) => ValueNs::StructId(it), + LangItemTarget::EnumVariantId(it) => ValueNs::EnumVariantId(it), + LangItemTarget::UnionId(_) + | LangItemTarget::ImplId(_) + | LangItemTarget::TypeAliasId(_) + | LangItemTarget::TraitId(_) + | LangItemTarget::EnumId(_) => return None, + }), ResolvePathResultPrefixInfo::default(), )); } @@ -363,7 +360,7 @@ impl<'db> Resolver<'db> { }; // Remaining segments start from 0 because lang paths have no segments other than the remaining. return Some(( - ResolveValueResult::Partial(type_ns, 0, None), + ResolveValueResult::Partial(type_ns, 0), ResolvePathResultPrefixInfo::default(), )); } @@ -388,10 +385,7 @@ impl<'db> Resolver<'db> { if let Some(e) = entry { return Some(( - ResolveValueResult::ValueNs( - ValueNs::LocalBinding(e.binding()), - None, - ), + ResolveValueResult::ValueNs(ValueNs::LocalBinding(e.binding())), ResolvePathResultPrefixInfo::default(), )); } @@ -404,14 +398,14 @@ impl<'db> Resolver<'db> { && *first_name == sym::Self_ { return Some(( - ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_), None), + ResolveValueResult::ValueNs(ValueNs::ImplSelf(impl_)), ResolvePathResultPrefixInfo::default(), )); } if let Some(id) = params.find_const_by_name(first_name, *def) { let val = ValueNs::GenericParam(id); return Some(( - ResolveValueResult::ValueNs(val, None), + ResolveValueResult::ValueNs(val), ResolvePathResultPrefixInfo::default(), )); } @@ -431,7 +425,7 @@ impl<'db> Resolver<'db> { if let &GenericDefId::ImplId(impl_) = def { if *first_name == sym::Self_ { return Some(( - ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1, None), + ResolveValueResult::Partial(TypeNs::SelfType(impl_), 1), ResolvePathResultPrefixInfo::default(), )); } @@ -440,14 +434,14 @@ impl<'db> Resolver<'db> { { let ty = TypeNs::AdtSelfType(adt); return Some(( - ResolveValueResult::Partial(ty, 1, None), + ResolveValueResult::Partial(ty, 1), ResolvePathResultPrefixInfo::default(), )); } if let Some(id) = params.find_type_by_name(first_name, *def) { let ty = TypeNs::GenericParam(id); return Some(( - ResolveValueResult::Partial(ty, 1, None), + ResolveValueResult::Partial(ty, 1), ResolvePathResultPrefixInfo::default(), )); } @@ -473,7 +467,7 @@ impl<'db> Resolver<'db> { && let Some(builtin) = BuiltinType::by_name(first_name) { return Some(( - ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1, None), + ResolveValueResult::Partial(TypeNs::BuiltinType(builtin), 1), ResolvePathResultPrefixInfo::default(), )); } @@ -488,7 +482,7 @@ impl<'db> Resolver<'db> { hygiene: HygieneId, ) -> Option { match self.resolve_path_in_value_ns(db, path, hygiene)? { - ResolveValueResult::ValueNs(it, _) => Some(it), + ResolveValueResult::ValueNs(it) => Some(it), ResolveValueResult::Partial(..) => None, } } @@ -1153,12 +1147,12 @@ impl<'db> ModuleItemMap<'db> { ); match unresolved_idx { None => { - let (value, import) = to_value_ns(module_def)?; - Some((ResolveValueResult::ValueNs(value, import), prefix_info)) + let value = to_value_ns(module_def, self.def_map)?; + Some((ResolveValueResult::ValueNs(value), prefix_info)) } Some(unresolved_idx) => { - let def = module_def.take_types_full()?; - let ty = match def.def { + let def = module_def.take_types()?; + let ty = match def { ModuleDefId::AdtId(it) => TypeNs::AdtId(it), ModuleDefId::TraitId(it) => TypeNs::TraitId(it), ModuleDefId::TypeAliasId(it) => TypeNs::TypeAliasId(it), @@ -1171,7 +1165,7 @@ impl<'db> ModuleItemMap<'db> { | ModuleDefId::MacroId(_) | ModuleDefId::StaticId(_) => return None, }; - Some((ResolveValueResult::Partial(ty, unresolved_idx, def.import), prefix_info)) + Some((ResolveValueResult::Partial(ty, unresolved_idx), prefix_info)) } } } @@ -1194,8 +1188,13 @@ impl<'db> ModuleItemMap<'db> { } } -fn to_value_ns(per_ns: PerNs) -> Option<(ValueNs, Option)> { - let (def, import) = per_ns.take_values_import()?; +fn to_value_ns(per_ns: PerNs, def_map: &DefMap) -> Option { + let def = per_ns.take_values().or_else(|| { + let Some(MacroId::ProcMacroId(proc_macro)) = per_ns.take_macros() else { return None }; + // If we cannot resolve to value ns, but we can resolve to a proc macro, and this is the crate + // defining this proc macro - inside this crate, we should treat the macro as a function. + def_map.proc_macro_as_fn(proc_macro).map(ModuleDefId::FunctionId) + })?; let res = match def { ModuleDefId::FunctionId(it) => ValueNs::FunctionId(it), ModuleDefId::AdtId(AdtId::StructId(it)) => ValueNs::StructId(it), @@ -1210,7 +1209,7 @@ fn to_value_ns(per_ns: PerNs) -> Option<(ValueNs, Option)> { | ModuleDefId::MacroId(_) | ModuleDefId::ModuleId(_) => return None, }; - Some((res, import)) + Some(res) } fn to_type_ns(per_ns: PerNs) -> Option<(TypeNs, Option)> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs index 50d4517d01254..21f263723bb17 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/unsafe_check.rs @@ -430,7 +430,7 @@ impl<'db> UnsafeVisitor<'db> { fn mark_unsafe_path(&mut self, node: ExprOrPatId, path: &Path) { let hygiene = self.body.expr_or_pat_path_hygiene(node); let value_or_partial = self.resolver.resolve_path_in_value_ns(self.db, path, hygiene); - if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial { + if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id))) = value_or_partial { let static_data = self.db.static_signature(id); if static_data.flags.contains(StaticFlags::MUTABLE) { self.on_unsafe_op(node, UnsafetyReason::MutableStatic); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 35d744e7d16b4..991acda14bc70 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -1584,7 +1584,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { return (self.err_ty(), None); }; match res { - ResolveValueResult::ValueNs(value, _) => match value { + ResolveValueResult::ValueNs(value) => match value { ValueNs::EnumVariantId(var) => { let args = path_ctx.substs_from_path(var.into(), true, false); drop(ctx); @@ -1608,7 +1608,7 @@ impl<'body, 'db> InferenceContext<'body, 'db> { return (self.err_ty(), None); } }, - ResolveValueResult::Partial(typens, unresolved, _) => (typens, Some(unresolved)), + ResolveValueResult::Partial(typens, unresolved) => (typens, Some(unresolved)), } } else { match path_ctx.resolve_path_in_type_ns() { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 9f2d9d25b9572..16e7d51e8734f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -751,7 +751,7 @@ impl<'db> InferenceContext<'_, 'db> { if let Some(lhs_ty) = lhs_ty { self.write_pat_ty(target, lhs_ty); - self.infer_expr_coerce(value, &Expectation::has_type(lhs_ty), ExprIsRead::No); + self.infer_expr_coerce(value, &Expectation::has_type(lhs_ty), ExprIsRead::Yes); } else { let rhs_ty = self.infer_expr(value, &Expectation::none(), ExprIsRead::Yes); let resolver_guard = diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index 1b8ce5ceaf860..87fd0dace38f9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -673,7 +673,7 @@ impl<'db> InferenceContext<'_, 'db> { pub(super) fn contains_explicit_ref_binding(body: &Body, pat_id: PatId) -> bool { let mut res = false; body.walk_pats(pat_id, &mut |pat| { - res |= matches!(body[pat], Pat::Bind { id, .. } if body[id].mode == BindingAnnotation::Ref); + res |= matches!(body[pat], Pat::Bind { id, .. } if matches!(body[id].mode, BindingAnnotation::Ref | BindingAnnotation::RefMut)); }); res } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs index ef1a610a323d8..40c6fdf3cc880 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/path.rs @@ -164,11 +164,11 @@ impl<'db> InferenceContext<'_, 'db> { let value_or_partial = path_ctx.resolve_path_in_value_ns(hygiene)?; match value_or_partial { - ResolveValueResult::ValueNs(it, _) => { + ResolveValueResult::ValueNs(it) => { drop_ctx(ctx, no_diagnostics); (it, None) } - ResolveValueResult::Partial(def, remaining_index, _) => { + ResolveValueResult::Partial(def, remaining_index) => { // there may be more intermediate segments between the resolved one and // the end. Only the last segment needs to be resolved to a value; from // the segments before that, we need to get either a type or a trait ref. diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs index f3d0de12275ed..517f67b828a07 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower/path.rs @@ -396,12 +396,10 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { } let (mod_segments, enum_segment, resolved_segment_idx) = match res { - ResolveValueResult::Partial(_, unresolved_segment, _) => { + ResolveValueResult::Partial(_, unresolved_segment) => { (segments.take(unresolved_segment - 1), None, unresolved_segment - 1) } - ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_), _) - if prefix_info.enum_variant => - { + ResolveValueResult::ValueNs(ValueNs::EnumVariantId(_)) if prefix_info.enum_variant => { (segments.strip_last_two(), segments.len().checked_sub(2), segments.len() - 1) } ResolveValueResult::ValueNs(..) => (segments.strip_last(), None, segments.len() - 1), @@ -431,7 +429,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { } match &res { - ResolveValueResult::ValueNs(resolution, _) => { + ResolveValueResult::ValueNs(resolution) => { let resolved_segment_idx = self.current_segment_u32(); let resolved_segment = self.current_or_prev_segment; @@ -469,7 +467,7 @@ impl<'a, 'b, 'db> PathLoweringContext<'a, 'b, 'db> { | ValueNs::ConstId(_) => {} } } - ResolveValueResult::Partial(resolution, _, _) => { + ResolveValueResult::Partial(resolution, _) => { if !self.handle_type_ns_resolution(resolution) { return None; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 5de08313f418d..ec0723c3f8c3f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -1625,7 +1625,7 @@ impl<'db> Evaluator<'db> { }; match target_ty { rustc_type_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()), - rustc_type_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()), + rustc_type_ir::FloatTy::F64 => Owned(value.to_le_bytes().to_vec()), rustc_type_ir::FloatTy::F16 | rustc_type_ir::FloatTy::F128 => { not_supported!("unstable floating point type f16 and f128"); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 199db7a3e7187..8d5e5c2e6e74b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -1429,7 +1429,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { .resolve_path_in_value_ns(self.db, c, HygieneId::ROOT) .ok_or_else(unresolved_name)?; match pr { - ResolveValueResult::ValueNs(v, _) => { + ResolveValueResult::ValueNs(v) => { if let ValueNs::ConstId(c) = v { self.lower_const_to_operand( GenericArgs::empty(self.interner()), @@ -1439,7 +1439,7 @@ impl<'a, 'db> MirLowerCtx<'a, 'db> { not_supported!("bad path in range pattern"); } } - ResolveValueResult::Partial(_, _, _) => { + ResolveValueResult::Partial(_, _) => { not_supported!("associated constants in range pattern") } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index a8aacbff16fa8..83139821e3b8b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -373,7 +373,7 @@ impl<'db> MirLowerCtx<'_, 'db> { if let ( MatchingMode::Assign, - ResolveValueResult::ValueNs(ValueNs::LocalBinding(binding), _), + ResolveValueResult::ValueNs(ValueNs::LocalBinding(binding)), ) = (mode, &pr) { let local = self.binding_local(*binding)?; @@ -398,7 +398,7 @@ impl<'db> MirLowerCtx<'_, 'db> { { break 'b (c, x.1); } - if let ResolveValueResult::ValueNs(ValueNs::ConstId(c), _) = pr { + if let ResolveValueResult::ValueNs(ValueNs::ConstId(c)) = pr { break 'b (c, GenericArgs::empty(self.interner())); } not_supported!("path in pattern position that is not const or variant") diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs index 6f4fae7073178..8658d03a9e3e8 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/next_solver/predicate.rs @@ -714,9 +714,9 @@ impl<'db> rustc_type_ir::inherent::Predicate> for Predicate<'db> fn allow_normalization(self) -> bool { // TODO: this should probably live in rustc_type_ir match self.inner().as_ref().skip_binder() { - PredicateKind::Clause(ClauseKind::WellFormed(_)) - | PredicateKind::AliasRelate(..) - | PredicateKind::NormalizesTo(..) => false, + PredicateKind::Clause(ClauseKind::WellFormed(_)) | PredicateKind::AliasRelate(..) => { + false + } PredicateKind::Clause(ClauseKind::Trait(_)) | PredicateKind::Clause(ClauseKind::RegionOutlives(_)) | PredicateKind::Clause(ClauseKind::TypeOutlives(_)) @@ -729,6 +729,7 @@ impl<'db> rustc_type_ir::inherent::Predicate> for Predicate<'db> | PredicateKind::Coerce(_) | PredicateKind::Clause(ClauseKind::ConstEvaluatable(_)) | PredicateKind::ConstEquate(_, _) + | PredicateKind::NormalizesTo(..) | PredicateKind::Ambiguous => true, } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs index 4d68179a88b82..e4e7b4cc38859 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/never_type.rs @@ -760,6 +760,48 @@ fn coerce_ref_binding() -> ! { ) } +#[test] +fn diverging_place_match_ref_mut() { + check_infer_with_mismatches( + r#" +//- minicore: sized +fn coerce_ref_mut_binding() -> ! { + unsafe { + let x: *mut ! = 0 as _; + let ref mut _x: () = *x; + } +} +"#, + expect![[r#" + 33..120 '{ ... } }': ! + 39..118 'unsafe... }': ! + 60..61 'x': *mut ! + 72..73 '0': i32 + 72..78 '0 as _': *mut ! + 92..102 'ref mut _x': &'? mut () + 109..111 '*x': ! + 110..111 'x': *mut ! + 109..111: expected (), got ! + "#]], + ) +} + +#[test] +fn assign_never_place_no_mismatch() { + check_no_mismatches( + r#" +//- minicore: sized +fn foo() { + unsafe { + let p: *mut ! = 0 as _; + let mut x: () = (); + x = *p; + } +} +"#, + ); +} + #[test] fn never_place_isnt_diverging() { check_infer_with_mismatches( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index 3b5b4e4fa5404..5291bf80e8219 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -2363,7 +2363,6 @@ fn test() { } "#, expect![[r#" - 46..49 'Foo': Foo 93..97 'self': Foo 108..125 '{ ... }': usize 118..119 'N': usize @@ -2688,3 +2687,86 @@ pub trait FilterT = Self> { "#, ); } + +#[test] +fn regression_21605() { + check_infer( + r#" +//- minicore: fn, coerce_unsized, dispatch_from_dyn, iterator, iterators +pub struct Filter<'a, 'b, T> +where + T: 'b, + 'a: 'b, +{ + filter_fn: dyn Fn(&'a T) -> bool, + t: Option, + b: &'b (), +} + +impl<'a, 'b, T> Filter<'a, 'b, T> +where + T: 'b, + 'a: 'b, +{ + pub fn new(filter_fn: dyn Fn(&T) -> bool) -> Self { + Self { + filter_fn: filter_fn, + t: None, + b: &(), + } + } +} + +pub trait FilterExt { + type Output; + fn filter(&self, filter: &Filter) -> Self::Output; +} + +impl FilterExt for [T; N] +where + T: IntoIterator, +{ + type Output = T; + fn filter(&self, filter: &Filter) -> Self::Output { + let _ = self.into_iter().filter(filter.filter_fn); + loop {} + } +} +"#, + expect![[r#" + 214..223 'filter_fn': dyn Fn(&'? T) -> bool + 'static + 253..360 '{ ... }': Filter<'a, 'b, T> + 263..354 'Self {... }': Filter<'a, 'b, T> + 293..302 'filter_fn': dyn Fn(&'? T) -> bool + 'static + 319..323 'None': Option + 340..343 '&()': &'? () + 341..343 '()': () + 421..425 'self': &'? Self + 427..433 'filter': &'? Filter<'?, '?, T> + 580..584 'self': &'? [T; N] + 586..592 'filter': &'? Filter<'?, '?, T> + 622..704 '{ ... }': T + 636..637 '_': Filter, dyn Fn(&'? T) -> bool + '?> + 640..644 'self': &'? [T; N] + 640..656 'self.i...iter()': Iter<'?, T> + 640..681 'self.i...er_fn)': Filter, dyn Fn(&'? T) -> bool + '?> + 664..670 'filter': &'? Filter<'?, '?, T> + 664..680 'filter...ter_fn': dyn Fn(&'? T) -> bool + 'static + 691..698 'loop {}': ! + 696..698 '{}': () + "#]], + ); +} + +#[test] +fn extern_fns_cannot_have_param_patterns() { + check_no_mismatches( + r#" +pub(crate) struct Builder<'a>(&'a ()); + +unsafe extern "C" { + pub(crate) fn foo<'a>(Builder: &Builder<'a>); +} + "#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs index 7d4f04268af9f..dab8bdb54b691 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/simple.rs @@ -4074,3 +4074,38 @@ static S: &[u8; 158] = include_bytes!("/foo/bar/baz.txt"); "#, ); } + +#[test] +fn proc_macros_are_functions_inside_defining_crate_and_macros_outside() { + check_types( + r#" +//- /pm.rs crate:pm +#![crate_type = "proc-macro"] + +#[proc_macro_attribute] +pub fn proc_macro() {} + +fn foo() { + proc_macro; + // ^^^^^^^^^^ fn proc_macro() +} + +mod bar { + use super::proc_macro; + + fn baz() { + super::proc_macro; + // ^^^^^^^^^^^^^^^^^ fn proc_macro() + proc_macro; + // ^^^^^^^^^^ fn proc_macro() + } +} + +//- /lib.rs crate:lib deps:pm +fn foo() { + pm::proc_macro; + // ^^^^^^^^^^^^^^ {unknown} +} + "#, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index b4440dfa1826c..91fdcb8e6369b 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -172,8 +172,13 @@ fn write_function<'db>(f: &mut HirFormatter<'_, 'db>, func_id: FunctionId) -> Re write_generic_params(GenericDefId::FunctionId(func_id), f)?; + let too_long_param = data.params.len() > 4; f.write_char('(')?; + if too_long_param { + f.write_str("\n ")?; + } + let mut first = true; let mut skip_self = 0; if let Some(self_param) = func.self_param(db) { @@ -182,11 +187,12 @@ fn write_function<'db>(f: &mut HirFormatter<'_, 'db>, func_id: FunctionId) -> Re skip_self = 1; } + let comma = if too_long_param { ",\n " } else { ", " }; // FIXME: Use resolved `param.ty` once we no longer discard lifetimes let body = db.body(func_id.into()); for (type_ref, param) in data.params.iter().zip(func.assoc_fn_params(db)).skip(skip_self) { if !first { - f.write_str(", ")?; + f.write_str(comma)?; } else { first = false; } @@ -201,11 +207,14 @@ fn write_function<'db>(f: &mut HirFormatter<'_, 'db>, func_id: FunctionId) -> Re if data.is_varargs() { if !first { - f.write_str(", ")?; + f.write_str(comma)?; } f.write_str("...")?; } + if too_long_param { + f.write_char('\n')?; + } f.write_char(')')?; // `FunctionData::ret_type` will be `::core::future::Future` for async fns. diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 4b615665167c0..5820a6714b023 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -2244,6 +2244,39 @@ impl DefWithBody { acc.push(diag.into()) } } + + /// Returns an iterator over the inferred types of all expressions in this body. + pub fn expression_types<'db>( + self, + db: &'db dyn HirDatabase, + ) -> impl Iterator> { + self.id().into_iter().flat_map(move |def_id| { + let infer = InferenceResult::for_body(db, def_id); + let resolver = def_id.resolver(db); + + infer.expression_types().map(move |(_, ty)| Type::new_with_resolver(db, &resolver, ty)) + }) + } + + /// Returns an iterator over the inferred types of all patterns in this body. + pub fn pattern_types<'db>(self, db: &'db dyn HirDatabase) -> impl Iterator> { + self.id().into_iter().flat_map(move |def_id| { + let infer = InferenceResult::for_body(db, def_id); + let resolver = def_id.resolver(db); + + infer.pattern_types().map(move |(_, ty)| Type::new_with_resolver(db, &resolver, ty)) + }) + } + + /// Returns an iterator over the inferred types of all bindings in this body. + pub fn binding_types<'db>(self, db: &'db dyn HirDatabase) -> impl Iterator> { + self.id().into_iter().flat_map(move |def_id| { + let infer = InferenceResult::for_body(db, def_id); + let resolver = def_id.resolver(db); + + infer.binding_types().map(move |(_, ty)| Type::new_with_resolver(db, &resolver, ty)) + }) + } } fn expr_store_diagnostics<'db>( @@ -6068,11 +6101,7 @@ impl<'db> Type<'db> { match name { Some(name) => { - match ctx.probe_for_name( - method_resolution::Mode::MethodCall, - name.clone(), - self_ty, - ) { + match ctx.probe_for_name(method_resolution::Mode::Path, name.clone(), self_ty) { Ok(candidate) | Err(method_resolution::MethodError::PrivateMatch(candidate)) => { let id = candidate.item.into(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs index 2694910aa6098..da5c123957715 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/auto_import.rs @@ -352,7 +352,7 @@ mod tests { let config = TEST_CONFIG; let ctx = AssistContext::new(sema, &config, frange); let mut acc = Assists::new(&ctx, AssistResolveStrategy::All); - auto_import(&mut acc, &ctx); + hir::attach_db(&db, || auto_import(&mut acc, &ctx)); let assists = acc.finish(); let labels = assists.iter().map(|assist| assist.label.to_string()).collect::>(); @@ -1897,4 +1897,35 @@ fn foo(_: S) {} "#, ); } + + #[test] + fn with_after_segments() { + let before = r#" +mod foo { + pub mod wanted { + pub fn abc() {} + } +} + +mod bar { + pub mod wanted {} +} + +mod baz { + pub fn wanted() {} +} + +mod quux { + pub struct wanted; +} +impl quux::wanted { + fn abc() {} +} + +fn f() { + wanted$0::abc; +} + "#; + check_auto_import_order(before, &["Import `foo::wanted`", "Import `quux::wanted`"]); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs index 771e80bb92312..0e85a77822bed 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/bind_unused_param.rs @@ -2,7 +2,7 @@ use crate::assist_context::{AssistContext, Assists}; use ide_db::{LineIndexDatabase, assists::AssistId, defs::Definition}; use syntax::{ AstNode, - ast::{self, HasName, edit_in_place::Indent}, + ast::{self, HasName, edit::AstNodeEdit}, }; // Assist: bind_unused_param diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs index 1ae5f6491744a..434fbbae05c43 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_bool_to_enum.rs @@ -11,9 +11,10 @@ use ide_db::{ source_change::SourceChangeBuilder, }; use itertools::Itertools; +use syntax::ast::edit::AstNodeEdit; use syntax::{ AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T, - ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, make}, + ast::{self, HasName, edit::IndentLevel, make}, }; use crate::{ @@ -479,10 +480,9 @@ fn add_enum_def( ctx.sema.scope(name.syntax()).map(|scope| scope.module()) }) .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module); - let enum_def = make_bool_enum(make_enum_pub); let indent = IndentLevel::from_node(&insert_before); - enum_def.reindent_to(indent); + let enum_def = make_bool_enum(make_enum_pub).reset_indent().indent(indent); edit.insert( insert_before.text_range().start(), diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs index ebfed9f9ca991..d2336a4a5d8d0 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_let_else_to_match.rs @@ -1,7 +1,6 @@ use syntax::T; use syntax::ast::RangeItem; -use syntax::ast::edit::IndentLevel; -use syntax::ast::edit_in_place::Indent; +use syntax::ast::edit::AstNodeEdit; use syntax::ast::syntax_factory::SyntaxFactory; use syntax::ast::{self, AstNode, HasName, LetStmt, Pat}; @@ -93,7 +92,8 @@ pub(crate) fn convert_let_else_to_match(acc: &mut Assists, ctx: &AssistContext<' ); let else_arm = make.match_arm(make.wildcard_pat().into(), None, else_expr); let match_ = make.expr_match(init, make.match_arm_list([binding_arm, else_arm])); - match_.reindent_to(IndentLevel::from_node(let_stmt.syntax())); + let match_ = match_.reset_indent(); + let match_ = match_.indent(let_stmt.indent_level()); if bindings.is_empty() { editor.replace(let_stmt.syntax(), match_.syntax()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs index 0e5e6185d0540..1740cd024a89c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/convert_tuple_return_type_to_struct.rs @@ -5,15 +5,20 @@ use ide_db::{ assists::AssistId, defs::Definition, helpers::mod_path_to_ast, - imports::insert_use::{ImportScope, insert_use}, + imports::insert_use::{ImportScope, insert_use_with_editor}, search::{FileReference, UsageSearchResult}, source_change::SourceChangeBuilder, syntax_helpers::node_ext::{for_each_tail_expr, walk_expr}, }; use syntax::{ AstNode, SyntaxNode, - ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, make}, - match_ast, ted, + ast::{ + self, HasName, + edit::{AstNodeEdit, IndentLevel}, + syntax_factory::SyntaxFactory, + }, + match_ast, + syntax_editor::SyntaxEditor, }; use crate::assist_context::{AssistContext, Assists}; @@ -67,14 +72,15 @@ pub(crate) fn convert_tuple_return_type_to_struct( "Convert tuple return type to tuple struct", target, move |edit| { - let ret_type = edit.make_mut(ret_type); - let fn_ = edit.make_mut(fn_); + let mut syntax_editor = edit.make_editor(ret_type.syntax()); + let syntax_factory = SyntaxFactory::with_mappings(); let usages = Definition::Function(fn_def).usages(&ctx.sema).all(); let struct_name = format!("{}Result", stdx::to_camel_case(&fn_name.to_string())); let parent = fn_.syntax().ancestors().find_map(>::cast); add_tuple_struct_def( edit, + &syntax_factory, ctx, &usages, parent.as_ref().map(|it| it.syntax()).unwrap_or(fn_.syntax()), @@ -83,15 +89,23 @@ pub(crate) fn convert_tuple_return_type_to_struct( &target_module, ); - ted::replace( + syntax_editor.replace( ret_type.syntax(), - make::ret_type(make::ty(&struct_name)).syntax().clone_for_update(), + syntax_factory.ret_type(syntax_factory.ty(&struct_name)).syntax(), ); if let Some(fn_body) = fn_.body() { - replace_body_return_values(ast::Expr::BlockExpr(fn_body), &struct_name); + replace_body_return_values( + &mut syntax_editor, + &syntax_factory, + ast::Expr::BlockExpr(fn_body), + &struct_name, + ); } + syntax_editor.add_mappings(syntax_factory.finish_with_mappings()); + edit.add_file_edits(ctx.vfs_file_id(), syntax_editor); + replace_usages(edit, ctx, &usages, &struct_name, &target_module); }, ) @@ -106,24 +120,37 @@ fn replace_usages( target_module: &hir::Module, ) { for (file_id, references) in usages.iter() { - edit.edit_file(file_id.file_id(ctx.db())); + let Some(first_ref) = references.first() else { continue }; + + let mut editor = edit.make_editor(first_ref.name.syntax().as_node().unwrap()); + let syntax_factory = SyntaxFactory::with_mappings(); - let refs_with_imports = - augment_references_with_imports(edit, ctx, references, struct_name, target_module); + let refs_with_imports = augment_references_with_imports( + &syntax_factory, + ctx, + references, + struct_name, + target_module, + ); refs_with_imports.into_iter().rev().for_each(|(name, import_data)| { if let Some(fn_) = name.syntax().parent().and_then(ast::Fn::cast) { cov_mark::hit!(replace_trait_impl_fns); if let Some(ret_type) = fn_.ret_type() { - ted::replace( + editor.replace( ret_type.syntax(), - make::ret_type(make::ty(struct_name)).syntax().clone_for_update(), + syntax_factory.ret_type(syntax_factory.ty(struct_name)).syntax(), ); } if let Some(fn_body) = fn_.body() { - replace_body_return_values(ast::Expr::BlockExpr(fn_body), struct_name); + replace_body_return_values( + &mut editor, + &syntax_factory, + ast::Expr::BlockExpr(fn_body), + struct_name, + ); } } else { // replace tuple patterns @@ -143,22 +170,30 @@ fn replace_usages( _ => None, }); for tuple_pat in tuple_pats { - ted::replace( + editor.replace( tuple_pat.syntax(), - make::tuple_struct_pat( - make::path_from_text(struct_name), - tuple_pat.fields(), - ) - .clone_for_update() - .syntax(), + syntax_factory + .tuple_struct_pat( + syntax_factory.path_from_text(struct_name), + tuple_pat.fields(), + ) + .syntax(), ); } } - // add imports across modules where needed if let Some((import_scope, path)) = import_data { - insert_use(&import_scope, path, &ctx.config.insert_use); + insert_use_with_editor( + &import_scope, + path, + &ctx.config.insert_use, + &mut editor, + &syntax_factory, + ); } - }) + }); + + editor.add_mappings(syntax_factory.finish_with_mappings()); + edit.add_file_edits(file_id.file_id(ctx.db()), editor); } } @@ -176,7 +211,7 @@ fn node_to_pats(node: SyntaxNode) -> Option> { } fn augment_references_with_imports( - edit: &mut SourceChangeBuilder, + syntax_factory: &SyntaxFactory, ctx: &AssistContext<'_>, references: &[FileReference], struct_name: &str, @@ -191,8 +226,6 @@ fn augment_references_with_imports( ctx.sema.scope(name.syntax()).map(|scope| (name, scope.module())) }) .map(|(name, ref_module)| { - let new_name = edit.make_mut(name); - // if the referenced module is not the same as the target one and has not been seen before, add an import let import_data = if ref_module.nearest_non_block_module(ctx.db()) != *target_module && !visited_modules.contains(&ref_module) @@ -201,8 +234,7 @@ fn augment_references_with_imports( let cfg = ctx.config.find_path_config(ctx.sema.is_nightly(ref_module.krate(ctx.sema.db))); - let import_scope = - ImportScope::find_insert_use_container(new_name.syntax(), &ctx.sema); + let import_scope = ImportScope::find_insert_use_container(name.syntax(), &ctx.sema); let path = ref_module .find_use_path( ctx.sema.db, @@ -211,12 +243,12 @@ fn augment_references_with_imports( cfg, ) .map(|mod_path| { - make::path_concat( + syntax_factory.path_concat( mod_path_to_ast( &mod_path, target_module.krate(ctx.db()).edition(ctx.db()), ), - make::path_from_text(struct_name), + syntax_factory.path_from_text(struct_name), ) }); @@ -225,7 +257,7 @@ fn augment_references_with_imports( None }; - (new_name, import_data) + (name, import_data) }) .collect() } @@ -233,6 +265,7 @@ fn augment_references_with_imports( // Adds the definition of the tuple struct before the parent function. fn add_tuple_struct_def( edit: &mut SourceChangeBuilder, + syntax_factory: &SyntaxFactory, ctx: &AssistContext<'_>, usages: &UsageSearchResult, parent: &SyntaxNode, @@ -248,22 +281,27 @@ fn add_tuple_struct_def( ctx.sema.scope(name.syntax()).map(|scope| scope.module()) }) .any(|module| module.nearest_non_block_module(ctx.db()) != *target_module); - let visibility = if make_struct_pub { Some(make::visibility_pub()) } else { None }; + let visibility = if make_struct_pub { Some(syntax_factory.visibility_pub()) } else { None }; - let field_list = ast::FieldList::TupleFieldList(make::tuple_field_list( - tuple_ty.fields().map(|ty| make::tuple_field(visibility.clone(), ty)), + let field_list = ast::FieldList::TupleFieldList(syntax_factory.tuple_field_list( + tuple_ty.fields().map(|ty| syntax_factory.tuple_field(visibility.clone(), ty)), )); - let struct_name = make::name(struct_name); - let struct_def = make::struct_(visibility, struct_name, None, field_list).clone_for_update(); + let struct_name = syntax_factory.name(struct_name); + let struct_def = syntax_factory.struct_(visibility, struct_name, None, field_list); let indent = IndentLevel::from_node(parent); - struct_def.reindent_to(indent); + let struct_def = struct_def.indent(indent); edit.insert(parent.text_range().start(), format!("{struct_def}\n\n{indent}")); } /// Replaces each returned tuple in `body` with the constructor of the tuple struct named `struct_name`. -fn replace_body_return_values(body: ast::Expr, struct_name: &str) { +fn replace_body_return_values( + syntax_editor: &mut SyntaxEditor, + syntax_factory: &SyntaxFactory, + body: ast::Expr, + struct_name: &str, +) { let mut exprs_to_wrap = Vec::new(); let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e); @@ -278,12 +316,11 @@ fn replace_body_return_values(body: ast::Expr, struct_name: &str) { for ret_expr in exprs_to_wrap { if let ast::Expr::TupleExpr(tuple_expr) = &ret_expr { - let struct_constructor = make::expr_call( - make::expr_path(make::ext::ident_path(struct_name)), - make::arg_list(tuple_expr.fields()), - ) - .clone_for_update(); - ted::replace(ret_expr.syntax(), struct_constructor.syntax()); + let struct_constructor = syntax_factory.expr_call( + syntax_factory.expr_path(syntax_factory.ident_path(struct_name)), + syntax_factory.arg_list(tuple_expr.fields()), + ); + syntax_editor.replace(ret_expr.syntax(), struct_constructor.syntax()); } } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs index e2afc0bf130ee..b8dc59f87dee7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/destructure_tuple_binding.rs @@ -8,8 +8,8 @@ use ide_db::{ use itertools::Itertools; use syntax::{ T, - ast::{self, AstNode, FieldExpr, HasName, IdentPat, make}, - ted, + ast::{self, AstNode, FieldExpr, HasName, IdentPat, syntax_factory::SyntaxFactory}, + syntax_editor::{Position, SyntaxEditor}, }; use crate::{ @@ -89,13 +89,20 @@ fn destructure_tuple_edit_impl( data: &TupleData, in_sub_pattern: bool, ) { - let assignment_edit = edit_tuple_assignment(ctx, edit, data, in_sub_pattern); - let current_file_usages_edit = edit_tuple_usages(data, edit, ctx, in_sub_pattern); + let mut syntax_editor = edit.make_editor(data.ident_pat.syntax()); + let syntax_factory = SyntaxFactory::with_mappings(); - assignment_edit.apply(); + let assignment_edit = + edit_tuple_assignment(ctx, edit, &mut syntax_editor, &syntax_factory, data, in_sub_pattern); + let current_file_usages_edit = edit_tuple_usages(data, ctx, &syntax_factory, in_sub_pattern); + + assignment_edit.apply(&mut syntax_editor, &syntax_factory); if let Some(usages_edit) = current_file_usages_edit { - usages_edit.into_iter().for_each(|usage_edit| usage_edit.apply(edit)) + usages_edit.into_iter().for_each(|usage_edit| usage_edit.apply(edit, &mut syntax_editor)) } + + syntax_editor.add_mappings(syntax_factory.finish_with_mappings()); + edit.add_file_edits(ctx.vfs_file_id(), syntax_editor); } fn collect_data(ident_pat: IdentPat, ctx: &AssistContext<'_>) -> Option { @@ -165,11 +172,11 @@ struct TupleData { fn edit_tuple_assignment( ctx: &AssistContext<'_>, edit: &mut SourceChangeBuilder, + editor: &mut SyntaxEditor, + make: &SyntaxFactory, data: &TupleData, in_sub_pattern: bool, ) -> AssignmentEdit { - let ident_pat = edit.make_mut(data.ident_pat.clone()); - let tuple_pat = { let original = &data.ident_pat; let is_ref = original.ref_token().is_some(); @@ -177,10 +184,11 @@ fn edit_tuple_assignment( let fields = data .field_names .iter() - .map(|name| ast::Pat::from(make::ident_pat(is_ref, is_mut, make::name(name)))); - make::tuple_pat(fields).clone_for_update() + .map(|name| ast::Pat::from(make.ident_pat(is_ref, is_mut, make.name(name)))); + make.tuple_pat(fields) }; - let is_shorthand_field = ident_pat + let is_shorthand_field = data + .ident_pat .name() .as_ref() .and_then(ast::RecordPatField::for_field_name) @@ -189,14 +197,20 @@ fn edit_tuple_assignment( if let Some(cap) = ctx.config.snippet_cap { // place cursor on first tuple name if let Some(ast::Pat::IdentPat(first_pat)) = tuple_pat.fields().next() { - edit.add_tabstop_before( - cap, - first_pat.name().expect("first ident pattern should have a name"), - ) + let annotation = edit.make_tabstop_before(cap); + editor.add_annotation( + first_pat.name().expect("first ident pattern should have a name").syntax(), + annotation, + ); } } - AssignmentEdit { ident_pat, tuple_pat, in_sub_pattern, is_shorthand_field } + AssignmentEdit { + ident_pat: data.ident_pat.clone(), + tuple_pat, + in_sub_pattern, + is_shorthand_field, + } } struct AssignmentEdit { ident_pat: ast::IdentPat, @@ -206,23 +220,30 @@ struct AssignmentEdit { } impl AssignmentEdit { - fn apply(self) { + fn apply(self, syntax_editor: &mut SyntaxEditor, syntax_mapping: &SyntaxFactory) { // with sub_pattern: keep original tuple and add subpattern: `tup @ (_0, _1)` if self.in_sub_pattern { - self.ident_pat.set_pat(Some(self.tuple_pat.into())) + self.ident_pat.set_pat_with_editor( + Some(self.tuple_pat.into()), + syntax_editor, + syntax_mapping, + ) } else if self.is_shorthand_field { - ted::insert(ted::Position::after(self.ident_pat.syntax()), self.tuple_pat.syntax()); - ted::insert_raw(ted::Position::after(self.ident_pat.syntax()), make::token(T![:])); + syntax_editor.insert(Position::after(self.ident_pat.syntax()), self.tuple_pat.syntax()); + syntax_editor + .insert(Position::after(self.ident_pat.syntax()), syntax_mapping.whitespace(" ")); + syntax_editor + .insert(Position::after(self.ident_pat.syntax()), syntax_mapping.token(T![:])); } else { - ted::replace(self.ident_pat.syntax(), self.tuple_pat.syntax()) + syntax_editor.replace(self.ident_pat.syntax(), self.tuple_pat.syntax()) } } } fn edit_tuple_usages( data: &TupleData, - edit: &mut SourceChangeBuilder, ctx: &AssistContext<'_>, + make: &SyntaxFactory, in_sub_pattern: bool, ) -> Option> { // We need to collect edits first before actually applying them @@ -238,20 +259,20 @@ fn edit_tuple_usages( .as_ref()? .as_slice() .iter() - .filter_map(|r| edit_tuple_usage(ctx, edit, r, data, in_sub_pattern)) + .filter_map(|r| edit_tuple_usage(ctx, make, r, data, in_sub_pattern)) .collect_vec(); Some(edits) } fn edit_tuple_usage( ctx: &AssistContext<'_>, - builder: &mut SourceChangeBuilder, + make: &SyntaxFactory, usage: &FileReference, data: &TupleData, in_sub_pattern: bool, ) -> Option { match detect_tuple_index(usage, data) { - Some(index) => Some(edit_tuple_field_usage(ctx, builder, data, index)), + Some(index) => Some(edit_tuple_field_usage(ctx, make, data, index)), None if in_sub_pattern => { cov_mark::hit!(destructure_tuple_call_with_subpattern); None @@ -262,20 +283,18 @@ fn edit_tuple_usage( fn edit_tuple_field_usage( ctx: &AssistContext<'_>, - builder: &mut SourceChangeBuilder, + make: &SyntaxFactory, data: &TupleData, index: TupleIndex, ) -> EditTupleUsage { let field_name = &data.field_names[index.index]; - let field_name = make::expr_path(make::ext::ident_path(field_name)); + let field_name = make.expr_path(make.ident_path(field_name)); if data.ref_type.is_some() { let (replace_expr, ref_data) = determine_ref_and_parens(ctx, &index.field_expr); - let replace_expr = builder.make_mut(replace_expr); - EditTupleUsage::ReplaceExpr(replace_expr, ref_data.wrap_expr(field_name)) + EditTupleUsage::ReplaceExpr(replace_expr, ref_data.wrap_expr_with_factory(field_name, make)) } else { - let field_expr = builder.make_mut(index.field_expr); - EditTupleUsage::ReplaceExpr(field_expr.into(), field_name) + EditTupleUsage::ReplaceExpr(index.field_expr.into(), field_name) } } enum EditTupleUsage { @@ -291,14 +310,14 @@ enum EditTupleUsage { } impl EditTupleUsage { - fn apply(self, edit: &mut SourceChangeBuilder) { + fn apply(self, edit: &mut SourceChangeBuilder, syntax_editor: &mut SyntaxEditor) { match self { EditTupleUsage::NoIndex(range) => { edit.insert(range.start(), "/*"); edit.insert(range.end(), "*/"); } EditTupleUsage::ReplaceExpr(target_expr, replace_with) => { - ted::replace(target_expr.syntax(), replace_with.clone_for_update().syntax()) + syntax_editor.replace(target_expr.syntax(), replace_with.syntax()) } } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs index 73e93a3fbf526..51b967437b568 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs @@ -2,13 +2,16 @@ use ide_db::{famous_defs::FamousDefs, source_change::SourceChangeBuilder}; use stdx::{format_to, to_lower_snake_case}; use syntax::{ TextRange, - ast::{self, AstNode, HasName, HasVisibility, edit_in_place::Indent, make}, - ted, + ast::{ + self, AstNode, HasGenericParams, HasName, HasVisibility, edit::AstNodeEdit, + syntax_factory::SyntaxFactory, + }, + syntax_editor::Position, }; use crate::{ AssistContext, AssistId, Assists, GroupLabel, - utils::{convert_reference_type, find_struct_impl, generate_impl}, + utils::{convert_reference_type, find_struct_impl}, }; // Assist: generate_setter @@ -215,12 +218,14 @@ fn generate_getter_from_info( ctx: &AssistContext<'_>, info: &AssistInfo, record_field_info: &RecordFieldInfo, + syntax_factory: &SyntaxFactory, ) -> ast::Fn { let (ty, body) = if matches!(info.assist_type, AssistType::MutGet) { + let self_expr = syntax_factory.expr_path(syntax_factory.ident_path("self")); ( - make::ty_ref(record_field_info.field_ty.clone(), true), - make::expr_ref( - make::expr_field(make::ext::expr_self(), &record_field_info.field_name.text()), + syntax_factory.ty_ref(record_field_info.field_ty.clone(), true), + syntax_factory.expr_ref( + syntax_factory.expr_field(self_expr, &record_field_info.field_name.text()).into(), true, ), ) @@ -241,9 +246,14 @@ fn generate_getter_from_info( })() .unwrap_or_else(|| { ( - make::ty_ref(record_field_info.field_ty.clone(), false), - make::expr_ref( - make::expr_field(make::ext::expr_self(), &record_field_info.field_name.text()), + syntax_factory.ty_ref(record_field_info.field_ty.clone(), false), + syntax_factory.expr_ref( + syntax_factory + .expr_field( + syntax_factory.expr_path(syntax_factory.ident_path("self")), + &record_field_info.field_name.text(), + ) + .into(), false, ), ) @@ -251,18 +261,18 @@ fn generate_getter_from_info( }; let self_param = if matches!(info.assist_type, AssistType::MutGet) { - make::mut_self_param() + syntax_factory.mut_self_param() } else { - make::self_param() + syntax_factory.self_param() }; let strukt = &info.strukt; - let fn_name = make::name(&record_field_info.fn_name); - let params = make::param_list(Some(self_param), []); - let ret_type = Some(make::ret_type(ty)); - let body = make::block_expr([], Some(body)); + let fn_name = syntax_factory.name(&record_field_info.fn_name); + let params = syntax_factory.param_list(Some(self_param), []); + let ret_type = Some(syntax_factory.ret_type(ty)); + let body = syntax_factory.block_expr([], Some(body)); - make::fn_( + syntax_factory.fn_( None, strukt.visibility(), fn_name, @@ -278,28 +288,35 @@ fn generate_getter_from_info( ) } -fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> ast::Fn { +fn generate_setter_from_info( + info: &AssistInfo, + record_field_info: &RecordFieldInfo, + syntax_factory: &SyntaxFactory, +) -> ast::Fn { let strukt = &info.strukt; let field_name = &record_field_info.fn_name; - let fn_name = make::name(&format!("set_{field_name}")); + let fn_name = syntax_factory.name(&format!("set_{field_name}")); let field_ty = &record_field_info.field_ty; // Make the param list // `(&mut self, $field_name: $field_ty)` - let field_param = - make::param(make::ident_pat(false, false, make::name(field_name)).into(), field_ty.clone()); - let params = make::param_list(Some(make::mut_self_param()), [field_param]); + let field_param = syntax_factory.param( + syntax_factory.ident_pat(false, false, syntax_factory.name(field_name)).into(), + field_ty.clone(), + ); + let params = syntax_factory.param_list(Some(syntax_factory.mut_self_param()), [field_param]); // Make the assignment body // `self.$field_name = $field_name` - let self_expr = make::ext::expr_self(); - let lhs = make::expr_field(self_expr, field_name); - let rhs = make::expr_path(make::ext::ident_path(field_name)); - let assign_stmt = make::expr_stmt(make::expr_assignment(lhs, rhs).into()); - let body = make::block_expr([assign_stmt.into()], None); + let self_expr = syntax_factory.expr_path(syntax_factory.ident_path("self")); + let lhs = syntax_factory.expr_field(self_expr, field_name); + let rhs = syntax_factory.expr_path(syntax_factory.ident_path(field_name)); + let assign_stmt = + syntax_factory.expr_stmt(syntax_factory.expr_assignment(lhs.into(), rhs).into()); + let body = syntax_factory.block_expr([assign_stmt.into()], None); // Make the setter fn - make::fn_( + syntax_factory.fn_( None, strukt.visibility(), fn_name, @@ -403,47 +420,69 @@ fn build_source_change( info_of_record_fields: Vec, assist_info: AssistInfo, ) { - let record_fields_count = info_of_record_fields.len(); - - let impl_def = if let Some(impl_def) = &assist_info.impl_def { - // We have an existing impl to add to - builder.make_mut(impl_def.clone()) - } else { - // Generate a new impl to add the methods to - let impl_def = generate_impl(&ast::Adt::Struct(assist_info.strukt.clone())); - - // Insert it after the adt - let strukt = builder.make_mut(assist_info.strukt.clone()); - - ted::insert_all_raw( - ted::Position::after(strukt.syntax()), - vec![make::tokens::blank_line().into(), impl_def.syntax().clone().into()], - ); - - impl_def - }; + let syntax_factory = SyntaxFactory::without_mappings(); - let assoc_item_list = impl_def.get_or_create_assoc_item_list(); + let items: Vec = info_of_record_fields + .iter() + .map(|record_field_info| { + let method = match assist_info.assist_type { + AssistType::Set => { + generate_setter_from_info(&assist_info, record_field_info, &syntax_factory) + } + _ => { + generate_getter_from_info(ctx, &assist_info, record_field_info, &syntax_factory) + } + }; + let new_fn = method.clone_for_update(); + let new_fn = new_fn.indent(1.into()); + new_fn.into() + }) + .collect(); - for (i, record_field_info) in info_of_record_fields.iter().enumerate() { - // Make the new getter or setter fn - let new_fn = match assist_info.assist_type { - AssistType::Set => generate_setter_from_info(&assist_info, record_field_info), - _ => generate_getter_from_info(ctx, &assist_info, record_field_info), - } - .clone_for_update(); - new_fn.indent(1.into()); + if let Some(impl_def) = &assist_info.impl_def { + // We have an existing impl to add to + let mut editor = builder.make_editor(impl_def.syntax()); + impl_def.assoc_item_list().unwrap().add_items(&mut editor, items.clone()); - // Insert a tabstop only for last method we generate - if i == record_fields_count - 1 - && let Some(cap) = ctx.config.snippet_cap - && let Some(name) = new_fn.name() + if let Some(cap) = ctx.config.snippet_cap + && let Some(ast::AssocItem::Fn(fn_)) = items.last() + && let Some(name) = fn_.name() { - builder.add_tabstop_before(cap, name); + let tabstop = builder.make_tabstop_before(cap); + editor.add_annotation(name.syntax(), tabstop); } - assoc_item_list.add_item(new_fn.clone().into()); + builder.add_file_edits(ctx.vfs_file_id(), editor); + return; } + let ty_params = assist_info.strukt.generic_param_list(); + let ty_args = ty_params.as_ref().map(|it| it.to_generic_args()); + let impl_def = syntax_factory.impl_( + None, + ty_params, + ty_args, + syntax_factory + .ty_path(syntax_factory.ident_path(&assist_info.strukt.name().unwrap().to_string())) + .into(), + None, + Some(syntax_factory.assoc_item_list(items)), + ); + let mut editor = builder.make_editor(assist_info.strukt.syntax()); + editor.insert_all( + Position::after(assist_info.strukt.syntax()), + vec![syntax_factory.whitespace("\n\n").into(), impl_def.syntax().clone().into()], + ); + + if let Some(cap) = ctx.config.snippet_cap + && let Some(assoc_list) = impl_def.assoc_item_list() + && let Some(ast::AssocItem::Fn(fn_)) = assoc_list.assoc_items().last() + && let Some(name) = fn_.name() + { + let tabstop = builder.make_tabstop_before(cap); + editor.add_annotation(name.syntax().clone(), tabstop); + } + + builder.add_file_edits(ctx.vfs_file_id(), editor); } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs index 77eb8efc6f6af..bbd42481ef0fa 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_impl.rs @@ -1,5 +1,5 @@ use syntax::{ - ast::{self, AstNode, HasGenericParams, HasName, edit_in_place::Indent, make}, + ast::{self, AstNode, HasGenericParams, HasName, edit::AstNodeEdit, make}, syntax_editor::{Position, SyntaxEditor}, }; @@ -8,10 +8,14 @@ use crate::{ utils::{self, DefaultMethods, IgnoreAssocItems}, }; -fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &impl Indent) { +fn insert_impl( + editor: &mut SyntaxEditor, + impl_: &ast::Impl, + nominal: &impl AstNodeEdit, +) -> ast::Impl { let indent = nominal.indent_level(); - impl_.indent(indent); + let impl_ = impl_.indent(indent); editor.insert_all( Position::after(nominal.syntax()), vec![ @@ -20,6 +24,8 @@ fn insert_impl(editor: &mut SyntaxEditor, impl_: &ast::Impl, nominal: &impl Inde impl_.syntax().clone().into(), ], ); + + impl_ } // Assist: generate_impl @@ -57,6 +63,8 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio let impl_ = utils::generate_impl(&nominal); let mut editor = edit.make_editor(nominal.syntax()); + + let impl_ = insert_impl(&mut editor, &impl_, &nominal); // Add a tabstop after the left curly brace if let Some(cap) = ctx.config.snippet_cap && let Some(l_curly) = impl_.assoc_item_list().and_then(|it| it.l_curly_token()) @@ -65,7 +73,6 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio editor.add_annotation(l_curly, tabstop); } - insert_impl(&mut editor, &impl_, &nominal); edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) @@ -106,6 +113,8 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> let impl_ = utils::generate_trait_impl_intransitive(&nominal, make::ty_placeholder()); let mut editor = edit.make_editor(nominal.syntax()); + + let impl_ = insert_impl(&mut editor, &impl_, &nominal); // Make the trait type a placeholder snippet if let Some(cap) = ctx.config.snippet_cap { if let Some(trait_) = impl_.trait_() { @@ -119,7 +128,6 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> } } - insert_impl(&mut editor, &impl_, &nominal); edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) @@ -206,6 +214,8 @@ pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> make_impl_(Some(assoc_item_list)) }; + let impl_ = insert_impl(&mut editor, &impl_, &trait_); + if let Some(cap) = ctx.config.snippet_cap { if let Some(generics) = impl_.trait_().and_then(|it| it.generic_arg_list()) { for generic in generics.generic_args() { @@ -232,7 +242,6 @@ pub(crate) fn generate_impl_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> } } - insert_impl(&mut editor, &impl_, &trait_); edit.add_file_edits(ctx.vfs_file_id(), editor); }, ) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs index 53f6f4883f4dc..3a62a8853e3af 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_mut_trait_impl.rs @@ -1,7 +1,7 @@ use ide_db::{famous_defs::FamousDefs, traits::resolve_target_trait}; use syntax::{ AstNode, SyntaxElement, SyntaxNode, T, - ast::{self, edit::AstNodeEdit, edit_in_place::Indent, syntax_factory::SyntaxFactory}, + ast::{self, edit::AstNodeEdit, syntax_factory::SyntaxFactory}, syntax_editor::{Element, Position, SyntaxEditor}, }; @@ -46,7 +46,7 @@ use crate::{AssistContext, AssistId, Assists}; // ``` pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let impl_def = ctx.find_node_at_offset::()?; - let indent = Indent::indent_level(&impl_def); + let indent = impl_def.indent_level(); let ast::Type::PathType(path) = impl_def.trait_()? else { return None; @@ -78,7 +78,7 @@ pub(crate) fn generate_mut_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_> let new_impl = ast::Impl::cast(new_root.clone()).unwrap(); - Indent::indent(&new_impl, indent); + let new_impl = new_impl.indent(indent); let mut editor = edit.make_editor(impl_def.syntax()); editor.insert_all( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs index 4b923ab556b8c..793211a27b478 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_new.rs @@ -3,7 +3,7 @@ use ide_db::{ use_trivial_constructor::use_trivial_constructor, }; use syntax::{ - ast::{self, AstNode, HasName, HasVisibility, StructKind, edit_in_place::Indent, make}, + ast::{self, AstNode, HasName, HasVisibility, StructKind, edit::AstNodeEdit, make}, syntax_editor::Position, }; @@ -150,14 +150,14 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option false, false, ) - .clone_for_update(); - fn_.indent(1.into()); + .clone_for_update() + .indent(1.into()); let mut editor = builder.make_editor(strukt.syntax()); // Get the node for set annotation let contain_fn = if let Some(impl_def) = impl_def { - fn_.indent(impl_def.indent_level()); + let fn_ = fn_.indent(impl_def.indent_level()); if let Some(l_curly) = impl_def.assoc_item_list().and_then(|list| list.l_curly_token()) { @@ -182,9 +182,8 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option let indent_level = strukt.indent_level(); let body = vec![ast::AssocItem::Fn(fn_)]; let list = make::assoc_item_list(Some(body)); - let impl_def = generate_impl_with_item(&ast::Adt::Struct(strukt.clone()), Some(list)); - - impl_def.indent(strukt.indent_level()); + let impl_def = generate_impl_with_item(&ast::Adt::Struct(strukt.clone()), Some(list)) + .indent(strukt.indent_level()); // Insert it after the adt editor.insert_all( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 56500cf068024..8bc4d50cf6af2 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -2,7 +2,7 @@ use crate::assist_context::{AssistContext, Assists}; use ide_db::assists::AssistId; use syntax::{ AstNode, SyntaxKind, T, - ast::{self, HasGenericParams, HasName, HasVisibility, edit_in_place::Indent, make}, + ast::{self, HasGenericParams, HasName, HasVisibility, edit::AstNodeEdit, make}, syntax_editor::{Position, SyntaxEditor}, }; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs index fa4f2a78c8b7f..21f2249a19c9d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs @@ -403,6 +403,12 @@ fn inline( .find(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW) { let replace_with = t.clone_subtree().syntax().clone_for_update(); + if !is_in_type_path(&self_tok) + && let Some(ty) = ast::Type::cast(replace_with.clone()) + && let Some(generic_arg_list) = ty.generic_arg_list() + { + ted::remove(generic_arg_list.syntax()); + } ted::replace(self_tok, replace_with); } } @@ -588,6 +594,17 @@ fn inline( } } +fn is_in_type_path(self_tok: &syntax::SyntaxToken) -> bool { + self_tok + .parent_ancestors() + .skip_while(|it| !ast::Path::can_cast(it.kind())) + .map_while(ast::Path::cast) + .last() + .and_then(|it| it.syntax().parent()) + .and_then(ast::PathType::cast) + .is_some() +} + fn path_expr_as_record_field(usage: &PathExpr) -> Option { let path = usage.path()?; let name_ref = path.as_single_name_ref()?; @@ -1694,6 +1711,41 @@ fn main() { ) } + #[test] + fn inline_trait_method_call_with_lifetimes() { + check_assist( + inline_call, + r#" +trait Trait { + fn f() -> Self; +} +struct Foo<'a>(&'a ()); +impl<'a> Trait for Foo<'a> { + fn f() -> Self { Self(&()) } +} +impl Foo<'_> { + fn new() -> Self { + Self::$0f() + } +} +"#, + r#" +trait Trait { + fn f() -> Self; +} +struct Foo<'a>(&'a ()); +impl<'a> Trait for Foo<'a> { + fn f() -> Self { Self(&()) } +} +impl Foo<'_> { + fn new() -> Self { + Foo(&()) + } +} +"#, + ) + } + #[test] fn method_by_reborrow() { check_assist( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs index 264e3767a2324..854e9561d29a4 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/introduce_named_lifetime.rs @@ -1,11 +1,12 @@ -use ide_db::FxHashSet; +use ide_db::{FileId, FxHashSet}; use syntax::{ - AstNode, TextRange, - ast::{self, HasGenericParams, edit_in_place::GenericParamsOwnerEdit, make}, - ted::{self, Position}, + AstNode, SmolStr, T, TextRange, ToSmolStr, + ast::{self, HasGenericParams, HasName, syntax_factory::SyntaxFactory}, + format_smolstr, + syntax_editor::{Element, Position, SyntaxEditor}, }; -use crate::{AssistContext, AssistId, Assists, assist_context::SourceChangeBuilder}; +use crate::{AssistContext, AssistId, Assists}; static ASSIST_NAME: &str = "introduce_named_lifetime"; static ASSIST_LABEL: &str = "Introduce named lifetime"; @@ -38,100 +39,108 @@ pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext<'_ // FIXME: should also add support for the case fun(f: &Foo) -> &$0Foo let lifetime = ctx.find_node_at_offset::().filter(|lifetime| lifetime.text() == "'_")?; + let file_id = ctx.vfs_file_id(); let lifetime_loc = lifetime.lifetime_ident_token()?.text_range(); if let Some(fn_def) = lifetime.syntax().ancestors().find_map(ast::Fn::cast) { - generate_fn_def_assist(acc, fn_def, lifetime_loc, lifetime) + generate_fn_def_assist(acc, fn_def, lifetime_loc, lifetime, file_id) } else if let Some(impl_def) = lifetime.syntax().ancestors().find_map(ast::Impl::cast) { - generate_impl_def_assist(acc, impl_def, lifetime_loc, lifetime) + generate_impl_def_assist(acc, impl_def, lifetime_loc, lifetime, file_id) } else { None } } -/// Generate the assist for the fn def case +/// Given a type parameter list, generate a unique lifetime parameter name +/// which is not in the list +fn generate_unique_lifetime_param_name( + existing_params: Option, +) -> Option { + let used_lifetime_param: FxHashSet = existing_params + .iter() + .flat_map(|params| params.lifetime_params()) + .map(|p| p.syntax().text().to_smolstr()) + .collect(); + ('a'..='z').map(|c| format_smolstr!("'{c}")).find(|lt| !used_lifetime_param.contains(lt)) +} + fn generate_fn_def_assist( acc: &mut Assists, fn_def: ast::Fn, lifetime_loc: TextRange, lifetime: ast::Lifetime, + file_id: FileId, ) -> Option<()> { - let param_list: ast::ParamList = fn_def.param_list()?; - let new_lifetime_param = generate_unique_lifetime_param_name(fn_def.generic_param_list())?; + let param_list = fn_def.param_list()?; + let new_lifetime_name = generate_unique_lifetime_param_name(fn_def.generic_param_list())?; let self_param = - // use the self if it's a reference and has no explicit lifetime param_list.self_param().filter(|p| p.lifetime().is_none() && p.amp_token().is_some()); - // compute the location which implicitly has the same lifetime as the anonymous lifetime + let loc_needing_lifetime = if let Some(self_param) = self_param { - // if we have a self reference, use that Some(NeedsLifetime::SelfParam(self_param)) } else { - // otherwise, if there's a single reference parameter without a named lifetime, use that - let fn_params_without_lifetime: Vec<_> = param_list + let unnamed_refs: Vec<_> = param_list .params() .filter_map(|param| match param.ty() { - Some(ast::Type::RefType(ascribed_type)) if ascribed_type.lifetime().is_none() => { - Some(NeedsLifetime::RefType(ascribed_type)) + Some(ast::Type::RefType(ref_type)) if ref_type.lifetime().is_none() => { + Some(NeedsLifetime::RefType(ref_type)) } _ => None, }) .collect(); - match fn_params_without_lifetime.len() { - 1 => Some(fn_params_without_lifetime.into_iter().next()?), + + match unnamed_refs.len() { + 1 => Some(unnamed_refs.into_iter().next()?), 0 => None, - // multiple unnamed is invalid. assist is not applicable _ => return None, } }; - acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |builder| { - let fn_def = builder.make_mut(fn_def); - let lifetime = builder.make_mut(lifetime); - let loc_needing_lifetime = - loc_needing_lifetime.and_then(|it| it.make_mut(builder).to_position()); - - fn_def.get_or_create_generic_param_list().add_generic_param( - make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(), - ); - ted::replace(lifetime.syntax(), new_lifetime_param.clone_for_update().syntax()); - if let Some(position) = loc_needing_lifetime { - ted::insert(position, new_lifetime_param.clone_for_update().syntax()); + + acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |edit| { + let root = fn_def.syntax().ancestors().last().unwrap().clone(); + let mut editor = SyntaxEditor::new(root); + let factory = SyntaxFactory::with_mappings(); + + if let Some(generic_list) = fn_def.generic_param_list() { + insert_lifetime_param(&mut editor, &factory, &generic_list, &new_lifetime_name); + } else { + insert_new_generic_param_list_fn(&mut editor, &factory, &fn_def, &new_lifetime_name); } + + editor.replace(lifetime.syntax(), factory.lifetime(&new_lifetime_name).syntax()); + + if let Some(pos) = loc_needing_lifetime.and_then(|l| l.to_position()) { + editor.insert_all( + pos, + vec![ + factory.lifetime(&new_lifetime_name).syntax().clone().into(), + factory.whitespace(" ").into(), + ], + ); + } + + edit.add_file_edits(file_id, editor); }) } -/// Generate the assist for the impl def case -fn generate_impl_def_assist( - acc: &mut Assists, - impl_def: ast::Impl, - lifetime_loc: TextRange, - lifetime: ast::Lifetime, +fn insert_new_generic_param_list_fn( + editor: &mut SyntaxEditor, + factory: &SyntaxFactory, + fn_def: &ast::Fn, + lifetime_name: &str, ) -> Option<()> { - let new_lifetime_param = generate_unique_lifetime_param_name(impl_def.generic_param_list())?; - acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |builder| { - let impl_def = builder.make_mut(impl_def); - let lifetime = builder.make_mut(lifetime); + let name = fn_def.name()?; - impl_def.get_or_create_generic_param_list().add_generic_param( - make::lifetime_param(new_lifetime_param.clone()).clone_for_update().into(), - ); - ted::replace(lifetime.syntax(), new_lifetime_param.clone_for_update().syntax()); - }) -} + editor.insert_all( + Position::after(name.syntax()), + vec![ + factory.token(T![<]).syntax_element(), + factory.lifetime(lifetime_name).syntax().syntax_element(), + factory.token(T![>]).syntax_element(), + ], + ); -/// Given a type parameter list, generate a unique lifetime parameter name -/// which is not in the list -fn generate_unique_lifetime_param_name( - existing_type_param_list: Option, -) -> Option { - match existing_type_param_list { - Some(type_params) => { - let used_lifetime_params: FxHashSet<_> = - type_params.lifetime_params().map(|p| p.syntax().text().to_string()).collect(); - ('a'..='z').map(|it| format!("'{it}")).find(|it| !used_lifetime_params.contains(it)) - } - None => Some("'a".to_owned()), - } - .map(|it| make::lifetime(&it)) + Some(()) } enum NeedsLifetime { @@ -140,13 +149,6 @@ enum NeedsLifetime { } impl NeedsLifetime { - fn make_mut(self, builder: &mut SourceChangeBuilder) -> Self { - match self { - Self::SelfParam(it) => Self::SelfParam(builder.make_mut(it)), - Self::RefType(it) => Self::RefType(builder.make_mut(it)), - } - } - fn to_position(self) -> Option { match self { Self::SelfParam(it) => Some(Position::after(it.amp_token()?)), @@ -155,6 +157,75 @@ impl NeedsLifetime { } } +fn generate_impl_def_assist( + acc: &mut Assists, + impl_def: ast::Impl, + lifetime_loc: TextRange, + lifetime: ast::Lifetime, + file_id: FileId, +) -> Option<()> { + let new_lifetime_name = generate_unique_lifetime_param_name(impl_def.generic_param_list())?; + + acc.add(AssistId::refactor(ASSIST_NAME), ASSIST_LABEL, lifetime_loc, |edit| { + let root = impl_def.syntax().ancestors().last().unwrap().clone(); + let mut editor = SyntaxEditor::new(root); + let factory = SyntaxFactory::without_mappings(); + + if let Some(generic_list) = impl_def.generic_param_list() { + insert_lifetime_param(&mut editor, &factory, &generic_list, &new_lifetime_name); + } else { + insert_new_generic_param_list_imp(&mut editor, &factory, &impl_def, &new_lifetime_name); + } + + editor.replace(lifetime.syntax(), factory.lifetime(&new_lifetime_name).syntax()); + + edit.add_file_edits(file_id, editor); + }) +} + +fn insert_new_generic_param_list_imp( + editor: &mut SyntaxEditor, + factory: &SyntaxFactory, + impl_: &ast::Impl, + lifetime_name: &str, +) -> Option<()> { + let impl_kw = impl_.impl_token()?; + + editor.insert_all( + Position::after(impl_kw), + vec![ + factory.token(T![<]).syntax_element(), + factory.lifetime(lifetime_name).syntax().syntax_element(), + factory.token(T![>]).syntax_element(), + ], + ); + + Some(()) +} + +fn insert_lifetime_param( + editor: &mut SyntaxEditor, + factory: &SyntaxFactory, + generic_list: &ast::GenericParamList, + lifetime_name: &str, +) -> Option<()> { + let r_angle = generic_list.r_angle_token()?; + let needs_comma = generic_list.generic_params().next().is_some(); + + let mut elements = Vec::new(); + + if needs_comma { + elements.push(factory.token(T![,]).syntax_element()); + elements.push(factory.whitespace(" ").syntax_element()); + } + + let lifetime = factory.lifetime(lifetime_name); + elements.push(lifetime.syntax().clone().into()); + + editor.insert_all(Position::before(r_angle), elements); + Some(()) +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs index 102d7e6d532cb..b3e79e4663e9d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs @@ -2,7 +2,10 @@ use hir::{AsAssocItem, AssocItemContainer, FileRange, HasSource}; use ide_db::{assists::AssistId, defs::Definition, search::SearchScope}; use syntax::{ SyntaxKind, - ast::{self, AstNode, edit::IndentLevel, edit_in_place::Indent}, + ast::{ + self, AstNode, + edit::{AstNodeEdit, IndentLevel}, + }, }; use crate::assist_context::{AssistContext, Assists}; @@ -136,7 +139,8 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> let indent = IndentLevel::from_node(parent_fn.syntax()); let const_ = const_.clone_for_update(); - const_.reindent_to(indent); + let const_ = const_.reset_indent(); + let const_ = const_.indent(indent); builder.insert(insert_offset, format!("\n{indent}{const_}{fixup}")); }, ) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 11b3fd22faa37..7c024263b4a80 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -1307,6 +1307,29 @@ impl Clone for Foo { ) } + #[test] + fn add_custom_impl_clone_generic_tuple_struct_with_associated() { + check_assist( + replace_derive_with_manual_impl, + r#" +//- minicore: clone, derive, deref +#[derive(Clo$0ne)] +struct Foo(T::Target); +"#, + r#" +struct Foo(T::Target); + +impl Clone for Foo +where T::Target: Clone +{ + $0fn clone(&self) -> Self { + Self(self.0.clone()) + } +} +"#, + ) + } + #[test] fn test_ignore_derive_macro_without_input() { check_assist_not_applicable( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs index b7e5344712eb4..915dd3ffcaf0c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_if_let_with_match.rs @@ -3,7 +3,11 @@ use std::iter::successors; use ide_db::{RootDatabase, defs::NameClass, ty_filter::TryEnum}; use syntax::{ AstNode, Edition, SyntaxKind, T, TextRange, - ast::{self, HasName, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory}, + ast::{ + self, HasName, + edit::{AstNodeEdit, IndentLevel}, + syntax_factory::SyntaxFactory, + }, syntax_editor::SyntaxEditor, }; @@ -54,8 +58,7 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<' ast::ElseBranch::IfExpr(expr) => Some(expr), ast::ElseBranch::Block(block) => { let block = unwrap_trivial_block(block).clone_for_update(); - block.reindent_to(IndentLevel(1)); - else_block = Some(block); + else_block = Some(block.reset_indent().indent(IndentLevel(1))); None } }); @@ -82,12 +85,13 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<' (Some(pat), guard) } }; - if let Some(guard) = &guard { - guard.dedent(indent); - guard.indent(IndentLevel(1)); - } - let body = if_expr.then_branch()?.clone_for_update(); - body.indent(IndentLevel(1)); + let guard = if let Some(guard) = &guard { + Some(guard.dedent(indent).indent(IndentLevel(1))) + } else { + guard + }; + + let body = if_expr.then_branch()?.clone_for_update().indent(IndentLevel(1)); cond_bodies.push((cond, guard, body)); } @@ -109,7 +113,8 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<' let else_arm = make_else_arm(ctx, &make, else_block, &cond_bodies); let make_match_arm = |(pat, guard, body): (_, Option, ast::BlockExpr)| { - body.reindent_to(IndentLevel::single()); + // Dedent from original position, then indent for match arm + let body = body.dedent(indent).indent(IndentLevel::single()); let body = unwrap_trivial_block(body); match (pat, guard.map(|it| make.match_guard(it))) { (Some(pat), guard) => make.match_arm(pat, guard, body), @@ -122,8 +127,8 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<' } }; let arms = cond_bodies.into_iter().map(make_match_arm).chain([else_arm]); - let match_expr = make.expr_match(scrutinee_to_be_expr, make.match_arm_list(arms)); - match_expr.indent(indent); + let match_expr = + make.expr_match(scrutinee_to_be_expr, make.match_arm_list(arms)).indent(indent); match_expr.into() }; @@ -131,10 +136,9 @@ pub(crate) fn replace_if_let_with_match(acc: &mut Assists, ctx: &AssistContext<' if_expr.syntax().parent().is_some_and(|it| ast::IfExpr::can_cast(it.kind())); let expr = if has_preceding_if_expr { // make sure we replace the `else if let ...` with a block so we don't end up with `else expr` - match_expr.dedent(indent); - match_expr.indent(IndentLevel(1)); - let block_expr = make.block_expr([], Some(match_expr)); - block_expr.indent(indent); + let block_expr = make + .block_expr([], Some(match_expr.dedent(indent).indent(IndentLevel(1)))) + .indent(indent); block_expr.into() } else { match_expr @@ -267,10 +271,7 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<' // wrap them in another BlockExpr. match expr { ast::Expr::BlockExpr(block) if block.modifier().is_none() => block, - expr => { - expr.indent(IndentLevel(1)); - make.block_expr([], Some(expr)) - } + expr => make.block_expr([], Some(expr.indent(IndentLevel(1)))), } }; @@ -292,18 +293,19 @@ pub(crate) fn replace_match_with_if_let(acc: &mut Assists, ctx: &AssistContext<' } else { condition }; - let then_expr = then_expr.clone_for_update(); - let else_expr = else_expr.clone_for_update(); - then_expr.reindent_to(IndentLevel::single()); - else_expr.reindent_to(IndentLevel::single()); + let then_expr = + then_expr.clone_for_update().reset_indent().indent(IndentLevel::single()); + let else_expr = + else_expr.clone_for_update().reset_indent().indent(IndentLevel::single()); let then_block = make_block_expr(then_expr); let else_expr = if is_empty_expr(&else_expr) { None } else { Some(else_expr) }; - let if_let_expr = make.expr_if( - condition, - then_block, - else_expr.map(make_block_expr).map(ast::ElseBranch::Block), - ); - if_let_expr.indent(IndentLevel::from_node(match_expr.syntax())); + let if_let_expr = make + .expr_if( + condition, + then_block, + else_expr.map(make_block_expr).map(ast::ElseBranch::Block), + ) + .indent(IndentLevel::from_node(match_expr.syntax())); let mut editor = builder.make_editor(match_expr.syntax()); editor.replace(match_expr.syntax(), if_let_expr.syntax()); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs index b95e9b52b0538..15977c420e642 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_let_with_if_let.rs @@ -1,7 +1,11 @@ use ide_db::ty_filter::TryEnum; use syntax::{ AstNode, T, - ast::{self, edit::IndentLevel, edit_in_place::Indent, syntax_factory::SyntaxFactory}, + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + syntax_factory::SyntaxFactory, + }, }; use crate::{AssistContext, AssistId, Assists}; @@ -64,7 +68,7 @@ pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext<'_> if let_expr_needs_paren(&init) { make.expr_paren(init).into() } else { init }; let block = make.block_expr([], None); - block.indent(IndentLevel::from_node(let_stmt.syntax())); + let block = block.indent(IndentLevel::from_node(let_stmt.syntax())); let if_expr = make.expr_if( make.expr_let(pat, init_expr).into(), block, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs index 5e08cba8e293c..b4055e77ccf8a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils.rs @@ -14,6 +14,7 @@ use ide_db::{ path_transform::PathTransform, syntax_helpers::{node_ext::preorder_expr, prettify_macro_expansion}, }; +use itertools::Itertools; use stdx::format_to; use syntax::{ AstNode, AstToken, Direction, NodeOrToken, SourceFile, @@ -765,6 +766,11 @@ fn generate_impl_inner( }); let generic_args = generic_params.as_ref().map(|params| params.to_generic_args().clone_for_update()); + let adt_assoc_bounds = trait_ + .as_ref() + .zip(generic_params.as_ref()) + .and_then(|(trait_, params)| generic_param_associated_bounds(adt, trait_, params)); + let ty = make::ty_path(make::ext::ident_path(&adt.name().unwrap().text())); let cfg_attrs = @@ -780,7 +786,7 @@ fn generate_impl_inner( false, trait_, ty, - None, + adt_assoc_bounds, adt.where_clause(), body, ), @@ -789,6 +795,51 @@ fn generate_impl_inner( .clone_for_update() } +fn generic_param_associated_bounds( + adt: &ast::Adt, + trait_: &ast::Type, + generic_params: &ast::GenericParamList, +) -> Option { + let in_type_params = |name: &ast::NameRef| { + generic_params + .generic_params() + .filter_map(|param| match param { + ast::GenericParam::TypeParam(type_param) => type_param.name(), + _ => None, + }) + .any(|param| param.text() == name.text()) + }; + let adt_body = match adt { + ast::Adt::Enum(e) => e.variant_list().map(|it| it.syntax().clone()), + ast::Adt::Struct(s) => s.field_list().map(|it| it.syntax().clone()), + ast::Adt::Union(u) => u.record_field_list().map(|it| it.syntax().clone()), + }; + let mut trait_where_clause = adt_body + .into_iter() + .flat_map(|it| it.descendants()) + .filter_map(ast::Path::cast) + .filter_map(|path| { + let qualifier = path.qualifier()?.as_single_segment()?; + let qualifier = qualifier + .name_ref() + .or_else(|| match qualifier.type_anchor()?.ty()? { + ast::Type::PathType(path_type) => path_type.path()?.as_single_name_ref(), + _ => None, + }) + .filter(in_type_params)?; + Some((qualifier, path.segment()?.name_ref()?)) + }) + .map(|(qualifier, assoc_name)| { + let segments = [qualifier, assoc_name].map(make::path_segment); + let path = make::path_from_segments(segments, false); + let bounds = Some(make::type_bound(trait_.clone())); + make::where_pred(either::Either::Right(make::ty_path(path)), bounds) + }) + .unique_by(|it| it.syntax().to_string()) + .peekable(); + trait_where_clause.peek().is_some().then(|| make::where_clause(trait_where_clause)) +} + pub(crate) fn add_method_to_adt( builder: &mut SourceChangeBuilder, adt: &ast::Adt, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs b/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs index 840b26a7ad58b..df8ad4111234e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/utils/ref_field_expr.rs @@ -5,7 +5,7 @@ //! based on the parent of the existing expression. use syntax::{ AstNode, T, - ast::{self, FieldExpr, MethodCallExpr, make}, + ast::{self, FieldExpr, MethodCallExpr, make, syntax_factory::SyntaxFactory}, }; use crate::AssistContext; @@ -130,4 +130,20 @@ impl RefData { expr } + + pub(crate) fn wrap_expr_with_factory( + &self, + mut expr: ast::Expr, + syntax_factory: &SyntaxFactory, + ) -> ast::Expr { + if self.needs_deref { + expr = syntax_factory.expr_prefix(T![*], expr).into(); + } + + if self.needs_parentheses { + expr = syntax_factory.expr_paren(expr).into(); + } + + expr + } } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index cab8bced88df1..97afd07b00863 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -821,7 +821,10 @@ impl<'db> CompletionContext<'db> { CompleteSemicolon::DoNotComplete } else if let Some(term_node) = sema.token_ancestors_with_macros(token.clone()).find(|node| { - matches!(node.kind(), BLOCK_EXPR | MATCH_ARM | CLOSURE_EXPR | ARG_LIST | PAREN_EXPR) + matches!( + node.kind(), + BLOCK_EXPR | MATCH_ARM | CLOSURE_EXPR | ARG_LIST | PAREN_EXPR | ARRAY_EXPR + ) }) { let next_token = iter::successors(token.next_token(), |it| it.next_token()) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs index 4713b1f1afa77..475e00dfcf29b 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs @@ -905,6 +905,25 @@ fn baz(_: impl FnOnce()) {} fn bar() { baz(foo()$0); } +"#, + ); + } + + #[test] + fn no_semicolon_in_array() { + check_edit( + r#"foo"#, + r#" +fn foo() {} +fn bar() { + let _ = [fo$0]; +} +"#, + r#" +fn foo() {} +fn bar() { + let _ = [foo()$0]; +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs index 35579eb2590dc..1c485270272e3 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs @@ -9,7 +9,7 @@ use hir::{ }; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; -use smallvec::SmallVec; +use smallvec::{SmallVec, smallvec}; use syntax::{ AstNode, SyntaxNode, ast::{self, HasName, make}, @@ -68,6 +68,8 @@ pub struct PathImportCandidate { pub qualifier: Vec, /// The name the item (struct, trait, enum, etc.) should have. pub name: NameToImport, + /// Potentially more segments that should resolve in the candidate. + pub after: Vec, } /// A name that will be used during item lookups. @@ -376,7 +378,7 @@ fn path_applicable_imports( ) -> FxIndexSet { let _p = tracing::info_span!("ImportAssets::path_applicable_imports").entered(); - match &*path_candidate.qualifier { + let mut result = match &*path_candidate.qualifier { [] => { items_locator::items_with_name( db, @@ -433,6 +435,75 @@ fn path_applicable_imports( }) .take(DEFAULT_QUERY_SEARCH_LIMIT) .collect(), + }; + + filter_candidates_by_after_path(db, scope, path_candidate, &mut result); + + result +} + +fn filter_candidates_by_after_path( + db: &RootDatabase, + scope: &SemanticsScope<'_>, + path_candidate: &PathImportCandidate, + imports: &mut FxIndexSet, +) { + if imports.len() <= 1 { + // Short-circuit, as even if it doesn't match fully we want it. + return; + } + + let Some((last_after, after_except_last)) = path_candidate.after.split_last() else { + return; + }; + + let original_imports = imports.clone(); + + let traits_in_scope = scope.visible_traits(); + imports.retain(|import| { + let items = if after_except_last.is_empty() { + smallvec![import.original_item] + } else { + let ItemInNs::Types(ModuleDef::Module(item)) = import.original_item else { + return false; + }; + // FIXME: This doesn't consider visibilities. + item.resolve_mod_path(db, after_except_last.iter().cloned()) + .into_iter() + .flatten() + .collect::>() + }; + items.into_iter().any(|item| { + let has_last_method = |ty: hir::Type<'_>| { + ty.iterate_path_candidates(db, scope, &traits_in_scope, Some(last_after), |_| { + Some(()) + }) + .is_some() + }; + // FIXME: A trait can have an assoc type that has a function/const, that's two segments before last. + match item { + // A module? Can we resolve one more segment? + ItemInNs::Types(ModuleDef::Module(module)) => module + .resolve_mod_path(db, [last_after.clone()]) + .is_some_and(|mut it| it.any(|_| true)), + // And ADT/Type Alias? That might be a method. + ItemInNs::Types(ModuleDef::Adt(it)) => has_last_method(it.ty(db)), + ItemInNs::Types(ModuleDef::BuiltinType(it)) => has_last_method(it.ty(db)), + ItemInNs::Types(ModuleDef::TypeAlias(it)) => has_last_method(it.ty(db)), + // A trait? Might have an associated item. + ItemInNs::Types(ModuleDef::Trait(it)) => it + .items(db) + .into_iter() + .any(|assoc_item| assoc_item.name(db) == Some(last_after.clone())), + // Other items? can't resolve one more segment. + _ => false, + } + }) + }); + + if imports.is_empty() { + // Better one half-match than zero full matches. + *imports = original_imports; } } @@ -759,10 +830,14 @@ impl<'db> ImportCandidate<'db> { if sema.resolve_path(path).is_some() { return None; } + let after = std::iter::successors(path.parent_path(), |it| it.parent_path()) + .map(|seg| seg.segment()?.name_ref().map(|name| Name::new_root(&name.text()))) + .collect::>()?; path_import_candidate( sema, path.qualifier(), NameToImport::exact_case_sensitive(path.segment()?.name_ref()?.to_string()), + after, ) } @@ -777,6 +852,7 @@ impl<'db> ImportCandidate<'db> { Some(ImportCandidate::Path(PathImportCandidate { qualifier: vec![], name: NameToImport::exact_case_sensitive(name.to_string()), + after: vec![], })) } @@ -785,7 +861,8 @@ impl<'db> ImportCandidate<'db> { fuzzy_name: String, sema: &Semantics<'db, RootDatabase>, ) -> Option { - path_import_candidate(sema, qualifier, NameToImport::fuzzy(fuzzy_name)) + // Assume a fuzzy match does not want the segments after. Because... I guess why not? + path_import_candidate(sema, qualifier, NameToImport::fuzzy(fuzzy_name), Vec::new()) } } @@ -793,6 +870,7 @@ fn path_import_candidate<'db>( sema: &Semantics<'db, RootDatabase>, qualifier: Option, name: NameToImport, + after: Vec, ) -> Option> { Some(match qualifier { Some(qualifier) => match sema.resolve_path(&qualifier) { @@ -802,7 +880,7 @@ fn path_import_candidate<'db>( .segments() .map(|seg| seg.name_ref().map(|name| Name::new_root(&name.text()))) .collect::>>()?; - ImportCandidate::Path(PathImportCandidate { qualifier, name }) + ImportCandidate::Path(PathImportCandidate { qualifier, name, after }) } else { return None; } @@ -826,7 +904,7 @@ fn path_import_candidate<'db>( } Some(_) => return None, }, - None => ImportCandidate::Path(PathImportCandidate { qualifier: vec![], name }), + None => ImportCandidate::Path(PathImportCandidate { qualifier: vec![], name, after }), }) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs index db1d599d550d7..f26952fa1535d 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/insert_use.rs @@ -9,8 +9,9 @@ use syntax::{ Direction, NodeOrToken, SyntaxKind, SyntaxNode, algo, ast::{ self, AstNode, HasAttrs, HasModuleItem, HasVisibility, PathSegmentKind, - edit_in_place::Removable, make, + edit_in_place::Removable, make, syntax_factory::SyntaxFactory, }, + syntax_editor::{Position, SyntaxEditor}, ted, }; @@ -146,6 +147,17 @@ pub fn insert_use(scope: &ImportScope, path: ast::Path, cfg: &InsertUseConfig) { insert_use_with_alias_option(scope, path, cfg, None); } +/// Insert an import path into the given file/node. A `merge` value of none indicates that no import merging is allowed to occur. +pub fn insert_use_with_editor( + scope: &ImportScope, + path: ast::Path, + cfg: &InsertUseConfig, + syntax_editor: &mut SyntaxEditor, + syntax_factory: &SyntaxFactory, +) { + insert_use_with_alias_option_with_editor(scope, path, cfg, None, syntax_editor, syntax_factory); +} + pub fn insert_use_as_alias( scope: &ImportScope, path: ast::Path, @@ -229,6 +241,71 @@ fn insert_use_with_alias_option( insert_use_(scope, use_item, cfg.group); } +fn insert_use_with_alias_option_with_editor( + scope: &ImportScope, + path: ast::Path, + cfg: &InsertUseConfig, + alias: Option, + syntax_editor: &mut SyntaxEditor, + syntax_factory: &SyntaxFactory, +) { + let _p = tracing::info_span!("insert_use_with_alias_option").entered(); + let mut mb = match cfg.granularity { + ImportGranularity::Crate => Some(MergeBehavior::Crate), + ImportGranularity::Module => Some(MergeBehavior::Module), + ImportGranularity::One => Some(MergeBehavior::One), + ImportGranularity::Item => None, + }; + if !cfg.enforce_granularity { + let file_granularity = guess_granularity_from_scope(scope); + mb = match file_granularity { + ImportGranularityGuess::Unknown => mb, + ImportGranularityGuess::Item => None, + ImportGranularityGuess::Module => Some(MergeBehavior::Module), + // We use the user's setting to infer if this is module or item. + ImportGranularityGuess::ModuleOrItem => match mb { + Some(MergeBehavior::Module) | None => mb, + // There isn't really a way to decide between module or item here, so we just pick one. + // FIXME: Maybe it is possible to infer based on semantic analysis? + Some(MergeBehavior::One | MergeBehavior::Crate) => Some(MergeBehavior::Module), + }, + ImportGranularityGuess::Crate => Some(MergeBehavior::Crate), + ImportGranularityGuess::CrateOrModule => match mb { + Some(MergeBehavior::Crate | MergeBehavior::Module) => mb, + Some(MergeBehavior::One) | None => Some(MergeBehavior::Crate), + }, + ImportGranularityGuess::One => Some(MergeBehavior::One), + }; + } + + let use_tree = syntax_factory.use_tree(path, None, alias, false); + if mb == Some(MergeBehavior::One) && use_tree.path().is_some() { + use_tree.wrap_in_tree_list(); + } + let use_item = make::use_(None, None, use_tree).clone_for_update(); + for attr in + scope.required_cfgs.iter().map(|attr| attr.syntax().clone_subtree().clone_for_update()) + { + syntax_editor.insert(Position::first_child_of(use_item.syntax()), attr); + } + + // merge into existing imports if possible + if let Some(mb) = mb { + let filter = |it: &_| !(cfg.skip_glob_imports && ast::Use::is_simple_glob(it)); + for existing_use in + scope.as_syntax_node().children().filter_map(ast::Use::cast).filter(filter) + { + if let Some(merged) = try_merge_imports(&existing_use, &use_item, mb) { + syntax_editor.replace(existing_use.syntax(), merged.syntax()); + return; + } + } + } + // either we weren't allowed to merge or there is no import that fits the merge conditions + // so look for the place we have to insert to + insert_use_with_editor_(scope, use_item, cfg.group, syntax_editor, syntax_factory); +} + pub fn ast_to_remove_for_path_in_use_stmt(path: &ast::Path) -> Option> { // FIXME: improve this if path.parent_path().is_some() { @@ -500,6 +577,127 @@ fn insert_use_(scope: &ImportScope, use_item: ast::Use, group_imports: bool) { } } +fn insert_use_with_editor_( + scope: &ImportScope, + use_item: ast::Use, + group_imports: bool, + syntax_editor: &mut SyntaxEditor, + syntax_factory: &SyntaxFactory, +) { + let scope_syntax = scope.as_syntax_node(); + let insert_use_tree = + use_item.use_tree().expect("`use_item` should have a use tree for `insert_path`"); + let group = ImportGroup::new(&insert_use_tree); + let path_node_iter = scope_syntax + .children() + .filter_map(|node| ast::Use::cast(node.clone()).zip(Some(node))) + .flat_map(|(use_, node)| { + let tree = use_.use_tree()?; + Some((tree, node)) + }); + + if group_imports { + // Iterator that discards anything that's not in the required grouping + // This implementation allows the user to rearrange their import groups as this only takes the first group that fits + let group_iter = path_node_iter + .clone() + .skip_while(|(use_tree, ..)| ImportGroup::new(use_tree) != group) + .take_while(|(use_tree, ..)| ImportGroup::new(use_tree) == group); + + // track the last element we iterated over, if this is still None after the iteration then that means we never iterated in the first place + let mut last = None; + // find the element that would come directly after our new import + let post_insert: Option<(_, SyntaxNode)> = group_iter + .inspect(|(.., node)| last = Some(node.clone())) + .find(|(use_tree, _)| use_tree_cmp(&insert_use_tree, use_tree) != Ordering::Greater); + + if let Some((.., node)) = post_insert { + cov_mark::hit!(insert_group); + // insert our import before that element + return syntax_editor.insert(Position::before(node), use_item.syntax()); + } + if let Some(node) = last { + cov_mark::hit!(insert_group_last); + // there is no element after our new import, so append it to the end of the group + return syntax_editor.insert(Position::after(node), use_item.syntax()); + } + + // the group we were looking for actually doesn't exist, so insert + + let mut last = None; + // find the group that comes after where we want to insert + let post_group = path_node_iter + .inspect(|(.., node)| last = Some(node.clone())) + .find(|(use_tree, ..)| ImportGroup::new(use_tree) > group); + if let Some((.., node)) = post_group { + cov_mark::hit!(insert_group_new_group); + syntax_editor.insert(Position::before(&node), use_item.syntax()); + if let Some(node) = algo::non_trivia_sibling(node.into(), Direction::Prev) { + syntax_editor.insert(Position::after(node), syntax_factory.whitespace("\n")); + } + return; + } + // there is no such group, so append after the last one + if let Some(node) = last { + cov_mark::hit!(insert_group_no_group); + syntax_editor.insert(Position::after(&node), use_item.syntax()); + syntax_editor.insert(Position::after(node), syntax_factory.whitespace("\n")); + return; + } + } else { + // There exists a group, so append to the end of it + if let Some((_, node)) = path_node_iter.last() { + cov_mark::hit!(insert_no_grouping_last); + syntax_editor.insert(Position::after(node), use_item.syntax()); + return; + } + } + + let l_curly = match &scope.kind { + ImportScopeKind::File(_) => None, + // don't insert the imports before the item list/block expr's opening curly brace + ImportScopeKind::Module(item_list) => item_list.l_curly_token(), + // don't insert the imports before the item list's opening curly brace + ImportScopeKind::Block(block) => block.l_curly_token(), + }; + // there are no imports in this file at all + // so put the import after all inner module attributes and possible license header comments + if let Some(last_inner_element) = scope_syntax + .children_with_tokens() + // skip the curly brace + .skip(l_curly.is_some() as usize) + .take_while(|child| match child { + NodeOrToken::Node(node) => is_inner_attribute(node.clone()), + NodeOrToken::Token(token) => { + [SyntaxKind::WHITESPACE, SyntaxKind::COMMENT, SyntaxKind::SHEBANG] + .contains(&token.kind()) + } + }) + .filter(|child| child.as_token().is_none_or(|t| t.kind() != SyntaxKind::WHITESPACE)) + .last() + { + cov_mark::hit!(insert_empty_inner_attr); + syntax_editor.insert(Position::after(&last_inner_element), use_item.syntax()); + syntax_editor.insert(Position::after(last_inner_element), syntax_factory.whitespace("\n")); + } else { + match l_curly { + Some(b) => { + cov_mark::hit!(insert_empty_module); + syntax_editor.insert(Position::after(&b), syntax_factory.whitespace("\n")); + syntax_editor.insert(Position::after(&b), use_item.syntax()); + } + None => { + cov_mark::hit!(insert_empty_file); + syntax_editor.insert( + Position::first_child_of(scope_syntax), + syntax_factory.whitespace("\n\n"), + ); + syntax_editor.insert(Position::first_child_of(scope_syntax), use_item.syntax()); + } + } + } +} + fn is_inner_attribute(node: SyntaxNode) -> bool { ast::Attr::cast(node).map(|attr| attr.kind()) == Some(ast::AttrKind::Inner) } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index 7900a0dc9991c..7fbbc576dd36a 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -9719,6 +9719,99 @@ fn test_hover_function_with_pat_param() { ); } +#[test] +fn test_hover_function_with_too_long_param() { + check( + r#" +fn fn_$0( + attrs: impl IntoIterator, + visibility: Option, + fn_name: ast::Name, + type_params: Option, + where_clause: Option, + params: ast::ParamList, + body: ast::BlockExpr, + ret_type: Option, + is_async: bool, + is_const: bool, + is_unsafe: bool, + is_gen: bool, +) -> ast::Fn {} + "#, + expect![[r#" + *fn_* + + ```rust + ra_test_fixture + ``` + + ```rust + fn fn_( + attrs: impl IntoIterator, + visibility: Option, + fn_name: ast::Name, + type_params: Option, + where_clause: Option, + params: ast::ParamList, + body: ast::BlockExpr, + ret_type: Option, + is_async: bool, + is_const: bool, + is_unsafe: bool, + is_gen: bool + ) -> ast::Fn + ``` + "#]], + ); + + check( + r#" +fn fn_$0( + &self, + attrs: impl IntoIterator, + visibility: Option, + fn_name: ast::Name, + type_params: Option, + where_clause: Option, + params: ast::ParamList, + body: ast::BlockExpr, + ret_type: Option, + is_async: bool, + is_const: bool, + is_unsafe: bool, + is_gen: bool, + ... +) -> ast::Fn {} + "#, + expect![[r#" + *fn_* + + ```rust + ra_test_fixture + ``` + + ```rust + fn fn_( + &self, + attrs: impl IntoIterator, + visibility: Option, + fn_name: ast::Name, + type_params: Option, + where_clause: Option, + params: ast::ParamList, + body: ast::BlockExpr, + ret_type: Option, + is_async: bool, + is_const: bool, + is_unsafe: bool, + is_gen: bool, + ... + ) -> ast::Fn + ``` + "#]], + ); +} + #[test] fn hover_path_inside_block_scope() { check( diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index 2e618550f92fb..930eaf2262d93 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -67,7 +67,7 @@ use ide_db::{ FxHashMap, FxIndexSet, LineIndexDatabase, base_db::{ CrateOrigin, CrateWorkspaceData, Env, FileSet, RootQueryDb, SourceDatabase, VfsPath, - salsa::{CancellationToken, Cancelled, Database}, + salsa::{Cancelled, Database}, }, prime_caches, symbol_index, }; @@ -947,10 +947,6 @@ impl Analysis { // We use `attach_db_allow_change()` and not `attach_db()` because fixture injection can change the database. hir::attach_db_allow_change(&self.db, || Cancelled::catch(|| f(&self.db))) } - - pub fn cancellation_token(&self) -> CancellationToken { - self.db.cancellation_token() - } } #[test] diff --git a/src/tools/rust-analyzer/crates/ide/src/references.rs b/src/tools/rust-analyzer/crates/ide/src/references.rs index 5443021988d45..38ee09703398b 100644 --- a/src/tools/rust-analyzer/crates/ide/src/references.rs +++ b/src/tools/rust-analyzer/crates/ide/src/references.rs @@ -2073,6 +2073,7 @@ fn func() {} expect![[r#" identity Attribute FileId(1) 1..107 32..40 + FileId(0) 17..25 import FileId(0) 43..51 "#]], ); @@ -2103,6 +2104,7 @@ mirror$0! {} expect![[r#" mirror ProcMacro FileId(1) 1..77 22..28 + FileId(0) 17..23 import FileId(0) 26..32 "#]], ) diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html index 59612634fda38..740a6272a79a7 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_macros.html @@ -41,7 +41,8 @@ .invalid_escape_sequence { color: #FC5555; text-decoration: wavy underline; } .unresolved_reference { color: #FC5555; text-decoration: wavy underline; } -
use proc_macros::{mirror, identity, DeriveIdentity};
+
use proc_macros::{mirror, identity, DeriveIdentity};
+use pm::proc_macro;
 
 mirror! {
     {
diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
index 8b529cf10f7f9..c6aebd0b0cd11 100644
--- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
+++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
@@ -55,8 +55,9 @@ fn macros() {
         r#"
 //- proc_macros: mirror, identity, derive_identity
 //- minicore: fmt, include, concat
-//- /lib.rs crate:lib
+//- /lib.rs crate:lib deps:pm
 use proc_macros::{mirror, identity, DeriveIdentity};
+use pm::proc_macro;
 
 mirror! {
     {
@@ -126,6 +127,11 @@ fn main() {
 //- /foo/foo.rs crate:foo
 mod foo {}
 use self::foo as bar;
+//- /pm.rs crate:pm
+#![crate_type = "proc-macro"]
+
+#[proc_macro_attribute]
+pub fn proc_macro() {}
 "#,
         expect_file!["./test_data/highlight_macros.html"],
         false,
diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
index c2935d94a8a71..70a00cf825162 100644
--- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs
@@ -26,10 +26,7 @@ use ide_db::{
 use itertools::Itertools;
 use proc_macro_api::{
     MacroDylib, ProcMacroClient,
-    bidirectional_protocol::{
-        msg::{SubRequest, SubResponse},
-        reject_subrequests,
-    },
+    bidirectional_protocol::msg::{SubRequest, SubResponse},
 };
 use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
 use span::{Span, SpanAnchor, SyntaxContext};
@@ -446,7 +443,7 @@ pub fn load_proc_macro(
 ) -> ProcMacroLoadResult {
     let res: Result, _> = (|| {
         let dylib = MacroDylib::new(path.to_path_buf());
-        let vec = server.load_dylib(dylib, Some(&reject_subrequests)).map_err(|e| {
+        let vec = server.load_dylib(dylib).map_err(|e| {
             ProcMacroLoadingError::ProcMacroSrvError(format!("{e}").into_boxed_str())
         })?;
         if vec.is_empty() {
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
index 4de1a3e5dd7d5..a135a469e87e4 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml
@@ -31,6 +31,7 @@ span = { path = "../span", version = "0.0.0", default-features = false}
 intern.workspace = true
 postcard.workspace = true
 semver.workspace = true
+rayon.workspace = true
 
 [features]
 sysroot-abi = ["proc-macro-srv", "proc-macro-srv/sysroot-abi"]
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
index e4b121b033d34..68b3afc3e8414 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/lib.rs
@@ -198,12 +198,8 @@ impl ProcMacroClient {
     }
 
     /// Loads a proc-macro dylib into the server process returning a list of `ProcMacro`s loaded.
-    pub fn load_dylib(
-        &self,
-        dylib: MacroDylib,
-        callback: Option>,
-    ) -> Result, ServerError> {
-        self.pool.load_dylib(&dylib, callback)
+    pub fn load_dylib(&self, dylib: MacroDylib) -> Result, ServerError> {
+        self.pool.load_dylib(&dylib)
     }
 
     /// Checks if the proc-macro server has exited.
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
index a637bc0e480a4..e6541823da583 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/pool.rs
@@ -1,10 +1,9 @@
 //! A pool of proc-macro server processes
 use std::sync::Arc;
 
-use crate::{
-    MacroDylib, ProcMacro, ServerError, bidirectional_protocol::SubCallback,
-    process::ProcMacroServerProcess,
-};
+use rayon::iter::{IntoParallelIterator, ParallelIterator};
+
+use crate::{MacroDylib, ProcMacro, ServerError, process::ProcMacroServerProcess};
 
 #[derive(Debug, Clone)]
 pub(crate) struct ProcMacroServerPool {
@@ -50,11 +49,7 @@ impl ProcMacroServerPool {
         })
     }
 
-    pub(crate) fn load_dylib(
-        &self,
-        dylib: &MacroDylib,
-        callback: Option>,
-    ) -> Result, ServerError> {
+    pub(crate) fn load_dylib(&self, dylib: &MacroDylib) -> Result, ServerError> {
         let _span = tracing::info_span!("ProcMacroServer::load_dylib").entered();
 
         let dylib_path = Arc::new(dylib.path.clone());
@@ -64,14 +59,17 @@ impl ProcMacroServerPool {
         let (first, rest) = self.workers.split_first().expect("worker pool must not be empty");
 
         let macros = first
-            .find_proc_macros(&dylib.path, callback)?
+            .find_proc_macros(&dylib.path)?
             .map_err(|e| ServerError { message: e, io: None })?;
 
-        for worker in rest {
-            worker
-                .find_proc_macros(&dylib.path, callback)?
-                .map_err(|e| ServerError { message: e, io: None })?;
-        }
+        rest.into_par_iter()
+            .map(|worker| {
+                worker
+                    .find_proc_macros(&dylib.path)?
+                    .map(|_| ())
+                    .map_err(|e| ServerError { message: e, io: None })
+            })
+            .collect::>()?;
 
         Ok(macros
             .into_iter()
diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
index 9f80880965b83..80e4ed05c36d8 100644
--- a/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
+++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/process.rs
@@ -18,7 +18,11 @@ use stdx::JodChild;
 
 use crate::{
     ProcMacro, ProcMacroKind, ProtocolFormat, ServerError,
-    bidirectional_protocol::{self, SubCallback, msg::BidirectionalMessage, reject_subrequests},
+    bidirectional_protocol::{
+        self, SubCallback,
+        msg::{BidirectionalMessage, SubResponse},
+        reject_subrequests,
+    },
     legacy_protocol::{self, SpanMode},
     version,
 };
@@ -207,14 +211,18 @@ impl ProcMacroServerProcess {
     pub(crate) fn find_proc_macros(
         &self,
         dylib_path: &AbsPath,
-        callback: Option>,
     ) -> Result, String>, ServerError> {
         match self.protocol {
             Protocol::LegacyJson { .. } => legacy_protocol::find_proc_macros(self, dylib_path),
 
             Protocol::BidirectionalPostcardPrototype { .. } => {
-                let cb = callback.expect("callback required for bidirectional protocol");
-                bidirectional_protocol::find_proc_macros(self, dylib_path, cb)
+                bidirectional_protocol::find_proc_macros(self, dylib_path, &|_| {
+                    Ok(SubResponse::Cancel {
+                        reason: String::from(
+                            "Server should not do a sub request when loading proc-macros",
+                        ),
+                    })
+                })
             }
         }
     }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs
index c74f4550fd890..47f7a57f72eca 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/flycheck.rs
@@ -741,70 +741,42 @@ impl FlycheckActor {
                             flycheck_id = self.id,
                             message = diagnostic.message,
                             package_id = package_id.as_ref().map(|it| it.as_str()),
-                            scope = ?self.scope,
                             "diagnostic received"
                         );
-
-                        match &self.scope {
-                            FlycheckScope::Workspace => {
-                                if self.diagnostics_received == DiagnosticsReceived::NotYet {
-                                    self.send(FlycheckMessage::ClearDiagnostics {
-                                        id: self.id,
-                                        kind: ClearDiagnosticsKind::All(ClearScope::Workspace),
-                                    });
-
-                                    self.diagnostics_received =
-                                        DiagnosticsReceived::AtLeastOneAndClearedWorkspace;
-                                }
-
-                                if let Some(package_id) = package_id {
-                                    tracing::warn!(
-                                        "Ignoring package label {:?} and applying diagnostics to the whole workspace",
-                                        package_id
-                                    );
-                                }
-
-                                self.send(FlycheckMessage::AddDiagnostic {
-                                    id: self.id,
-                                    generation: self.generation,
-                                    package_id: None,
-                                    workspace_root: self.root.clone(),
-                                    diagnostic,
-                                });
-                            }
-                            FlycheckScope::Package { package: flycheck_package, .. } => {
-                                if self.diagnostics_received == DiagnosticsReceived::NotYet {
-                                    self.diagnostics_received = DiagnosticsReceived::AtLeastOne;
-                                }
-
-                                // If the package has been set in the diagnostic JSON, respect that. Otherwise, use the
-                                // package that the current flycheck is scoped to. This is useful when a project is
-                                // directly using rustc for its checks (e.g. custom check commands in rust-project.json).
-                                let package_id = package_id.unwrap_or(flycheck_package.clone());
-
-                                if self.diagnostics_cleared_for.insert(package_id.clone()) {
-                                    tracing::trace!(
-                                        flycheck_id = self.id,
-                                        package_id = package_id.as_str(),
-                                        "clearing diagnostics"
-                                    );
-                                    self.send(FlycheckMessage::ClearDiagnostics {
-                                        id: self.id,
-                                        kind: ClearDiagnosticsKind::All(ClearScope::Package(
-                                            package_id.clone(),
-                                        )),
-                                    });
-                                }
-
-                                self.send(FlycheckMessage::AddDiagnostic {
+                        if self.diagnostics_received == DiagnosticsReceived::NotYet {
+                            self.diagnostics_received = DiagnosticsReceived::AtLeastOne;
+                        }
+                        if let Some(package_id) = &package_id {
+                            if self.diagnostics_cleared_for.insert(package_id.clone()) {
+                                tracing::trace!(
+                                    flycheck_id = self.id,
+                                    package_id = package_id.as_str(),
+                                    "clearing diagnostics"
+                                );
+                                self.send(FlycheckMessage::ClearDiagnostics {
                                     id: self.id,
-                                    generation: self.generation,
-                                    package_id: Some(package_id),
-                                    workspace_root: self.root.clone(),
-                                    diagnostic,
+                                    kind: ClearDiagnosticsKind::All(ClearScope::Package(
+                                        package_id.clone(),
+                                    )),
                                 });
                             }
+                        } else if self.diagnostics_received
+                            != DiagnosticsReceived::AtLeastOneAndClearedWorkspace
+                        {
+                            self.diagnostics_received =
+                                DiagnosticsReceived::AtLeastOneAndClearedWorkspace;
+                            self.send(FlycheckMessage::ClearDiagnostics {
+                                id: self.id,
+                                kind: ClearDiagnosticsKind::All(ClearScope::Workspace),
+                            });
                         }
+                        self.send(FlycheckMessage::AddDiagnostic {
+                            id: self.id,
+                            generation: self.generation,
+                            package_id,
+                            workspace_root: self.root.clone(),
+                            diagnostic,
+                        });
                     }
                 },
             }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
index 1462727df4e18..afd4162de6227 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
@@ -14,7 +14,7 @@ use hir::ChangeWithProcMacros;
 use ide::{Analysis, AnalysisHost, Cancellable, FileId, SourceRootId};
 use ide_db::{
     MiniCore,
-    base_db::{Crate, ProcMacroPaths, SourceDatabase, salsa::CancellationToken, salsa::Revision},
+    base_db::{Crate, ProcMacroPaths, SourceDatabase, salsa::Revision},
 };
 use itertools::Itertools;
 use load_cargo::SourceRootConfig;
@@ -88,7 +88,6 @@ pub(crate) struct GlobalState {
     pub(crate) task_pool: Handle, Receiver>,
     pub(crate) fmt_pool: Handle, Receiver>,
     pub(crate) cancellation_pool: thread::Pool,
-    pub(crate) cancellation_tokens: FxHashMap,
 
     pub(crate) config: Arc,
     pub(crate) config_errors: Option,
@@ -266,7 +265,6 @@ impl GlobalState {
             task_pool,
             fmt_pool,
             cancellation_pool,
-            cancellation_tokens: Default::default(),
             loader,
             config: Arc::new(config.clone()),
             analysis_host,
@@ -619,7 +617,6 @@ impl GlobalState {
     }
 
     pub(crate) fn respond(&mut self, response: lsp_server::Response) {
-        self.cancellation_tokens.remove(&response.id);
         if let Some((method, start)) = self.req_queue.incoming.complete(&response.id) {
             if let Some(err) = &response.error
                 && err.message.starts_with("server panicked")
@@ -634,9 +631,6 @@ impl GlobalState {
     }
 
     pub(crate) fn cancel(&mut self, request_id: lsp_server::RequestId) {
-        if let Some(token) = self.cancellation_tokens.remove(&request_id) {
-            token.cancel();
-        }
         if let Some(response) = self.req_queue.incoming.cancel(request_id) {
             self.send(response.into());
         }
diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
index 63b4e6430c2d9..90deae2d902e5 100644
--- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
+++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/dispatch.rs
@@ -253,9 +253,6 @@ impl RequestDispatcher<'_> {
         tracing::debug!(?params);
 
         let world = self.global_state.snapshot();
-        self.global_state
-            .cancellation_tokens
-            .insert(req.id.clone(), world.analysis.cancellation_token());
         if RUSTFMT {
             &mut self.global_state.fmt_pool.handle
         } else {
@@ -268,19 +265,7 @@ impl RequestDispatcher<'_> {
             });
             match thread_result_to_response::(req.id.clone(), result) {
                 Ok(response) => Task::Response(response),
-                Err(HandlerCancelledError::Inner(
-                    Cancelled::PendingWrite | Cancelled::PropagatedPanic,
-                )) if ALLOW_RETRYING => Task::Retry(req),
-                // Note: Technically the return value here does not matter as we have already responded to the client with this error.
-                Err(HandlerCancelledError::Inner(Cancelled::Local)) => Task::Response(Response {
-                    id: req.id,
-                    result: None,
-                    error: Some(ResponseError {
-                        code: lsp_server::ErrorCode::RequestCanceled as i32,
-                        message: "canceled by client".to_owned(),
-                        data: None,
-                    }),
-                }),
+                Err(_cancelled) if ALLOW_RETRYING => Task::Retry(req),
                 Err(_cancelled) => {
                     let error = on_cancelled();
                     Task::Response(Response { id: req.id, result: None, error: Some(error) })
diff --git a/src/tools/rust-analyzer/crates/span/src/ast_id.rs b/src/tools/rust-analyzer/crates/span/src/ast_id.rs
index 599b3c7175228..f52604e139177 100644
--- a/src/tools/rust-analyzer/crates/span/src/ast_id.rs
+++ b/src/tools/rust-analyzer/crates/span/src/ast_id.rs
@@ -88,7 +88,6 @@ impl fmt::Debug for ErasedFileAstId {
             Module,
             Static,
             Trait,
-            TraitAlias,
             Variant,
             Const,
             Fn,
@@ -129,7 +128,6 @@ enum ErasedFileAstIdKind {
     Module,
     Static,
     Trait,
-    TraitAlias,
     // Until here associated with `ErasedHasNameFileAstId`.
     // The following are associated with `ErasedAssocItemFileAstId`.
     Variant,
diff --git a/src/tools/rust-analyzer/crates/span/src/hygiene.rs b/src/tools/rust-analyzer/crates/span/src/hygiene.rs
index 0a81cef52ec5a..fe05ef9465181 100644
--- a/src/tools/rust-analyzer/crates/span/src/hygiene.rs
+++ b/src/tools/rust-analyzer/crates/span/src/hygiene.rs
@@ -81,24 +81,25 @@ const _: () = {
     #[derive(Hash)]
     struct StructKey<'db, T0, T1, T2, T3>(T0, T1, T2, T3, std::marker::PhantomData<&'db ()>);
 
-    impl<'db, T0, T1, T2, T3> zalsa_::HashEqLike> for SyntaxContextData
+    impl<'db, T0, T1, T2, T3> zalsa_::interned::HashEqLike>
+        for SyntaxContextData
     where
-        Option: zalsa_::HashEqLike,
-        Transparency: zalsa_::HashEqLike,
-        Edition: zalsa_::HashEqLike,
-        SyntaxContext: zalsa_::HashEqLike,
+        Option: zalsa_::interned::HashEqLike,
+        Transparency: zalsa_::interned::HashEqLike,
+        Edition: zalsa_::interned::HashEqLike,
+        SyntaxContext: zalsa_::interned::HashEqLike,
     {
         fn hash(&self, h: &mut H) {
-            zalsa_::HashEqLike::::hash(&self.outer_expn, &mut *h);
-            zalsa_::HashEqLike::::hash(&self.outer_transparency, &mut *h);
-            zalsa_::HashEqLike::::hash(&self.edition, &mut *h);
-            zalsa_::HashEqLike::::hash(&self.parent, &mut *h);
+            zalsa_::interned::HashEqLike::::hash(&self.outer_expn, &mut *h);
+            zalsa_::interned::HashEqLike::::hash(&self.outer_transparency, &mut *h);
+            zalsa_::interned::HashEqLike::::hash(&self.edition, &mut *h);
+            zalsa_::interned::HashEqLike::::hash(&self.parent, &mut *h);
         }
         fn eq(&self, data: &StructKey<'db, T0, T1, T2, T3>) -> bool {
-            zalsa_::HashEqLike::::eq(&self.outer_expn, &data.0)
-                && zalsa_::HashEqLike::::eq(&self.outer_transparency, &data.1)
-                && zalsa_::HashEqLike::::eq(&self.edition, &data.2)
-                && zalsa_::HashEqLike::::eq(&self.parent, &data.3)
+            zalsa_::interned::HashEqLike::::eq(&self.outer_expn, &data.0)
+                && zalsa_::interned::HashEqLike::::eq(&self.outer_transparency, &data.1)
+                && zalsa_::interned::HashEqLike::::eq(&self.edition, &data.2)
+                && zalsa_::interned::HashEqLike::::eq(&self.parent, &data.3)
         }
     }
     impl zalsa_struct_::Configuration for SyntaxContext {
@@ -202,10 +203,10 @@ const _: () = {
     impl<'db> SyntaxContext {
         pub fn new<
             Db,
-            T0: zalsa_::Lookup> + std::hash::Hash,
-            T1: zalsa_::Lookup + std::hash::Hash,
-            T2: zalsa_::Lookup + std::hash::Hash,
-            T3: zalsa_::Lookup + std::hash::Hash,
+            T0: zalsa_::interned::Lookup> + std::hash::Hash,
+            T1: zalsa_::interned::Lookup + std::hash::Hash,
+            T2: zalsa_::interned::Lookup + std::hash::Hash,
+            T3: zalsa_::interned::Lookup + std::hash::Hash,
         >(
             db: &'db Db,
             outer_expn: T0,
@@ -217,10 +218,10 @@ const _: () = {
         ) -> Self
         where
             Db: ?Sized + salsa::Database,
-            Option: zalsa_::HashEqLike,
-            Transparency: zalsa_::HashEqLike,
-            Edition: zalsa_::HashEqLike,
-            SyntaxContext: zalsa_::HashEqLike,
+            Option: zalsa_::interned::HashEqLike,
+            Transparency: zalsa_::interned::HashEqLike,
+            Edition: zalsa_::interned::HashEqLike,
+            SyntaxContext: zalsa_::interned::HashEqLike,
         {
             let (zalsa, zalsa_local) = db.zalsas();
 
@@ -235,10 +236,10 @@ const _: () = {
                     std::marker::PhantomData,
                 ),
                 |id, data| SyntaxContextData {
-                    outer_expn: zalsa_::Lookup::into_owned(data.0),
-                    outer_transparency: zalsa_::Lookup::into_owned(data.1),
-                    edition: zalsa_::Lookup::into_owned(data.2),
-                    parent: zalsa_::Lookup::into_owned(data.3),
+                    outer_expn: zalsa_::interned::Lookup::into_owned(data.0),
+                    outer_transparency: zalsa_::interned::Lookup::into_owned(data.1),
+                    edition: zalsa_::interned::Lookup::into_owned(data.2),
+                    parent: zalsa_::interned::Lookup::into_owned(data.3),
                     opaque: opaque(zalsa_::FromId::from_id(id)),
                     opaque_and_semiopaque: opaque_and_semiopaque(zalsa_::FromId::from_id(id)),
                 },
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
index 1cd8146f68630..2b7dc5cd76ab5 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs
@@ -9,8 +9,9 @@ use crate::{
     SyntaxKind::{ATTR, COMMENT, WHITESPACE},
     SyntaxNode, SyntaxToken,
     algo::{self, neighbor},
-    ast::{self, HasGenericParams, edit::IndentLevel, make},
-    ted::{self, Position},
+    ast::{self, HasGenericParams, edit::IndentLevel, make, syntax_factory::SyntaxFactory},
+    syntax_editor::{Position, SyntaxEditor},
+    ted,
 };
 
 use super::{GenericParam, HasName};
@@ -26,13 +27,13 @@ impl GenericParamsOwnerEdit for ast::Fn {
             Some(it) => it,
             None => {
                 let position = if let Some(name) = self.name() {
-                    Position::after(name.syntax)
+                    ted::Position::after(name.syntax)
                 } else if let Some(fn_token) = self.fn_token() {
-                    Position::after(fn_token)
+                    ted::Position::after(fn_token)
                 } else if let Some(param_list) = self.param_list() {
-                    Position::before(param_list.syntax)
+                    ted::Position::before(param_list.syntax)
                 } else {
-                    Position::last_child_of(self.syntax())
+                    ted::Position::last_child_of(self.syntax())
                 };
                 create_generic_param_list(position)
             }
@@ -42,11 +43,11 @@ impl GenericParamsOwnerEdit for ast::Fn {
     fn get_or_create_where_clause(&self) -> ast::WhereClause {
         if self.where_clause().is_none() {
             let position = if let Some(ty) = self.ret_type() {
-                Position::after(ty.syntax())
+                ted::Position::after(ty.syntax())
             } else if let Some(param_list) = self.param_list() {
-                Position::after(param_list.syntax())
+                ted::Position::after(param_list.syntax())
             } else {
-                Position::last_child_of(self.syntax())
+                ted::Position::last_child_of(self.syntax())
             };
             create_where_clause(position);
         }
@@ -60,8 +61,8 @@ impl GenericParamsOwnerEdit for ast::Impl {
             Some(it) => it,
             None => {
                 let position = match self.impl_token() {
-                    Some(imp_token) => Position::after(imp_token),
-                    None => Position::last_child_of(self.syntax()),
+                    Some(imp_token) => ted::Position::after(imp_token),
+                    None => ted::Position::last_child_of(self.syntax()),
                 };
                 create_generic_param_list(position)
             }
@@ -71,8 +72,8 @@ impl GenericParamsOwnerEdit for ast::Impl {
     fn get_or_create_where_clause(&self) -> ast::WhereClause {
         if self.where_clause().is_none() {
             let position = match self.assoc_item_list() {
-                Some(items) => Position::before(items.syntax()),
-                None => Position::last_child_of(self.syntax()),
+                Some(items) => ted::Position::before(items.syntax()),
+                None => ted::Position::last_child_of(self.syntax()),
             };
             create_where_clause(position);
         }
@@ -86,11 +87,11 @@ impl GenericParamsOwnerEdit for ast::Trait {
             Some(it) => it,
             None => {
                 let position = if let Some(name) = self.name() {
-                    Position::after(name.syntax)
+                    ted::Position::after(name.syntax)
                 } else if let Some(trait_token) = self.trait_token() {
-                    Position::after(trait_token)
+                    ted::Position::after(trait_token)
                 } else {
-                    Position::last_child_of(self.syntax())
+                    ted::Position::last_child_of(self.syntax())
                 };
                 create_generic_param_list(position)
             }
@@ -100,9 +101,9 @@ impl GenericParamsOwnerEdit for ast::Trait {
     fn get_or_create_where_clause(&self) -> ast::WhereClause {
         if self.where_clause().is_none() {
             let position = match (self.assoc_item_list(), self.semicolon_token()) {
-                (Some(items), _) => Position::before(items.syntax()),
-                (_, Some(tok)) => Position::before(tok),
-                (None, None) => Position::last_child_of(self.syntax()),
+                (Some(items), _) => ted::Position::before(items.syntax()),
+                (_, Some(tok)) => ted::Position::before(tok),
+                (None, None) => ted::Position::last_child_of(self.syntax()),
             };
             create_where_clause(position);
         }
@@ -116,11 +117,11 @@ impl GenericParamsOwnerEdit for ast::TypeAlias {
             Some(it) => it,
             None => {
                 let position = if let Some(name) = self.name() {
-                    Position::after(name.syntax)
+                    ted::Position::after(name.syntax)
                 } else if let Some(trait_token) = self.type_token() {
-                    Position::after(trait_token)
+                    ted::Position::after(trait_token)
                 } else {
-                    Position::last_child_of(self.syntax())
+                    ted::Position::last_child_of(self.syntax())
                 };
                 create_generic_param_list(position)
             }
@@ -130,10 +131,10 @@ impl GenericParamsOwnerEdit for ast::TypeAlias {
     fn get_or_create_where_clause(&self) -> ast::WhereClause {
         if self.where_clause().is_none() {
             let position = match self.eq_token() {
-                Some(tok) => Position::before(tok),
+                Some(tok) => ted::Position::before(tok),
                 None => match self.semicolon_token() {
-                    Some(tok) => Position::before(tok),
-                    None => Position::last_child_of(self.syntax()),
+                    Some(tok) => ted::Position::before(tok),
+                    None => ted::Position::last_child_of(self.syntax()),
                 },
             };
             create_where_clause(position);
@@ -148,11 +149,11 @@ impl GenericParamsOwnerEdit for ast::Struct {
             Some(it) => it,
             None => {
                 let position = if let Some(name) = self.name() {
-                    Position::after(name.syntax)
+                    ted::Position::after(name.syntax)
                 } else if let Some(struct_token) = self.struct_token() {
-                    Position::after(struct_token)
+                    ted::Position::after(struct_token)
                 } else {
-                    Position::last_child_of(self.syntax())
+                    ted::Position::last_child_of(self.syntax())
                 };
                 create_generic_param_list(position)
             }
@@ -166,13 +167,13 @@ impl GenericParamsOwnerEdit for ast::Struct {
                 ast::FieldList::TupleFieldList(it) => Some(it),
             });
             let position = if let Some(tfl) = tfl {
-                Position::after(tfl.syntax())
+                ted::Position::after(tfl.syntax())
             } else if let Some(gpl) = self.generic_param_list() {
-                Position::after(gpl.syntax())
+                ted::Position::after(gpl.syntax())
             } else if let Some(name) = self.name() {
-                Position::after(name.syntax())
+                ted::Position::after(name.syntax())
             } else {
-                Position::last_child_of(self.syntax())
+                ted::Position::last_child_of(self.syntax())
             };
             create_where_clause(position);
         }
@@ -186,11 +187,11 @@ impl GenericParamsOwnerEdit for ast::Enum {
             Some(it) => it,
             None => {
                 let position = if let Some(name) = self.name() {
-                    Position::after(name.syntax)
+                    ted::Position::after(name.syntax)
                 } else if let Some(enum_token) = self.enum_token() {
-                    Position::after(enum_token)
+                    ted::Position::after(enum_token)
                 } else {
-                    Position::last_child_of(self.syntax())
+                    ted::Position::last_child_of(self.syntax())
                 };
                 create_generic_param_list(position)
             }
@@ -200,11 +201,11 @@ impl GenericParamsOwnerEdit for ast::Enum {
     fn get_or_create_where_clause(&self) -> ast::WhereClause {
         if self.where_clause().is_none() {
             let position = if let Some(gpl) = self.generic_param_list() {
-                Position::after(gpl.syntax())
+                ted::Position::after(gpl.syntax())
             } else if let Some(name) = self.name() {
-                Position::after(name.syntax())
+                ted::Position::after(name.syntax())
             } else {
-                Position::last_child_of(self.syntax())
+                ted::Position::last_child_of(self.syntax())
             };
             create_where_clause(position);
         }
@@ -212,12 +213,12 @@ impl GenericParamsOwnerEdit for ast::Enum {
     }
 }
 
-fn create_where_clause(position: Position) {
+fn create_where_clause(position: ted::Position) {
     let where_clause = make::where_clause(empty()).clone_for_update();
     ted::insert(position, where_clause.syntax());
 }
 
-fn create_generic_param_list(position: Position) -> ast::GenericParamList {
+fn create_generic_param_list(position: ted::Position) -> ast::GenericParamList {
     let gpl = make::generic_param_list(empty()).clone_for_update();
     ted::insert_raw(position, gpl.syntax());
     gpl
@@ -253,7 +254,7 @@ impl ast::GenericParamList {
     pub fn add_generic_param(&self, generic_param: ast::GenericParam) {
         match self.generic_params().last() {
             Some(last_param) => {
-                let position = Position::after(last_param.syntax());
+                let position = ted::Position::after(last_param.syntax());
                 let elements = vec![
                     make::token(T![,]).into(),
                     make::tokens::single_space().into(),
@@ -262,7 +263,7 @@ impl ast::GenericParamList {
                 ted::insert_all(position, elements);
             }
             None => {
-                let after_l_angle = Position::after(self.l_angle_token().unwrap());
+                let after_l_angle = ted::Position::after(self.l_angle_token().unwrap());
                 ted::insert(after_l_angle, generic_param.syntax());
             }
         }
@@ -412,7 +413,7 @@ impl ast::UseTree {
         match self.use_tree_list() {
             Some(it) => it,
             None => {
-                let position = Position::last_child_of(self.syntax());
+                let position = ted::Position::last_child_of(self.syntax());
                 let use_tree_list = make::use_tree_list(empty()).clone_for_update();
                 let mut elements = Vec::with_capacity(2);
                 if self.coloncolon_token().is_none() {
@@ -458,7 +459,7 @@ impl ast::UseTree {
         // Next, transform 'suffix' use tree into 'prefix::{suffix}'
         let subtree = self.clone_subtree().clone_for_update();
         ted::remove_all_iter(self.syntax().children_with_tokens());
-        ted::insert(Position::first_child_of(self.syntax()), prefix.syntax());
+        ted::insert(ted::Position::first_child_of(self.syntax()), prefix.syntax());
         self.get_or_create_use_tree_list().add_use_tree(subtree);
 
         fn split_path_prefix(prefix: &ast::Path) -> Option<()> {
@@ -507,7 +508,7 @@ impl ast::UseTreeList {
     pub fn add_use_tree(&self, use_tree: ast::UseTree) {
         let (position, elements) = match self.use_trees().last() {
             Some(last_tree) => (
-                Position::after(last_tree.syntax()),
+                ted::Position::after(last_tree.syntax()),
                 vec![
                     make::token(T![,]).into(),
                     make::tokens::single_space().into(),
@@ -516,8 +517,8 @@ impl ast::UseTreeList {
             ),
             None => {
                 let position = match self.l_curly_token() {
-                    Some(l_curly) => Position::after(l_curly),
-                    None => Position::last_child_of(self.syntax()),
+                    Some(l_curly) => ted::Position::after(l_curly),
+                    None => ted::Position::last_child_of(self.syntax()),
                 };
                 (position, vec![use_tree.syntax.into()])
             }
@@ -582,15 +583,15 @@ impl ast::AssocItemList {
         let (indent, position, whitespace) = match self.assoc_items().last() {
             Some(last_item) => (
                 IndentLevel::from_node(last_item.syntax()),
-                Position::after(last_item.syntax()),
+                ted::Position::after(last_item.syntax()),
                 "\n\n",
             ),
             None => match self.l_curly_token() {
                 Some(l_curly) => {
                     normalize_ws_between_braces(self.syntax());
-                    (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n")
+                    (IndentLevel::from_token(&l_curly) + 1, ted::Position::after(&l_curly), "\n")
                 }
-                None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
+                None => (IndentLevel::single(), ted::Position::last_child_of(self.syntax()), "\n"),
             },
         };
         let elements: Vec = vec![
@@ -618,17 +619,17 @@ impl ast::RecordExprFieldList {
         let position = match self.fields().last() {
             Some(last_field) => {
                 let comma = get_or_insert_comma_after(last_field.syntax());
-                Position::after(comma)
+                ted::Position::after(comma)
             }
             None => match self.l_curly_token() {
-                Some(it) => Position::after(it),
-                None => Position::last_child_of(self.syntax()),
+                Some(it) => ted::Position::after(it),
+                None => ted::Position::last_child_of(self.syntax()),
             },
         };
 
         ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
         if is_multiline {
-            ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+            ted::insert(ted::Position::after(field.syntax()), ast::make::token(T![,]));
         }
     }
 }
@@ -656,7 +657,7 @@ impl ast::RecordExprField {
                 ast::make::tokens::single_space().into(),
                 expr.syntax().clone().into(),
             ];
-            ted::insert_all_raw(Position::last_child_of(self.syntax()), children);
+            ted::insert_all_raw(ted::Position::last_child_of(self.syntax()), children);
         }
     }
 }
@@ -679,17 +680,17 @@ impl ast::RecordPatFieldList {
             Some(last_field) => {
                 let syntax = last_field.syntax();
                 let comma = get_or_insert_comma_after(syntax);
-                Position::after(comma)
+                ted::Position::after(comma)
             }
             None => match self.l_curly_token() {
-                Some(it) => Position::after(it),
-                None => Position::last_child_of(self.syntax()),
+                Some(it) => ted::Position::after(it),
+                None => ted::Position::last_child_of(self.syntax()),
             },
         };
 
         ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
         if is_multiline {
-            ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+            ted::insert(ted::Position::after(field.syntax()), ast::make::token(T![,]));
         }
     }
 }
@@ -703,7 +704,7 @@ fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken {
         Some(it) => it,
         None => {
             let comma = ast::make::token(T![,]);
-            ted::insert(Position::after(syntax), &comma);
+            ted::insert(ted::Position::after(syntax), &comma);
             comma
         }
     }
@@ -728,7 +729,7 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
             }
         }
         Some(ws) if ws.kind() == T!['}'] => {
-            ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{indent}")));
+            ted::insert(ted::Position::after(l), make::tokens::whitespace(&format!("\n{indent}")));
         }
         _ => (),
     }
@@ -780,6 +781,56 @@ impl ast::IdentPat {
             }
         }
     }
+
+    pub fn set_pat_with_editor(
+        &self,
+        pat: Option,
+        syntax_editor: &mut SyntaxEditor,
+        syntax_factory: &SyntaxFactory,
+    ) {
+        match pat {
+            None => {
+                if let Some(at_token) = self.at_token() {
+                    // Remove `@ Pat`
+                    let start = at_token.clone().into();
+                    let end = self
+                        .pat()
+                        .map(|it| it.syntax().clone().into())
+                        .unwrap_or_else(|| at_token.into());
+                    syntax_editor.delete_all(start..=end);
+
+                    // Remove any trailing ws
+                    if let Some(last) =
+                        self.syntax().last_token().filter(|it| it.kind() == WHITESPACE)
+                    {
+                        last.detach();
+                    }
+                }
+            }
+            Some(pat) => {
+                if let Some(old_pat) = self.pat() {
+                    // Replace existing pattern
+                    syntax_editor.replace(old_pat.syntax(), pat.syntax())
+                } else if let Some(at_token) = self.at_token() {
+                    // Have an `@` token but not a pattern yet
+                    syntax_editor.insert(Position::after(at_token), pat.syntax());
+                } else {
+                    // Don't have an `@`, should have a name
+                    let name = self.name().unwrap();
+
+                    syntax_editor.insert_all(
+                        Position::after(name.syntax()),
+                        vec![
+                            syntax_factory.whitespace(" ").into(),
+                            syntax_factory.token(T![@]).into(),
+                            syntax_factory.whitespace(" ").into(),
+                            pat.syntax().clone().into(),
+                        ],
+                    )
+                }
+            }
+        }
+    }
 }
 
 pub trait HasVisibilityEdit: ast::HasVisibility {
diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs
index 5fe419ad4eb79..6e17d262a79dd 100644
--- a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs
+++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs
@@ -75,6 +75,24 @@ impl SyntaxFactory {
         make::path_from_text(text).clone_for_update()
     }
 
+    pub fn path_concat(&self, first: ast::Path, second: ast::Path) -> ast::Path {
+        make::path_concat(first, second).clone_for_update()
+    }
+
+    pub fn visibility_pub(&self) -> ast::Visibility {
+        make::visibility_pub()
+    }
+
+    pub fn struct_(
+        &self,
+        visibility: Option,
+        strukt_name: ast::Name,
+        generic_param_list: Option,
+        field_list: ast::FieldList,
+    ) -> ast::Struct {
+        make::struct_(visibility, strukt_name, generic_param_list, field_list).clone_for_update()
+    }
+
     pub fn expr_field(&self, receiver: ast::Expr, field: &str) -> ast::FieldExpr {
         let ast::Expr::FieldExpr(ast) =
             make::expr_field(receiver.clone(), field).clone_for_update()
@@ -1590,6 +1608,65 @@ impl SyntaxFactory {
         ast
     }
 
+    pub fn self_param(&self) -> ast::SelfParam {
+        let ast = make::self_param().clone_for_update();
+
+        if let Some(mut mapping) = self.mappings() {
+            let builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+            builder.finish(&mut mapping);
+        }
+
+        ast
+    }
+
+    pub fn impl_(
+        &self,
+        attrs: impl IntoIterator,
+        generic_params: Option,
+        generic_args: Option,
+        path_type: ast::Type,
+        where_clause: Option,
+        body: Option,
+    ) -> ast::Impl {
+        let (attrs, attrs_input) = iterator_input(attrs);
+        let ast = make::impl_(
+            attrs,
+            generic_params.clone(),
+            generic_args.clone(),
+            path_type.clone(),
+            where_clause.clone(),
+            body.clone(),
+        )
+        .clone_for_update();
+
+        if let Some(mut mapping) = self.mappings() {
+            let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone());
+            builder.map_children(attrs_input, ast.attrs().map(|attr| attr.syntax().clone()));
+            if let Some(generic_params) = generic_params {
+                builder.map_node(
+                    generic_params.syntax().clone(),
+                    ast.generic_param_list().unwrap().syntax().clone(),
+                );
+            }
+            builder.map_node(path_type.syntax().clone(), ast.self_ty().unwrap().syntax().clone());
+            if let Some(where_clause) = where_clause {
+                builder.map_node(
+                    where_clause.syntax().clone(),
+                    ast.where_clause().unwrap().syntax().clone(),
+                );
+            }
+            if let Some(body) = body {
+                builder.map_node(
+                    body.syntax().clone(),
+                    ast.assoc_item_list().unwrap().syntax().clone(),
+                );
+            }
+            builder.finish(&mut mapping);
+        }
+
+        ast
+    }
+
     pub fn ret_type(&self, ty: ast::Type) -> ast::RetType {
         let ast = make::ret_type(ty.clone()).clone_for_update();
 
diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
index 7d95043867ee6..c34475bbdf012 100644
--- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
+++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs
@@ -1689,6 +1689,21 @@ pub mod iter {
             }
         }
 
+        pub struct Filter {
+            iter: I,
+            predicate: P,
+        }
+        impl Iterator for Filter
+        where
+            P: FnMut(&I::Item) -> bool,
+        {
+            type Item = I::Item;
+
+            fn next(&mut self) -> Option {
+                loop {}
+            }
+        }
+
         pub struct FilterMap {
             iter: I,
             f: F,
@@ -1705,7 +1720,7 @@ pub mod iter {
             }
         }
     }
-    pub use self::adapters::{FilterMap, Take};
+    pub use self::adapters::{Filter, FilterMap, Take};
 
     mod sources {
         mod repeat {
@@ -1756,6 +1771,13 @@ pub mod iter {
                 {
                     loop {}
                 }
+                fn filter

(self, predicate: P) -> crate::iter::Filter + where + Self: Sized, + P: FnMut(&Self::Item) -> bool, + { + loop {} + } fn filter_map(self, _f: F) -> crate::iter::FilterMap where Self: Sized, diff --git a/src/tools/rust-analyzer/crates/tt/src/storage.rs b/src/tools/rust-analyzer/crates/tt/src/storage.rs index 4dd02d875a29e..50a1106175ab3 100644 --- a/src/tools/rust-analyzer/crates/tt/src/storage.rs +++ b/src/tools/rust-analyzer/crates/tt/src/storage.rs @@ -488,7 +488,7 @@ impl TopSubtree { unreachable!() }; *open_span = S::new(span.open.range, 0); - *close_span = S::new(span.close.range, 0); + *close_span = S::new(span.close.range, 1); } dispatch! { match &mut self.repr => tt => do_it(tt, span) diff --git a/src/tools/rust-analyzer/editors/code/package-lock.json b/src/tools/rust-analyzer/editors/code/package-lock.json index 57f6bf69beb08..84be0a666f9ca 100644 --- a/src/tools/rust-analyzer/editors/code/package-lock.json +++ b/src/tools/rust-analyzer/editors/code/package-lock.json @@ -1486,7 +1486,6 @@ "integrity": "sha512-4gbs64bnbSzu4FpgMiQ1A+D+urxkoJk/kqlDJ2W//5SygaEiAP2B4GoS7TEdxgwol2el03gckFV9lJ4QOMiiHg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "8.25.0", "@typescript-eslint/types": "8.25.0", @@ -1870,7 +1869,6 @@ "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", "dev": true, "license": "MIT", - "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -2840,7 +2838,6 @@ "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", "license": "ISC", - "peer": true, "engines": { "node": ">=12" } @@ -3322,7 +3319,6 @@ "integrity": "sha512-KjeihdFqTPhOMXTt7StsDxriV4n66ueuF/jfPNC3j/lduHwr/ijDwJMsF+wyMJethgiKi5wniIE243vi07d3pg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", @@ -4410,7 +4406,6 @@ "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz", "integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==", "license": "MIT", - "peer": true, "bin": { "jiti": "lib/jiti-cli.mjs" } @@ -5584,9 +5579,9 @@ } }, "node_modules/qs": { - "version": "6.14.1", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz", - "integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==", + "version": "6.14.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.2.tgz", + "integrity": "sha512-V/yCWTTF7VJ9hIh18Ugr2zhJMP01MY7c5kh4J870L7imm6/DIzBsNLTXzMwUA3yZ5b/KBqLx8Kp3uRvd7xSe3Q==", "dev": true, "license": "BSD-3-Clause", "dependencies": { @@ -6678,7 +6673,6 @@ "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", "dev": true, "license": "Apache-2.0", - "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" diff --git a/src/tools/rust-analyzer/lib/smol_str/src/borsh.rs b/src/tools/rust-analyzer/lib/smol_str/src/borsh.rs index 527ce85a1746a..b684a4910c963 100644 --- a/src/tools/rust-analyzer/lib/smol_str/src/borsh.rs +++ b/src/tools/rust-analyzer/lib/smol_str/src/borsh.rs @@ -29,8 +29,9 @@ impl BorshDeserialize for SmolStr { })) } else { // u8::vec_from_reader always returns Some on success in current implementation - let vec = u8::vec_from_reader(len, reader)? - .ok_or_else(|| Error::other("u8::vec_from_reader unexpectedly returned None"))?; + let vec = u8::vec_from_reader(len, reader)?.ok_or_else(|| { + Error::new(ErrorKind::Other, "u8::vec_from_reader unexpectedly returned None") + })?; Ok(SmolStr::from(String::from_utf8(vec).map_err(|err| { let msg = err.to_string(); Error::new(ErrorKind::InvalidData, msg) diff --git a/src/tools/rust-analyzer/lib/smol_str/tests/test.rs b/src/tools/rust-analyzer/lib/smol_str/tests/test.rs index 640e7df681c9d..00fab2ee1c7fd 100644 --- a/src/tools/rust-analyzer/lib/smol_str/tests/test.rs +++ b/src/tools/rust-analyzer/lib/smol_str/tests/test.rs @@ -393,7 +393,7 @@ mod test_str_ext { } } -#[cfg(feature = "borsh")] +#[cfg(all(feature = "borsh", feature = "std"))] mod borsh_tests { use borsh::BorshDeserialize; use smol_str::{SmolStr, ToSmolStr}; diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index a1011c4a0acf0..b22c6c3869c62 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -ba284f468cd2cda48420251efc991758ec13d450 +139651428df86cf88443295542c12ea617cbb587 diff --git a/tests/ui/traits/auxiliary/force_unstable.rs b/tests/ui/traits/auxiliary/force_unstable.rs new file mode 100644 index 0000000000000..ce71e1241f9cb --- /dev/null +++ b/tests/ui/traits/auxiliary/force_unstable.rs @@ -0,0 +1,7 @@ +//@ edition: 2024 +//@ compile-flags: -Zforce-unstable-if-unmarked + +// Auxiliary crate that uses `-Zforce-unstable-if-unmarked` to export an +// "unstable" trait. + +pub trait ForeignTrait {} diff --git a/tests/ui/traits/nightly-only-unstable.force.stderr b/tests/ui/traits/nightly-only-unstable.force.stderr new file mode 100644 index 0000000000000..c1b5a45dc827e --- /dev/null +++ b/tests/ui/traits/nightly-only-unstable.force.stderr @@ -0,0 +1,36 @@ +error[E0277]: the trait bound `(): LocalTrait` is not satisfied + --> $DIR/nightly-only-unstable.rs:25:21 + | +LL | use_local_trait(()); + | --------------- ^^ the trait `LocalTrait` is not implemented for `()` + | | + | required by a bound introduced by this call + | +help: this trait has no implementations, consider adding one + --> $DIR/nightly-only-unstable.rs:14:1 + | +LL | trait LocalTrait {} + | ^^^^^^^^^^^^^^^^ +note: required by a bound in `use_local_trait` + --> $DIR/nightly-only-unstable.rs:16:28 + | +LL | fn use_local_trait(_: impl LocalTrait) {} + | ^^^^^^^^^^ required by this bound in `use_local_trait` + +error[E0277]: the trait bound `(): ForeignTrait` is not satisfied + --> $DIR/nightly-only-unstable.rs:31:23 + | +LL | use_foreign_trait(()); + | ----------------- ^^ the trait `ForeignTrait` is not implemented for `()` + | | + | required by a bound introduced by this call + | +note: required by a bound in `use_foreign_trait` + --> $DIR/nightly-only-unstable.rs:20:30 + | +LL | fn use_foreign_trait(_: impl force_unstable::ForeignTrait) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `use_foreign_trait` + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/traits/nightly-only-unstable.normal.stderr b/tests/ui/traits/nightly-only-unstable.normal.stderr new file mode 100644 index 0000000000000..51b896cfefdf2 --- /dev/null +++ b/tests/ui/traits/nightly-only-unstable.normal.stderr @@ -0,0 +1,36 @@ +error[E0277]: the trait bound `(): LocalTrait` is not satisfied + --> $DIR/nightly-only-unstable.rs:25:21 + | +LL | use_local_trait(()); + | --------------- ^^ the trait `LocalTrait` is not implemented for `()` + | | + | required by a bound introduced by this call + | +help: this trait has no implementations, consider adding one + --> $DIR/nightly-only-unstable.rs:14:1 + | +LL | trait LocalTrait {} + | ^^^^^^^^^^^^^^^^ +note: required by a bound in `use_local_trait` + --> $DIR/nightly-only-unstable.rs:16:28 + | +LL | fn use_local_trait(_: impl LocalTrait) {} + | ^^^^^^^^^^ required by this bound in `use_local_trait` + +error[E0277]: the trait bound `(): ForeignTrait` is not satisfied + --> $DIR/nightly-only-unstable.rs:31:23 + | +LL | use_foreign_trait(()); + | ----------------- ^^ the nightly-only, unstable trait `ForeignTrait` is not implemented for `()` + | | + | required by a bound introduced by this call + | +note: required by a bound in `use_foreign_trait` + --> $DIR/nightly-only-unstable.rs:20:30 + | +LL | fn use_foreign_trait(_: impl force_unstable::ForeignTrait) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `use_foreign_trait` + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/traits/nightly-only-unstable.rs b/tests/ui/traits/nightly-only-unstable.rs new file mode 100644 index 0000000000000..94f3000743900 --- /dev/null +++ b/tests/ui/traits/nightly-only-unstable.rs @@ -0,0 +1,36 @@ +//@ revisions: normal force +//@ edition: 2024 +//@ aux-crate: force_unstable=force_unstable.rs +//@[force] compile-flags: -Zforce-unstable-if-unmarked + +#![feature(rustc_private)] + +// Regression test for . +// +// When building a crate with `-Zforce-unstable-if-unmarked` (e.g. the compiler or stdlib), +// it's unhelpful to mention that a not-implemented trait is unstable, because that will +// be true of every local and foreign trait that isn't explicitly marked stable. + +trait LocalTrait {} + +fn use_local_trait(_: impl LocalTrait) {} +//~^ NOTE required by a bound in `use_local_trait` +//~| NOTE required by this bound in `use_local_trait` + +fn use_foreign_trait(_: impl force_unstable::ForeignTrait) {} +//~^ NOTE required by a bound in `use_foreign_trait` +//~| NOTE required by this bound in `use_foreign_trait` + +fn main() { + use_local_trait(()); + //~^ ERROR the trait bound `(): LocalTrait` is not satisfied + //[normal]~| NOTE the trait `LocalTrait` is not implemented for `()` + //[force]~| NOTE the trait `LocalTrait` is not implemented for `()` + //~| NOTE required by a bound introduced by this call + + use_foreign_trait(()); + //~^ ERROR the trait bound `(): ForeignTrait` is not satisfied + //[normal]~| NOTE the nightly-only, unstable trait `ForeignTrait` is not implemented for `()` + //[force]~| NOTE the trait `ForeignTrait` is not implemented for `()` + //~| NOTE required by a bound introduced by this call +}