diff options
178 files changed, 3503 insertions, 1238 deletions
diff --git a/.mailmap b/.mailmap index 9ac7f1a9b49..0f58762e023 100644 --- a/.mailmap +++ b/.mailmap @@ -74,6 +74,7 @@ Ben Striegel <ben.striegel@gmail.com> Benjamin Jackman <ben@jackman.biz> Benoît Cortier <benoit.cortier@fried-world.eu> Bheesham Persaud <bheesham123@hotmail.com> Bheesham Persaud <bheesham.persaud@live.ca> +bjorn3 <17426603+bjorn3@users.noreply.github.com> <bjorn3@users.noreply.github.com> Björn Steinbrink <bsteinbr@gmail.com> <B.Steinbrink@gmx.de> blake2-ppc <ulrik.sverdrup@gmail.com> <blake2-ppc> blyxyas <blyxyas@gmail.com> Alejandra González <blyxyas@gmail.com> diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs index cb60d9f286b..21f9bf028d5 100644 --- a/compiler/rustc_infer/src/infer/mod.rs +++ b/compiler/rustc_infer/src/infer/mod.rs @@ -18,7 +18,7 @@ pub use relate::combine::PredicateEmittingRelation; use rustc_data_structures::captures::Captures; use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; use rustc_data_structures::sync::Lrc; -use rustc_data_structures::undo_log::Rollback; +use rustc_data_structures::undo_log::{Rollback, UndoLogs}; use rustc_data_structures::unify as ut; use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed}; use rustc_hir as hir; @@ -50,6 +50,7 @@ use snapshot::undo_log::InferCtxtUndoLogs; use tracing::{debug, instrument}; use type_variable::TypeVariableOrigin; +use crate::infer::region_constraints::UndoLog; use crate::traits::{self, ObligationCause, ObligationInspector, PredicateObligation, TraitEngine}; pub mod at; @@ -67,6 +68,13 @@ pub mod resolve; pub(crate) mod snapshot; mod type_variable; +/// `InferOk<'tcx, ()>` is used a lot. It may seem like a useless wrapper +/// around `Vec<PredicateObligation<'tcx>>`, but it has one important property: +/// because `InferOk` is marked with `#[must_use]`, if you have a method +/// `InferCtxt::f` that returns `InferResult<'tcx, ()>` and you call it with +/// `infcx.f()?;` you'll get a warning about the obligations being discarded +/// without use, which is probably unintentional and has been a source of bugs +/// in the past. #[must_use] #[derive(Debug)] pub struct InferOk<'tcx, T> { @@ -163,12 +171,12 @@ impl<'tcx> InferCtxtInner<'tcx> { undo_log: InferCtxtUndoLogs::default(), projection_cache: Default::default(), - type_variable_storage: type_variable::TypeVariableStorage::new(), - const_unification_storage: ut::UnificationTableStorage::new(), - int_unification_storage: ut::UnificationTableStorage::new(), - float_unification_storage: ut::UnificationTableStorage::new(), - effect_unification_storage: ut::UnificationTableStorage::new(), - region_constraint_storage: Some(RegionConstraintStorage::new()), + type_variable_storage: Default::default(), + const_unification_storage: Default::default(), + int_unification_storage: Default::default(), + float_unification_storage: Default::default(), + effect_unification_storage: Default::default(), + region_constraint_storage: Some(Default::default()), region_obligations: vec![], opaque_type_storage: Default::default(), } @@ -1004,8 +1012,8 @@ impl<'tcx> InferCtxt<'tcx> { ty::Const::new_infer(self.tcx, ty::InferConst::EffectVar(effect_vid)).into() } - /// Given a set of generics defined on a type or impl, returns the generic parameters mapping each - /// type/region parameter to a fresh inference variable. + /// Given a set of generics defined on a type or impl, returns the generic parameters mapping + /// each type/region parameter to a fresh inference variable. pub fn fresh_args_for_item(&self, span: Span, def_id: DefId) -> GenericArgsRef<'tcx> { GenericArgs::for_item(self.tcx, def_id, |param, _| self.var_for_def(span, param)) } @@ -1036,18 +1044,14 @@ impl<'tcx> InferCtxt<'tcx> { /// Clone the list of variable regions. This is used only during NLL processing /// to put the set of region variables into the NLL region context. pub fn get_region_var_origins(&self) -> VarInfos { - let mut inner = self.inner.borrow_mut(); - let (var_infos, data) = inner - .region_constraint_storage - // We clone instead of taking because borrowck still wants to use - // the inference context after calling this for diagnostics - // and the new trait solver. - .clone() - .expect("regions already resolved") - .with_log(&mut inner.undo_log) - .into_infos_and_data(); - assert!(data.is_empty()); - var_infos + let inner = self.inner.borrow(); + assert!(!UndoLogs::<UndoLog<'_>>::in_snapshot(&inner.undo_log)); + let storage = inner.region_constraint_storage.as_ref().expect("regions already resolved"); + assert!(storage.data.is_empty()); + // We clone instead of taking because borrowck still wants to use the + // inference context after calling this for diagnostics and the new + // trait solver. + storage.var_infos.clone() } #[instrument(level = "debug", skip(self), ret)] @@ -1383,10 +1387,10 @@ impl<'tcx> InferCtxt<'tcx> { /// /// The constant can be located on a trait like `<A as B>::C`, in which case the given /// generic parameters and environment are used to resolve the constant. Alternatively if the - /// constant has generic parameters in scope the instantiations are used to evaluate the value of - /// the constant. For example in `fn foo<T>() { let _ = [0; bar::<T>()]; }` the repeat count - /// constant `bar::<T>()` requires a instantiation for `T`, if the instantiation for `T` is still - /// too generic for the constant to be evaluated then `Err(ErrorHandled::TooGeneric)` is + /// constant has generic parameters in scope the instantiations are used to evaluate the value + /// of the constant. For example in `fn foo<T>() { let _ = [0; bar::<T>()]; }` the repeat count + /// constant `bar::<T>()` requires a instantiation for `T`, if the instantiation for `T` is + /// still too generic for the constant to be evaluated then `Err(ErrorHandled::TooGeneric)` is /// returned. /// /// This handles inferences variables within both `param_env` and `args` by diff --git a/compiler/rustc_infer/src/infer/opaque_types/mod.rs b/compiler/rustc_infer/src/infer/opaque_types/mod.rs index 5a2ffbf029e..365ddaba138 100644 --- a/compiler/rustc_infer/src/infer/opaque_types/mod.rs +++ b/compiler/rustc_infer/src/infer/opaque_types/mod.rs @@ -148,11 +148,11 @@ impl<'tcx> InferCtxt<'tcx> { } if let ty::Alias(ty::Opaque, ty::AliasTy { def_id: b_def_id, .. }) = *b.kind() { - // We could accept this, but there are various ways to handle this situation, and we don't - // want to make a decision on it right now. Likely this case is so super rare anyway, that - // no one encounters it in practice. - // It does occur however in `fn fut() -> impl Future<Output = i32> { async { 42 } }`, - // where it is of no concern, so we only check for TAITs. + // We could accept this, but there are various ways to handle this situation, + // and we don't want to make a decision on it right now. Likely this case is so + // super rare anyway, that no one encounters it in practice. It does occur + // however in `fn fut() -> impl Future<Output = i32> { async { 42 } }`, where + // it is of no concern, so we only check for TAITs. if self.can_define_opaque_ty(b_def_id) && self.tcx.is_type_alias_impl_trait(b_def_id) { diff --git a/compiler/rustc_infer/src/infer/outlives/env.rs b/compiler/rustc_infer/src/infer/outlives/env.rs index a071b84a1a0..9300fc574dc 100644 --- a/compiler/rustc_infer/src/infer/outlives/env.rs +++ b/compiler/rustc_infer/src/infer/outlives/env.rs @@ -1,8 +1,7 @@ use rustc_data_structures::fx::FxIndexSet; use rustc_data_structures::transitive_relation::TransitiveRelationBuilder; -use rustc_middle::bug; -use rustc_middle::ty::{self, Region}; -use tracing::{debug, instrument}; +use rustc_middle::{bug, ty}; +use tracing::debug; use super::explicit_outlives_bounds; use crate::infer::GenericKind; @@ -54,37 +53,16 @@ pub struct OutlivesEnvironment<'tcx> { region_bound_pairs: RegionBoundPairs<'tcx>, } -/// Builder of OutlivesEnvironment. -#[derive(Debug)] -struct OutlivesEnvironmentBuilder<'tcx> { - param_env: ty::ParamEnv<'tcx>, - region_relation: TransitiveRelationBuilder<Region<'tcx>>, - region_bound_pairs: RegionBoundPairs<'tcx>, -} - /// "Region-bound pairs" tracks outlives relations that are known to /// be true, either because of explicit where-clauses like `T: 'a` or /// because of implied bounds. pub type RegionBoundPairs<'tcx> = FxIndexSet<ty::OutlivesPredicate<'tcx, GenericKind<'tcx>>>; impl<'tcx> OutlivesEnvironment<'tcx> { - /// Create a builder using `ParamEnv` and add explicit outlives bounds into it. - fn builder(param_env: ty::ParamEnv<'tcx>) -> OutlivesEnvironmentBuilder<'tcx> { - let mut builder = OutlivesEnvironmentBuilder { - param_env, - region_relation: Default::default(), - region_bound_pairs: Default::default(), - }; - - builder.add_outlives_bounds(explicit_outlives_bounds(param_env)); - - builder - } - - #[inline] /// Create a new `OutlivesEnvironment` without extra outlives bounds. + #[inline] pub fn new(param_env: ty::ParamEnv<'tcx>) -> Self { - Self::builder(param_env).build() + Self::with_bounds(param_env, vec![]) } /// Create a new `OutlivesEnvironment` with extra outlives bounds. @@ -92,56 +70,27 @@ impl<'tcx> OutlivesEnvironment<'tcx> { param_env: ty::ParamEnv<'tcx>, extra_bounds: impl IntoIterator<Item = OutlivesBound<'tcx>>, ) -> Self { - let mut builder = Self::builder(param_env); - builder.add_outlives_bounds(extra_bounds); - builder.build() - } + let mut region_relation = TransitiveRelationBuilder::default(); + let mut region_bound_pairs = RegionBoundPairs::default(); - /// Borrows current value of the `free_region_map`. - pub fn free_region_map(&self) -> &FreeRegionMap<'tcx> { - &self.free_region_map - } - - /// Borrows current `region_bound_pairs`. - pub fn region_bound_pairs(&self) -> &RegionBoundPairs<'tcx> { - &self.region_bound_pairs - } -} - -impl<'tcx> OutlivesEnvironmentBuilder<'tcx> { - #[inline] - #[instrument(level = "debug")] - fn build(self) -> OutlivesEnvironment<'tcx> { - OutlivesEnvironment { - param_env: self.param_env, - free_region_map: FreeRegionMap { relation: self.region_relation.freeze() }, - region_bound_pairs: self.region_bound_pairs, - } - } - - /// Processes outlives bounds that are known to hold, whether from implied or other sources. - fn add_outlives_bounds<I>(&mut self, outlives_bounds: I) - where - I: IntoIterator<Item = OutlivesBound<'tcx>>, - { // Record relationships such as `T:'x` that don't go into the // free-region-map but which we use here. - for outlives_bound in outlives_bounds { + for outlives_bound in explicit_outlives_bounds(param_env).chain(extra_bounds) { debug!("add_outlives_bounds: outlives_bound={:?}", outlives_bound); match outlives_bound { OutlivesBound::RegionSubParam(r_a, param_b) => { - self.region_bound_pairs + region_bound_pairs .insert(ty::OutlivesPredicate(GenericKind::Param(param_b), r_a)); } OutlivesBound::RegionSubAlias(r_a, alias_b) => { - self.region_bound_pairs + region_bound_pairs .insert(ty::OutlivesPredicate(GenericKind::Alias(alias_b), r_a)); } OutlivesBound::RegionSubRegion(r_a, r_b) => match (*r_a, *r_b) { ( ty::ReStatic | ty::ReEarlyParam(_) | ty::ReLateParam(_), ty::ReStatic | ty::ReEarlyParam(_) | ty::ReLateParam(_), - ) => self.region_relation.add(r_a, r_b), + ) => region_relation.add(r_a, r_b), (ty::ReError(_), _) | (_, ty::ReError(_)) => {} // FIXME(#109628): We shouldn't have existential variables in implied bounds. // Panic here once the linked issue is resolved! @@ -150,5 +99,21 @@ impl<'tcx> OutlivesEnvironmentBuilder<'tcx> { }, } } + + OutlivesEnvironment { + param_env, + free_region_map: FreeRegionMap { relation: region_relation.freeze() }, + region_bound_pairs, + } + } + + /// Borrows current value of the `free_region_map`. + pub fn free_region_map(&self) -> &FreeRegionMap<'tcx> { + &self.free_region_map + } + + /// Borrows current `region_bound_pairs`. + pub fn region_bound_pairs(&self) -> &RegionBoundPairs<'tcx> { + &self.region_bound_pairs } } diff --git a/compiler/rustc_infer/src/infer/outlives/mod.rs b/compiler/rustc_infer/src/infer/outlives/mod.rs index a270f9322f3..e23bb1aaa56 100644 --- a/compiler/rustc_infer/src/infer/outlives/mod.rs +++ b/compiler/rustc_infer/src/infer/outlives/mod.rs @@ -1,11 +1,12 @@ //! Various code related to computing outlives relations. +use rustc_data_structures::undo_log::UndoLogs; use rustc_middle::traits::query::{NoSolution, OutlivesBound}; use rustc_middle::ty; use tracing::instrument; use self::env::OutlivesEnvironment; -use super::region_constraints::RegionConstraintData; +use super::region_constraints::{RegionConstraintData, UndoLog}; use super::{InferCtxt, RegionResolutionError, SubregionOrigin}; use crate::infer::free_regions::RegionRelations; use crate::infer::lexical_region_resolve; @@ -63,7 +64,7 @@ impl<'tcx> InferCtxt<'tcx> { } }; - let (var_infos, data) = { + let storage = { let mut inner = self.inner.borrow_mut(); let inner = &mut *inner; assert!( @@ -71,18 +72,14 @@ impl<'tcx> InferCtxt<'tcx> { "region_obligations not empty: {:#?}", inner.region_obligations ); - inner - .region_constraint_storage - .take() - .expect("regions already resolved") - .with_log(&mut inner.undo_log) - .into_infos_and_data() + assert!(!UndoLogs::<UndoLog<'_>>::in_snapshot(&inner.undo_log)); + inner.region_constraint_storage.take().expect("regions already resolved") }; let region_rels = &RegionRelations::new(self.tcx, outlives_env.free_region_map()); let (lexical_region_resolutions, errors) = - lexical_region_resolve::resolve(region_rels, var_infos, data); + lexical_region_resolve::resolve(region_rels, storage.var_infos, storage.data); let old_value = self.lexical_region_resolutions.replace(Some(lexical_region_resolutions)); assert!(old_value.is_none()); diff --git a/compiler/rustc_infer/src/infer/outlives/obligations.rs b/compiler/rustc_infer/src/infer/outlives/obligations.rs index 634cda86bc3..e0e03a29220 100644 --- a/compiler/rustc_infer/src/infer/outlives/obligations.rs +++ b/compiler/rustc_infer/src/infer/outlives/obligations.rs @@ -396,11 +396,12 @@ where // 'a` in the environment but `trait Foo<'b> { type Item: 'b // }` in the trait definition. approx_env_bounds.retain(|bound_outlives| { - // OK to skip binder because we only manipulate and compare against other - // values from the same binder. e.g. if we have (e.g.) `for<'a> <T as Trait<'a>>::Item: 'a` - // in `bound`, the `'a` will be a `^1` (bound, debruijn index == innermost) region. - // If the declaration is `trait Trait<'b> { type Item: 'b; }`, then `projection_declared_bounds_from_trait` - // will be invoked with `['b => ^1]` and so we will get `^1` returned. + // OK to skip binder because we only manipulate and compare against other values from + // the same binder. e.g. if we have (e.g.) `for<'a> <T as Trait<'a>>::Item: 'a` in + // `bound`, the `'a` will be a `^1` (bound, debruijn index == innermost) region. If the + // declaration is `trait Trait<'b> { type Item: 'b; }`, then + // `projection_declared_bounds_from_trait` will be invoked with `['b => ^1]` and so we + // will get `^1` returned. let bound = bound_outlives.skip_binder(); let ty::Alias(_, alias_ty) = bound.0.kind() else { bug!("expected AliasTy") }; self.verify_bound.declared_bounds_from_definition(*alias_ty).all(|r| r != bound.1) diff --git a/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs b/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs index 7913f0e340e..3cfc58dea05 100644 --- a/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs +++ b/compiler/rustc_infer/src/infer/region_constraints/leak_check.rs @@ -55,8 +55,8 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { /// * what placeholder they must outlive transitively /// * if they must also be equal to a placeholder, report an error because `P1: P2` /// * minimum universe U of all SCCs they must outlive - /// * if they must also be equal to a placeholder P, and U cannot name P, report an error, as that - /// indicates `P: R` and `R` is in an incompatible universe + /// * if they must also be equal to a placeholder P, and U cannot name P, report an error, as + /// that indicates `P: R` and `R` is in an incompatible universe /// /// To improve performance and for the old trait solver caching to be sound, this takes /// an optional snapshot in which case we only look at region constraints added in that @@ -73,7 +73,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { /// * R: P1, R: P2, as above #[instrument(level = "debug", skip(self, tcx, only_consider_snapshot), ret)] pub fn leak_check( - &mut self, + self, tcx: TyCtxt<'tcx>, outer_universe: ty::UniverseIndex, max_universe: ty::UniverseIndex, @@ -83,7 +83,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { return Ok(()); } - let mini_graph = &MiniGraph::new(tcx, self, only_consider_snapshot); + let mini_graph = MiniGraph::new(tcx, &self, only_consider_snapshot); let mut leak_check = LeakCheck::new(tcx, outer_universe, max_universe, mini_graph, self); leak_check.assign_placeholder_values()?; @@ -92,11 +92,11 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { } } -struct LeakCheck<'a, 'b, 'tcx> { +struct LeakCheck<'a, 'tcx> { tcx: TyCtxt<'tcx>, outer_universe: ty::UniverseIndex, - mini_graph: &'a MiniGraph<'tcx>, - rcc: &'a mut RegionConstraintCollector<'b, 'tcx>, + mini_graph: MiniGraph<'tcx>, + rcc: RegionConstraintCollector<'a, 'tcx>, // Initially, for each SCC S, stores a placeholder `P` such that `S = P` // must hold. @@ -115,26 +115,27 @@ struct LeakCheck<'a, 'b, 'tcx> { // either the placeholder `P1` or the empty region in that same universe. // // To detect errors, we look for an SCC S where the values in - // `scc_values[S]` (if any) cannot be stored into `scc_universes[S]`. + // `scc_placeholders[S]` (if any) cannot be stored into `scc_universes[S]`. scc_universes: IndexVec<LeakCheckScc, SccUniverse<'tcx>>, } -impl<'a, 'b, 'tcx> LeakCheck<'a, 'b, 'tcx> { +impl<'a, 'tcx> LeakCheck<'a, 'tcx> { fn new( tcx: TyCtxt<'tcx>, outer_universe: ty::UniverseIndex, max_universe: ty::UniverseIndex, - mini_graph: &'a MiniGraph<'tcx>, - rcc: &'a mut RegionConstraintCollector<'b, 'tcx>, + mini_graph: MiniGraph<'tcx>, + rcc: RegionConstraintCollector<'a, 'tcx>, ) -> Self { let dummy_scc_universe = SccUniverse { universe: max_universe, region: None }; + let num_sccs = mini_graph.sccs.num_sccs(); Self { tcx, outer_universe, mini_graph, rcc, - scc_placeholders: IndexVec::from_elem_n(None, mini_graph.sccs.num_sccs()), - scc_universes: IndexVec::from_elem_n(dummy_scc_universe, mini_graph.sccs.num_sccs()), + scc_placeholders: IndexVec::from_elem_n(None, num_sccs), + scc_universes: IndexVec::from_elem_n(dummy_scc_universe, num_sccs), } } @@ -156,7 +157,16 @@ impl<'a, 'b, 'tcx> LeakCheck<'a, 'b, 'tcx> { // Detect those SCCs that directly contain a placeholder if let ty::RePlaceholder(placeholder) = **region { if self.outer_universe.cannot_name(placeholder.universe) { - self.assign_scc_value(scc, placeholder)?; + // Update `scc_placeholders` to account for the fact that `P: S` must hold. + match self.scc_placeholders[scc] { + Some(p) => { + assert_ne!(p, placeholder); + return Err(self.placeholder_error(p, placeholder)); + } + None => { + self.scc_placeholders[scc] = Some(placeholder); + } + } } } } @@ -164,26 +174,6 @@ impl<'a, 'b, 'tcx> LeakCheck<'a, 'b, 'tcx> { Ok(()) } - // assign_scc_value(S, P): Update `scc_values` to account for the fact that `P: S` must hold. - // This may create an error. - fn assign_scc_value( - &mut self, - scc: LeakCheckScc, - placeholder: ty::PlaceholderRegion, - ) -> RelateResult<'tcx, ()> { - match self.scc_placeholders[scc] { - Some(p) => { - assert_ne!(p, placeholder); - return Err(self.placeholder_error(p, placeholder)); - } - None => { - self.scc_placeholders[scc] = Some(placeholder); - } - }; - - Ok(()) - } - /// For each SCC S, iterate over each successor S1 where `S: S1`: /// /// * Compute @@ -216,8 +206,8 @@ impl<'a, 'b, 'tcx> LeakCheck<'a, 'b, 'tcx> { // Walk over each `scc2` such that `scc1: scc2` and compute: // // * `scc1_universe`: the minimum universe of `scc2` and the constituents of `scc1` - // * `succ_bound`: placeholder `P` that the successors must outlive, if any (if there are multiple, - // we pick one arbitrarily) + // * `succ_bound`: placeholder `P` that the successors must outlive, if any (if there + // are multiple, we pick one arbitrarily) let mut scc1_universe = self.scc_universes[scc1]; let mut succ_bound = None; for &scc2 in self.mini_graph.sccs.successors(scc1) { @@ -260,7 +250,8 @@ impl<'a, 'b, 'tcx> LeakCheck<'a, 'b, 'tcx> { self.scc_placeholders[scc1] = succ_bound; } - // At this point, `scc_placeholder[scc1]` stores some placeholder that `scc1` must outlive (if any). + // At this point, `scc_placeholder[scc1]` stores some placeholder that `scc1` must + // outlive (if any). } Ok(()) } diff --git a/compiler/rustc_infer/src/infer/region_constraints/mod.rs b/compiler/rustc_infer/src/infer/region_constraints/mod.rs index 4411f8db72a..270217e26b7 100644 --- a/compiler/rustc_infer/src/infer/region_constraints/mod.rs +++ b/compiler/rustc_infer/src/infer/region_constraints/mod.rs @@ -27,9 +27,9 @@ pub use rustc_middle::infer::MemberConstraint; #[derive(Clone, Default)] pub struct RegionConstraintStorage<'tcx> { /// For each `RegionVid`, the corresponding `RegionVariableOrigin`. - var_infos: IndexVec<RegionVid, RegionVariableInfo>, + pub(super) var_infos: IndexVec<RegionVid, RegionVariableInfo>, - data: RegionConstraintData<'tcx>, + pub(super) data: RegionConstraintData<'tcx>, /// For a given pair of regions (R1, R2), maps to a region R3 that /// is designated as their LUB (edges R1 <= R3 and R2 <= R3 @@ -61,21 +61,6 @@ pub struct RegionConstraintCollector<'a, 'tcx> { undo_log: &'a mut InferCtxtUndoLogs<'tcx>, } -impl<'tcx> std::ops::Deref for RegionConstraintCollector<'_, 'tcx> { - type Target = RegionConstraintStorage<'tcx>; - #[inline] - fn deref(&self) -> &RegionConstraintStorage<'tcx> { - self.storage - } -} - -impl<'tcx> std::ops::DerefMut for RegionConstraintCollector<'_, 'tcx> { - #[inline] - fn deref_mut(&mut self) -> &mut RegionConstraintStorage<'tcx> { - self.storage - } -} - pub type VarInfos = IndexVec<RegionVid, RegionVariableInfo>; /// The full set of region constraints gathered up by the collector. @@ -309,10 +294,6 @@ pub(crate) struct RegionSnapshot { } impl<'tcx> RegionConstraintStorage<'tcx> { - pub fn new() -> Self { - Self::default() - } - #[inline] pub(crate) fn with_log<'a>( &'a mut self, @@ -320,46 +301,15 @@ impl<'tcx> RegionConstraintStorage<'tcx> { ) -> RegionConstraintCollector<'a, 'tcx> { RegionConstraintCollector { storage: self, undo_log } } - - fn rollback_undo_entry(&mut self, undo_entry: UndoLog<'tcx>) { - match undo_entry { - AddVar(vid) => { - self.var_infos.pop().unwrap(); - assert_eq!(self.var_infos.len(), vid.index()); - } - AddConstraint(index) => { - self.data.constraints.pop().unwrap(); - assert_eq!(self.data.constraints.len(), index); - } - AddVerify(index) => { - self.data.verifys.pop(); - assert_eq!(self.data.verifys.len(), index); - } - AddCombination(Glb, ref regions) => { - self.glbs.remove(regions); - } - AddCombination(Lub, ref regions) => { - self.lubs.remove(regions); - } - } - } } impl<'tcx> RegionConstraintCollector<'_, 'tcx> { pub fn num_region_vars(&self) -> usize { - self.var_infos.len() + self.storage.var_infos.len() } pub fn region_constraint_data(&self) -> &RegionConstraintData<'tcx> { - &self.data - } - - /// Once all the constraints have been gathered, extract out the final data. - /// - /// Not legal during a snapshot. - pub fn into_infos_and_data(self) -> (VarInfos, RegionConstraintData<'tcx>) { - assert!(!UndoLogs::<UndoLog<'_>>::in_snapshot(&self.undo_log)); - (mem::take(&mut self.storage.var_infos), mem::take(&mut self.storage.data)) + &self.storage.data } /// Takes (and clears) the current set of constraints. Note that @@ -415,17 +365,17 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { } pub fn data(&self) -> &RegionConstraintData<'tcx> { - &self.data + &self.storage.data } - pub(super) fn start_snapshot(&mut self) -> RegionSnapshot { + pub(super) fn start_snapshot(&self) -> RegionSnapshot { debug!("RegionConstraintCollector: start_snapshot"); - RegionSnapshot { any_unifications: self.any_unifications } + RegionSnapshot { any_unifications: self.storage.any_unifications } } pub(super) fn rollback_to(&mut self, snapshot: RegionSnapshot) { debug!("RegionConstraintCollector: rollback_to({:?})", snapshot); - self.any_unifications = snapshot.any_unifications; + self.storage.any_unifications = snapshot.any_unifications; } pub(super) fn new_region_var( @@ -433,7 +383,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { universe: ty::UniverseIndex, origin: RegionVariableOrigin, ) -> RegionVid { - let vid = self.var_infos.push(RegionVariableInfo { origin, universe }); + let vid = self.storage.var_infos.push(RegionVariableInfo { origin, universe }); let u_vid = self.unification_table_mut().new_key(RegionVariableValue::Unknown { universe }); assert_eq!(vid, u_vid.vid); @@ -444,7 +394,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { /// Returns the origin for the given variable. pub(super) fn var_origin(&self, vid: RegionVid) -> RegionVariableOrigin { - self.var_infos[vid].origin + self.storage.var_infos[vid].origin } fn add_constraint(&mut self, constraint: Constraint<'tcx>, origin: SubregionOrigin<'tcx>) { @@ -467,8 +417,8 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { return; } - let index = self.data.verifys.len(); - self.data.verifys.push(verify); + let index = self.storage.data.verifys.len(); + self.storage.data.verifys.push(verify); self.undo_log.push(AddVerify(index)); } @@ -488,7 +438,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { (ty::ReVar(a), ty::ReVar(b)) => { debug!("make_eqregion: unifying {:?} with {:?}", a, b); if self.unification_table_mut().unify_var_var(a, b).is_ok() { - self.any_unifications = true; + self.storage.any_unifications = true; } } (ty::ReVar(vid), _) => { @@ -498,7 +448,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { .unify_var_value(vid, RegionVariableValue::Known { value: b }) .is_ok() { - self.any_unifications = true; + self.storage.any_unifications = true; }; } (_, ty::ReVar(vid)) => { @@ -508,7 +458,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { .unify_var_value(vid, RegionVariableValue::Known { value: a }) .is_ok() { - self.any_unifications = true; + self.storage.any_unifications = true; }; } (_, _) => {} @@ -530,7 +480,7 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { return; } - self.data.member_constraints.push(MemberConstraint { + self.storage.data.member_constraints.push(MemberConstraint { key, definition_span, hidden_ty, @@ -646,8 +596,8 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { fn combine_map(&mut self, t: CombineMapType) -> &mut CombineMap<'tcx> { match t { - Glb => &mut self.glbs, - Lub => &mut self.lubs, + Glb => &mut self.storage.glbs, + Lub => &mut self.storage.lubs, } } @@ -700,11 +650,12 @@ impl<'tcx> RegionConstraintCollector<'_, 'tcx> { &self, value_count: usize, ) -> (Range<RegionVid>, Vec<RegionVariableOrigin>) { - let range = RegionVid::from(value_count)..RegionVid::from(self.unification_table.len()); + let range = + RegionVid::from(value_count)..RegionVid::from(self.storage.unification_table.len()); ( range.clone(), (range.start.index()..range.end.index()) - .map(|index| self.var_infos[ty::RegionVid::from(index)].origin) + .map(|index| self.storage.var_infos[ty::RegionVid::from(index)].origin) .collect(), ) } @@ -801,6 +752,25 @@ impl<'tcx> RegionConstraintData<'tcx> { impl<'tcx> Rollback<UndoLog<'tcx>> for RegionConstraintStorage<'tcx> { fn reverse(&mut self, undo: UndoLog<'tcx>) { - self.rollback_undo_entry(undo) + match undo { + AddVar(vid) => { + self.var_infos.pop().unwrap(); + assert_eq!(self.var_infos.len(), vid.index()); + } + AddConstraint(index) => { + self.data.constraints.pop().unwrap(); + assert_eq!(self.data.constraints.len(), index); + } + AddVerify(index) => { + self.data.verifys.pop(); + assert_eq!(self.data.verifys.len(), index); + } + AddCombination(Glb, ref regions) => { + self.glbs.remove(regions); + } + AddCombination(Lub, ref regions) => { + self.lubs.remove(regions); + } + } } } diff --git a/compiler/rustc_infer/src/infer/relate/generalize.rs b/compiler/rustc_infer/src/infer/relate/generalize.rs index a6d10aa5968..726a2296d11 100644 --- a/compiler/rustc_infer/src/infer/relate/generalize.rs +++ b/compiler/rustc_infer/src/infer/relate/generalize.rs @@ -50,7 +50,8 @@ impl<'tcx> InferCtxt<'tcx> { // Then the `generalized_ty` would be `&'?2 ?3`, where `'?2` and `?3` are fresh // region/type inference variables. // - // We then relate `generalized_ty <: source_ty`,adding constraints like `'x: '?2` and `?1 <: ?3`. + // We then relate `generalized_ty <: source_ty`, adding constraints like `'x: '?2` and + // `?1 <: ?3`. let Generalization { value_may_be_infer: generalized_ty, has_unconstrained_ty_var } = self .generalize( relation.span(), @@ -104,7 +105,8 @@ impl<'tcx> InferCtxt<'tcx> { &ty::Alias(ty::Projection, data) => { // FIXME: This does not handle subtyping correctly, we could // instead create a new inference variable `?normalized_source`, emitting - // `Projection(normalized_source, ?ty_normalized)` and `?normalized_source <: generalized_ty`. + // `Projection(normalized_source, ?ty_normalized)` and + // `?normalized_source <: generalized_ty`. relation.register_predicates([ty::ProjectionPredicate { projection_term: data.into(), term: generalized_ty.into(), diff --git a/compiler/rustc_infer/src/infer/resolve.rs b/compiler/rustc_infer/src/infer/resolve.rs index 025c3a629fa..64cc76f827e 100644 --- a/compiler/rustc_infer/src/infer/resolve.rs +++ b/compiler/rustc_infer/src/infer/resolve.rs @@ -38,7 +38,7 @@ impl<'a, 'tcx> TypeFolder<TyCtxt<'tcx>> for OpportunisticVarResolver<'a, 'tcx> { if !t.has_non_region_infer() { t // micro-optimize -- if there is nothing in this type that this fold affects... } else if let Some(&ty) = self.cache.get(&t) { - return ty; + ty } else { let shallow = self.infcx.shallow_resolve(t); let res = shallow.super_fold_with(self); diff --git a/compiler/rustc_infer/src/infer/type_variable.rs b/compiler/rustc_infer/src/infer/type_variable.rs index c50477b2922..779ce976bec 100644 --- a/compiler/rustc_infer/src/infer/type_variable.rs +++ b/compiler/rustc_infer/src/infer/type_variable.rs @@ -19,7 +19,7 @@ impl<'tcx> Rollback<sv::UndoLog<ut::Delegate<TyVidEqKey<'tcx>>>> for TypeVariabl } } -#[derive(Clone)] +#[derive(Clone, Default)] pub(crate) struct TypeVariableStorage<'tcx> { /// The origins of each type variable. values: IndexVec<TyVid, TypeVariableData>, @@ -74,13 +74,6 @@ impl<'tcx> TypeVariableValue<'tcx> { } impl<'tcx> TypeVariableStorage<'tcx> { - pub(crate) fn new() -> TypeVariableStorage<'tcx> { - TypeVariableStorage { - values: Default::default(), - eq_relations: ut::UnificationTableStorage::new(), - } - } - #[inline] pub(crate) fn with_log<'a>( &'a mut self, diff --git a/compiler/rustc_infer/src/traits/project.rs b/compiler/rustc_infer/src/traits/project.rs index fa813d0f90c..64b72de3986 100644 --- a/compiler/rustc_infer/src/traits/project.rs +++ b/compiler/rustc_infer/src/traits/project.rs @@ -92,38 +92,31 @@ pub enum ProjectionCacheEntry<'tcx> { Error, NormalizedTerm { ty: NormalizedTerm<'tcx>, - /// If we were able to successfully evaluate the - /// corresponding cache entry key during predicate - /// evaluation, then this field stores the final - /// result obtained from evaluating all of the projection - /// sub-obligations. During evaluation, we will skip - /// evaluating the cached sub-obligations in `ty` - /// if this field is set. Evaluation only - /// cares about the final result, so we don't - /// care about any region constraint side-effects - /// produced by evaluating the sub-obligations. + /// If we were able to successfully evaluate the corresponding cache + /// entry key during predicate evaluation, then this field stores the + /// final result obtained from evaluating all of the projection + /// sub-obligations. During evaluation, we will skip evaluating the + /// cached sub-obligations in `ty` if this field is set. Evaluation + /// only cares about the final result, so we don't care about any + /// region constraint side-effects produced by evaluating the + /// sub-obligations. /// - /// Additionally, we will clear out the sub-obligations - /// entirely if we ever evaluate the cache entry (along - /// with all its sub obligations) to `EvaluatedToOk`. - /// This affects all users of the cache, not just evaluation. - /// Since a result of `EvaluatedToOk` means that there were - /// no region obligations that need to be tracked, it's - /// fine to forget about the sub-obligations - they - /// don't provide any additional information. However, - /// we do *not* discard any obligations when we see - /// `EvaluatedToOkModuloRegions` - we don't know - /// which sub-obligations may introduce region constraints, - /// so we keep them all to be safe. + /// Additionally, we will clear out the sub-obligations entirely if we + /// ever evaluate the cache entry (along with all its sub obligations) + /// to `EvaluatedToOk`. This affects all users of the cache, not just + /// evaluation. Since a result of `EvaluatedToOk` means that there were + /// no region obligations that need to be tracked, it's fine to forget + /// about the sub-obligations - they don't provide any additional + /// information. However, we do *not* discard any obligations when we + /// see `EvaluatedToOkModuloRegions` - we don't know which + /// sub-obligations may introduce region constraints, so we keep them + /// all to be safe. /// - /// When we are not performing evaluation - /// (e.g. in `FulfillmentContext`), we ignore this field, - /// and always re-process the cached sub-obligations - /// (which may have been cleared out - see the above - /// paragraph). - /// This ensures that we do not lose any regions - /// constraints that arise from processing the - /// sub-obligations. + /// When we are not performing evaluation (e.g. in + /// `FulfillmentContext`), we ignore this field, and always re-process + /// the cached sub-obligations (which may have been cleared out - see + /// the above paragraph). This ensures that we do not lose any regions + /// constraints that arise from processing the sub-obligations. complete: Option<EvaluationResult>, }, } diff --git a/compiler/rustc_lexer/src/lib.rs b/compiler/rustc_lexer/src/lib.rs index 60aab668cba..b0ab50dd773 100644 --- a/compiler/rustc_lexer/src/lib.rs +++ b/compiler/rustc_lexer/src/lib.rs @@ -104,6 +104,12 @@ pub enum TokenKind { /// for emoji identifier recovery, as those are not meant to be ever accepted. InvalidPrefix, + /// Guarded string literal prefix: `#"` or `##`. + /// + /// Used for reserving "guarded strings" (RFC 3598) in edition 2024. + /// Split into the component tokens on older editions. + GuardedStrPrefix, + /// Examples: `12u8`, `1.0e-40`, `b"123"`. Note that `_` is an invalid /// suffix, but may be present here on string and float literals. Users of /// this type will need to check for and reject that case. @@ -191,30 +197,41 @@ pub enum DocStyle { /// `rustc_ast::ast::LitKind`). #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum LiteralKind { - /// "12_u8", "0o100", "0b120i99", "1f32". + /// `12_u8`, `0o100`, `0b120i99`, `1f32`. Int { base: Base, empty_int: bool }, - /// "12.34f32", "1e3", but not "1f32". + /// `12.34f32`, `1e3`, but not `1f32`. Float { base: Base, empty_exponent: bool }, - /// "'a'", "'\\'", "'''", "';" + /// `'a'`, `'\\'`, `'''`, `';` Char { terminated: bool }, - /// "b'a'", "b'\\'", "b'''", "b';" + /// `b'a'`, `b'\\'`, `b'''`, `b';` Byte { terminated: bool }, - /// ""abc"", ""abc" + /// `"abc"`, `"abc` Str { terminated: bool }, - /// "b"abc"", "b"abc" + /// `b"abc"`, `b"abc` ByteStr { terminated: bool }, /// `c"abc"`, `c"abc` CStr { terminated: bool }, - /// "r"abc"", "r#"abc"#", "r####"ab"###"c"####", "r#"a". `None` indicates + /// `r"abc"`, `r#"abc"#`, `r####"ab"###"c"####`, `r#"a`. `None` indicates /// an invalid literal. RawStr { n_hashes: Option<u8> }, - /// "br"abc"", "br#"abc"#", "br####"ab"###"c"####", "br#"a". `None` + /// `br"abc"`, `br#"abc"#`, `br####"ab"###"c"####`, `br#"a`. `None` /// indicates an invalid literal. RawByteStr { n_hashes: Option<u8> }, /// `cr"abc"`, "cr#"abc"#", `cr#"a`. `None` indicates an invalid literal. RawCStr { n_hashes: Option<u8> }, } +/// `#"abc"#`, `##"a"` (fewer closing), or even `#"a` (unterminated). +/// +/// Can capture fewer closing hashes than starting hashes, +/// for more efficient lexing and better backwards diagnostics. +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub struct GuardedStr { + pub n_hashes: u32, + pub terminated: bool, + pub token_len: u32, +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum RawStrError { /// Non `#` characters exist between `r` and `"`, e.g. `r##~"abcde"##` @@ -403,6 +420,12 @@ impl Cursor<'_> { TokenKind::Literal { kind: literal_kind, suffix_start } } + // Guarded string literal prefix: `#"` or `##` + '#' if matches!(self.first(), '"' | '#') => { + self.bump(); + TokenKind::GuardedStrPrefix + } + // One-symbol tokens. ';' => Semi, ',' => Comma, @@ -780,6 +803,60 @@ impl Cursor<'_> { false } + /// Attempt to lex for a guarded string literal. + /// + /// Used by `rustc_parse::lexer` to lex for guarded strings + /// conditionally based on edition. + /// + /// Note: this will not reset the `Cursor` when a + /// guarded string is not found. It is the caller's + /// responsibility to do so. + pub fn guarded_double_quoted_string(&mut self) -> Option<GuardedStr> { + debug_assert!(self.prev() != '#'); + + let mut n_start_hashes: u32 = 0; + while self.first() == '#' { + n_start_hashes += 1; + self.bump(); + } + + if self.first() != '"' { + return None; + } + self.bump(); + debug_assert!(self.prev() == '"'); + + // Lex the string itself as a normal string literal + // so we can recover that for older editions later. + let terminated = self.double_quoted_string(); + if !terminated { + let token_len = self.pos_within_token(); + self.reset_pos_within_token(); + + return Some(GuardedStr { n_hashes: n_start_hashes, terminated: false, token_len }); + } + + // Consume closing '#' symbols. + // Note that this will not consume extra trailing `#` characters: + // `###"abcde"####` is lexed as a `GuardedStr { n_end_hashes: 3, .. }` + // followed by a `#` token. + let mut n_end_hashes = 0; + while self.first() == '#' && n_end_hashes < n_start_hashes { + n_end_hashes += 1; + self.bump(); + } + + // Reserved syntax, always an error, so it doesn't matter if + // `n_start_hashes != n_end_hashes`. + + self.eat_literal_suffix(); + + let token_len = self.pos_within_token(); + self.reset_pos_within_token(); + + Some(GuardedStr { n_hashes: n_start_hashes, terminated: true, token_len }) + } + /// Eats the double-quoted string and returns `n_hashes` and an error if encountered. fn raw_double_quoted_string(&mut self, prefix_len: u32) -> Result<u8, RawStrError> { // Wrap the actual function to handle the error with too many hashes. diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index d3799594871..e733e92c7cb 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -740,6 +740,9 @@ lint_reserved_prefix = prefix `{$prefix}` is unknown .label = unknown prefix .suggestion = insert whitespace here to avoid this being parsed as a prefix in Rust 2021 +lint_reserved_string = will be parsed as a guarded string in Rust 2024 + .suggestion = insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + lint_shadowed_into_iter = this method call resolves to `<&{$target} as IntoIterator>::into_iter` (due to backwards compatibility), but will resolve to `<{$target} as IntoIterator>::into_iter` in Rust {$edition} .use_iter_suggestion = use `.iter()` instead of `.into_iter()` to avoid ambiguity diff --git a/compiler/rustc_lint/src/context/diagnostics.rs b/compiler/rustc_lint/src/context/diagnostics.rs index b5ab56912cb..565c3c04252 100644 --- a/compiler/rustc_lint/src/context/diagnostics.rs +++ b/compiler/rustc_lint/src/context/diagnostics.rs @@ -176,6 +176,9 @@ pub(super) fn decorate_lint(sess: &Session, diagnostic: BuiltinLintDiag, diag: & lints::RawPrefix { label: label_span, suggestion: label_span.shrink_to_hi() } .decorate_lint(diag); } + BuiltinLintDiag::ReservedString(suggestion) => { + lints::ReservedString { suggestion }.decorate_lint(diag); + } BuiltinLintDiag::UnusedBuiltinAttribute { attr_name, macro_name, invoc_span } => { lints::UnusedBuiltinAttribute { invoc_span, attr_name, macro_name }.decorate_lint(diag); } diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index a861894a444..87afeca0b28 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -3053,3 +3053,10 @@ pub(crate) enum MutRefSugg { #[derive(LintDiagnostic)] #[diag(lint_unqualified_local_imports)] pub(crate) struct UnqualifiedLocalImportsDiag {} + +#[derive(LintDiagnostic)] +#[diag(lint_reserved_string)] +pub(crate) struct ReservedString { + #[suggestion(code = " ", applicability = "machine-applicable")] + pub suggestion: Span, +} diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index a123059df8f..827791c54be 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -92,6 +92,7 @@ declare_lint_pass! { RUST_2021_INCOMPATIBLE_OR_PATTERNS, RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, RUST_2021_PRELUDE_COLLISIONS, + RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX, RUST_2024_INCOMPATIBLE_PAT, RUST_2024_PRELUDE_COLLISIONS, SELF_CONSTRUCTOR_FROM_OUTER_ITEM, @@ -4996,3 +4997,43 @@ declare_lint! { Warn, "detects pointer to integer transmutes in const functions and associated constants", } + +declare_lint! { + /// The `rust_2024_guarded_string_incompatible_syntax` lint detects `#` tokens + /// that will be parsed as part of a guarded string literal in Rust 2024. + /// + /// ### Example + /// + /// ```rust,edition2021,compile_fail + /// #![deny(rust_2024_guarded_string_incompatible_syntax)] + /// + /// macro_rules! m { + /// (# $x:expr #) => (); + /// (# $x:expr) => (); + /// } + /// + /// m!(#"hey"#); + /// m!(#"hello"); + /// ``` + /// + /// {{produces}} + /// + /// ### Explanation + /// + /// Prior to Rust 2024, `#"hey"#` is three tokens: the first `#` + /// followed by the string literal `"hey"` then the final `#`. + /// In Rust 2024, the whole sequence is considered a single token. + /// + /// This lint suggests to add whitespace between the leading `#` + /// and the string to keep them separated in Rust 2024. + // Allow this lint -- rustdoc doesn't yet support threading edition into this lint's parser. + #[allow(rustdoc::invalid_rust_codeblocks)] + pub RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX, + Allow, + "will be parsed as a guarded string in Rust 2024", + @future_incompatible = FutureIncompatibleInfo { + reason: FutureIncompatibilityReason::EditionError(Edition::Edition2024), + reference: "issue #123735 <https://github.com/rust-lang/rust/issues/123735>", + }; + crate_level_only +} diff --git a/compiler/rustc_lint_defs/src/lib.rs b/compiler/rustc_lint_defs/src/lib.rs index 386918a5c41..c01fa5c54d6 100644 --- a/compiler/rustc_lint_defs/src/lib.rs +++ b/compiler/rustc_lint_defs/src/lib.rs @@ -614,6 +614,8 @@ pub enum BuiltinLintDiag { ReservedPrefix(Span, String), /// `'r#` in edition < 2021. RawPrefix(Span), + /// `##` or `#"` is edition < 2024. + ReservedString(Span), TrailingMacro(bool, Ident), BreakWithLabelAndLoop(Span), UnicodeTextFlow(Span, String), diff --git a/compiler/rustc_parse/messages.ftl b/compiler/rustc_parse/messages.ftl index 948199fd55c..ba5e2ddf4fc 100644 --- a/compiler/rustc_parse/messages.ftl +++ b/compiler/rustc_parse/messages.ftl @@ -706,6 +706,10 @@ parse_require_colon_after_labeled_expression = labeled expression must be follow .label = the label .suggestion = add `:` after the label +parse_reserved_string = invalid string literal + .note = unprefixed guarded string literals are reserved for future use since Rust 2024 + .suggestion_whitespace = consider inserting whitespace here + parse_return_types_use_thin_arrow = return types are denoted using `->` .suggestion = use `->` instead diff --git a/compiler/rustc_parse/src/errors.rs b/compiler/rustc_parse/src/errors.rs index dade3912751..124975f67f1 100644 --- a/compiler/rustc_parse/src/errors.rs +++ b/compiler/rustc_parse/src/errors.rs @@ -2111,6 +2111,24 @@ pub(crate) enum UnknownPrefixSugg { } #[derive(Diagnostic)] +#[diag(parse_reserved_string)] +#[note] +pub(crate) struct ReservedString { + #[primary_span] + pub span: Span, + #[subdiagnostic] + pub sugg: Option<GuardedStringSugg>, +} +#[derive(Subdiagnostic)] +#[suggestion( + parse_suggestion_whitespace, + code = " ", + applicability = "maybe-incorrect", + style = "verbose" +)] +pub(crate) struct GuardedStringSugg(#[primary_span] pub Span); + +#[derive(Diagnostic)] #[diag(parse_too_many_hashes)] pub(crate) struct TooManyHashes { #[primary_span] diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index 3e46fc93fa4..d627ef3d2cb 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -10,7 +10,8 @@ use rustc_lexer::unescape::{self, EscapeError, Mode}; use rustc_lexer::{Base, Cursor, DocStyle, LiteralKind, RawStrError}; use rustc_session::lint::BuiltinLintDiag; use rustc_session::lint::builtin::{ - RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, TEXT_DIRECTION_CODEPOINT_IN_COMMENT, + RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX, + TEXT_DIRECTION_CODEPOINT_IN_COMMENT, }; use rustc_session::parse::ParseSess; use rustc_span::symbol::Symbol; @@ -251,6 +252,7 @@ impl<'psess, 'src> StringReader<'psess, 'src> { let prefix_span = self.mk_sp(start, lit_start); return (Token::new(self.ident(start), prefix_span), preceded_by_whitespace); } + rustc_lexer::TokenKind::GuardedStrPrefix => self.maybe_report_guarded_str(start, str_before), rustc_lexer::TokenKind::Literal { kind, suffix_start } => { let suffix_start = start + BytePos(suffix_start); let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind); @@ -781,6 +783,86 @@ impl<'psess, 'src> StringReader<'psess, 'src> { } } + /// Detect guarded string literal syntax + /// + /// RFC 3598 reserved this syntax for future use. As of Rust 2024, + /// using this syntax produces an error. In earlier editions, however, it + /// only results in an (allowed by default) lint, and is treated as + /// separate tokens. + fn maybe_report_guarded_str(&mut self, start: BytePos, str_before: &'src str) -> TokenKind { + let span = self.mk_sp(start, self.pos); + let edition2024 = span.edition().at_least_rust_2024(); + + let space_pos = start + BytePos(1); + let space_span = self.mk_sp(space_pos, space_pos); + + let mut cursor = Cursor::new(str_before); + + let (span, unterminated) = match cursor.guarded_double_quoted_string() { + Some(rustc_lexer::GuardedStr { n_hashes, terminated, token_len }) => { + let end = start + BytePos(token_len); + let span = self.mk_sp(start, end); + let str_start = start + BytePos(n_hashes); + + if edition2024 { + self.cursor = cursor; + self.pos = end; + } + + let unterminated = if terminated { None } else { Some(str_start) }; + + (span, unterminated) + } + _ => { + // We should only get here in the `##+` case. + debug_assert_eq!(self.str_from_to(start, start + BytePos(2)), "##"); + + (span, None) + } + }; + if edition2024 { + if let Some(str_start) = unterminated { + // Only a fatal error if string is unterminated. + self.dcx() + .struct_span_fatal( + self.mk_sp(str_start, self.pos), + "unterminated double quote string", + ) + .with_code(E0765) + .emit() + } + + let sugg = if span.from_expansion() { + None + } else { + Some(errors::GuardedStringSugg(space_span)) + }; + + // In Edition 2024 and later, emit a hard error. + let err = self.dcx().emit_err(errors::ReservedString { span, sugg }); + + token::Literal(token::Lit { + kind: token::Err(err), + symbol: self.symbol_from_to(start, self.pos), + suffix: None, + }) + } else { + // Before Rust 2024, only emit a lint for migration. + self.psess.buffer_lint( + RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX, + span, + ast::CRATE_NODE_ID, + BuiltinLintDiag::ReservedString(space_span), + ); + + // For backwards compatibility, roll back to after just the first `#` + // and return the `Pound` token. + self.pos = start + BytePos(1); + self.cursor = Cursor::new(&str_before[1..]); + token::Pound + } + } + fn report_too_many_hashes(&self, start: BytePos, num: u32) -> ! { self.dcx().emit_fatal(errors::TooManyHashes { span: self.mk_sp(start, self.pos), num }); } diff --git a/library/core/src/cell.rs b/library/core/src/cell.rs index 95cf55a923e..8ccd1a44ff1 100644 --- a/library/core/src/cell.rs +++ b/library/core/src/cell.rs @@ -666,7 +666,7 @@ impl<T: Default> Cell<T> { impl<T: CoerceUnsized<U>, U> CoerceUnsized<Cell<U>> for Cell<T> {} // Allow types that wrap `Cell` to also implement `DispatchFromDyn` -// and become object safe method receivers. +// and become dyn-compatible method receivers. // Note that currently `Cell` itself cannot be a method receiver // because it does not implement Deref. // In other words: @@ -2247,7 +2247,7 @@ impl<T> From<T> for UnsafeCell<T> { impl<T: CoerceUnsized<U>, U> CoerceUnsized<UnsafeCell<U>> for UnsafeCell<T> {} // Allow types that wrap `UnsafeCell` to also implement `DispatchFromDyn` -// and become object safe method receivers. +// and become dyn-compatible method receivers. // Note that currently `UnsafeCell` itself cannot be a method receiver // because it does not implement Deref. // In other words: @@ -2349,7 +2349,7 @@ impl<T> From<T> for SyncUnsafeCell<T> { impl<T: CoerceUnsized<U>, U> CoerceUnsized<SyncUnsafeCell<U>> for SyncUnsafeCell<T> {} // Allow types that wrap `SyncUnsafeCell` to also implement `DispatchFromDyn` -// and become object safe method receivers. +// and become dyn-compatible method receivers. // Note that currently `SyncUnsafeCell` itself cannot be a method receiver // because it does not implement Deref. // In other words: diff --git a/library/core/src/error.rs b/library/core/src/error.rs index cac00b37d1f..95a39cc3aed 100644 --- a/library/core/src/error.rs +++ b/library/core/src/error.rs @@ -335,16 +335,17 @@ impl dyn Error { #[unstable(feature = "error_iter", issue = "58520")] #[inline] pub fn sources(&self) -> Source<'_> { - // You may think this method would be better in the Error trait, and you'd be right. - // Unfortunately that doesn't work, not because of the object safety rules but because we - // save a reference to self in Sources below as a trait object. If this method was - // declared in Error, then self would have the type &T where T is some concrete type which - // implements Error. We would need to coerce self to have type &dyn Error, but that requires - // that Self has a known size (i.e., Self: Sized). We can't put that bound on Error - // since that would forbid Error trait objects, and we can't put that bound on the method - // because that means the method can't be called on trait objects (we'd also need the - // 'static bound, but that isn't allowed because methods with bounds on Self other than - // Sized are not object-safe). Requiring an Unsize bound is not backwards compatible. + // You may think this method would be better in the `Error` trait, and you'd be right. + // Unfortunately that doesn't work, not because of the dyn-incompatibility rules but + // because we save a reference to `self` in `Source`s below as a trait object. + // If this method was declared in `Error`, then `self` would have the type `&T` where + // `T` is some concrete type which implements `Error`. We would need to coerce `self` + // to have type `&dyn Error`, but that requires that `Self` has a known size + // (i.e., `Self: Sized`). We can't put that bound on `Error` since that would forbid + // `Error` trait objects, and we can't put that bound on the method because that means + // the method can't be called on trait objects (we'd also need the `'static` bound, + // but that isn't allowed because methods with bounds on `Self` other than `Sized` are + // dyn-incompatible). Requiring an `Unsize` bound is not backwards compatible. Source { current: Some(self) } } diff --git a/library/core/src/iter/traits/iterator.rs b/library/core/src/iter/traits/iterator.rs index 7963459bfb5..302720eddef 100644 --- a/library/core/src/iter/traits/iterator.rs +++ b/library/core/src/iter/traits/iterator.rs @@ -9,7 +9,7 @@ use crate::cmp::{self, Ordering}; use crate::num::NonZero; use crate::ops::{ChangeOutputType, ControlFlow, FromResidual, Residual, Try}; -fn _assert_is_object_safe(_: &dyn Iterator<Item = ()>) {} +fn _assert_is_dyn_compatible(_: &dyn Iterator<Item = ()>) {} /// A trait for dealing with iterators. /// diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs index fd41b80cdbd..aed6be4c627 100644 --- a/library/core/src/marker.rs +++ b/library/core/src/marker.rs @@ -158,7 +158,7 @@ pub trait Sized { /// - Arrays `[T; N]` implement `Unsize<[T]>`. /// - A type implements `Unsize<dyn Trait + 'a>` if all of these conditions are met: /// - The type implements `Trait`. -/// - `Trait` is object safe. +/// - `Trait` is dyn-compatible[^1]. /// - The type is sized. /// - The type outlives `'a`. /// - Structs `Foo<..., T1, ..., Tn, ...>` implement `Unsize<Foo<..., U1, ..., Un, ...>>` @@ -178,6 +178,7 @@ pub trait Sized { /// [`Rc`]: ../../std/rc/struct.Rc.html /// [RFC982]: https://github.com/rust-lang/rfcs/blob/master/text/0982-dst-coercion.md /// [nomicon-coerce]: ../../nomicon/coercions.html +/// [^1]: Formerly known as *object safe*. #[unstable(feature = "unsize", issue = "18598")] #[lang = "unsize"] #[rustc_deny_explicit_impl(implement_via_object = false)] diff --git a/library/core/src/ops/unsize.rs b/library/core/src/ops/unsize.rs index b51f12580ea..d2a07197f6f 100644 --- a/library/core/src/ops/unsize.rs +++ b/library/core/src/ops/unsize.rs @@ -68,8 +68,8 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *mut T {} #[unstable(feature = "coerce_unsized", issue = "18598")] impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {} -/// `DispatchFromDyn` is used in the implementation of object safety checks (specifically allowing -/// arbitrary self types), to guarantee that a method's receiver type can be dispatched on. +/// `DispatchFromDyn` is used in the implementation of dyn-compatibility[^1] checks (specifically +/// allowing arbitrary self types), to guarantee that a method's receiver type can be dispatched on. /// /// Note: `DispatchFromDyn` was briefly named `CoerceSized` (and had a slightly different /// interpretation). @@ -80,7 +80,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {} /// type). The compiler must generate an implicit conversion from the trait object/wide pointer to /// the concrete reference/narrow pointer. Implementing `DispatchFromDyn` indicates that that /// conversion is allowed and thus that the type implementing `DispatchFromDyn` is safe to use as -/// the self type in an object-safe method. (in the above example, the compiler will require +/// the self type in an dyn-compatible method. (in the above example, the compiler will require /// `DispatchFromDyn` is implemented for `&'a U`). /// /// `DispatchFromDyn` does not specify the conversion from wide pointer to narrow pointer; the @@ -112,6 +112,8 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<*const U> for *const T {} /// T: Unsize<U>, /// {} /// ``` +/// +/// [^1]: Formerly known as *object safety*. #[unstable(feature = "dispatch_from_dyn", issue = "none")] #[lang = "dispatch_from_dyn"] pub trait DispatchFromDyn<T> { diff --git a/library/core/src/primitive_docs.rs b/library/core/src/primitive_docs.rs index c25501f1200..89936dc12ac 100644 --- a/library/core/src/primitive_docs.rs +++ b/library/core/src/primitive_docs.rs @@ -862,6 +862,27 @@ mod prim_array {} /// assert_eq!(x, &[1, 7, 3]); /// ``` /// +/// It is possible to slice empty subranges of slices by using empty ranges (including `slice.len()..slice.len()`): +/// ``` +/// let x = [1, 2, 3]; +/// let empty = &x[0..0]; // subslice before the first element +/// assert_eq!(empty, &[]); +/// let empty = &x[..0]; // same as &x[0..0] +/// assert_eq!(empty, &[]); +/// let empty = &x[1..1]; // empty subslice in the middle +/// assert_eq!(empty, &[]); +/// let empty = &x[3..3]; // subslice after the last element +/// assert_eq!(empty, &[]); +/// let empty = &x[3..]; // same as &x[3..3] +/// assert_eq!(empty, &[]); +/// ``` +/// +/// It is not allowed to use subranges that start with lower bound bigger than `slice.len()`: +/// ```should_panic +/// let x = vec![1, 2, 3]; +/// let _ = &x[4..4]; +/// ``` +/// /// As slices store the length of the sequence they refer to, they have twice /// the size of pointers to [`Sized`](marker/trait.Sized.html) types. /// Also see the reference on diff --git a/library/core/tests/hash/mod.rs b/library/core/tests/hash/mod.rs index 03826fc4c92..bf91e9e5df0 100644 --- a/library/core/tests/hash/mod.rs +++ b/library/core/tests/hash/mod.rs @@ -164,7 +164,7 @@ fn test_indirect_hasher() { } #[test] -fn test_build_hasher_object_safe() { +fn test_build_hasher_dyn_compatible() { use std::hash::{DefaultHasher, RandomState}; let _: &dyn BuildHasher<Hasher = DefaultHasher> = &RandomState::new(); diff --git a/library/std/src/keyword_docs.rs b/library/std/src/keyword_docs.rs index 9f4d244b547..453b2708daa 100644 --- a/library/std/src/keyword_docs.rs +++ b/library/std/src/keyword_docs.rs @@ -2349,12 +2349,13 @@ mod async_keyword {} /// [`async`]: ../std/keyword.async.html mod await_keyword {} +// FIXME(dyn_compat_renaming): Update URL and link text. #[doc(keyword = "dyn")] // /// `dyn` is a prefix of a [trait object]'s type. /// /// The `dyn` keyword is used to highlight that calls to methods on the associated `Trait` -/// are [dynamically dispatched]. To use the trait this way, it must be 'object safe'. +/// are [dynamically dispatched]. To use the trait this way, it must be 'dyn-compatible'[^1]. /// /// Unlike generic parameters or `impl Trait`, the compiler does not know the concrete type that /// is being passed. That is, the type has been [erased]. @@ -2382,6 +2383,7 @@ mod await_keyword {} /// [ref-trait-obj]: ../reference/types/trait-object.html /// [ref-obj-safety]: ../reference/items/traits.html#object-safety /// [erased]: https://en.wikipedia.org/wiki/Type_erasure +/// [^1]: Formerly known as 'object safe'. mod dyn_keyword {} #[doc(keyword = "union")] diff --git a/src/bootstrap/src/bin/rustc.rs b/src/bootstrap/src/bin/rustc.rs index 780979ed981..18f5a1a58db 100644 --- a/src/bootstrap/src/bin/rustc.rs +++ b/src/bootstrap/src/bin/rustc.rs @@ -136,6 +136,12 @@ fn main() { cmd.args(lint_flags.split_whitespace()); } + // Conditionally pass `-Zon-broken-pipe=kill` to underlying rustc. Not all binaries want + // `-Zon-broken-pipe=kill`, which includes cargo itself. + if env::var_os("FORCE_ON_BROKEN_PIPE_KILL").is_some() { + cmd.arg("-Z").arg("on-broken-pipe=kill"); + } + if target.is_some() { // The stage0 compiler has a special sysroot distinct from what we // actually downloaded, so we just always pass the `--sysroot` option, diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index b5be7d841dd..27bbc8bd8ff 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -1053,8 +1053,19 @@ pub fn rustc_cargo( cargo.rustdocflag("-Zcrate-attr=warn(rust_2018_idioms)"); - // If the rustc output is piped to e.g. `head -n1` we want the process to be - // killed, rather than having an error bubble up and cause a panic. + // If the rustc output is piped to e.g. `head -n1` we want the process to be killed, rather than + // having an error bubble up and cause a panic. + // + // FIXME(jieyouxu): this flag is load-bearing for rustc to not ICE on broken pipes, because + // rustc internally sometimes uses std `println!` -- but std `println!` by default will panic on + // broken pipes, and uncaught panics will manifest as an ICE. The compiler *should* handle this + // properly, but this flag is set in the meantime to paper over the I/O errors. + // + // See <https://github.com/rust-lang/rust/issues/131059> for details. + // + // Also see the discussion for properly handling I/O errors related to broken pipes, i.e. safe + // variants of `println!` in + // <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Internal.20lint.20for.20raw.20.60print!.60.20and.20.60println!.60.3F>. cargo.rustflag("-Zon-broken-pipe=kill"); if builder.config.llvm_enzyme { diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 7a49b68b91e..0dd3d0b3bd1 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -2082,7 +2082,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the } if builder.config.profiler_enabled(target) { - cmd.arg("--profiler-support"); + cmd.arg("--profiler-runtime"); } cmd.env("RUST_TEST_TMPDIR", builder.tempdir()); diff --git a/src/bootstrap/src/core/build_steps/tool.rs b/src/bootstrap/src/core/build_steps/tool.rs index e2fcd13efe3..a01497c2bb9 100644 --- a/src/bootstrap/src/core/build_steps/tool.rs +++ b/src/bootstrap/src/core/build_steps/tool.rs @@ -209,11 +209,28 @@ pub fn prepare_tool_cargo( // See https://github.com/rust-lang/rust/issues/116538 cargo.rustflag("-Zunstable-options"); - // `-Zon-broken-pipe=kill` breaks cargo tests + // NOTE: The root cause of needing `-Zon-broken-pipe=kill` in the first place is because `rustc` + // and `rustdoc` doesn't gracefully handle I/O errors due to usages of raw std `println!` macros + // which panics upon encountering broken pipes. `-Zon-broken-pipe=kill` just papers over that + // and stops rustc/rustdoc ICEing on e.g. `rustc --print=sysroot | false`. + // + // cargo explicitly does not want the `-Zon-broken-pipe=kill` paper because it does actually use + // variants of `println!` that handles I/O errors gracefully. It's also a breaking change for a + // spawn process not written in Rust, especially if the language default handler is not + // `SIG_IGN`. Thankfully cargo tests will break if we do set the flag. + // + // For the cargo discussion, see + // <https://rust-lang.zulipchat.com/#narrow/stream/246057-t-cargo/topic/Applying.20.60-Zon-broken-pipe.3Dkill.60.20flags.20in.20bootstrap.3F>. + // + // For the rustc discussion, see + // <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Internal.20lint.20for.20raw.20.60print!.60.20and.20.60println!.60.3F> + // for proper solutions. if !path.ends_with("cargo") { - // If the output is piped to e.g. `head -n1` we want the process to be killed, - // rather than having an error bubble up and cause a panic. - cargo.rustflag("-Zon-broken-pipe=kill"); + // Use an untracked env var `FORCE_ON_BROKEN_PIPE_KILL` here instead of `RUSTFLAGS`. + // `RUSTFLAGS` is tracked by cargo. Conditionally omitting `-Zon-broken-pipe=kill` from + // `RUSTFLAGS` causes unnecessary tool rebuilds due to cache invalidation from building e.g. + // cargo *without* `-Zon-broken-pipe=kill` but then rustdoc *with* `-Zon-broken-pipe=kill`. + cargo.env("FORCE_ON_BROKEN_PIPE_KILL", "-Zon-broken-pipe=kill"); } cargo diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs index 155c6515db8..d32830c0a96 100644 --- a/src/bootstrap/src/core/builder.rs +++ b/src/bootstrap/src/core/builder.rs @@ -414,6 +414,15 @@ impl StepDescription { .map(|desc| (desc.should_run)(ShouldRun::new(builder, desc.kind))) .collect::<Vec<_>>(); + if builder.download_rustc() && (builder.kind == Kind::Dist || builder.kind == Kind::Install) + { + eprintln!( + "ERROR: '{}' subcommand is incompatible with `rust.download-rustc`.", + builder.kind.as_str() + ); + crate::exit!(1); + } + // sanity checks on rules for (desc, should_run) in v.iter().zip(&should_runs) { assert!( diff --git a/src/bootstrap/src/core/builder/tests.rs b/src/bootstrap/src/core/builder/tests.rs index bd81dc930be..ab36193f8b6 100644 --- a/src/bootstrap/src/core/builder/tests.rs +++ b/src/bootstrap/src/core/builder/tests.rs @@ -1,5 +1,7 @@ use std::thread; +use build_helper::git::get_closest_merge_commit; + use super::*; use crate::Flags; use crate::core::build_steps::doc::DocumentationFormat; @@ -212,6 +214,52 @@ fn alias_and_path_for_library() { assert_eq!(first(cache.all::<doc::Std>()), &[doc_std!(A => A, stage = 0)]); } +#[test] +fn ci_rustc_if_unchanged_logic() { + let config = Config::parse_inner( + Flags::parse(&[ + "build".to_owned(), + "--dry-run".to_owned(), + "--set=rust.download-rustc='if-unchanged'".to_owned(), + ]), + |&_| Ok(Default::default()), + ); + + if config.rust_info.is_from_tarball() { + return; + } + + let build = Build::new(config.clone()); + let builder = Builder::new(&build); + + if config.out.exists() { + fs::remove_dir_all(&config.out).unwrap(); + } + + builder.run_step_descriptions(&Builder::get_step_descriptions(config.cmd.kind()), &[]); + + let compiler_path = build.src.join("compiler"); + let library_path = build.src.join("library"); + + let commit = + get_closest_merge_commit(Some(&builder.config.src), &builder.config.git_config(), &[ + compiler_path.clone(), + library_path.clone(), + ]) + .unwrap(); + + let has_changes = !helpers::git(Some(&builder.src)) + .args(["diff-index", "--quiet", &commit]) + .arg("--") + .args([compiler_path, library_path]) + .as_command_mut() + .status() + .unwrap() + .success(); + + assert!(has_changes == config.download_rustc_commit.is_none()); +} + mod defaults { use pretty_assertions::assert_eq; diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index 7dc3b7b081a..9f84b492b80 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -13,6 +13,7 @@ use std::str::FromStr; use std::sync::OnceLock; use std::{cmp, env, fs}; +use build_helper::ci::CiEnv; use build_helper::exit; use build_helper::git::{GitConfig, get_closest_merge_commit, output_result}; use serde::{Deserialize, Deserializer}; @@ -22,6 +23,7 @@ use crate::core::build_steps::compile::CODEGEN_BACKEND_PREFIX; use crate::core::build_steps::llvm; pub use crate::core::config::flags::Subcommand; use crate::core::config::flags::{Color, Flags, Warnings}; +use crate::core::download::is_download_ci_available; use crate::utils::cache::{INTERNER, Interned}; use crate::utils::channel::{self, GitInfo}; use crate::utils::helpers::{self, exe, output, t}; @@ -1627,9 +1629,11 @@ impl Config { config.mandir = mandir.map(PathBuf::from); } + config.llvm_assertions = + toml.llvm.as_ref().map_or(false, |llvm| llvm.assertions.unwrap_or(false)); + // Store off these values as options because if they're not provided // we'll infer default values for them later - let mut llvm_assertions = None; let mut llvm_tests = None; let mut llvm_enzyme = None; let mut llvm_plugins = None; @@ -1712,7 +1716,8 @@ impl Config { is_user_configured_rust_channel = channel.is_some(); set(&mut config.channel, channel.clone()); - config.download_rustc_commit = config.download_ci_rustc_commit(download_rustc); + config.download_rustc_commit = + config.download_ci_rustc_commit(download_rustc, config.llvm_assertions); debug = debug_toml; debug_assertions = debug_assertions_toml; @@ -1848,7 +1853,7 @@ impl Config { optimize: optimize_toml, thin_lto, release_debuginfo, - assertions, + assertions: _, tests, enzyme, plugins, @@ -1882,7 +1887,6 @@ impl Config { Some(StringOrBool::Bool(false)) | None => {} } set(&mut config.ninja_in_file, ninja); - llvm_assertions = assertions; llvm_tests = tests; llvm_enzyme = enzyme; llvm_plugins = plugins; @@ -1911,8 +1915,8 @@ impl Config { config.llvm_enable_warnings = enable_warnings.unwrap_or(false); config.llvm_build_config = build_config.clone().unwrap_or(Default::default()); - let asserts = llvm_assertions.unwrap_or(false); - config.llvm_from_ci = config.parse_download_ci_llvm(download_ci_llvm, asserts); + config.llvm_from_ci = + config.parse_download_ci_llvm(download_ci_llvm, config.llvm_assertions); if config.llvm_from_ci { let warn = |option: &str| { @@ -2080,7 +2084,6 @@ impl Config { // Now that we've reached the end of our configuration, infer the // default values for all options that we haven't otherwise stored yet. - config.llvm_assertions = llvm_assertions.unwrap_or(false); config.llvm_tests = llvm_tests.unwrap_or(false); config.llvm_enzyme = llvm_enzyme.unwrap_or(false); config.llvm_plugins = llvm_plugins.unwrap_or(false); @@ -2419,8 +2422,9 @@ impl Config { ci_config_toml, ); - let disable_ci_rustc_if_incompatible = - env::var_os("DISABLE_CI_RUSTC_IF_INCOMPATIBLE") + // Primarily used by CI runners to avoid handling download-rustc incompatible + // options one by one on shell scripts. + let disable_ci_rustc_if_incompatible = env::var_os("DISABLE_CI_RUSTC_IF_INCOMPATIBLE") .is_some_and(|s| s == "1" || s == "true"); if disable_ci_rustc_if_incompatible && res.is_err() { @@ -2711,7 +2715,15 @@ impl Config { } /// Returns the commit to download, or `None` if we shouldn't download CI artifacts. - fn download_ci_rustc_commit(&self, download_rustc: Option<StringOrBool>) -> Option<String> { + fn download_ci_rustc_commit( + &self, + download_rustc: Option<StringOrBool>, + llvm_assertions: bool, + ) -> Option<String> { + if !is_download_ci_available(&self.build.triple, llvm_assertions) { + return None; + } + // If `download-rustc` is not set, default to rebuilding. let if_unchanged = match download_rustc { None | Some(StringOrBool::Bool(false)) => return None, @@ -2724,7 +2736,11 @@ impl Config { // Look for a version to compare to based on the current commit. // Only commits merged by bors will have CI artifacts. - let commit = get_closest_merge_commit(Some(&self.src), &self.git_config(), &[]).unwrap(); + let commit = get_closest_merge_commit(Some(&self.src), &self.git_config(), &[ + self.src.join("compiler"), + self.src.join("library"), + ]) + .unwrap(); if commit.is_empty() { println!("ERROR: could not find commit hash for downloading rustc"); println!("HELP: maybe your repository history is too shallow?"); @@ -2733,6 +2749,19 @@ impl Config { crate::exit!(1); } + if CiEnv::is_ci() && { + let head_sha = + output(helpers::git(Some(&self.src)).arg("rev-parse").arg("HEAD").as_command_mut()); + let head_sha = head_sha.trim(); + commit == head_sha + } { + eprintln!("CI rustc commit matches with HEAD and we are in CI."); + eprintln!( + "`rustc.download-ci` functionality will be skipped as artifacts are not available." + ); + return None; + } + // Warn if there were changes to the compiler or standard library since the ancestor commit. let has_changes = !t!(helpers::git(Some(&self.src)) .args(["diff-index", "--quiet", &commit]) diff --git a/src/bootstrap/src/core/config/tests.rs b/src/bootstrap/src/core/config/tests.rs index 278becdcbc7..ed89112de90 100644 --- a/src/bootstrap/src/core/config/tests.rs +++ b/src/bootstrap/src/core/config/tests.rs @@ -12,7 +12,7 @@ use super::{ChangeIdWrapper, Config}; use crate::core::build_steps::clippy::get_clippy_rules_in_order; use crate::core::config::{LldMode, Target, TargetSelection, TomlConfig}; -fn parse(config: &str) -> Config { +pub(crate) fn parse(config: &str) -> Config { Config::parse_inner( Flags::parse(&["check".to_string(), "--config=/does/not/exist".to_string()]), |&_| toml::from_str(&config), diff --git a/src/bootstrap/src/core/download.rs b/src/bootstrap/src/core/download.rs index 444b75876f2..db1f5b08338 100644 --- a/src/bootstrap/src/core/download.rs +++ b/src/bootstrap/src/core/download.rs @@ -832,3 +832,43 @@ fn path_is_dylib(path: &Path) -> bool { // The .so is not necessarily the extension, it might be libLLVM.so.18.1 path.to_str().map_or(false, |path| path.contains(".so")) } + +/// Checks whether the CI rustc is available for the given target triple. +pub(crate) fn is_download_ci_available(target_triple: &str, llvm_assertions: bool) -> bool { + // All tier 1 targets and tier 2 targets with host tools. + const SUPPORTED_PLATFORMS: &[&str] = &[ + "aarch64-apple-darwin", + "aarch64-pc-windows-msvc", + "aarch64-unknown-linux-gnu", + "aarch64-unknown-linux-musl", + "arm-unknown-linux-gnueabi", + "arm-unknown-linux-gnueabihf", + "armv7-unknown-linux-gnueabihf", + "i686-pc-windows-gnu", + "i686-pc-windows-msvc", + "i686-unknown-linux-gnu", + "loongarch64-unknown-linux-gnu", + "powerpc-unknown-linux-gnu", + "powerpc64-unknown-linux-gnu", + "powerpc64le-unknown-linux-gnu", + "riscv64gc-unknown-linux-gnu", + "s390x-unknown-linux-gnu", + "x86_64-apple-darwin", + "x86_64-pc-windows-gnu", + "x86_64-pc-windows-msvc", + "x86_64-unknown-freebsd", + "x86_64-unknown-illumos", + "x86_64-unknown-linux-gnu", + "x86_64-unknown-linux-musl", + "x86_64-unknown-netbsd", + ]; + + const SUPPORTED_PLATFORMS_WITH_ASSERTIONS: &[&str] = + &["x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"]; + + if llvm_assertions { + SUPPORTED_PLATFORMS_WITH_ASSERTIONS.contains(&target_triple) + } else { + SUPPORTED_PLATFORMS.contains(&target_triple) + } +} diff --git a/src/ci/docker/host-x86_64/mingw-check/Dockerfile b/src/ci/docker/host-x86_64/mingw-check/Dockerfile index 571378774be..467ca1dac67 100644 --- a/src/ci/docker/host-x86_64/mingw-check/Dockerfile +++ b/src/ci/docker/host-x86_64/mingw-check/Dockerfile @@ -46,7 +46,20 @@ ENV RUN_CHECK_WITH_PARALLEL_QUERIES 1 # Check library crates on all tier 1 targets. # We disable optimized compiler built-ins because that requires a C toolchain for the target. # We also skip the x86_64-unknown-linux-gnu target as it is well-tested by other jobs. -ENV SCRIPT python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \ +ENV SCRIPT \ + # `core::builder::tests::ci_rustc_if_unchanged_logic` bootstrap test covers the `rust.download-rustc=if-unchanged` logic. + # Here we are adding a dummy commit on compiler and running that test to ensure when there is a change on the compiler, + # we never download ci rustc with `rust.download-rustc=if-unchanged` option. + echo \"\" >> ../compiler/rustc/src/main.rs && \ + git config --global user.email \"dummy@dummy.com\" && \ + git config --global user.name \"dummy\" && \ + git add ../compiler/rustc/src/main.rs && \ + git commit -m \"test commit for rust.download-rustc=if-unchanged logic\" && \ + DISABLE_CI_RUSTC_IF_INCOMPATIBLE=0 python3 ../x.py test bootstrap -- core::builder::tests::ci_rustc_if_unchanged_logic && \ + # Revert the dummy commit + git reset --hard HEAD~1 && \ + + python3 ../x.py check --stage 0 --set build.optimized-compiler-builtins=false core alloc std --target=aarch64-unknown-linux-gnu,i686-pc-windows-msvc,i686-unknown-linux-gnu,x86_64-apple-darwin,x86_64-pc-windows-gnu,x86_64-pc-windows-msvc && \ /scripts/check-default-config-profiles.sh && \ python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \ python3 ../x.py clippy bootstrap -Dwarnings && \ diff --git a/src/ci/docker/host-x86_64/x86_64-fuchsia/Dockerfile b/src/ci/docker/host-x86_64/x86_64-fuchsia/Dockerfile index ba3e8bdb687..0cae83a85b3 100644 --- a/src/ci/docker/host-x86_64/x86_64-fuchsia/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-fuchsia/Dockerfile @@ -58,6 +58,9 @@ RUN mkdir -p $RUST_INSTALL_DIR/etc # Fuchsia only supports LLVM. ENV CODEGEN_BACKENDS llvm +# download-rustc is not allowed for `x install` +ENV NO_DOWNLOAD_CI_RUSTC 1 + ENV RUST_CONFIGURE_ARGS \ --prefix=$RUST_INSTALL_DIR \ --sysconfdir=etc \ @@ -70,6 +73,7 @@ ENV RUST_CONFIGURE_ARGS \ --set target.x86_64-unknown-fuchsia.ar=/usr/local/bin/llvm-ar \ --set target.x86_64-unknown-fuchsia.ranlib=/usr/local/bin/llvm-ranlib \ --set target.x86_64-unknown-fuchsia.linker=/usr/local/bin/ld.lld + ENV SCRIPT \ python3 ../x.py install --target $TARGETS compiler/rustc library/std clippy && \ bash ../src/ci/docker/host-x86_64/x86_64-fuchsia/build-fuchsia.sh diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile index 145f41f21e1..17fc1a57492 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu-tools/Dockerfile @@ -84,6 +84,7 @@ ENV RUST_CONFIGURE_ARGS \ --enable-new-symbol-mangling ENV HOST_TARGET x86_64-unknown-linux-gnu +ENV FORCE_CI_RUSTC 1 COPY host-x86_64/dist-x86_64-linux/shared.sh /scripts/ COPY host-x86_64/dist-x86_64-linux/build-gccjit.sh /scripts/ diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index fad4b5af095..28487bce482 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -343,6 +343,7 @@ docker \ --env PR_CI_JOB \ --env OBJDIR_ON_HOST="$objdir" \ --env CODEGEN_BACKENDS \ + --env DISABLE_CI_RUSTC_IF_INCOMPATIBLE="$DISABLE_CI_RUSTC_IF_INCOMPATIBLE" \ --init \ --rm \ rust-ci \ diff --git a/src/ci/docker/scripts/rfl-build.sh b/src/ci/docker/scripts/rfl-build.sh index 8011e07e92e..27dbfc6040c 100755 --- a/src/ci/docker/scripts/rfl-build.sh +++ b/src/ci/docker/scripts/rfl-build.sh @@ -2,7 +2,7 @@ set -euo pipefail -LINUX_VERSION=4c7864e81d8bbd51036dacf92fb0a400e13aaeee +LINUX_VERSION=v6.12-rc2 # Build rustc, rustdoc, cargo, clippy-driver and rustfmt ../x.py build --stage 2 library rustdoc clippy rustfmt diff --git a/src/ci/github-actions/jobs.yml b/src/ci/github-actions/jobs.yml index 6379f1ade1c..4bbebbc4697 100644 --- a/src/ci/github-actions/jobs.yml +++ b/src/ci/github-actions/jobs.yml @@ -85,6 +85,9 @@ envs: # it in each job definition. pr: - image: mingw-check + env: + # We are adding (temporarily) a dummy commit on the compiler + READ_ONLY_SRC: "0" <<: *job-linux-4c - image: mingw-check-tidy continue_on_error: true @@ -207,6 +210,8 @@ auto: <<: *job-linux-8c - image: mingw-check + env: + READ_ONLY_SRC: 0 <<: *job-linux-4c - image: test-various diff --git a/src/ci/run.sh b/src/ci/run.sh index c8201d9bcfd..1ce54f9ecb3 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -52,6 +52,13 @@ if [ "$CI" != "" ]; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set change-id=99999999" fi +# If runner uses an incompatible option and `FORCE_CI_RUSTC` is not defined, +# switch to in-tree rustc. +if [ "$FORCE_CI_RUSTC" == "" ]; then + echo "debug: `DISABLE_CI_RUSTC_IF_INCOMPATIBLE` configured." + DISABLE_CI_RUSTC_IF_INCOMPATIBLE=1 +fi + if ! isCI || isCiBranch auto || isCiBranch beta || isCiBranch try || isCiBranch try-perf || \ isCiBranch automation/bors/try; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set build.print-step-timings --enable-verbose-tests" @@ -169,10 +176,16 @@ else if [ "$NO_DOWNLOAD_CI_LLVM" = "" ]; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set llvm.download-ci-llvm=if-unchanged" else + # CI rustc requires CI LLVM to be enabled (see https://github.com/rust-lang/rust/issues/123586). + NO_DOWNLOAD_CI_RUSTC=1 # When building for CI we want to use the static C++ Standard library # included with LLVM, since a dynamic libstdcpp may not be available. RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set llvm.static-libstdcpp" fi + + if [ "$NO_DOWNLOAD_CI_RUSTC" = "" ]; then + RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set rust.download-rustc=if-unchanged" + fi fi if [ "$ENABLE_GCC_CODEGEN" = "1" ]; then diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index e7ddd4b73b4..b68b7295096 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -845,6 +845,7 @@ impl<'src> Classifier<'src> { // Number literals. LiteralKind::Float { .. } | LiteralKind::Int { .. } => Class::Number, }, + TokenKind::GuardedStrPrefix => return no_highlight(sink), TokenKind::Ident | TokenKind::RawIdent if lookahead == Some(TokenKind::Bang) => { self.in_macro = true; sink(Highlight::EnterSpan { class: Class::Macro(self.new_span(before, text)) }); diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index 6c5c58754a8..5dacabd031e 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -35,8 +35,7 @@ use std::str::{self, CharIndices}; use std::sync::OnceLock; use pulldown_cmark::{ - BrokenLink, BrokenLinkCallback, CodeBlockKind, CowStr, Event, LinkType, OffsetIter, Options, - Parser, Tag, TagEnd, html, + BrokenLink, CodeBlockKind, CowStr, Event, LinkType, Options, Parser, Tag, TagEnd, html, }; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_errors::{Diag, DiagMessage}; @@ -1686,7 +1685,6 @@ pub(crate) fn html_text_from_events<'a>( pub(crate) struct MarkdownLink { pub kind: LinkType, pub link: String, - pub display_text: Option<String>, pub range: MarkdownLinkRange, } @@ -1848,23 +1846,9 @@ pub(crate) fn markdown_links<'md, R>( LinkType::Autolink | LinkType::Email => unreachable!(), }; - let display_text = if matches!( - link_type, - LinkType::Inline - | LinkType::ReferenceUnknown - | LinkType::Reference - | LinkType::Shortcut - | LinkType::ShortcutUnknown - ) { - collect_link_data(&mut event_iter) - } else { - None - }; - if let Some(link) = preprocess_link(MarkdownLink { kind: link_type, link: dest_url.into_string(), - display_text, range, }) { links.push(link); @@ -1877,37 +1861,6 @@ pub(crate) fn markdown_links<'md, R>( links } -/// Collects additional data of link. -fn collect_link_data<'input, F: BrokenLinkCallback<'input>>( - event_iter: &mut OffsetIter<'input, F>, -) -> Option<String> { - let mut display_text: Option<String> = None; - let mut append_text = |text: CowStr<'_>| { - if let Some(display_text) = &mut display_text { - display_text.push_str(&text); - } else { - display_text = Some(text.to_string()); - } - }; - - while let Some((event, _span)) = event_iter.next() { - match event { - Event::Text(text) => { - append_text(text); - } - Event::Code(code) => { - append_text(code); - } - Event::End(_) => { - break; - } - _ => {} - } - } - - display_text -} - #[derive(Debug)] pub(crate) struct RustCodeBlock { /// The range in the markdown that the code block occupies. Note that this includes the fences diff --git a/src/librustdoc/passes/collect_intra_doc_links.rs b/src/librustdoc/passes/collect_intra_doc_links.rs index 2eb0e32b831..db235786cf4 100644 --- a/src/librustdoc/passes/collect_intra_doc_links.rs +++ b/src/librustdoc/passes/collect_intra_doc_links.rs @@ -1040,21 +1040,6 @@ impl LinkCollector<'_, '_> { false, )?; - if ori_link.display_text.is_some() { - self.resolve_display_text( - path_str, - ResolutionInfo { - item_id, - module_id, - dis: disambiguator, - path_str: ori_link.display_text.clone()?.into_boxed_str(), - extra_fragment: extra_fragment.clone(), - }, - &ori_link, - &diag_info, - ); - } - // Check for a primitive which might conflict with a module // Report the ambiguity and require that the user specify which one they meant. // FIXME: could there ever be a primitive not in the type namespace? @@ -1088,7 +1073,7 @@ impl LinkCollector<'_, '_> { // valid omission. See https://github.com/rust-lang/rust/pull/80660#discussion_r551585677 // for discussion on the matter. let kind = self.cx.tcx.def_kind(id); - self.verify_disambiguator(path_str, kind, id, disambiguator, item, &diag_info)?; + self.verify_disambiguator(path_str, kind, id, disambiguator, &diag_info)?; } else { match disambiguator { Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {} @@ -1117,7 +1102,6 @@ impl LinkCollector<'_, '_> { kind_for_dis, id_for_dis, disambiguator, - item, &diag_info, )?; @@ -1138,7 +1122,6 @@ impl LinkCollector<'_, '_> { kind: DefKind, id: DefId, disambiguator: Option<Disambiguator>, - item: &Item, diag_info: &DiagnosticInfo<'_>, ) -> Option<()> { debug!("intra-doc link to {path_str} resolved to {:?}", (kind, id)); @@ -1165,7 +1148,7 @@ impl LinkCollector<'_, '_> { // item can be non-local e.g. when using `#[rustc_doc_primitive = "pointer"]` if let Some((src_id, dst_id)) = id.as_local().and_then(|dst_id| { - item.item_id.expect_def_id().as_local().map(|src_id| (src_id, dst_id)) + diag_info.item.item_id.expect_def_id().as_local().map(|src_id| (src_id, dst_id)) }) { if self.cx.tcx.effective_visibilities(()).is_exported(src_id) && !self.cx.tcx.effective_visibilities(()).is_exported(dst_id) @@ -1398,58 +1381,6 @@ impl LinkCollector<'_, '_> { } } } - - /// Resolve display text if the provided link has separated parts of links. - /// - /// For example: - /// Inline link `[display_text](dest_link)` and reference link `[display_text][reference_link]` has - /// separated parts of links. - fn resolve_display_text( - &mut self, - explicit_link: &Box<str>, - display_res_info: ResolutionInfo, - ori_link: &MarkdownLink, - diag_info: &DiagnosticInfo<'_>, - ) { - // Check if explicit resolution's path is same as resolution of original link's display text path, see - // tests/rustdoc-ui/lint/redundant_explicit_links.rs for more cases. - // - // To avoid disambiguator from panicking, we check if display text path is possible to be disambiguated - // into explicit path. - if !matches!( - ori_link.kind, - LinkType::Inline | LinkType::Reference | LinkType::ReferenceUnknown - ) { - return; - } - - // Algorithm to check if display text could possibly be the explicit link: - // - // Consider 2 links which are display text and explicit link, pick the shorter - // one as symbol and longer one as full qualified path, and tries to match symbol - // to the full qualified path's last symbol. - // - // Otherwise, check if 2 links are same, if so, skip the resolve process. - // - // Notice that this algorithm is passive, might possibly miss actual redundant cases. - let explicit_link = explicit_link.to_string(); - let display_text = ori_link.display_text.as_ref().unwrap(); - - if display_text.len() == explicit_link.len() { - // Whether they are same or not, skip the resolve process. - return; - } - - if explicit_link.ends_with(&display_text[..]) || display_text.ends_with(&explicit_link[..]) - { - self.resolve_with_disambiguator_cached( - display_res_info, - diag_info.clone(), // this struct should really be Copy, but Range is not :( - false, - true, - ); - } - } } /// Get the section of a link between the backticks, diff --git a/src/tools/cargo b/src/tools/cargo -Subproject ad074abe3a18ce8444c06f962ceecfd056acfc7 +Subproject 15fbd2f607d4defc87053b8b76bf5038f2483cf diff --git a/src/tools/compiletest/src/command-list.rs b/src/tools/compiletest/src/command-list.rs index bcdd4a69b66..53db55d0c10 100644 --- a/src/tools/compiletest/src/command-list.rs +++ b/src/tools/compiletest/src/command-list.rs @@ -42,6 +42,8 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ "ignore-cdb", "ignore-compare-mode-next-solver", "ignore-compare-mode-polonius", + "ignore-coverage-map", + "ignore-coverage-run", "ignore-cross-compile", "ignore-debug", "ignore-eabi", @@ -64,8 +66,6 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ "ignore-loongarch64", "ignore-macabi", "ignore-macos", - "ignore-mode-coverage-map", - "ignore-mode-coverage-run", "ignore-msp430", "ignore-msvc", "ignore-musl", @@ -129,7 +129,7 @@ const KNOWN_DIRECTIVE_NAMES: &[&str] = &[ "needs-git-hash", "needs-llvm-components", "needs-llvm-zstd", - "needs-profiler-support", + "needs-profiler-runtime", "needs-relocation-model-pic", "needs-run-enabled", "needs-rust-lld", diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs index 17ec6ea4301..a5418ad8384 100644 --- a/src/tools/compiletest/src/common.rs +++ b/src/tools/compiletest/src/common.rs @@ -385,8 +385,8 @@ pub struct Config { pub git_merge_commit_email: String, /// True if the profiler runtime is enabled for this target. - /// Used by the "needs-profiler-support" header in test files. - pub profiler_support: bool, + /// Used by the "needs-profiler-runtime" directive in test files. + pub profiler_runtime: bool, } impl Config { diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs index 83a10c56208..bd0ed6321bc 100644 --- a/src/tools/compiletest/src/header.rs +++ b/src/tools/compiletest/src/header.rs @@ -5,9 +5,7 @@ use std::io::BufReader; use std::io::prelude::*; use std::path::{Path, PathBuf}; use std::process::Command; -use std::sync::OnceLock; -use regex::Regex; use tracing::*; use crate::common::{Config, Debugger, FailMode, Mode, PassMode}; @@ -797,7 +795,6 @@ struct HeaderLine<'ln> { pub(crate) struct CheckDirectiveResult<'ln> { is_known_directive: bool, - directive_name: &'ln str, trailing_directive: Option<&'ln str>, } @@ -832,11 +829,7 @@ pub(crate) fn check_directive<'a>( } .then_some(trailing); - CheckDirectiveResult { - is_known_directive: is_known(&directive_name), - directive_name: directive_ln, - trailing_directive, - } + CheckDirectiveResult { is_known_directive: is_known(&directive_name), trailing_directive } } fn iter_header( @@ -851,16 +844,17 @@ fn iter_header( return; } - // Coverage tests in coverage-run mode always have these extra directives, - // without needing to specify them manually in every test file. - // (Some of the comments below have been copied over from the old - // `tests/run-make/coverage-reports/Makefile`, which no longer exists.) + // Coverage tests in coverage-run mode always have these extra directives, without needing to + // specify them manually in every test file. (Some of the comments below have been copied over + // from the old `tests/run-make/coverage-reports/Makefile`, which no longer exists.) + // + // FIXME(jieyouxu): I feel like there's a better way to do this, leaving for later. if mode == Mode::CoverageRun { let extra_directives: &[&str] = &[ - "needs-profiler-support", - // FIXME(pietroalbini): this test currently does not work on cross-compiled - // targets because remote-test is not capable of sending back the *.profraw - // files generated by the LLVM instrumentation. + "needs-profiler-runtime", + // FIXME(pietroalbini): this test currently does not work on cross-compiled targets + // because remote-test is not capable of sending back the *.profraw files generated by + // the LLVM instrumentation. "ignore-cross-compile", ]; // Process the extra implied directives, with a dummy line number of 0. @@ -869,17 +863,13 @@ fn iter_header( } } + // NOTE(jieyouxu): once we get rid of `Makefile`s we can unconditionally check for `//@`. let comment = if testfile.extension().is_some_and(|e| e == "rs") { "//@" } else { "#" }; let mut rdr = BufReader::with_capacity(1024, rdr); let mut ln = String::new(); let mut line_number = 0; - // Match on error annotations like `//~ERROR`. - static REVISION_MAGIC_COMMENT_RE: OnceLock<Regex> = OnceLock::new(); - let revision_magic_comment_re = - REVISION_MAGIC_COMMENT_RE.get_or_init(|| Regex::new("//(\\[.*\\])?~.*").unwrap()); - loop { line_number += 1; ln.clear(); @@ -892,85 +882,62 @@ fn iter_header( // with a warm page cache. Maybe with a cold one. let original_line = &ln; let ln = ln.trim(); + + // Assume that any directives will be found before the first module or function. This + // doesn't seem to be an optimization with a warm page cache. Maybe with a cold one. + // FIXME(jieyouxu): this will cause `//@` directives in the rest of the test file to + // not be checked. if ln.starts_with("fn") || ln.starts_with("mod") { return; + } - // First try to accept `ui_test` style comments (`//@`) - } else if let Some((header_revision, non_revisioned_directive_line)) = - line_directive(comment, ln) - { - // Perform unknown directive check on Rust files. - if testfile.extension().map(|e| e == "rs").unwrap_or(false) { - let directive_ln = non_revisioned_directive_line.trim(); - - let CheckDirectiveResult { is_known_directive, trailing_directive, .. } = - check_directive(directive_ln, mode, ln); - - if !is_known_directive { - *poisoned = true; - - eprintln!( - "error: detected unknown compiletest test directive `{}` in {}:{}", - directive_ln, - testfile.display(), - line_number, - ); - - return; - } + let Some((header_revision, non_revisioned_directive_line)) = line_directive(comment, ln) + else { + continue; + }; - if let Some(trailing_directive) = &trailing_directive { - *poisoned = true; + // Perform unknown directive check on Rust files. + if testfile.extension().map(|e| e == "rs").unwrap_or(false) { + let directive_ln = non_revisioned_directive_line.trim(); - eprintln!( - "error: detected trailing compiletest test directive `{}` in {}:{}\n \ - help: put the trailing directive in it's own line: `//@ {}`", - trailing_directive, - testfile.display(), - line_number, - trailing_directive, - ); + let CheckDirectiveResult { is_known_directive, trailing_directive } = + check_directive(directive_ln, mode, ln); - return; - } - } + if !is_known_directive { + *poisoned = true; - it(HeaderLine { - line_number, - original_line, - header_revision, - directive: non_revisioned_directive_line, - }); - // Then we try to check for legacy-style candidates, which are not the magic ~ERROR family - // error annotations. - } else if !revision_magic_comment_re.is_match(ln) { - let Some((_, rest)) = line_directive("//", ln) else { - continue; - }; + eprintln!( + "error: detected unknown compiletest test directive `{}` in {}:{}", + directive_ln, + testfile.display(), + line_number, + ); - if rest.trim_start().starts_with(':') { - // This is likely a markdown link: - // `[link_name]: https://example.org` - continue; + return; } - let rest = rest.trim_start(); - - let CheckDirectiveResult { is_known_directive, directive_name, .. } = - check_directive(rest, mode, ln); - - if is_known_directive { + if let Some(trailing_directive) = &trailing_directive { *poisoned = true; + eprintln!( - "error: detected legacy-style directive {} in compiletest test: {}:{}, please use `ui_test`-style directives `//@` instead: {:#?}", - directive_name, + "error: detected trailing compiletest test directive `{}` in {}:{}\n \ + help: put the trailing directive in it's own line: `//@ {}`", + trailing_directive, testfile.display(), line_number, - line_directive("//", ln), + trailing_directive, ); + return; } } + + it(HeaderLine { + line_number, + original_line, + header_revision, + directive: non_revisioned_directive_line, + }); } } diff --git a/src/tools/compiletest/src/header/cfg.rs b/src/tools/compiletest/src/header/cfg.rs index f3835637a1e..6e351aa27b9 100644 --- a/src/tools/compiletest/src/header/cfg.rs +++ b/src/tools/compiletest/src/header/cfg.rs @@ -217,13 +217,10 @@ pub(super) fn parse_cfg_name_directive<'a>( } // Coverage tests run the same test file in multiple modes. // If a particular test should not be run in one of the modes, ignore it - // with "ignore-mode-coverage-map" or "ignore-mode-coverage-run". + // with "ignore-coverage-map" or "ignore-coverage-run". condition! { - name: format!("mode-{}", config.mode.to_str()), - allowed_names: ContainsPrefixed { - prefix: "mode-", - inner: ["coverage-run", "coverage-map"], - }, + name: config.mode.to_str(), + allowed_names: ["coverage-map", "coverage-run"], message: "when the test mode is {name}", } diff --git a/src/tools/compiletest/src/header/needs.rs b/src/tools/compiletest/src/header/needs.rs index f5dd722ed37..a744fb61b9c 100644 --- a/src/tools/compiletest/src/header/needs.rs +++ b/src/tools/compiletest/src/header/needs.rs @@ -100,9 +100,9 @@ pub(super) fn handle_needs( ignore_reason: "ignored on targets without unwinding support", }, Need { - name: "needs-profiler-support", - condition: cache.profiler_support, - ignore_reason: "ignored when profiler support is disabled", + name: "needs-profiler-runtime", + condition: config.profiler_runtime, + ignore_reason: "ignored when the profiler runtime is not available", }, Need { name: "needs-force-clang-based-tests", @@ -220,7 +220,6 @@ pub(super) struct CachedNeedsConditions { sanitizer_memtag: bool, sanitizer_shadow_call_stack: bool, sanitizer_safestack: bool, - profiler_support: bool, xray: bool, rust_lld: bool, dlltool: bool, @@ -247,7 +246,6 @@ impl CachedNeedsConditions { sanitizer_memtag: sanitizers.contains(&Sanitizer::Memtag), sanitizer_shadow_call_stack: sanitizers.contains(&Sanitizer::ShadowCallStack), sanitizer_safestack: sanitizers.contains(&Sanitizer::Safestack), - profiler_support: config.profiler_support, xray: config.target_cfg().xray, // For tests using the `needs-rust-lld` directive (e.g. for `-Clink-self-contained=+linker`), diff --git a/src/tools/compiletest/src/header/test-auxillary/known_legacy_directive.rs b/src/tools/compiletest/src/header/test-auxillary/known_legacy_directive.rs deleted file mode 100644 index 108ca432e13..00000000000 --- a/src/tools/compiletest/src/header/test-auxillary/known_legacy_directive.rs +++ /dev/null @@ -1 +0,0 @@ -// ignore-wasm diff --git a/src/tools/compiletest/src/header/tests.rs b/src/tools/compiletest/src/header/tests.rs index 76a8b129198..10ec2a1806f 100644 --- a/src/tools/compiletest/src/header/tests.rs +++ b/src/tools/compiletest/src/header/tests.rs @@ -69,7 +69,7 @@ struct ConfigBuilder { llvm_version: Option<String>, git_hash: bool, system_llvm: bool, - profiler_support: bool, + profiler_runtime: bool, } impl ConfigBuilder { @@ -113,8 +113,8 @@ impl ConfigBuilder { self } - fn profiler_support(&mut self, s: bool) -> &mut Self { - self.profiler_support = s; + fn profiler_runtime(&mut self, is_available: bool) -> &mut Self { + self.profiler_runtime = is_available; self } @@ -162,8 +162,8 @@ impl ConfigBuilder { if self.system_llvm { args.push("--system-llvm".to_owned()); } - if self.profiler_support { - args.push("--profiler-support".to_owned()); + if self.profiler_runtime { + args.push("--profiler-runtime".to_owned()); } args.push("--rustc-path".to_string()); @@ -368,12 +368,12 @@ fn sanitizers() { } #[test] -fn profiler_support() { - let config: Config = cfg().profiler_support(false).build(); - assert!(check_ignore(&config, "//@ needs-profiler-support")); +fn profiler_runtime() { + let config: Config = cfg().profiler_runtime(false).build(); + assert!(check_ignore(&config, "//@ needs-profiler-runtime")); - let config: Config = cfg().profiler_support(true).build(); - assert!(!check_ignore(&config, "//@ needs-profiler-support")); + let config: Config = cfg().profiler_runtime(true).build(); + assert!(!check_ignore(&config, "//@ needs-profiler-runtime")); } #[test] @@ -572,17 +572,15 @@ fn families() { } #[test] -fn ignore_mode() { - for mode in ["coverage-map", "coverage-run"] { - // Indicate profiler support so that "coverage-run" tests aren't skipped. - let config: Config = cfg().mode(mode).profiler_support(true).build(); - let other = if mode == "coverage-run" { "coverage-map" } else { "coverage-run" }; +fn ignore_coverage() { + // Indicate profiler runtime availability so that "coverage-run" tests aren't skipped. + let config = cfg().mode("coverage-map").profiler_runtime(true).build(); + assert!(check_ignore(&config, "//@ ignore-coverage-map")); + assert!(!check_ignore(&config, "//@ ignore-coverage-run")); - assert_ne!(mode, other); - - assert!(check_ignore(&config, &format!("//@ ignore-mode-{mode}"))); - assert!(!check_ignore(&config, &format!("//@ ignore-mode-{other}"))); - } + let config = cfg().mode("coverage-run").profiler_runtime(true).build(); + assert!(!check_ignore(&config, "//@ ignore-coverage-map")); + assert!(check_ignore(&config, "//@ ignore-coverage-run")); } #[test] @@ -619,17 +617,6 @@ fn test_unknown_directive_check() { } #[test] -fn test_known_legacy_directive_check() { - let mut poisoned = false; - run_path( - &mut poisoned, - Path::new("a.rs"), - include_bytes!("./test-auxillary/known_legacy_directive.rs"), - ); - assert!(poisoned); -} - -#[test] fn test_known_directive_check_no_error() { let mut poisoned = false; run_path( diff --git a/src/tools/compiletest/src/lib.rs b/src/tools/compiletest/src/lib.rs index d9f64cddf5d..98375a21b04 100644 --- a/src/tools/compiletest/src/lib.rs +++ b/src/tools/compiletest/src/lib.rs @@ -153,7 +153,7 @@ pub fn parse_config(args: Vec<String>) -> Config { .optflag("", "force-rerun", "rerun tests even if the inputs are unchanged") .optflag("", "only-modified", "only run tests that result been modified") .optflag("", "nocapture", "") - .optflag("", "profiler-support", "is the profiler runtime enabled for this target") + .optflag("", "profiler-runtime", "is the profiler runtime enabled for this target") .optflag("h", "help", "show this message") .reqopt("", "channel", "current Rust channel", "CHANNEL") .optflag( @@ -355,7 +355,7 @@ pub fn parse_config(args: Vec<String>) -> Config { nightly_branch: matches.opt_str("nightly-branch").unwrap(), git_merge_commit_email: matches.opt_str("git-merge-commit-email").unwrap(), - profiler_support: matches.opt_present("profiler-support"), + profiler_runtime: matches.opt_present("profiler-runtime"), } } diff --git a/src/tools/compiletest/src/main.rs b/src/tools/compiletest/src/main.rs index 9f3eef3776d..b0f87593f95 100644 --- a/src/tools/compiletest/src/main.rs +++ b/src/tools/compiletest/src/main.rs @@ -22,7 +22,7 @@ fn main() { eprintln!("warning: `tidy` is not installed; diffs will not be generated"); } - if !config.profiler_support && config.mode == Mode::CoverageRun { + if !config.profiler_runtime && config.mode == Mode::CoverageRun { let actioned = if config.bless { "blessed" } else { "checked" }; eprintln!( r#" diff --git a/src/tools/rust-analyzer/.github/workflows/release.yaml b/src/tools/rust-analyzer/.github/workflows/release.yaml index e11d6e15d10..39ac652de0f 100644 --- a/src/tools/rust-analyzer/.github/workflows/release.yaml +++ b/src/tools/rust-analyzer/.github/workflows/release.yaml @@ -16,7 +16,7 @@ env: RUSTFLAGS: "-D warnings -W unreachable-pub" RUSTUP_MAX_RETRIES: 10 FETCH_DEPTH: 0 # pull in the tags for the version string - MACOSX_DEPLOYMENT_TARGET: 10.15 + MACOSX_DEPLOYMENT_TARGET: 13.0 CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc @@ -43,10 +43,10 @@ jobs: - os: ubuntu-20.04 target: arm-unknown-linux-gnueabihf code-target: linux-armhf - - os: macos-12 + - os: macos-13 target: x86_64-apple-darwin code-target: darwin-x64 - - os: macos-12 + - os: macos-13 target: aarch64-apple-darwin code-target: darwin-arm64 diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index dc820fcb28d..7891edc2447 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -145,9 +145,12 @@ dependencies = [ [[package]] name = "cc" -version = "1.1.10" +version = "1.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9e8aabfac534be767c909e0690571677d49f41bd8465ae876fe043d52ba5292" +checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0" +dependencies = [ + "shlex", +] [[package]] name = "cfg" @@ -1853,6 +1856,12 @@ dependencies = [ ] [[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] name = "smallvec" version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index b4587a37961..0b3d6e2a1ef 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-srv/proc-macro-test/imp"] resolver = "2" [workspace.package] -rust-version = "1.80" +rust-version = "1.81" edition = "2021" license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs index e9daaf7de3c..c2d40086056 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs @@ -49,6 +49,10 @@ impl CfgOptions { cfg.fold(&|atom| self.enabled.contains(atom)) } + pub fn check_atom(&self, cfg: &CfgAtom) -> bool { + self.enabled.contains(cfg) + } + pub fn insert_atom(&mut self, key: Symbol) { self.enabled.insert(CfgAtom::Flag(key)); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs index 85fb90fdfb6..d568f6faa72 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -36,7 +36,7 @@ macro_rules! f { } struct#0:1@58..64#1# MyTraitMap2#0:2@31..42#0# {#0:1@72..73#1# - map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..92#1#std#0:1@93..96#1#::#0:1@96..97#1#collections#0:1@98..109#1#::#0:1@109..110#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1# + map#0:1@86..89#1#:#0:1@89..90#1# #0:1@89..90#1#::#0:1@91..93#1#std#0:1@93..96#1#::#0:1@96..98#1#collections#0:1@98..109#1#::#0:1@109..111#1#HashSet#0:1@111..118#1#<#0:1@118..119#1#(#0:1@119..120#1#)#0:1@120..121#1#>#0:1@121..122#1#,#0:1@122..123#1# }#0:1@132..133#1# "#]], ); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index e09ef4f205d..7f1d19719da 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -6,7 +6,7 @@ use std::{cmp::Ordering, iter, mem, ops::Not}; use base_db::{CrateId, CrateOrigin, Dependency, LangCrateOrigin}; -use cfg::{CfgExpr, CfgOptions}; +use cfg::{CfgAtom, CfgExpr, CfgOptions}; use either::Either; use hir_expand::{ attrs::{Attr, AttrId}, @@ -1324,13 +1324,21 @@ impl DefCollector<'_> { }; // Skip #[test]/#[bench] expansion, which would merely result in more memory usage - // due to duplicating functions into macro expansions + // due to duplicating functions into macro expansions, but only if `cfg(test)` is active, + // otherwise they are expanded to nothing and this can impact e.g. diagnostics (due to things + // being cfg'ed out). + // Ideally we will just expand them to nothing here. But we are only collecting macro calls, + // not expanding them, so we have no way to do that. if matches!( def.kind, MacroDefKind::BuiltInAttr(_, expander) if expander.is_test() || expander.is_bench() ) { - return recollect_without(self); + let test_is_active = + self.cfg_options.check_atom(&CfgAtom::Flag(sym::test.clone())); + if test_is_active { + return recollect_without(self); + } } let call_id = || { diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs index b9afc666f75..2a8691b461c 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin/attr_macro.rs @@ -4,6 +4,8 @@ use span::{MacroCallId, Span}; use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind}; +use super::quote; + macro_rules! register_builtin { ($(($name:ident, $variant:ident) => $expand:ident),* ) => { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -52,15 +54,15 @@ impl BuiltinAttrExpander { } register_builtin! { - (bench, Bench) => dummy_attr_expand, + (bench, Bench) => dummy_gate_test_expand, (cfg_accessible, CfgAccessible) => dummy_attr_expand, (cfg_eval, CfgEval) => dummy_attr_expand, (derive, Derive) => derive_expand, // derive const is equivalent to derive for our proposes. (derive_const, DeriveConst) => derive_expand, (global_allocator, GlobalAllocator) => dummy_attr_expand, - (test, Test) => dummy_attr_expand, - (test_case, TestCase) => dummy_attr_expand + (test, Test) => dummy_gate_test_expand, + (test_case, TestCase) => dummy_gate_test_expand } pub fn find_builtin_attr(ident: &name::Name) -> Option<BuiltinAttrExpander> { @@ -76,6 +78,19 @@ fn dummy_attr_expand( ExpandResult::ok(tt.clone()) } +fn dummy_gate_test_expand( + _db: &dyn ExpandDatabase, + _id: MacroCallId, + tt: &tt::Subtree, + span: Span, +) -> ExpandResult<tt::Subtree> { + let result = quote::quote! { span=> + #[cfg(test)] + #tt + }; + ExpandResult::ok(result) +} + /// We generate a very specific expansion here, as we do not actually expand the `#[derive]` attribute /// itself in name res, but we do want to expand it to something for the IDE layer, so that the input /// derive attributes can be downmapped, and resolved as proper paths. diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs index 584f9631e34..484a8662eb1 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs @@ -16,7 +16,10 @@ use crate::{ cfg_process, declarative::DeclarativeMacroExpander, fixup::{self, SyntaxFixupUndoInfo}, - hygiene::{span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt}, + hygiene::{ + span_with_call_site_ctxt, span_with_def_site_ctxt, span_with_mixed_site_ctxt, + SyntaxContextExt as _, + }, proc_macro::ProcMacros, span_map::{RealSpanMap, SpanMap, SpanMapRef}, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, @@ -300,14 +303,16 @@ pub fn expand_speculative( token_tree_to_syntax_node(&speculative_expansion.value, expand_to, loc.def.edition); let syntax_node = node.syntax_node(); - let token = rev_tmap + let (token, _) = rev_tmap .ranges_with_span(span_map.span_for_range(token_to_map.text_range())) - .filter_map(|range| syntax_node.covering_element(range).into_token()) - .min_by_key(|t| { - // prefer tokens of the same kind and text + .filter_map(|(range, ctx)| syntax_node.covering_element(range).into_token().zip(Some(ctx))) + .min_by_key(|(t, ctx)| { + // prefer tokens of the same kind and text, as well as non opaque marked ones // Note the inversion of the score here, as we want to prefer the first token in case // of all tokens having the same score - (t.kind() != token_to_map.kind()) as u8 + 2 * ((t.text() != token_to_map.text()) as u8) + ctx.is_opaque(db) as u8 + + 2 * (t.kind() != token_to_map.kind()) as u8 + + 4 * ((t.text() != token_to_map.text()) as u8) })?; Some((node.syntax_node(), token)) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs index cc02332207d..5e1448f7950 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs @@ -151,6 +151,7 @@ pub trait SyntaxContextExt { fn remove_mark(&mut self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency); fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency); fn marks(self, db: &dyn ExpandDatabase) -> Vec<(MacroCallId, Transparency)>; + fn is_opaque(self, db: &dyn ExpandDatabase) -> bool; } impl SyntaxContextExt for SyntaxContextId { @@ -177,6 +178,9 @@ impl SyntaxContextExt for SyntaxContextId { marks.reverse(); marks } + fn is_opaque(self, db: &dyn ExpandDatabase) -> bool { + !self.is_root() && db.lookup_intern_syntax_context(self).outer_transparency.is_opaque() + } } // FIXME: Make this a SyntaxContextExt method once we have RPIT diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index 95380979492..56cb5fd375c 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -25,6 +25,7 @@ mod prettify_macro_expansion_; use attrs::collect_attrs; use rustc_hash::FxHashMap; +use stdx::TupleExt; use triomphe::Arc; use std::hash::Hash; @@ -772,14 +773,15 @@ impl ExpansionInfo { /// Maps the passed in file range down into a macro expansion if it is the input to a macro call. /// /// Note this does a linear search through the entire backing vector of the spanmap. + // FIXME: Consider adding a reverse map to ExpansionInfo to get rid of the linear search which + // potentially results in quadratic look ups (notably this might improve semantic highlighting perf) pub fn map_range_down_exact( &self, span: Span, - ) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> { - let tokens = self - .exp_map - .ranges_with_span_exact(span) - .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); + ) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> { + let tokens = self.exp_map.ranges_with_span_exact(span).flat_map(move |(range, ctx)| { + self.expanded.value.covering_element(range).into_token().zip(Some(ctx)) + }); Some(InMacroFile::new(self.expanded.file_id, tokens)) } @@ -791,11 +793,10 @@ impl ExpansionInfo { pub fn map_range_down( &self, span: Span, - ) -> Option<InMacroFile<impl Iterator<Item = SyntaxToken> + '_>> { - let tokens = self - .exp_map - .ranges_with_span(span) - .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); + ) -> Option<InMacroFile<impl Iterator<Item = (SyntaxToken, SyntaxContextId)> + '_>> { + let tokens = self.exp_map.ranges_with_span(span).flat_map(move |(range, ctx)| { + self.expanded.value.covering_element(range).into_token().zip(Some(ctx)) + }); Some(InMacroFile::new(self.expanded.file_id, tokens)) } @@ -845,7 +846,8 @@ impl ExpansionInfo { self.arg.file_id, arg_map .ranges_with_span_exact(span) - .filter(|range| range.intersect(arg_range).is_some()) + .filter(|(range, _)| range.intersect(arg_range).is_some()) + .map(TupleExt::head) .collect(), ) } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index e74e3d78988..f7bacbd49b3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -382,8 +382,9 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> { } fn is_object_safe(&self, trait_id: chalk_ir::TraitId<Interner>) -> bool { + // FIXME: When cargo is updated, change to dyn_compatibility let trait_ = from_chalk_trait_id(trait_id); - crate::object_safety::object_safety(self.db, trait_).is_none() + crate::dyn_compatibility::dyn_compatibility(self.db, trait_).is_none() } fn closure_kind( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index ce5a821ea2b..5620d80adb5 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -20,11 +20,11 @@ use triomphe::Arc; use crate::{ chalk_db, consteval::ConstEvalError, + dyn_compatibility::DynCompatibilityViolation, layout::{Layout, LayoutError}, lower::{GenericDefaults, GenericPredicates}, method_resolution::{InherentImpls, TraitImpls, TyFingerprint}, mir::{BorrowckResult, MirBody, MirLowerError}, - object_safety::ObjectSafetyViolation, Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner, PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId, }; @@ -108,8 +108,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { #[salsa::invoke(crate::layout::target_data_layout_query)] fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>; - #[salsa::invoke(crate::object_safety::object_safety_of_trait_query)] - fn object_safety_of_trait(&self, trait_: TraitId) -> Option<ObjectSafetyViolation>; + #[salsa::invoke(crate::dyn_compatibility::dyn_compatibility_of_trait_query)] + fn dyn_compatibility_of_trait(&self, trait_: TraitId) -> Option<DynCompatibilityViolation>; #[salsa::invoke(crate::lower::ty_query)] #[salsa::cycle(crate::lower::ty_recover)] @@ -280,8 +280,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { } #[test] -fn hir_database_is_object_safe() { - fn _assert_object_safe(_: &dyn HirDatabase) {} +fn hir_database_is_dyn_compatible() { + fn _assert_dyn_compatible(_: &dyn HirDatabase) {} } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs index 82517e69917..7f6b7e392b3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs @@ -58,7 +58,7 @@ impl fmt::Display for CaseType { let repr = match self { CaseType::LowerSnakeCase => "snake_case", CaseType::UpperSnakeCase => "UPPER_SNAKE_CASE", - CaseType::UpperCamelCase => "CamelCase", + CaseType::UpperCamelCase => "UpperCamelCase", }; repr.fmt(f) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs index cbe1af15703..aa0c9e30be1 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check/case_conv.rs @@ -111,7 +111,7 @@ mod tests { check(to_lower_snake_case, "lower_snake_case", expect![[""]]); check(to_lower_snake_case, "UPPER_SNAKE_CASE", expect![["upper_snake_case"]]); check(to_lower_snake_case, "Weird_Case", expect![["weird_case"]]); - check(to_lower_snake_case, "CamelCase", expect![["camel_case"]]); + check(to_lower_snake_case, "UpperCamelCase", expect![["upper_camel_case"]]); check(to_lower_snake_case, "lowerCamelCase", expect![["lower_camel_case"]]); check(to_lower_snake_case, "a", expect![[""]]); check(to_lower_snake_case, "abc", expect![[""]]); @@ -121,8 +121,8 @@ mod tests { #[test] fn test_to_camel_case() { - check(to_camel_case, "CamelCase", expect![[""]]); - check(to_camel_case, "CamelCase_", expect![[""]]); + check(to_camel_case, "UpperCamelCase", expect![[""]]); + check(to_camel_case, "UpperCamelCase_", expect![[""]]); check(to_camel_case, "_CamelCase", expect![[""]]); check(to_camel_case, "lowerCamelCase", expect![["LowerCamelCase"]]); check(to_camel_case, "lower_snake_case", expect![["LowerSnakeCase"]]); @@ -143,7 +143,7 @@ mod tests { check(to_upper_snake_case, "UPPER_SNAKE_CASE", expect![[""]]); check(to_upper_snake_case, "lower_snake_case", expect![["LOWER_SNAKE_CASE"]]); check(to_upper_snake_case, "Weird_Case", expect![["WEIRD_CASE"]]); - check(to_upper_snake_case, "CamelCase", expect![["CAMEL_CASE"]]); + check(to_upper_snake_case, "UpperCamelCase", expect![["UPPER_CAMEL_CASE"]]); check(to_upper_snake_case, "lowerCamelCase", expect![["LOWER_CAMEL_CASE"]]); check(to_upper_snake_case, "A", expect![[""]]); check(to_upper_snake_case, "ABC", expect![[""]]); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/object_safety.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs index a4c66268555..e0d1758210e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/object_safety.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility.rs @@ -1,4 +1,4 @@ -//! Compute the object-safety of a trait +//! Compute the dyn-compatibility of a trait use std::ops::ControlFlow; @@ -28,14 +28,14 @@ use crate::{ }; #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum ObjectSafetyViolation { +pub enum DynCompatibilityViolation { SizedSelf, SelfReferential, Method(FunctionId, MethodViolationCode), AssocConst(ConstId), GAT(TypeAliasId), // This doesn't exist in rustc, but added for better visualization - HasNonSafeSuperTrait(TraitId), + HasNonCompatibleSuperTrait(TraitId), } #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -50,70 +50,73 @@ pub enum MethodViolationCode { UndispatchableReceiver, } -pub fn object_safety(db: &dyn HirDatabase, trait_: TraitId) -> Option<ObjectSafetyViolation> { +pub fn dyn_compatibility( + db: &dyn HirDatabase, + trait_: TraitId, +) -> Option<DynCompatibilityViolation> { for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() { - if db.object_safety_of_trait(super_trait).is_some() { - return Some(ObjectSafetyViolation::HasNonSafeSuperTrait(super_trait)); + if db.dyn_compatibility_of_trait(super_trait).is_some() { + return Some(DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait)); } } - db.object_safety_of_trait(trait_) + db.dyn_compatibility_of_trait(trait_) } -pub fn object_safety_with_callback<F>( +pub fn dyn_compatibility_with_callback<F>( db: &dyn HirDatabase, trait_: TraitId, cb: &mut F, ) -> ControlFlow<()> where - F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>, + F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>, { for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() { - if db.object_safety_of_trait(super_trait).is_some() { - cb(ObjectSafetyViolation::HasNonSafeSuperTrait(trait_))?; + if db.dyn_compatibility_of_trait(super_trait).is_some() { + cb(DynCompatibilityViolation::HasNonCompatibleSuperTrait(trait_))?; } } - object_safety_of_trait_with_callback(db, trait_, cb) + dyn_compatibility_of_trait_with_callback(db, trait_, cb) } -pub fn object_safety_of_trait_with_callback<F>( +pub fn dyn_compatibility_of_trait_with_callback<F>( db: &dyn HirDatabase, trait_: TraitId, cb: &mut F, ) -> ControlFlow<()> where - F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>, + F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>, { // Check whether this has a `Sized` bound if generics_require_sized_self(db, trait_.into()) { - cb(ObjectSafetyViolation::SizedSelf)?; + cb(DynCompatibilityViolation::SizedSelf)?; } // Check if there exist bounds that referencing self if predicates_reference_self(db, trait_) { - cb(ObjectSafetyViolation::SelfReferential)?; + cb(DynCompatibilityViolation::SelfReferential)?; } if bounds_reference_self(db, trait_) { - cb(ObjectSafetyViolation::SelfReferential)?; + cb(DynCompatibilityViolation::SelfReferential)?; } // rustc checks for non-lifetime binders here, but we don't support HRTB yet let trait_data = db.trait_data(trait_); for (_, assoc_item) in &trait_data.items { - object_safety_violation_for_assoc_item(db, trait_, *assoc_item, cb)?; + dyn_compatibility_violation_for_assoc_item(db, trait_, *assoc_item, cb)?; } ControlFlow::Continue(()) } -pub fn object_safety_of_trait_query( +pub fn dyn_compatibility_of_trait_query( db: &dyn HirDatabase, trait_: TraitId, -) -> Option<ObjectSafetyViolation> { +) -> Option<DynCompatibilityViolation> { let mut res = None; - object_safety_of_trait_with_callback(db, trait_, &mut |osv| { + dyn_compatibility_of_trait_with_callback(db, trait_, &mut |osv| { res = Some(osv); ControlFlow::Break(()) }); @@ -321,14 +324,14 @@ fn contains_illegal_self_type_reference<T: TypeVisitable<Interner>>( t.visit_with(visitor.as_dyn(), outer_binder).is_break() } -fn object_safety_violation_for_assoc_item<F>( +fn dyn_compatibility_violation_for_assoc_item<F>( db: &dyn HirDatabase, trait_: TraitId, item: AssocItemId, cb: &mut F, ) -> ControlFlow<()> where - F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>, + F: FnMut(DynCompatibilityViolation) -> ControlFlow<()>, { // Any item that has a `Self : Sized` requisite is otherwise // exempt from the regulations. @@ -337,10 +340,10 @@ where } match item { - AssocItemId::ConstId(it) => cb(ObjectSafetyViolation::AssocConst(it)), + AssocItemId::ConstId(it) => cb(DynCompatibilityViolation::AssocConst(it)), AssocItemId::FunctionId(it) => { virtual_call_violations_for_method(db, trait_, it, &mut |mvc| { - cb(ObjectSafetyViolation::Method(it, mvc)) + cb(DynCompatibilityViolation::Method(it, mvc)) }) } AssocItemId::TypeAliasId(it) => { @@ -350,7 +353,7 @@ where } else { let generic_params = db.generic_params(item.into()); if !generic_params.is_empty() { - cb(ObjectSafetyViolation::GAT(it)) + cb(DynCompatibilityViolation::GAT(it)) } else { ControlFlow::Continue(()) } @@ -469,7 +472,7 @@ fn receiver_is_dispatchable( return false; }; - // `self: Self` can't be dispatched on, but this is already considered object safe. + // `self: Self` can't be dispatched on, but this is already considered dyn compatible // See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437 if sig .skip_binders() diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/object_safety/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs index c2a9117c5be..3f3e68eeb1c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/object_safety/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/dyn_compatibility/tests.rs @@ -5,29 +5,29 @@ use rustc_hash::{FxHashMap, FxHashSet}; use syntax::ToSmolStr; use test_fixture::WithFixture; -use crate::{object_safety::object_safety_with_callback, test_db::TestDB}; +use crate::{dyn_compatibility::dyn_compatibility_with_callback, test_db::TestDB}; use super::{ + DynCompatibilityViolation, MethodViolationCode::{self, *}, - ObjectSafetyViolation, }; -use ObjectSafetyViolationKind::*; +use DynCompatibilityViolationKind::*; #[allow(clippy::upper_case_acronyms)] #[derive(Debug, Clone, PartialEq, Eq, Hash)] -enum ObjectSafetyViolationKind { +enum DynCompatibilityViolationKind { SizedSelf, SelfReferential, Method(MethodViolationCode), AssocConst, GAT, - HasNonSafeSuperTrait, + HasNonCompatibleSuperTrait, } -fn check_object_safety<'a>( +fn check_dyn_compatibility<'a>( ra_fixture: &str, - expected: impl IntoIterator<Item = (&'a str, Vec<ObjectSafetyViolationKind>)>, + expected: impl IntoIterator<Item = (&'a str, Vec<DynCompatibilityViolationKind>)>, ) { let mut expected: FxHashMap<_, _> = expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect(); @@ -53,18 +53,20 @@ fn check_object_safety<'a>( continue; }; let mut osvs = FxHashSet::default(); - object_safety_with_callback(&db, trait_id, &mut |osv| { + dyn_compatibility_with_callback(&db, trait_id, &mut |osv| { osvs.insert(match osv { - ObjectSafetyViolation::SizedSelf => SizedSelf, - ObjectSafetyViolation::SelfReferential => SelfReferential, - ObjectSafetyViolation::Method(_, mvc) => Method(mvc), - ObjectSafetyViolation::AssocConst(_) => AssocConst, - ObjectSafetyViolation::GAT(_) => GAT, - ObjectSafetyViolation::HasNonSafeSuperTrait(_) => HasNonSafeSuperTrait, + DynCompatibilityViolation::SizedSelf => SizedSelf, + DynCompatibilityViolation::SelfReferential => SelfReferential, + DynCompatibilityViolation::Method(_, mvc) => Method(mvc), + DynCompatibilityViolation::AssocConst(_) => AssocConst, + DynCompatibilityViolation::GAT(_) => GAT, + DynCompatibilityViolation::HasNonCompatibleSuperTrait(_) => { + HasNonCompatibleSuperTrait + } }); ControlFlow::Continue(()) }); - assert_eq!(osvs, expected, "Object safety violations for `{name}` do not match;"); + assert_eq!(osvs, expected, "Dyn Compatibility violations for `{name}` do not match;"); } let remains: Vec<_> = expected.keys().collect(); @@ -73,7 +75,7 @@ fn check_object_safety<'a>( #[test] fn item_bounds_can_reference_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: eq pub trait Foo { @@ -88,7 +90,7 @@ pub trait Foo { #[test] fn associated_consts() { - check_object_safety( + check_dyn_compatibility( r#" trait Bar { const X: usize; @@ -100,7 +102,7 @@ trait Bar { #[test] fn bounds_reference_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: eq trait X { @@ -113,7 +115,7 @@ trait X { #[test] fn by_value_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar { @@ -135,7 +137,7 @@ trait Quux { #[test] fn generic_methods() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar { @@ -157,7 +159,7 @@ trait Qax { #[test] fn mentions_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar { @@ -182,7 +184,7 @@ trait Quux { #[test] fn no_static() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Foo { @@ -195,7 +197,7 @@ trait Foo { #[test] fn sized_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar: Sized { @@ -205,7 +207,7 @@ trait Bar: Sized { [("Bar", vec![SizedSelf])], ); - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar @@ -220,7 +222,7 @@ trait Bar #[test] fn supertrait_gat() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait GatTrait { @@ -229,13 +231,13 @@ trait GatTrait { trait SuperTrait<T>: GatTrait {} "#, - [("GatTrait", vec![GAT]), ("SuperTrait", vec![HasNonSafeSuperTrait])], + [("GatTrait", vec![GAT]), ("SuperTrait", vec![HasNonCompatibleSuperTrait])], ); } #[test] fn supertrait_mentions_self() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Bar<T> { @@ -251,7 +253,7 @@ trait Baz : Bar<Self> { #[test] fn rustc_issue_19538() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Foo { @@ -260,13 +262,13 @@ trait Foo { trait Bar: Foo {} "#, - [("Foo", vec![Method(Generic)]), ("Bar", vec![HasNonSafeSuperTrait])], + [("Foo", vec![Method(Generic)]), ("Bar", vec![HasNonCompatibleSuperTrait])], ); } #[test] fn rustc_issue_22040() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: fmt, eq, dispatch_from_dyn use core::fmt::Debug; @@ -281,7 +283,7 @@ trait Expr: Debug + PartialEq { #[test] fn rustc_issue_102762() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: future, send, sync, dispatch_from_dyn, deref use core::pin::Pin; @@ -313,7 +315,7 @@ pub trait Fetcher: Send + Sync { #[test] fn rustc_issue_102933() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: future, dispatch_from_dyn, deref use core::future::Future; @@ -351,7 +353,7 @@ pub trait B2: Service<Response = i32> + B1 { #[test] fn rustc_issue_106247() { - check_object_safety( + check_dyn_compatibility( r#" //- minicore: sync, dispatch_from_dyn pub trait Trait { @@ -363,8 +365,8 @@ pub trait Trait { } #[test] -fn std_error_is_object_safe() { - check_object_safety( +fn std_error_is_dyn_compatible() { + check_dyn_compatibility( r#" //- minicore: fmt, dispatch_from_dyn trait Erased<'a>: 'a {} @@ -380,14 +382,14 @@ pub trait Error: core::fmt::Debug + core::fmt::Display { } #[test] -fn lifetime_gat_is_object_unsafe() { - check_object_safety( +fn lifetime_gat_is_dyn_incompatible() { + check_dyn_compatibility( r#" //- minicore: dispatch_from_dyn trait Foo { type Bar<'a>; } "#, - [("Foo", vec![ObjectSafetyViolationKind::GAT])], + [("Foo", vec![DynCompatibilityViolationKind::GAT])], ); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 5ed41b99ba3..ef570a20556 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -38,11 +38,11 @@ pub mod consteval; pub mod db; pub mod diagnostics; pub mod display; +pub mod dyn_compatibility; pub mod lang_items; pub mod layout; pub mod method_resolution; pub mod mir; -pub mod object_safety; pub mod primitive; pub mod traits; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs index 878d584a4ef..9830fa1ca7b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs @@ -386,82 +386,91 @@ fn ever_initialized_map( fn dfs( db: &dyn HirDatabase, body: &MirBody, - b: BasicBlockId, l: LocalId, + stack: &mut Vec<BasicBlockId>, result: &mut ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>>, ) { - let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs - let block = &body.basic_blocks[b]; - for statement in &block.statements { - match &statement.kind { - StatementKind::Assign(p, _) => { - if p.projection.lookup(&body.projection_store).is_empty() && p.local == l { - is_ever_initialized = true; + while let Some(b) = stack.pop() { + let mut is_ever_initialized = result[b][l]; // It must be filled, as we use it as mark for dfs + let block = &body.basic_blocks[b]; + for statement in &block.statements { + match &statement.kind { + StatementKind::Assign(p, _) => { + if p.projection.lookup(&body.projection_store).is_empty() && p.local == l { + is_ever_initialized = true; + } } - } - StatementKind::StorageDead(p) => { - if *p == l { - is_ever_initialized = false; + StatementKind::StorageDead(p) => { + if *p == l { + is_ever_initialized = false; + } } + StatementKind::Deinit(_) + | StatementKind::FakeRead(_) + | StatementKind::Nop + | StatementKind::StorageLive(_) => (), } - StatementKind::Deinit(_) - | StatementKind::FakeRead(_) - | StatementKind::Nop - | StatementKind::StorageLive(_) => (), - } - } - let Some(terminator) = &block.terminator else { - never!( - "Terminator should be none only in construction.\nThe body:\n{}", - body.pretty_print(db) - ); - return; - }; - let mut process = |target, is_ever_initialized| { - if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized { - result[target].insert(l, is_ever_initialized); - dfs(db, body, target, l, result); - } - }; - match &terminator.kind { - TerminatorKind::Goto { target } => process(*target, is_ever_initialized), - TerminatorKind::SwitchInt { targets, .. } => { - targets.all_targets().iter().for_each(|&it| process(it, is_ever_initialized)); } - TerminatorKind::UnwindResume - | TerminatorKind::Abort - | TerminatorKind::Return - | TerminatorKind::Unreachable => (), - TerminatorKind::Call { target, cleanup, destination, .. } => { - if destination.projection.lookup(&body.projection_store).is_empty() - && destination.local == l - { - is_ever_initialized = true; + let Some(terminator) = &block.terminator else { + never!( + "Terminator should be none only in construction.\nThe body:\n{}", + body.pretty_print(db) + ); + return; + }; + let mut process = |target, is_ever_initialized| { + if !result[target].contains_idx(l) || !result[target][l] && is_ever_initialized { + result[target].insert(l, is_ever_initialized); + stack.push(target); + } + }; + match &terminator.kind { + TerminatorKind::Goto { target } => process(*target, is_ever_initialized), + TerminatorKind::SwitchInt { targets, .. } => { + targets.all_targets().iter().for_each(|&it| process(it, is_ever_initialized)); + } + TerminatorKind::UnwindResume + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable => (), + TerminatorKind::Call { target, cleanup, destination, .. } => { + if destination.projection.lookup(&body.projection_store).is_empty() + && destination.local == l + { + is_ever_initialized = true; + } + target.iter().chain(cleanup).for_each(|&it| process(it, is_ever_initialized)); + } + TerminatorKind::Drop { target, unwind, place: _ } => { + iter::once(target) + .chain(unwind) + .for_each(|&it| process(it, is_ever_initialized)); + } + TerminatorKind::DropAndReplace { .. } + | TerminatorKind::Assert { .. } + | TerminatorKind::Yield { .. } + | TerminatorKind::CoroutineDrop + | TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } => { + never!("We don't emit these MIR terminators yet"); } - target.iter().chain(cleanup).for_each(|&it| process(it, is_ever_initialized)); - } - TerminatorKind::Drop { target, unwind, place: _ } => { - iter::once(target).chain(unwind).for_each(|&it| process(it, is_ever_initialized)); - } - TerminatorKind::DropAndReplace { .. } - | TerminatorKind::Assert { .. } - | TerminatorKind::Yield { .. } - | TerminatorKind::CoroutineDrop - | TerminatorKind::FalseEdge { .. } - | TerminatorKind::FalseUnwind { .. } => { - never!("We don't emit these MIR terminators yet"); } } } + let mut stack = Vec::new(); for &l in &body.param_locals { result[body.start_block].insert(l, true); - dfs(db, body, body.start_block, l, &mut result); + stack.clear(); + stack.push(body.start_block); + dfs(db, body, l, &mut stack, &mut result); } for l in body.locals.iter().map(|it| it.0) { db.unwind_if_cancelled(); if !result[body.start_block].contains_idx(l) { result[body.start_block].insert(l, false); - dfs(db, body, body.start_block, l, &mut result); + stack.clear(); + stack.push(body.start_block); + dfs(db, body, l, &mut stack, &mut result); } } result diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 8f5db32f957..30e023e1a47 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -144,9 +144,9 @@ pub use { hir_ty::{ consteval::ConstEvalError, display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, + dyn_compatibility::{DynCompatibilityViolation, MethodViolationCode}, layout::LayoutError, mir::{MirEvalError, MirLowerError}, - object_safety::{MethodViolationCode, ObjectSafetyViolation}, CastError, FnAbi, PointerCast, Safety, }, // FIXME: Properly encapsulate mir @@ -497,10 +497,9 @@ impl Module { /// Finds a parent module. pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> { - // FIXME: handle block expressions as modules (their parent is in a different DefMap) let def_map = self.id.def_map(db.upcast()); - let parent_id = def_map[self.id.local_id].parent?; - Some(Module { id: def_map.module_id(parent_id) }) + let parent_id = def_map.containing_module(self.id.local_id)?; + Some(Module { id: parent_id }) } /// Finds nearest non-block ancestor `Module` (`self` included). @@ -557,7 +556,7 @@ impl Module { acc: &mut Vec<AnyDiagnostic>, style_lints: bool, ) { - let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered(); + let _p = tracing::info_span!("diagnostics", name = ?self.name(db)).entered(); let edition = db.crate_graph()[self.id.krate()].edition; let def_map = self.id.def_map(db.upcast()); for diag in def_map.diagnostics() { @@ -2690,8 +2689,8 @@ impl Trait { .count() } - pub fn object_safety(&self, db: &dyn HirDatabase) -> Option<ObjectSafetyViolation> { - hir_ty::object_safety::object_safety(db, self.id) + pub fn dyn_compatibility(&self, db: &dyn HirDatabase) -> Option<DynCompatibilityViolation> { + hir_ty::dyn_compatibility::dyn_compatibility(db, self.id) } fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> { diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index fa14b53dbc3..b27f1fbb5db 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -24,6 +24,7 @@ use hir_expand::{ builtin::{BuiltinFnLikeExpander, EagerExpander}, db::ExpandDatabase, files::InRealFile, + hygiene::SyntaxContextExt as _, inert_attr_macro::find_builtin_attr_idx, name::AsName, FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, @@ -32,13 +33,13 @@ use intern::Symbol; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; -use span::{EditionedFileId, FileId, HirFileIdRepr}; +use span::{EditionedFileId, FileId, HirFileIdRepr, SyntaxContextId}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, - ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _}, - match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, - TextRange, TextSize, + ast::{self, HasAttrs as _, HasGenericParams, IsString as _}, + AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, + TextSize, }; use crate::{ @@ -608,7 +609,7 @@ impl<'db> SemanticsImpl<'db> { let quote = string.open_quote_text_range()?; let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?; - self.descend_into_macros_breakable(token, |token| { + self.descend_into_macros_breakable(token, |token, _| { (|| { let token = token.value; let string = ast::String::cast(token)?; @@ -655,7 +656,7 @@ impl<'db> SemanticsImpl<'db> { let original_string = ast::String::cast(original_token.clone())?; let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?; let quote = original_string.open_quote_text_range()?; - self.descend_into_macros_breakable(original_token, |token| { + self.descend_into_macros_breakable(original_token, |token, _| { (|| { let token = token.value; self.resolve_offset_in_format_args( @@ -718,7 +719,7 @@ impl<'db> SemanticsImpl<'db> { // node is just the token, so descend the token self.descend_into_macros_impl( InRealFile::new(file_id, first), - &mut |InFile { value, .. }| { + &mut |InFile { value, .. }, _ctx| { if let Some(node) = value .parent_ancestors() .take_while(|it| it.text_range() == value.text_range()) @@ -732,7 +733,7 @@ impl<'db> SemanticsImpl<'db> { } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; - self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token| { + self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token, _ctx| { scratch.push(token); CONTINUE_NO_BREAKS }); @@ -740,7 +741,7 @@ impl<'db> SemanticsImpl<'db> { let mut scratch = scratch.into_iter(); self.descend_into_macros_impl( InRealFile::new(file_id, last), - &mut |InFile { value: last, file_id: last_fid }| { + &mut |InFile { value: last, file_id: last_fid }, _ctx| { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if first_fid == last_fid { if let Some(p) = first.parent() { @@ -763,7 +764,9 @@ impl<'db> SemanticsImpl<'db> { res } - fn is_inside_macro_call(token: &SyntaxToken) -> bool { + // FIXME: This isn't quite right wrt to inner attributes + /// Does a syntactic traversal to check whether this token might be inside a macro call + pub fn might_be_inside_macro_call(&self, token: &SyntaxToken) -> bool { token.parent_ancestors().any(|ancestor| { if ast::MacroCall::can_cast(ancestor.kind()) { return true; @@ -781,25 +784,14 @@ impl<'db> SemanticsImpl<'db> { }) } - pub fn descend_into_macros_exact_if_in_macro( - &self, - token: SyntaxToken, - ) -> SmallVec<[SyntaxToken; 1]> { - if Self::is_inside_macro_call(&token) { - self.descend_into_macros_exact(token) - } else { - smallvec![token] - } - } - pub fn descend_into_macros_cb( &self, token: SyntaxToken, - mut cb: impl FnMut(InFile<SyntaxToken>), + mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId), ) { if let Ok(token) = self.wrap_token_infile(token).into_real_file() { - self.descend_into_macros_impl(token, &mut |t| { - cb(t); + self.descend_into_macros_impl(token, &mut |t, ctx| { + cb(t, ctx); CONTINUE_NO_BREAKS }); } @@ -808,7 +800,7 @@ impl<'db> SemanticsImpl<'db> { pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { let mut res = smallvec![]; if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { - self.descend_into_macros_impl(token, &mut |t| { + self.descend_into_macros_impl(token, &mut |t, _ctx| { res.push(t.value); CONTINUE_NO_BREAKS }); @@ -819,10 +811,27 @@ impl<'db> SemanticsImpl<'db> { res } + pub fn descend_into_macros_no_opaque(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { + let mut res = smallvec![]; + if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { + self.descend_into_macros_impl(token, &mut |t, ctx| { + if !ctx.is_opaque(self.db.upcast()) { + // Don't descend into opaque contexts + res.push(t.value); + } + CONTINUE_NO_BREAKS + }); + } + if res.is_empty() { + res.push(token); + } + res + } + pub fn descend_into_macros_breakable<T>( &self, token: InRealFile<SyntaxToken>, - mut cb: impl FnMut(InFile<SyntaxToken>) -> ControlFlow<T>, + mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>, ) -> Option<T> { self.descend_into_macros_impl(token.clone(), &mut cb) } @@ -834,10 +843,12 @@ impl<'db> SemanticsImpl<'db> { let text = token.text(); let kind = token.kind(); - self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }| { + self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }, ctx| { let mapped_kind = value.kind(); let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier(); - let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); + let matches = (kind == mapped_kind || any_ident_match()) + && text == value.text() + && !ctx.is_opaque(self.db.upcast()); if matches { r.push(value); } @@ -854,17 +865,21 @@ impl<'db> SemanticsImpl<'db> { let text = token.text(); let kind = token.kind(); if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() { - self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| { - let mapped_kind = value.kind(); - let any_ident_match = - || kind.is_any_identifier() && value.kind().is_any_identifier(); - let matches = (kind == mapped_kind || any_ident_match()) && text == value.text(); - if matches { - ControlFlow::Break(value) - } else { - ControlFlow::Continue(()) - } - }) + self.descend_into_macros_breakable( + token.clone(), + |InFile { value, file_id: _ }, _ctx| { + let mapped_kind = value.kind(); + let any_ident_match = + || kind.is_any_identifier() && value.kind().is_any_identifier(); + let matches = + (kind == mapped_kind || any_ident_match()) && text == value.text(); + if matches { + ControlFlow::Break(value) + } else { + ControlFlow::Continue(()) + } + }, + ) } else { None } @@ -874,7 +889,7 @@ impl<'db> SemanticsImpl<'db> { fn descend_into_macros_impl<T>( &self, InRealFile { value: token, file_id }: InRealFile<SyntaxToken>, - f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<T>, + f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContextId) -> ControlFlow<T>, ) -> Option<T> { let _p = tracing::info_span!("descend_into_macros_impl").entered(); let (sa, span, file_id) = token @@ -898,7 +913,8 @@ impl<'db> SemanticsImpl<'db> { // These are tracked to know which macro calls we still have to look into // the tokens themselves aren't that interesting as the span that is being used to map // things down never changes. - let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])]; + let mut stack: Vec<(_, SmallVec<[_; 2]>)> = + vec![(file_id, smallvec![(token, SyntaxContextId::ROOT)])]; // Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { @@ -921,11 +937,11 @@ impl<'db> SemanticsImpl<'db> { // Filters out all tokens that contain the given range (usually the macro call), any such // token is redundant as the corresponding macro call has already been processed let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| { - tokens.retain(|t: &mut SyntaxToken| !range.contains_range(t.text_range())) + tokens.retain(|(t, _): &mut (SyntaxToken, _)| !range.contains_range(t.text_range())) }; while let Some((expansion, ref mut tokens)) = stack.pop() { - while let Some(token) = tokens.pop() { + while let Some((token, ctx)) = tokens.pop() { let was_not_remapped = (|| { // First expand into attribute invocations let containing_attribute_macro_call = self.with_ctx(|ctx| { @@ -1036,7 +1052,7 @@ impl<'db> SemanticsImpl<'db> { let text_range = attr.syntax().text_range(); // remove any other token in this macro input, all their mappings are the // same as this - tokens.retain(|t| { + tokens.retain(|(t, _)| { !text_range.contains_range(t.text_range()) }); return process_expansion_for_token( @@ -1093,7 +1109,7 @@ impl<'db> SemanticsImpl<'db> { .is_none(); if was_not_remapped { - if let ControlFlow::Break(b) = f(InFile::new(expansion, token)) { + if let ControlFlow::Break(b) = f(InFile::new(expansion, token), ctx) { return Some(b); } } @@ -1221,26 +1237,10 @@ impl<'db> SemanticsImpl<'db> { ToDef::to_def(self, src.as_ref()) } - pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> { - let text = lifetime.text(); - let label = lifetime.syntax().ancestors().find_map(|syn| { - let label = match_ast! { - match syn { - ast::ForExpr(it) => it.label(), - ast::WhileExpr(it) => it.label(), - ast::LoopExpr(it) => it.label(), - ast::BlockExpr(it) => it.label(), - _ => None, - } - }; - label.filter(|l| { - l.lifetime() - .and_then(|lt| lt.lifetime_ident_token()) - .map_or(false, |lt| lt.text() == text) - }) - })?; - let src = self.wrap_node_infile(label); - ToDef::to_def(self, src.as_ref()) + pub fn resolve_label(&self, label: &ast::Lifetime) -> Option<Label> { + let (parent, label_id) = self + .with_ctx(|ctx| ctx.label_ref_to_def(self.wrap_node_infile(label.clone()).as_ref()))?; + Some(Label { parent, label_id }) } pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> { diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs index c1e4e1d1e27..fd6d52d6c9d 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs @@ -92,7 +92,7 @@ use hir_def::{ keys::{self, Key}, DynMap, }, - hir::{BindingId, LabelId}, + hir::{BindingId, Expr, LabelId}, AdtId, BlockId, ConstId, ConstParamId, DefWithBodyId, EnumId, EnumVariantId, ExternCrateId, FieldId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, Lookup, MacroId, ModuleId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, @@ -343,6 +343,20 @@ impl SourceToDefCtx<'_, '_> { Some((container, label_id)) } + pub(super) fn label_ref_to_def( + &mut self, + src: InFile<&ast::Lifetime>, + ) -> Option<(DefWithBodyId, LabelId)> { + let break_or_continue = ast::Expr::cast(src.value.syntax().parent()?)?; + let container = self.find_pat_or_label_container(src.syntax_ref())?; + let (body, source_map) = self.db.body_with_source_map(container); + let break_or_continue = source_map.node_expr(src.with_value(&break_or_continue))?; + let (Expr::Break { label, .. } | Expr::Continue { label }) = body[break_or_continue] else { + return None; + }; + Some((container, label?)) + } + pub(super) fn item_to_macro_call(&mut self, src: InFile<&ast::Item>) -> Option<MacroCallId> { let map = self.dyn_map(src)?; map[keys::ATTR_MACRO_CALL].get(&AstPtr::new(src.value)).copied() diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs index a43a4b5e1a0..61dc72e0b33 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_variable.rs @@ -1,8 +1,12 @@ use hir::TypeInfo; use ide_db::syntax_helpers::suggest_name; use syntax::{ - ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName}, - ted, NodeOrToken, + ast::{ + self, edit::IndentLevel, edit_in_place::Indent, make, syntax_factory::SyntaxFactory, + AstNode, + }, + syntax_editor::Position, + NodeOrToken, SyntaxKind::{BLOCK_EXPR, BREAK_EXPR, COMMENT, LOOP_EXPR, MATCH_GUARD, PATH_EXPR, RETURN_EXPR}, SyntaxNode, T, }; @@ -105,39 +109,46 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op ), }; + let make = SyntaxFactory::new(); + let mut editor = edit.make_editor(&expr_replace); + + let pat_name = make.name(&var_name); + let name_expr = make.expr_path(make::ext::ident_path(&var_name)); + + if let Some(cap) = ctx.config.snippet_cap { + let tabstop = edit.make_tabstop_before(cap); + editor.add_annotation(pat_name.syntax().clone(), tabstop); + } + let ident_pat = match parent { Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => { - make::ident_pat(false, true, make::name(&var_name)) + make.ident_pat(false, true, pat_name) } _ if needs_adjust && !needs_ref && ty.as_ref().is_some_and(|ty| ty.is_mutable_reference()) => { - make::ident_pat(false, true, make::name(&var_name)) + make.ident_pat(false, true, pat_name) } - _ => make::ident_pat(false, false, make::name(&var_name)), + _ => make.ident_pat(false, false, pat_name), }; let to_extract_no_ref = match ty.as_ref().filter(|_| needs_ref) { Some(receiver_type) if receiver_type.is_mutable_reference() => { - make::expr_ref(to_extract_no_ref, true) + make.expr_ref(to_extract_no_ref, true) } Some(receiver_type) if receiver_type.is_reference() => { - make::expr_ref(to_extract_no_ref, false) + make.expr_ref(to_extract_no_ref, false) } _ => to_extract_no_ref, }; - let expr_replace = edit.make_syntax_mut(expr_replace); - let let_stmt = - make::let_stmt(ident_pat.into(), None, Some(to_extract_no_ref)).clone_for_update(); - let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update(); + let let_stmt = make.let_stmt(ident_pat.into(), None, Some(to_extract_no_ref)); match anchor { Anchor::Before(place) => { let prev_ws = place.prev_sibling_or_token().and_then(|it| it.into_token()); let indent_to = IndentLevel::from_node(&place); - let insert_place = edit.make_syntax_mut(place); // Adjust ws to insert depending on if this is all inline or on separate lines let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with('\n')) { @@ -146,37 +157,20 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op " ".to_owned() }; - ted::insert_all_raw( - ted::Position::before(insert_place), + editor.insert_all( + Position::before(place), vec![ let_stmt.syntax().clone().into(), make::tokens::whitespace(&trailing_ws).into(), ], ); - ted::replace(expr_replace, name_expr.syntax()); - - if let Some(cap) = ctx.config.snippet_cap { - if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() { - if let Some(name) = ident_pat.name() { - edit.add_tabstop_before(cap, name); - } - } - } + editor.replace(expr_replace, name_expr.syntax()); } Anchor::Replace(stmt) => { cov_mark::hit!(test_extract_var_expr_stmt); - let stmt_replace = edit.make_mut(stmt); - ted::replace(stmt_replace.syntax(), let_stmt.syntax()); - - if let Some(cap) = ctx.config.snippet_cap { - if let Some(ast::Pat::IdentPat(ident_pat)) = let_stmt.pat() { - if let Some(name) = ident_pat.name() { - edit.add_tabstop_before(cap, name); - } - } - } + editor.replace(stmt.syntax(), let_stmt.syntax()); } Anchor::WrapInBlock(to_wrap) => { let indent_to = to_wrap.indent_level(); @@ -184,47 +178,22 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op let block = if to_wrap.syntax() == &expr_replace { // Since `expr_replace` is the same that needs to be wrapped in a block, // we can just directly replace it with a block - let block = - make::block_expr([let_stmt.into()], Some(name_expr)).clone_for_update(); - ted::replace(expr_replace, block.syntax()); - - block + make.block_expr([let_stmt.into()], Some(name_expr)) } else { - // `expr_replace` is a descendant of `to_wrap`, so both steps need to be - // handled separately, otherwise we wrap the wrong expression - let to_wrap = edit.make_mut(to_wrap); - - // Replace the target expr first so that we don't need to find where - // `expr_replace` is in the wrapped `to_wrap` - ted::replace(expr_replace, name_expr.syntax()); - - // Wrap `to_wrap` in a block - let block = make::block_expr([let_stmt.into()], Some(to_wrap.clone())) - .clone_for_update(); - ted::replace(to_wrap.syntax(), block.syntax()); - - block + // `expr_replace` is a descendant of `to_wrap`, so we just replace it with `name_expr`. + editor.replace(expr_replace, name_expr.syntax()); + make.block_expr([let_stmt.into()], Some(to_wrap.clone())) }; - if let Some(cap) = ctx.config.snippet_cap { - // Adding a tabstop to `name` requires finding the let stmt again, since - // the existing `let_stmt` is not actually added to the tree - let pat = block.statements().find_map(|stmt| { - let ast::Stmt::LetStmt(let_stmt) = stmt else { return None }; - let_stmt.pat() - }); - - if let Some(ast::Pat::IdentPat(ident_pat)) = pat { - if let Some(name) = ident_pat.name() { - edit.add_tabstop_before(cap, name); - } - } - } + editor.replace(to_wrap.syntax(), block.syntax()); // fixup indentation of block block.indent(indent_to); } } + + editor.add_mappings(make.finish_with_mappings()); + edit.add_file_edits(ctx.file_id(), editor); edit.rename(); }, ) diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs index 0d403f49b7a..1d05419c96d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/config.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/config.rs @@ -7,7 +7,7 @@ use hir::ImportPathConfig; use ide_db::{imports::insert_use::InsertUseConfig, SnippetCap}; -use crate::snippet::Snippet; +use crate::{snippet::Snippet, CompletionFieldsToResolve}; #[derive(Clone, Debug, PartialEq, Eq)] pub struct CompletionConfig { @@ -27,6 +27,7 @@ pub struct CompletionConfig { pub prefer_absolute: bool, pub snippets: Vec<Snippet>, pub limit: Option<usize>, + pub fields_to_resolve: CompletionFieldsToResolve, } #[derive(Clone, Debug, PartialEq, Eq)] diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 192f1b43fac..e49a9e3b064 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -7,7 +7,8 @@ mod tests; use std::{iter, ops::ControlFlow}; use hir::{ - HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo, + HasAttrs, Local, ModuleSource, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, + TypeInfo, }; use ide_db::{ base_db::SourceDatabase, famous_defs::FamousDefs, helpers::is_editable_crate, FilePosition, @@ -743,7 +744,12 @@ impl<'a> CompletionContext<'a> { } }); - let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count(); + let depth_from_crate_root = iter::successors(Some(module), |m| m.parent(db)) + // `BlockExpr` modules are not count as module depth + .filter(|m| !matches!(m.definition_source(db).value, ModuleSource::BlockExpr(_))) + .count() + // exclude `m` itself + .saturating_sub(1); let complete_semicolon = if config.add_semicolon_to_unit { let inside_closure_ret = token.parent_ancestors().try_for_each(|ancestor| { diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index 58d1fad0950..a78976d3fd8 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -37,6 +37,31 @@ pub use crate::{ snippet::{Snippet, SnippetScope}, }; +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct CompletionFieldsToResolve { + pub resolve_label_details: bool, + pub resolve_tags: bool, + pub resolve_detail: bool, + pub resolve_documentation: bool, + pub resolve_filter_text: bool, + pub resolve_text_edit: bool, + pub resolve_command: bool, +} + +impl CompletionFieldsToResolve { + pub const fn empty() -> Self { + Self { + resolve_label_details: false, + resolve_tags: false, + resolve_detail: false, + resolve_documentation: false, + resolve_filter_text: false, + resolve_text_edit: false, + resolve_command: false, + } + } +} + //FIXME: split the following feature into fine-grained features. // Feature: Magic Completions diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs index 9d77d970071..f371012de3f 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests.rs @@ -37,8 +37,8 @@ use test_fixture::ChangeFixture; use test_utils::assert_eq_text; use crate::{ - resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionItem, - CompletionItemKind, + resolve_completion_edits, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, + CompletionItem, CompletionItemKind, }; /// Lots of basic item definitions @@ -84,6 +84,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig { prefer_absolute: false, snippets: Vec::new(), limit: None, + fields_to_resolve: CompletionFieldsToResolve::empty(), }; pub(crate) fn completion_list(ra_fixture: &str) -> String { diff --git a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs index abf4438a71f..266109765ab 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/generated/lints.rs @@ -7081,8 +7081,8 @@ This feature has no tracking issue, and is therefore likely internal to the comp "##, }, Lint { - label: "object_safe_for_dispatch", - description: r##"# `object_safe_for_dispatch` + label: "dyn_compatible_for_dispatch", + description: r##"# `dyn_compatible_for_dispatch` The tracking issue for this feature is: [#43561] diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs index 926fae0d317..4c197b45338 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/merge_imports.rs @@ -614,7 +614,7 @@ fn path_segment_cmp(a: &ast::PathSegment, b: &ast::PathSegment) -> Ordering { (Some(_), None) => Ordering::Greater, (None, Some(_)) => Ordering::Less, (Some(a_name), Some(b_name)) => { - // snake_case < CamelCase < UPPER_SNAKE_CASE + // snake_case < UpperCamelCase < UPPER_SNAKE_CASE let a_text = a_name.as_str().trim_start_matches("r#"); let b_text = b_name.as_str().trim_start_matches("r#"); if a_text.starts_with(char::is_lowercase) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs index bb121f4a80a..19d8a15422e 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/prime_caches.rs @@ -13,6 +13,7 @@ use crate::{ salsa::{Database, ParallelDatabase, Snapshot}, Cancelled, CrateId, SourceDatabase, SourceRootDatabase, }, + symbol_index::SymbolsDatabase, FxIndexMap, RootDatabase, }; @@ -54,11 +55,13 @@ pub fn parallel_prime_caches( let (progress_sender, progress_receiver) = crossbeam_channel::unbounded(); let (work_sender, work_receiver) = crossbeam_channel::unbounded(); let graph = graph.clone(); + let local_roots = db.local_roots(); let prime_caches_worker = move |db: Snapshot<RootDatabase>| { while let Ok((crate_id, crate_name)) = work_receiver.recv() { progress_sender .send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?; + // Compute the DefMap and possibly ImportMap let file_id = graph[crate_id].root_file_id; let root_id = db.file_source_root(file_id); if db.source_root(root_id).is_library { @@ -68,6 +71,19 @@ pub fn parallel_prime_caches( db.import_map(crate_id); } + // Compute the symbol search index. + // This primes the cache for `ide_db::symbol_index::world_symbols()`. + // + // We do this for workspace crates only (members of local_roots), because doing it + // for all dependencies could be *very* unnecessarily slow in a large project. + // + // FIXME: We should do it unconditionally if the configuration is set to default to + // searching dependencies (rust-analyzer.workspace.symbol.search.scope), but we + // would need to pipe that configuration information down here. + if local_roots.contains(&root_id) { + db.crate_symbols(crate_id.into()); + } + progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?; } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index 4166b08339b..852ee595be4 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -529,9 +529,13 @@ impl<'a> FindUsages<'a> { }) .into_iter() .flat_map(move |token| { - sema.descend_into_macros_exact_if_in_macro(token) - .into_iter() - .filter_map(|it| it.parent()) + if sema.might_be_inside_macro_call(&token) { + sema.descend_into_macros_exact(token) + } else { + <_>::from([token]) + } + .into_iter() + .filter_map(|it| it.parent()) }) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs index 83a1eb44a61..bbdeb7cf085 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -229,10 +229,10 @@ fn foo() { check_diagnostics( r#" struct non_camel_case_name {} - // ^^^^^^^^^^^^^^^^^^^ 💡 warn: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName` + // ^^^^^^^^^^^^^^^^^^^ 💡 warn: Structure `non_camel_case_name` should have UpperCamelCase name, e.g. `NonCamelCaseName` struct SCREAMING_CASE {} - // ^^^^^^^^^^^^^^ 💡 warn: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase` + // ^^^^^^^^^^^^^^ 💡 warn: Structure `SCREAMING_CASE` should have UpperCamelCase name, e.g. `ScreamingCase` "#, ); } @@ -261,10 +261,10 @@ struct SomeStruct { SomeField: u8 } check_diagnostics( r#" enum some_enum { Val(u8) } - // ^^^^^^^^^ 💡 warn: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum` + // ^^^^^^^^^ 💡 warn: Enum `some_enum` should have UpperCamelCase name, e.g. `SomeEnum` enum SOME_ENUM {} - // ^^^^^^^^^ 💡 warn: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum` + // ^^^^^^^^^ 💡 warn: Enum `SOME_ENUM` should have UpperCamelCase name, e.g. `SomeEnum` "#, ); } @@ -283,7 +283,7 @@ enum AABB {} check_diagnostics( r#" enum SomeEnum { SOME_VARIANT(u8) } - // ^^^^^^^^^^^^ 💡 warn: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant` + // ^^^^^^^^^^^^ 💡 warn: Variant `SOME_VARIANT` should have UpperCamelCase name, e.g. `SomeVariant` "#, ); } @@ -313,7 +313,7 @@ static some_weird_const: u8 = 10; check_diagnostics( r#" struct someStruct; - // ^^^^^^^^^^ 💡 warn: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct` + // ^^^^^^^^^^ 💡 warn: Structure `someStruct` should have UpperCamelCase name, e.g. `SomeStruct` impl someStruct { fn SomeFunc(&self) { @@ -530,11 +530,11 @@ extern { check_diagnostics( r#" trait BAD_TRAIT { - // ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait` + // ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait` const bad_const: u8; // ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST` type BAD_TYPE; - // ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType` + // ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType` fn BAD_FUNCTION(); // ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function` fn BadFunction(); @@ -552,11 +552,11 @@ trait BAD_TRAIT { check_diagnostics_with_disabled( r#" trait BAD_TRAIT { - // ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait` + // ^^^^^^^^^ 💡 warn: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait` const bad_const: u8; // ^^^^^^^^^ 💡 warn: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST` type BAD_TYPE; - // ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType` + // ^^^^^^^^ 💡 warn: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType` fn BAD_FUNCTION(BAD_PARAM: u8); // ^^^^^^^^^^^^ 💡 warn: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function` // ^^^^^^^^^ 💡 warn: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param` @@ -664,7 +664,7 @@ mod CheckNonstandardStyle { mod CheckBadStyle { //^^^^^^^^^^^^^ 💡 error: Module `CheckBadStyle` should have snake_case name, e.g. `check_bad_style` struct fooo; - //^^^^ 💡 error: Structure `fooo` should have CamelCase name, e.g. `Fooo` + //^^^^ 💡 error: Structure `fooo` should have UpperCamelCase name, e.g. `Fooo` } mod F { @@ -676,7 +676,7 @@ mod F { #[deny(non_snake_case, non_camel_case_types)] pub struct some_type { - //^^^^^^^^^ 💡 error: Structure `some_type` should have CamelCase name, e.g. `SomeType` + //^^^^^^^^^ 💡 error: Structure `some_type` should have UpperCamelCase name, e.g. `SomeType` SOME_FIELD: u8, //^^^^^^^^^^ 💡 error: Field `SOME_FIELD` should have snake_case name, e.g. `some_field` SomeField: u16, @@ -693,11 +693,11 @@ pub static SomeStatic: u8 = 10; #[deny(non_snake_case, non_camel_case_types, non_upper_case_globals)] trait BAD_TRAIT { - // ^^^^^^^^^ 💡 error: Trait `BAD_TRAIT` should have CamelCase name, e.g. `BadTrait` + // ^^^^^^^^^ 💡 error: Trait `BAD_TRAIT` should have UpperCamelCase name, e.g. `BadTrait` const bad_const: u8; // ^^^^^^^^^ 💡 error: Constant `bad_const` should have UPPER_SNAKE_CASE name, e.g. `BAD_CONST` type BAD_TYPE; - // ^^^^^^^^ 💡 error: Type alias `BAD_TYPE` should have CamelCase name, e.g. `BadType` + // ^^^^^^^^ 💡 error: Type alias `BAD_TYPE` should have UpperCamelCase name, e.g. `BadType` fn BAD_FUNCTION(BAD_PARAM: u8); // ^^^^^^^^^^^^ 💡 error: Function `BAD_FUNCTION` should have snake_case name, e.g. `bad_function` // ^^^^^^^^^ 💡 error: Parameter `BAD_PARAM` should have snake_case name, e.g. `bad_param` @@ -952,7 +952,7 @@ fn foo() { let FOO; #[allow(non_snake_case)] struct qux; - // ^^^ 💡 warn: Structure `qux` should have CamelCase name, e.g. `Qux` + // ^^^ 💡 warn: Structure `qux` should have UpperCamelCase name, e.g. `Qux` fn BAZ() { // ^^^ 💡 error: Function `BAZ` should have snake_case name, e.g. `baz` diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs index 6ae9dde84be..9dacbd8badf 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_declaration.rs @@ -29,7 +29,7 @@ pub(crate) fn goto_declaration( .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; let range = original_token.text_range(); let info: Vec<NavigationTarget> = sema - .descend_into_macros(original_token) + .descend_into_macros_no_opaque(original_token) .iter() .filter_map(|token| { let parent = token.parent()?; diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs index 8836166d969..c61b2ba84f2 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_definition.rs @@ -83,7 +83,7 @@ pub(crate) fn goto_definition( } let navs = sema - .descend_into_macros(original_token.clone()) + .descend_into_macros_no_opaque(original_token.clone()) .into_iter() .filter_map(|token| { let parent = token.parent()?; @@ -2661,6 +2661,24 @@ fn foo() { } #[test] + fn label_inside_macro() { + check( + r#" +macro_rules! m { + ($s:stmt) => { $s }; +} + +fn foo() { + 'label: loop { + // ^^^^^^ + m!(continue 'label$0); + } +} +"#, + ); + } + + #[test] fn goto_def_on_return_in_try() { check( r#" diff --git a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs index ca04b7bb5a9..c7ebd9a3531 100644 --- a/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs +++ b/src/tools/rust-analyzer/crates/ide/src/goto_type_definition.rs @@ -69,7 +69,7 @@ pub(crate) fn goto_type_definition( } let range = token.text_range(); - sema.descend_into_macros(token) + sema.descend_into_macros_no_opaque(token) .into_iter() .filter_map(|token| { let ty = sema diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index 83adf6548a8..01fa316d5fc 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -3,9 +3,9 @@ use std::{mem, ops::Not}; use either::Either; use hir::{ - db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, CaptureKind, HasCrate, HasSource, - HirDisplay, Layout, LayoutError, MethodViolationCode, Name, ObjectSafetyViolation, Semantics, - Trait, Type, TypeInfo, + db::ExpandDatabase, Adt, AsAssocItem, AsExternAssocItem, AssocItemContainer, CaptureKind, + DynCompatibilityViolation, HasCrate, HasSource, HirDisplay, Layout, LayoutError, + MethodViolationCode, Name, Semantics, Trait, Type, TypeInfo, }; use ide_db::{ base_db::SourceDatabase, @@ -529,10 +529,10 @@ pub(super) fn definition( _ => None, }; - let object_safety_info = if let Definition::Trait(it) = def { - let mut object_safety_info = String::new(); - render_object_safety(db, &mut object_safety_info, it.object_safety(db)); - Some(object_safety_info) + let dyn_compatibility_info = if let Definition::Trait(it) = def { + let mut dyn_compatibility_info = String::new(); + render_dyn_compatibility(db, &mut dyn_compatibility_info, it.dyn_compatibility(db)); + Some(dyn_compatibility_info) } else { None }; @@ -546,8 +546,8 @@ pub(super) fn definition( desc.push_str(&layout_info); desc.push('\n'); } - if let Some(object_safety_info) = object_safety_info { - desc.push_str(&object_safety_info); + if let Some(dyn_compatibility_info) = dyn_compatibility_info { + desc.push_str(&dyn_compatibility_info); desc.push('\n'); } desc.push_str(&label); @@ -813,7 +813,15 @@ fn definition_mod_path(db: &RootDatabase, def: &Definition, edition: Edition) -> if matches!(def, Definition::GenericParam(_) | Definition::Local(_) | Definition::Label(_)) { return None; } - def.module(db).map(|module| path(db, module, definition_owner_name(db, def, edition), edition)) + let container: Option<Definition> = + def.as_assoc_item(db).and_then(|assoc| match assoc.container(db) { + AssocItemContainer::Trait(trait_) => Some(trait_.into()), + AssocItemContainer::Impl(impl_) => impl_.self_ty(db).as_adt().map(|adt| adt.into()), + }); + container + .unwrap_or(*def) + .module(db) + .map(|module| path(db, module, definition_owner_name(db, def, edition), edition)) } fn markup(docs: Option<String>, desc: String, mod_path: Option<String>) -> Markup { @@ -980,24 +988,24 @@ fn keyword_hints( } } -fn render_object_safety( +fn render_dyn_compatibility( db: &RootDatabase, buf: &mut String, - safety: Option<ObjectSafetyViolation>, + safety: Option<DynCompatibilityViolation>, ) { let Some(osv) = safety else { - buf.push_str("// Object Safety: Yes"); + buf.push_str("// Dyn Compatible: Yes"); return; }; - buf.push_str("// Object Safety: No\n// - Reason: "); + buf.push_str("// Dyn Compatible: No\n// - Reason: "); match osv { - ObjectSafetyViolation::SizedSelf => { + DynCompatibilityViolation::SizedSelf => { buf.push_str("has a `Self: Sized` bound"); } - ObjectSafetyViolation::SelfReferential => { + DynCompatibilityViolation::SelfReferential => { buf.push_str("has a bound that references `Self`"); } - ObjectSafetyViolation::Method(func, mvc) => { + DynCompatibilityViolation::Method(func, mvc) => { let name = hir::Function::from(func).name(db); format_to!( buf, @@ -1020,7 +1028,7 @@ fn render_object_safety( }; buf.push_str(desc); } - ObjectSafetyViolation::AssocConst(const_) => { + DynCompatibilityViolation::AssocConst(const_) => { let name = hir::Const::from(const_).name(db); if let Some(name) = name { format_to!(buf, "has an associated constant `{}`", name.as_str()); @@ -1028,11 +1036,11 @@ fn render_object_safety( buf.push_str("has an associated constant"); } } - ObjectSafetyViolation::GAT(alias) => { + DynCompatibilityViolation::GAT(alias) => { let name = hir::TypeAlias::from(alias).name(db); format_to!(buf, "has a generic associated type `{}`", name.as_str()); } - ObjectSafetyViolation::HasNonSafeSuperTrait(super_trait) => { + DynCompatibilityViolation::HasNonCompatibleSuperTrait(super_trait) => { let name = hir::Trait::from(super_trait).name(db); format_to!(buf, "has a object unsafe supertrait `{}`", name.as_str()); } diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index cca62d2181f..e60be577f79 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -7175,7 +7175,7 @@ impl T$0 for () {} ``` ```rust - // Object Safety: Yes + // Dyn Compatible: Yes trait T {} ``` "#]], @@ -7195,7 +7195,7 @@ impl T$0 for () {} ``` ```rust - // Object Safety: Yes + // Dyn Compatible: Yes trait T {} ``` "#]], @@ -7219,7 +7219,7 @@ impl T$0 for () {} ``` ```rust - // Object Safety: No + // Dyn Compatible: No // - Reason: has a method `func` that is non dispatchable because of: // - missing a receiver trait T { /* … */ } @@ -7245,7 +7245,7 @@ impl T$0 for () {} ``` ```rust - // Object Safety: No + // Dyn Compatible: No // - Reason: has a method `func` that is non dispatchable because of: // - missing a receiver trait T { @@ -7275,7 +7275,7 @@ impl T$0 for () {} ``` ```rust - // Object Safety: No + // Dyn Compatible: No // - Reason: has a method `func` that is non dispatchable because of: // - missing a receiver trait T { @@ -7305,7 +7305,7 @@ impl T$0 for () {} ``` ```rust - // Object Safety: No + // Dyn Compatible: No // - Reason: has a method `func` that is non dispatchable because of: // - missing a receiver trait T { @@ -8962,3 +8962,29 @@ fn test_hover_function_with_pat_param() { "#]], ); } + +#[test] +fn hover_path_inside_block_scope() { + check( + r#" +mod m { + const _: () = { + mod m2 { + const C$0: () = (); + } + }; +} +"#, + expect![[r#" + *C* + + ```rust + test::m::m2 + ``` + + ```rust + const C: () = () + ``` + "#]], + ); +} diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index 547286c3f4d..c46c4c8ce94 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -119,8 +119,8 @@ pub use ide_assists::{ Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve, }; pub use ide_completion::{ - CallableSnippets, CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance, - Snippet, SnippetScope, + CallableSnippets, CompletionConfig, CompletionFieldsToResolve, CompletionItem, + CompletionItemKind, CompletionRelevance, Snippet, SnippetScope, }; pub use ide_db::{ base_db::{Cancelled, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootId}, diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index 927fdaa178c..961b2a4c938 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -409,7 +409,8 @@ fn traverse( let mut r = 0; sema.descend_into_macros_breakable( InRealFile::new(file_id, token.clone()), - |tok| { + |tok, _ctx| { + // FIXME: Consider checking ctx transparency for being opaque? let tok = tok.value; let tok_kind = tok.kind(); diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs index baa45174236..0e1606a6991 100644 --- a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -265,6 +265,11 @@ impl ProjectFolders { entries.push(manifest.to_owned()); } + for buildfile in ws.buildfiles() { + file_set_roots.push(VfsPath::from(buildfile.to_owned())); + entries.push(buildfile.to_owned()); + } + // In case of detached files we do **not** look for a rust-analyzer.toml. if !matches!(ws.kind, ProjectWorkspaceKind::DetachedFile { .. }) { let ws_root = ws.workspace_root(); @@ -521,7 +526,7 @@ mod tests { #[test] fn test_loading_rust_analyzer() { let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); - let cargo_config = CargoConfig::default(); + let cargo_config = CargoConfig { set_test: true, ..CargoConfig::default() }; let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: false, with_proc_macro_server: ProcMacroServerChoice::None, diff --git a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs index dceac815e0b..7ea23b4f752 100644 --- a/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs +++ b/src/tools/rust-analyzer/crates/parser/src/lexed_str.rs @@ -187,6 +187,12 @@ impl<'a> Converter<'a> { } rustc_lexer::TokenKind::RawIdent => IDENT, + + rustc_lexer::TokenKind::GuardedStrPrefix => { + err = "Invalid string literal (reserved syntax)"; + ERROR + }, + rustc_lexer::TokenKind::Literal { kind, .. } => { self.extend_literal(token_text.len(), kind); return; diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 7cc21bcf131..2dc6f0357e3 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -100,6 +100,7 @@ pub struct CargoConfig { pub invocation_strategy: InvocationStrategy, /// Optional path to use instead of `target` when building pub target_dir: Option<Utf8PathBuf>, + pub set_test: bool, } pub type Package = Idx<PackageData>; diff --git a/src/tools/rust-analyzer/crates/project-model/src/tests.rs b/src/tools/rust-analyzer/crates/project-model/src/tests.rs index 30d1ddb636e..5099697a696 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/tests.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/tests.rs @@ -35,6 +35,7 @@ fn load_cargo_with_overrides( rustc: Err(None), cargo_config_extra_env: Default::default(), error: None, + set_test: true, }, cfg_overrides, sysroot: Sysroot::empty(), @@ -242,6 +243,7 @@ fn smoke_test_real_sysroot_cargo() { rustc: Err(None), cargo_config_extra_env: Default::default(), error: None, + set_test: true, }, sysroot, rustc_cfg: Vec::new(), diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index 17b40a87cda..71b9b61e205 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -78,6 +78,7 @@ pub enum ProjectWorkspaceKind { rustc: Result<Box<(CargoWorkspace, WorkspaceBuildScripts)>, Option<String>>, /// Environment variables set in the `.cargo/config` file. cargo_config_extra_env: FxHashMap<String, String>, + set_test: bool, }, /// Project workspace was specified using a `rust-project.json` file. Json(ProjectJson), @@ -98,6 +99,7 @@ pub enum ProjectWorkspaceKind { cargo: Option<(CargoWorkspace, WorkspaceBuildScripts, Option<Arc<anyhow::Error>>)>, /// Environment variables set in the `.cargo/config` file. cargo_config_extra_env: FxHashMap<String, String>, + set_test: bool, }, } @@ -112,6 +114,7 @@ impl fmt::Debug for ProjectWorkspace { build_scripts, rustc, cargo_config_extra_env, + set_test, } => f .debug_struct("Cargo") .field("root", &cargo.workspace_root().file_name()) @@ -126,6 +129,7 @@ impl fmt::Debug for ProjectWorkspace { .field("toolchain", &toolchain) .field("data_layout", &target_layout) .field("cargo_config_extra_env", &cargo_config_extra_env) + .field("set_test", set_test) .field("build_scripts", &build_scripts.error().unwrap_or("ok")) .finish(), ProjectWorkspaceKind::Json(project) => { @@ -137,12 +141,14 @@ impl fmt::Debug for ProjectWorkspace { .field("toolchain", &toolchain) .field("data_layout", &target_layout) .field("n_cfg_overrides", &cfg_overrides.len()); + debug_struct.finish() } ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, cargo_config_extra_env, + set_test, } => f .debug_struct("DetachedFiles") .field("file", &file) @@ -154,6 +160,7 @@ impl fmt::Debug for ProjectWorkspace { .field("data_layout", &target_layout) .field("n_cfg_overrides", &cfg_overrides.len()) .field("cargo_config_extra_env", &cargo_config_extra_env) + .field("set_test", set_test) .finish(), } } @@ -329,6 +336,7 @@ impl ProjectWorkspace { rustc, cargo_config_extra_env, error: error.map(Arc::new), + set_test: config.set_test, }, sysroot, rustc_cfg, @@ -423,6 +431,7 @@ impl ProjectWorkspace { file: detached_file.to_owned(), cargo: cargo_script, cargo_config_extra_env, + set_test: config.set_test, }, sysroot, rustc_cfg, @@ -539,6 +548,17 @@ impl ProjectWorkspace { } } + pub fn buildfiles(&self) -> Vec<AbsPathBuf> { + match &self.kind { + ProjectWorkspaceKind::Json(project) => project + .crates() + .filter_map(|(_, krate)| krate.build.as_ref().map(|build| build.build_file.clone())) + .map(AbsPathBuf::assert) + .collect(), + _ => vec![], + } + } + pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> { self.sysroot.discover_proc_macro_srv() } @@ -598,6 +618,7 @@ impl ProjectWorkspace { build_scripts, cargo_config_extra_env: _, error: _, + set_test: _, } => { cargo .packages() @@ -739,6 +760,7 @@ impl ProjectWorkspace { build_scripts, cargo_config_extra_env: _, error: _, + set_test, } => ( cargo_to_crate_graph( load, @@ -748,10 +770,11 @@ impl ProjectWorkspace { rustc_cfg.clone(), cfg_overrides, build_scripts, + *set_test, ), sysroot, ), - ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, .. } => ( + ProjectWorkspaceKind::DetachedFile { file, cargo: cargo_script, set_test, .. } => ( if let Some((cargo, build_scripts, _)) = cargo_script { cargo_to_crate_graph( &mut |path| load(path), @@ -761,6 +784,7 @@ impl ProjectWorkspace { rustc_cfg.clone(), cfg_overrides, build_scripts, + *set_test, ) } else { detached_file_to_crate_graph( @@ -769,6 +793,7 @@ impl ProjectWorkspace { file, sysroot, cfg_overrides, + *set_test, ) }, sysroot, @@ -802,6 +827,7 @@ impl ProjectWorkspace { cargo_config_extra_env, build_scripts: _, error: _, + set_test: _, }, ProjectWorkspaceKind::Cargo { cargo: o_cargo, @@ -809,6 +835,7 @@ impl ProjectWorkspace { cargo_config_extra_env: o_cargo_config_extra_env, build_scripts: _, error: _, + set_test: _, }, ) => { cargo == o_cargo @@ -823,11 +850,13 @@ impl ProjectWorkspace { file, cargo: Some((cargo_script, _, _)), cargo_config_extra_env, + set_test: _, }, ProjectWorkspaceKind::DetachedFile { file: o_file, cargo: Some((o_cargo_script, _, _)), cargo_config_extra_env: o_cargo_config_extra_env, + set_test: _, }, ) => { file == o_file @@ -976,6 +1005,7 @@ fn cargo_to_crate_graph( rustc_cfg: Vec<CfgAtom>, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, + set_test: bool, ) -> (CrateGraph, ProcMacroPaths) { let _p = tracing::info_span!("cargo_to_crate_graph").entered(); let mut res = (CrateGraph::default(), ProcMacroPaths::default()); @@ -1000,8 +1030,10 @@ fn cargo_to_crate_graph( let mut cfg_options = cfg_options.clone(); if cargo[pkg].is_local { - // Add test cfg for local crates - cfg_options.insert_atom(sym::test.clone()); + if set_test { + // Add test cfg for local crates + cfg_options.insert_atom(sym::test.clone()); + } cfg_options.insert_atom(sym::rust_analyzer.clone()); } @@ -1162,6 +1194,7 @@ fn detached_file_to_crate_graph( detached_file: &ManifestPath, sysroot: &Sysroot, override_cfg: &CfgOverrides, + set_test: bool, ) -> (CrateGraph, ProcMacroPaths) { let _p = tracing::info_span!("detached_file_to_crate_graph").entered(); let mut crate_graph = CrateGraph::default(); @@ -1169,7 +1202,9 @@ fn detached_file_to_crate_graph( sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load); let mut cfg_options = CfgOptions::from_iter(rustc_cfg); - cfg_options.insert_atom(sym::test.clone()); + if set_test { + cfg_options.insert_atom(sym::test.clone()); + } cfg_options.insert_atom(sym::rust_analyzer.clone()); override_cfg.apply(&mut cfg_options, ""); let cfg_options = Arc::new(cfg_options); @@ -1415,6 +1450,7 @@ fn sysroot_to_crate_graph( ..Default::default() }, &WorkspaceBuildScripts::default(), + false, ); let mut pub_deps = vec![]; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index 41b42573f08..ecc8333503e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -137,6 +137,7 @@ fn setup_logging(log_file_flag: Option<PathBuf>) -> anyhow::Result<()> { filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()), chalk_filter: env::var("CHALK_DEBUG").ok(), profile_filter: env::var("RA_PROFILE").ok(), + json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(), } .init()?; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index e899e0e8eea..4844c514ae9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -65,6 +65,7 @@ impl flags::AnalysisStats { false => Some(RustLibSource::Discover), }, all_targets: true, + set_test: true, ..Default::default() }; let no_progress = &|_| (); @@ -81,7 +82,13 @@ impl flags::AnalysisStats { with_proc_macro_server: if self.disable_proc_macros { ProcMacroServerChoice::None } else { - ProcMacroServerChoice::Sysroot + match self.proc_macro_srv { + Some(ref path) => { + let path = vfs::AbsPathBuf::assert_utf8(path.to_owned()); + ProcMacroServerChoice::Explicit(path) + } + None => ProcMacroServerChoice::Sysroot, + } }, prefill_caches: false, }; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs index 73e71658d17..60d621b214a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs @@ -76,6 +76,8 @@ xflags::xflags! { optional --disable-build-scripts /// Don't use expand proc macros. optional --disable-proc-macros + /// Run the proc-macro-srv binary at the specified path. + optional --proc-macro-srv path: PathBuf /// Skip body lowering. optional --skip-lowering /// Skip type inference. @@ -120,7 +122,7 @@ xflags::xflags! { optional --disable-build-scripts /// Don't use expand proc macros. optional --disable-proc-macros - /// Run a custom proc-macro-srv binary. + /// Run the proc-macro-srv binary at the specified path. optional --proc-macro-srv path: PathBuf } @@ -133,7 +135,7 @@ xflags::xflags! { optional --disable-build-scripts /// Don't use expand proc macros. optional --disable-proc-macros - /// Run a custom proc-macro-srv binary. + /// Run the proc-macro-srv binary at the specified path. optional --proc-macro-srv path: PathBuf } @@ -233,6 +235,7 @@ pub struct AnalysisStats { pub no_sysroot: bool, pub disable_build_scripts: bool, pub disable_proc_macros: bool, + pub proc_macro_srv: Option<PathBuf>, pub skip_lowering: bool, pub skip_inference: bool, pub skip_mir_stats: bool, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs index e4263a3f667..ca8acf57bff 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs @@ -277,6 +277,7 @@ impl flags::Lsif { let cargo_config = &CargoConfig { sysroot: Some(RustLibSource::Discover), all_targets: true, + set_test: true, ..Default::default() }; let no_progress = &|_| (); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs index f90ebcfdb2e..11534bbeba9 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs @@ -16,6 +16,7 @@ impl flags::RunTests { let cargo_config = CargoConfig { sysroot: Some(RustLibSource::Discover), all_targets: true, + set_test: true, ..Default::default() }; let load_cargo_config = LoadCargoConfig { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs index 730f3c08abb..30378db0b38 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -70,6 +70,7 @@ impl Tester { let cargo_config = CargoConfig { sysroot: Some(RustLibSource::Discover), all_targets: true, + set_test: true, ..Default::default() }; @@ -85,6 +86,7 @@ impl Tester { file: ManifestPath::try_from(tmp_file).unwrap(), cargo: None, cargo_config_extra_env: Default::default(), + set_test: true, }, sysroot, rustc_cfg: vec![], diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index e9198977dea..ff009e69547 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -24,11 +24,6 @@ impl flags::Scip { let now = Instant::now(); let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}")); - let load_cargo_config = LoadCargoConfig { - load_out_dirs_from_check: true, - with_proc_macro_server: ProcMacroServerChoice::Sysroot, - prefill_caches: true, - }; let root = vfs::AbsPathBuf::assert_utf8(std::env::current_dir()?.join(&self.path)).normalize(); @@ -51,6 +46,11 @@ impl flags::Scip { // FIXME @alibektas : What happens to errors without logging? error!(?error_sink, "Config Error(s)"); } + let load_cargo_config = LoadCargoConfig { + load_out_dirs_from_check: true, + with_proc_macro_server: ProcMacroServerChoice::Sysroot, + prefill_caches: true, + }; let cargo_config = config.cargo(None); let (db, vfs, _) = load_workspace_at( root.as_path().as_ref(), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs index bdca800a0d6..c03688e8009 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs @@ -13,6 +13,7 @@ impl flags::Ssr { let cargo_config = CargoConfig { sysroot: Some(RustLibSource::Discover), all_targets: true, + set_test: true, ..Default::default() }; let load_cargo_config = LoadCargoConfig { @@ -50,7 +51,8 @@ impl flags::Search { pub fn run(self) -> anyhow::Result<()> { use ide_db::base_db::SourceRootDatabase; use ide_db::symbol_index::SymbolsDatabase; - let cargo_config = CargoConfig::default(); + let cargo_config = + CargoConfig { all_targets: true, set_test: true, ..CargoConfig::default() }; let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: true, with_proc_macro_server: ProcMacroServerChoice::Sysroot, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 4cc60695fe6..ef2e542cf22 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -12,10 +12,10 @@ use std::{ use cfg::{CfgAtom, CfgDiff}; use hir::Symbol; use ide::{ - AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode, - GenericParameterHints, HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, - InlayFieldsToResolve, InlayHintsConfig, JoinLinesConfig, MemoryLayoutHoverConfig, - MemoryLayoutHoverRenderKind, Snippet, SnippetScope, SourceRootId, + AssistConfig, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, DiagnosticsConfig, + ExprFillDefaultMode, GenericParameterHints, HighlightConfig, HighlightRelatedConfig, + HoverConfig, HoverDocFormat, InlayFieldsToResolve, InlayHintsConfig, JoinLinesConfig, + MemoryLayoutHoverConfig, MemoryLayoutHoverRenderKind, Snippet, SnippetScope, SourceRootId, }; use ide_db::{ imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind}, @@ -574,6 +574,9 @@ config_data! { /// set to a path relative to the workspace to use that path. cargo_targetDir | rust_analyzerTargetDir: Option<TargetDirectory> = None, + /// Set `cfg(test)` for local crates. Defaults to true. + cfg_setTest: bool = true, + /// Run the check command for diagnostics on save. checkOnSave | checkOnSave_enable: bool = true, @@ -695,7 +698,6 @@ config_data! { workspace_symbol_search_limit: usize = 128, /// Workspace symbol search scope. workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = WorkspaceSymbolSearchScopeDef::Workspace, - } } @@ -1391,6 +1393,7 @@ impl Config { } pub fn completion(&self, source_root: Option<SourceRootId>) -> CompletionConfig { + let client_capability_fields = self.completion_resolve_support_properties(); CompletionConfig { enable_postfix_completions: self.completion_postfix_enable(source_root).to_owned(), enable_imports_on_the_fly: self.completion_autoimport_enable(source_root).to_owned() @@ -1415,6 +1418,15 @@ impl Config { limit: self.completion_limit(source_root).to_owned(), enable_term_search: self.completion_termSearch_enable(source_root).to_owned(), term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64, + fields_to_resolve: CompletionFieldsToResolve { + resolve_label_details: client_capability_fields.contains("labelDetails"), + resolve_tags: client_capability_fields.contains("tags"), + resolve_detail: client_capability_fields.contains("detail"), + resolve_documentation: client_capability_fields.contains("documentation"), + resolve_filter_text: client_capability_fields.contains("filterText"), + resolve_text_edit: client_capability_fields.contains("textEdit"), + resolve_command: client_capability_fields.contains("command"), + }, } } @@ -1859,9 +1871,14 @@ impl Config { extra_args: self.cargo_extraArgs(source_root).clone(), extra_env: self.cargo_extraEnv(source_root).clone(), target_dir: self.target_dir_from_config(source_root), + set_test: *self.cfg_setTest(source_root), } } + pub fn cfg_set_test(&self, source_root: Option<SourceRootId>) -> bool { + *self.cfg_setTest(source_root) + } + pub(crate) fn completion_snippets_default() -> FxHashMap<String, SnippetDef> { serde_json::from_str( r#"{ diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs index 7e9162eee6e..96b164228ef 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/discover.rs @@ -7,6 +7,7 @@ use paths::{AbsPathBuf, Utf8Path, Utf8PathBuf}; use project_model::ProjectJsonData; use serde::{Deserialize, Serialize}; use serde_json::Value; +use tracing::{info_span, span::EnteredSpan}; use crate::command::{CommandHandle, ParseFromLine}; @@ -60,7 +61,10 @@ impl DiscoverCommand { let mut cmd = Command::new(command); cmd.args(args); - Ok(DiscoverHandle { _handle: CommandHandle::spawn(cmd, self.sender.clone())? }) + Ok(DiscoverHandle { + _handle: CommandHandle::spawn(cmd, self.sender.clone())?, + span: info_span!("discover_command").entered(), + }) } } @@ -68,6 +72,8 @@ impl DiscoverCommand { #[derive(Debug)] pub(crate) struct DiscoverHandle { _handle: CommandHandle<DiscoverProjectMessage>, + #[allow(dead_code)] // not accessed, but used to log on drop. + span: EnteredSpan, } /// An enum containing either progress messages, an error, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index 89487aa673b..c3142c9cfca 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -46,6 +46,11 @@ pub(crate) struct FetchWorkspaceRequest { pub(crate) force_crate_graph_reload: bool, } +pub(crate) struct FetchWorkspaceResponse { + pub(crate) workspaces: Vec<anyhow::Result<ProjectWorkspace>>, + pub(crate) force_crate_graph_reload: bool, +} + // Enforces drop order pub(crate) struct Handle<H, C> { pub(crate) handle: H, @@ -111,6 +116,9 @@ pub(crate) struct GlobalState { pub(crate) vfs_config_version: u32, pub(crate) vfs_progress_config_version: u32, pub(crate) vfs_done: bool, + // used to track how long VFS loading takes. this can't be on `vfs::loader::Handle`, + // as that handle's lifetime is the same as `GlobalState` itself. + pub(crate) vfs_span: Option<tracing::span::EnteredSpan>, pub(crate) wants_to_switch: Option<Cause>, /// `workspaces` field stores the data we actually use, while the `OpQueue` @@ -143,8 +151,7 @@ pub(crate) struct GlobalState { pub(crate) detached_files: FxHashSet<ManifestPath>, // op queues - pub(crate) fetch_workspaces_queue: - OpQueue<FetchWorkspaceRequest, Option<(Vec<anyhow::Result<ProjectWorkspace>>, bool)>>, + pub(crate) fetch_workspaces_queue: OpQueue<FetchWorkspaceRequest, FetchWorkspaceResponse>, pub(crate) fetch_build_data_queue: OpQueue<(), (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>, pub(crate) fetch_proc_macros_queue: OpQueue<Vec<ProcMacroPaths>, bool>, @@ -253,6 +260,7 @@ impl GlobalState { vfs: Arc::new(RwLock::new((vfs::Vfs::default(), IntMap::default()))), vfs_config_version: 0, vfs_progress_config_version: 0, + vfs_span: None, vfs_done: true, wants_to_switch: None, @@ -498,7 +506,7 @@ impl GlobalState { mem_docs: self.mem_docs.clone(), semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache), proc_macros_loaded: !self.config.expand_proc_macros() - || *self.fetch_proc_macros_queue.last_op_result(), + || self.fetch_proc_macros_queue.last_op_result().copied().unwrap_or(false), flycheck: self.flycheck.clone(), } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index 49b1ba32a79..bb03eb3c89b 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -380,7 +380,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { if id == flycheck.id() { updated = true; match package.filter(|_| { - !world.config.flycheck_workspace(source_root_id) || target.is_some() + !world.config.flycheck_workspace(source_root_id) && target.is_some() }) { Some(package) => flycheck .restart_for_package(package, target.clone().map(TupleExt::head)), diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index bcbd970a0d2..9773d8dbce0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -10,9 +10,9 @@ use std::{ use anyhow::Context; use ide::{ - AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange, - HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, ReferenceCategory, - Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, + AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, CompletionFieldsToResolve, + FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, + RangeInfo, ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, }; use ide_db::SymbolKind; use itertools::Itertools; @@ -1019,9 +1019,11 @@ pub(crate) fn handle_completion( let items = to_proto::completion_items( &snap.config, + &completion_config.fields_to_resolve, &line_index, snap.file_version(position.file_id), text_document_position, + completion_trigger_character, items, ); @@ -1054,36 +1056,70 @@ pub(crate) fn handle_completion_resolve( }; let source_root = snap.analysis.source_root_id(file_id)?; - let additional_edits = snap - .analysis - .resolve_completion_edits( - &snap.config.completion(Some(source_root)), - FilePosition { file_id, offset }, - resolve_data - .imports - .into_iter() - .map(|import| (import.full_import_path, import.imported_name)), - )? - .into_iter() - .flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel))) - .collect::<Vec<_>>(); + let mut forced_resolve_completions_config = snap.config.completion(Some(source_root)); + forced_resolve_completions_config.fields_to_resolve = CompletionFieldsToResolve::empty(); - if !all_edits_are_disjoint(&original_completion, &additional_edits) { - return Err(LspError::new( - ErrorCode::InternalError as i32, - "Import edit overlaps with the original completion edits, this is not LSP-compliant" - .into(), - ) - .into()); - } + let position = FilePosition { file_id, offset }; + let Some(resolved_completions) = snap.analysis.completions( + &forced_resolve_completions_config, + position, + resolve_data.trigger_character, + )? + else { + return Ok(original_completion); + }; + let resolved_completions = to_proto::completion_items( + &snap.config, + &forced_resolve_completions_config.fields_to_resolve, + &line_index, + snap.file_version(position.file_id), + resolve_data.position, + resolve_data.trigger_character, + resolved_completions, + ); + let Some(mut resolved_completion) = resolved_completions.into_iter().find(|completion| { + completion.label == original_completion.label + && completion.kind == original_completion.kind + && completion.deprecated == original_completion.deprecated + && completion.preselect == original_completion.preselect + && completion.sort_text == original_completion.sort_text + }) else { + return Ok(original_completion); + }; - if let Some(original_additional_edits) = original_completion.additional_text_edits.as_mut() { - original_additional_edits.extend(additional_edits) - } else { - original_completion.additional_text_edits = Some(additional_edits); + if !resolve_data.imports.is_empty() { + let additional_edits = snap + .analysis + .resolve_completion_edits( + &forced_resolve_completions_config, + position, + resolve_data + .imports + .into_iter() + .map(|import| (import.full_import_path, import.imported_name)), + )? + .into_iter() + .flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel))) + .collect::<Vec<_>>(); + + if !all_edits_are_disjoint(&resolved_completion, &additional_edits) { + return Err(LspError::new( + ErrorCode::InternalError as i32, + "Import edit overlaps with the original completion edits, this is not LSP-compliant" + .into(), + ) + .into()); + } + + if let Some(original_additional_edits) = resolved_completion.additional_text_edits.as_mut() + { + original_additional_edits.extend(additional_edits) + } else { + resolved_completion.additional_text_edits = Some(additional_edits); + } } - Ok(original_completion) + Ok(resolved_completion) } pub(crate) fn handle_folding_range( diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs index 118469df730..8946c7acb93 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -12,7 +12,8 @@ use hir::ChangeWithProcMacros; use ide::{ - AnalysisHost, CallableSnippets, CompletionConfig, DiagnosticsConfig, FilePosition, TextSize, + AnalysisHost, CallableSnippets, CompletionConfig, CompletionFieldsToResolve, DiagnosticsConfig, + FilePosition, TextSize, }; use ide_db::{ imports::insert_use::{ImportGranularity, InsertUseConfig}, @@ -36,6 +37,8 @@ fn integrated_highlighting_benchmark() { let cargo_config = CargoConfig { sysroot: Some(project_model::RustLibSource::Discover), + all_targets: true, + set_test: true, ..CargoConfig::default() }; let load_cargo_config = LoadCargoConfig { @@ -102,6 +105,8 @@ fn integrated_completion_benchmark() { let cargo_config = CargoConfig { sysroot: Some(project_model::RustLibSource::Discover), + all_targets: true, + set_test: true, ..CargoConfig::default() }; let load_cargo_config = LoadCargoConfig { @@ -168,6 +173,7 @@ fn integrated_completion_benchmark() { snippets: Vec::new(), limit: None, add_semicolon_to_unit: true, + fields_to_resolve: CompletionFieldsToResolve::empty(), }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; @@ -215,6 +221,7 @@ fn integrated_completion_benchmark() { snippets: Vec::new(), limit: None, add_semicolon_to_unit: true, + fields_to_resolve: CompletionFieldsToResolve::empty(), }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; @@ -260,6 +267,7 @@ fn integrated_completion_benchmark() { snippets: Vec::new(), limit: None, add_semicolon_to_unit: true, + fields_to_resolve: CompletionFieldsToResolve::empty(), }; let position = FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() }; @@ -279,6 +287,8 @@ fn integrated_diagnostics_benchmark() { let cargo_config = CargoConfig { sysroot: Some(project_model::RustLibSource::Discover), + all_targets: true, + set_test: true, ..CargoConfig::default() }; let load_cargo_config = LoadCargoConfig { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs index 714991e8116..234204695cb 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs @@ -34,6 +34,7 @@ mod handlers { pub mod tracing { pub mod config; + pub mod json; pub use config::Config; pub mod hprof; } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs index 9610808c27e..3b19284f241 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/capabilities.rs @@ -448,7 +448,7 @@ impl ClientCapabilities { .unwrap_or_default() } - pub fn inlay_hint_resolve_support_properties(&self) -> FxHashSet<String> { + pub fn inlay_hint_resolve_support_properties(&self) -> FxHashSet<&str> { self.0 .text_document .as_ref() @@ -457,8 +457,22 @@ impl ClientCapabilities { .map(|inlay_resolve| inlay_resolve.properties.iter()) .into_iter() .flatten() - .cloned() - .collect::<FxHashSet<_>>() + .map(|s| s.as_str()) + .collect() + } + + pub fn completion_resolve_support_properties(&self) -> FxHashSet<&str> { + self.0 + .text_document + .as_ref() + .and_then(|text| text.completion.as_ref()) + .and_then(|completion_caps| completion_caps.completion_item.as_ref()) + .and_then(|completion_item_caps| completion_item_caps.resolve_support.as_ref()) + .map(|resolve_support| resolve_support.properties.iter()) + .into_iter() + .flatten() + .map(|s| s.as_str()) + .collect() } pub fn hover_markdown_support(&self) -> bool { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs index 618481bbc66..8039f0644ee 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/ext.rs @@ -825,6 +825,7 @@ pub struct CompletionResolveData { pub position: lsp_types::TextDocumentPositionParams, pub imports: Vec<CompletionImport>, pub version: Option<i32>, + pub trigger_character: Option<char>, } #[derive(Debug, Serialize, Deserialize)] diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs index 4902c9f88c1..375b7428c2d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp/to_proto.rs @@ -6,9 +6,9 @@ use std::{ }; use ide::{ - Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionItem, - CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit, - Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, + Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionFieldsToResolve, + CompletionItem, CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, + FileSystemEdit, Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayFieldsToResolve, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayKind, Markup, NavigationTarget, ReferenceCategory, RenameError, Runnable, Severity, SignatureHelp, SnippetEdit, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize, @@ -227,9 +227,11 @@ pub(crate) fn snippet_text_edit_vec( pub(crate) fn completion_items( config: &Config, + fields_to_resolve: &CompletionFieldsToResolve, line_index: &LineIndex, version: Option<i32>, tdpp: lsp_types::TextDocumentPositionParams, + completion_trigger_character: Option<char>, mut items: Vec<CompletionItem>, ) -> Vec<lsp_types::CompletionItem> { if config.completion_hide_deprecated() { @@ -239,7 +241,17 @@ pub(crate) fn completion_items( let max_relevance = items.iter().map(|it| it.relevance.score()).max().unwrap_or_default(); let mut res = Vec::with_capacity(items.len()); for item in items { - completion_item(&mut res, config, line_index, version, &tdpp, max_relevance, item); + completion_item( + &mut res, + config, + fields_to_resolve, + line_index, + version, + &tdpp, + max_relevance, + completion_trigger_character, + item, + ); } if let Some(limit) = config.completion(None).limit { @@ -253,21 +265,33 @@ pub(crate) fn completion_items( fn completion_item( acc: &mut Vec<lsp_types::CompletionItem>, config: &Config, + fields_to_resolve: &CompletionFieldsToResolve, line_index: &LineIndex, version: Option<i32>, tdpp: &lsp_types::TextDocumentPositionParams, max_relevance: u32, + completion_trigger_character: Option<char>, item: CompletionItem, ) { let insert_replace_support = config.insert_replace_support().then_some(tdpp.position); let ref_match = item.ref_match(); - let lookup = item.lookup().to_owned(); let mut additional_text_edits = Vec::new(); + let mut something_to_resolve = false; - // LSP does not allow arbitrary edits in completion, so we have to do a - // non-trivial mapping here. - let text_edit = { + let filter_text = if fields_to_resolve.resolve_filter_text { + something_to_resolve = !item.lookup().is_empty(); + None + } else { + Some(item.lookup().to_owned()) + }; + + let text_edit = if fields_to_resolve.resolve_text_edit { + something_to_resolve = true; + None + } else { + // LSP does not allow arbitrary edits in completion, so we have to do a + // non-trivial mapping here. let mut text_edit = None; let source_range = item.source_range; for indel in item.text_edit { @@ -290,25 +314,49 @@ fn completion_item( additional_text_edits.push(text_edit); } } - text_edit.unwrap() + Some(text_edit.unwrap()) }; let insert_text_format = item.is_snippet.then_some(lsp_types::InsertTextFormat::SNIPPET); - let tags = item.deprecated.then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]); + let tags = if fields_to_resolve.resolve_tags { + something_to_resolve = item.deprecated; + None + } else { + item.deprecated.then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]) + }; let command = if item.trigger_call_info && config.client_commands().trigger_parameter_hints { - Some(command::trigger_parameter_hints()) + if fields_to_resolve.resolve_command { + something_to_resolve = true; + None + } else { + Some(command::trigger_parameter_hints()) + } + } else { + None + }; + + let detail = if fields_to_resolve.resolve_detail { + something_to_resolve = item.detail.is_some(); + None } else { + item.detail + }; + + let documentation = if fields_to_resolve.resolve_documentation { + something_to_resolve = item.documentation.is_some(); None + } else { + item.documentation.map(documentation) }; let mut lsp_item = lsp_types::CompletionItem { label: item.label.to_string(), - detail: item.detail, - filter_text: Some(lookup), + detail, + filter_text, kind: Some(completion_item_kind(item.kind)), - text_edit: Some(text_edit), + text_edit, additional_text_edits: Some(additional_text_edits), - documentation: item.documentation.map(documentation), + documentation, deprecated: Some(item.deprecated), tags, command, @@ -317,29 +365,40 @@ fn completion_item( }; if config.completion_label_details_support() { - lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails { - detail: item.label_detail.as_ref().map(ToString::to_string), - description: lsp_item.detail.clone(), - }); + if fields_to_resolve.resolve_label_details { + something_to_resolve = true; + } else { + lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails { + detail: item.label_detail.as_ref().map(ToString::to_string), + description: lsp_item.detail.clone(), + }); + } } else if let Some(label_detail) = item.label_detail { lsp_item.label.push_str(label_detail.as_str()); } set_score(&mut lsp_item, max_relevance, item.relevance); - if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() { - let imports = item - .import_to_add - .into_iter() - .map(|(import_path, import_name)| lsp_ext::CompletionImport { - full_import_path: import_path, - imported_name: import_name, - }) - .collect::<Vec<_>>(); - if !imports.is_empty() { - let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports, version }; - lsp_item.data = Some(to_value(data).unwrap()); - } + let imports = + if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() { + item.import_to_add + .into_iter() + .map(|(import_path, import_name)| lsp_ext::CompletionImport { + full_import_path: import_path, + imported_name: import_name, + }) + .collect() + } else { + Vec::new() + }; + if something_to_resolve || !imports.is_empty() { + let data = lsp_ext::CompletionResolveData { + position: tdpp.clone(), + imports, + version, + trigger_character: completion_trigger_character, + }; + lsp_item.data = Some(to_value(data).unwrap()); } if let Some((label, indel, relevance)) = ref_match { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 83559230257..ef289720568 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -22,7 +22,9 @@ use crate::{ diagnostics::{fetch_native_diagnostics, DiagnosticsGeneration, NativeDiagnosticsFetchKind}, discover::{DiscoverArgument, DiscoverCommand, DiscoverProjectMessage}, flycheck::{self, FlycheckMessage}, - global_state::{file_id_to_url, url_to_file_id, FetchWorkspaceRequest, GlobalState}, + global_state::{ + file_id_to_url, url_to_file_id, FetchWorkspaceRequest, FetchWorkspaceResponse, GlobalState, + }, hack_recover_crate_name, handlers::dispatch::{NotificationDispatcher, RequestDispatcher}, lsp::{ @@ -695,9 +697,9 @@ impl GlobalState { let (state, msg) = match progress { ProjectWorkspaceProgress::Begin => (Progress::Begin, None), ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)), - ProjectWorkspaceProgress::End(workspaces, force_reload_crate_graph) => { - self.fetch_workspaces_queue - .op_completed(Some((workspaces, force_reload_crate_graph))); + ProjectWorkspaceProgress::End(workspaces, force_crate_graph_reload) => { + let resp = FetchWorkspaceResponse { workspaces, force_crate_graph_reload }; + self.fetch_workspaces_queue.op_completed(resp); if let Err(e) = self.fetch_workspace_error() { error!("FetchWorkspaceError: {e}"); } @@ -794,13 +796,20 @@ impl GlobalState { } } vfs::loader::Message::Progress { n_total, n_done, dir, config_version } => { - let _p = tracing::info_span!("GlobalState::handle_vfs_mgs/progress").entered(); + let _p = span!(Level::INFO, "GlobalState::handle_vfs_mgs/progress").entered(); always!(config_version <= self.vfs_config_version); let (n_done, state) = match n_done { - LoadingProgress::Started => (0, Progress::Begin), + LoadingProgress::Started => { + self.vfs_span = + Some(span!(Level::INFO, "vfs_load", total = n_total).entered()); + (0, Progress::Begin) + } LoadingProgress::Progress(n_done) => (n_done.min(n_total), Progress::Report), - LoadingProgress::Finished => (n_total, Progress::End), + LoadingProgress::Finished => { + self.vfs_span = None; + (n_total, Progress::End) + } }; self.vfs_progress_config_version = config_version; @@ -881,6 +890,7 @@ impl GlobalState { .expect("No title could be found; this is a bug"); match message { DiscoverProjectMessage::Finished { project, buildfile } => { + self.discover_handle = None; self.report_progress(&title, Progress::End, None, None, None); self.discover_workspace_queue.op_completed(()); @@ -892,6 +902,7 @@ impl GlobalState { self.report_progress(&title, Progress::Report, Some(message), None, None) } DiscoverProjectMessage::Error { error, source } => { + self.discover_handle = None; let message = format!("Project discovery failed: {error}"); self.discover_workspace_queue.op_completed(()); self.show_and_log_error(message.clone(), source); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs index 5c4c858e150..123f20605ab 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/op_queue.rs @@ -27,12 +27,12 @@ pub(crate) type Cause = String; pub(crate) struct OpQueue<Args = (), Output = ()> { op_requested: Option<(Cause, Args)>, op_in_progress: bool, - last_op_result: Output, + last_op_result: Option<Output>, } -impl<Args, Output: Default> Default for OpQueue<Args, Output> { +impl<Args, Output> Default for OpQueue<Args, Output> { fn default() -> Self { - Self { op_requested: None, op_in_progress: false, last_op_result: Default::default() } + Self { op_requested: None, op_in_progress: false, last_op_result: None } } } @@ -56,12 +56,12 @@ impl<Args, Output> OpQueue<Args, Output> { pub(crate) fn op_completed(&mut self, result: Output) { assert!(self.op_in_progress); self.op_in_progress = false; - self.last_op_result = result; + self.last_op_result = Some(result); } /// Get the result of the last operation. - pub(crate) fn last_op_result(&self) -> &Output { - &self.last_op_result + pub(crate) fn last_op_result(&self) -> Option<&Output> { + self.last_op_result.as_ref() } // Is there an operation that has started, but hasn't yet finished? diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index f6765715c5a..60ee0295a3a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -33,7 +33,7 @@ use vfs::{AbsPath, AbsPathBuf, ChangeKind}; use crate::{ config::{Config, FilesWatcher, LinkedProject}, flycheck::{FlycheckConfig, FlycheckHandle}, - global_state::{FetchWorkspaceRequest, GlobalState}, + global_state::{FetchWorkspaceRequest, FetchWorkspaceResponse, GlobalState}, lsp_ext, main_loop::{DiscoverProjectParam, Task}, op_queue::Cause, @@ -448,15 +448,15 @@ impl GlobalState { let _p = tracing::info_span!("GlobalState::switch_workspaces").entered(); tracing::info!(%cause, "will switch workspaces"); - let Some((workspaces, force_reload_crate_graph)) = + let Some(FetchWorkspaceResponse { workspaces, force_crate_graph_reload }) = self.fetch_workspaces_queue.last_op_result() else { return; }; - info!(%cause, ?force_reload_crate_graph); + info!(%cause, ?force_crate_graph_reload); if self.fetch_workspace_error().is_err() && !self.workspaces.is_empty() { - if *force_reload_crate_graph { + if *force_crate_graph_reload { self.recreate_crate_graph(cause); } // It only makes sense to switch to a partially broken workspace @@ -474,8 +474,12 @@ impl GlobalState { .all(|(l, r)| l.eq_ignore_build_data(r)); if same_workspaces { - let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result(); - if Arc::ptr_eq(workspaces, &self.workspaces) { + let (workspaces, build_scripts) = match self.fetch_build_data_queue.last_op_result() { + Some((workspaces, build_scripts)) => (workspaces.clone(), build_scripts.as_slice()), + None => (Default::default(), Default::default()), + }; + + if Arc::ptr_eq(&workspaces, &self.workspaces) { info!("set build scripts to workspaces"); let workspaces = workspaces @@ -492,7 +496,7 @@ impl GlobalState { self.workspaces = Arc::new(workspaces); } else { info!("build scripts do not match the version of the active workspace"); - if *force_reload_crate_graph { + if *force_crate_graph_reload { self.recreate_crate_graph(cause); } @@ -739,22 +743,18 @@ impl GlobalState { pub(super) fn fetch_workspace_error(&self) -> Result<(), String> { let mut buf = String::new(); - let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else { + let Some(FetchWorkspaceResponse { workspaces, .. }) = + self.fetch_workspaces_queue.last_op_result() + else { return Ok(()); }; - if !self.discover_workspace_queue.op_in_progress() { - if last_op_result.is_empty() { - stdx::format_to!(buf, "rust-analyzer failed to discover workspace"); - } else { - for ws in last_op_result { - if let Err(err) = ws { - stdx::format_to!( - buf, - "rust-analyzer failed to load workspace: {:#}\n", - err - ); - } + if workspaces.is_empty() && self.config.discover_workspace_config().is_none() { + stdx::format_to!(buf, "rust-analyzer failed to fetch workspace"); + } else { + for ws in workspaces { + if let Err(err) = ws { + stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err); } } } @@ -769,7 +769,11 @@ impl GlobalState { pub(super) fn fetch_build_data_error(&self) -> Result<(), String> { let mut buf = String::new(); - for ws in &self.fetch_build_data_queue.last_op_result().1 { + let Some((_, ws)) = &self.fetch_build_data_queue.last_op_result() else { + return Ok(()); + }; + + for ws in ws { match ws { Ok(data) => { if let Some(stderr) = data.error() { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs index f330754f19a..b73f6e77514 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/config.rs @@ -1,17 +1,20 @@ //! Simple logger that logs either to stderr or to a file, using `tracing_subscriber` //! filter syntax and `tracing_appender` for non blocking output. -use std::io; +use std::io::{self}; use anyhow::Context; use tracing::level_filters::LevelFilter; use tracing_subscriber::{ - filter::Targets, fmt::MakeWriter, layer::SubscriberExt, util::SubscriberInitExt, Layer, - Registry, + filter::{filter_fn, Targets}, + fmt::MakeWriter, + layer::SubscriberExt, + Layer, Registry, }; use tracing_tree::HierarchicalLayer; use crate::tracing::hprof; +use crate::tracing::json; #[derive(Debug)] pub struct Config<T> { @@ -34,6 +37,12 @@ pub struct Config<T> { /// env RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 /// ``` pub profile_filter: Option<String>, + + /// Filtering syntax, set in a shell: + /// ``` + /// env RA_PROFILE_JSON=foo|bar|baz + /// ``` + pub json_profile_filter: Option<String>, } impl<T> Config<T> @@ -41,7 +50,7 @@ where T: for<'writer> MakeWriter<'writer> + Send + Sync + 'static, { pub fn init(self) -> anyhow::Result<()> { - let filter: Targets = self + let targets_filter: Targets = self .filter .parse() .with_context(|| format!("invalid log filter: `{}`", self.filter))?; @@ -50,31 +59,60 @@ where let ra_fmt_layer = tracing_subscriber::fmt::layer() .with_target(false) + .with_ansi(false) .with_writer(writer) - .with_filter(filter); - - let mut chalk_layer = None; - if let Some(chalk_filter) = self.chalk_filter { - let level: LevelFilter = - chalk_filter.parse().with_context(|| "invalid chalk log filter")?; - - let chalk_filter = Targets::new() - .with_target("chalk_solve", level) - .with_target("chalk_ir", level) - .with_target("chalk_recursive", level); - chalk_layer = Some( + .with_filter(targets_filter); + + let chalk_layer = match self.chalk_filter { + Some(chalk_filter) => { + let level: LevelFilter = + chalk_filter.parse().with_context(|| "invalid chalk log filter")?; + + let chalk_filter = Targets::new() + .with_target("chalk_solve", level) + .with_target("chalk_ir", level) + .with_target("chalk_recursive", level); + // TODO: remove `.with_filter(LevelFilter::OFF)` on the `None` branch. HierarchicalLayer::default() .with_indent_lines(true) .with_ansi(false) .with_indent_amount(2) .with_writer(io::stderr) - .with_filter(chalk_filter), - ); + .with_filter(chalk_filter) + .boxed() + } + None => None::<HierarchicalLayer>.with_filter(LevelFilter::OFF).boxed(), + }; + + // TODO: remove `.with_filter(LevelFilter::OFF)` on the `None` branch. + let profiler_layer = match self.profile_filter { + Some(spec) => Some(hprof::SpanTree::new(&spec)).with_filter(LevelFilter::INFO), + None => None.with_filter(LevelFilter::OFF), + }; + + let json_profiler_layer = match self.json_profile_filter { + Some(spec) => { + let filter = json::JsonFilter::from_spec(&spec); + let filter = filter_fn(move |metadata| { + let allowed = match &filter.allowed_names { + Some(names) => names.contains(metadata.name()), + None => true, + }; + + allowed && metadata.is_span() + }); + Some(json::TimingLayer::new(std::io::stderr).with_filter(filter)) + } + None => None, }; - let profiler_layer = self.profile_filter.map(|spec| hprof::layer(&spec)); + let subscriber = Registry::default() + .with(ra_fmt_layer) + .with(json_profiler_layer) + .with(profiler_layer) + .with(chalk_layer); - Registry::default().with(ra_fmt_layer).with(chalk_layer).with(profiler_layer).try_init()?; + tracing::subscriber::set_global_default(subscriber)?; Ok(()) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs index 2d1604e70be..cad92962f34 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/hprof.rs @@ -33,6 +33,7 @@ use std::{ fmt::Write, + marker::PhantomData, mem, time::{Duration, Instant}, }; @@ -50,53 +51,42 @@ use tracing_subscriber::{ Layer, Registry, }; -use crate::tracing::hprof; - pub fn init(spec: &str) -> tracing::subscriber::DefaultGuard { - let subscriber = Registry::default().with(layer(spec)); + let subscriber = Registry::default().with(SpanTree::new(spec)); tracing::subscriber::set_default(subscriber) } -pub fn layer<S>(spec: &str) -> impl Layer<S> -where - S: Subscriber + for<'span> tracing_subscriber::registry::LookupSpan<'span>, -{ - let (write_filter, allowed_names) = WriteFilter::from_spec(spec); - - // this filter the first pass for `tracing`: these are all the "profiling" spans, but things like - // span depth or duration are not filtered here: that only occurs at write time. - let profile_filter = filter::filter_fn(move |metadata| { - let allowed = match &allowed_names { - Some(names) => names.contains(metadata.name()), - None => true, - }; - - allowed - && metadata.is_span() - && metadata.level() >= &Level::INFO - && !metadata.target().starts_with("salsa") - && metadata.name() != "compute_exhaustiveness_and_usefulness" - && !metadata.target().starts_with("chalk") - }); - - hprof::SpanTree::default().aggregate(true).spec_filter(write_filter).with_filter(profile_filter) -} - -#[derive(Default, Debug)] -pub(crate) struct SpanTree { +#[derive(Debug)] +pub(crate) struct SpanTree<S> { aggregate: bool, write_filter: WriteFilter, + _inner: PhantomData<fn(S)>, } -impl SpanTree { - /// Merge identical sibling spans together. - pub(crate) fn aggregate(self, yes: bool) -> SpanTree { - SpanTree { aggregate: yes, ..self } - } - - /// Add a write-time filter for span duration or tree depth. - pub(crate) fn spec_filter(self, write_filter: WriteFilter) -> SpanTree { - SpanTree { write_filter, ..self } +impl<S> SpanTree<S> +where + S: Subscriber + for<'span> tracing_subscriber::registry::LookupSpan<'span>, +{ + pub(crate) fn new(spec: &str) -> impl Layer<S> { + let (write_filter, allowed_names) = WriteFilter::from_spec(spec); + + // this filter the first pass for `tracing`: these are all the "profiling" spans, but things like + // span depth or duration are not filtered here: that only occurs at write time. + let profile_filter = filter::filter_fn(move |metadata| { + let allowed = match &allowed_names { + Some(names) => names.contains(metadata.name()), + None => true, + }; + + allowed + && metadata.is_span() + && metadata.level() >= &Level::INFO + && !metadata.target().starts_with("salsa") + && metadata.name() != "compute_exhaustiveness_and_usefulness" + && !metadata.target().starts_with("chalk") + }); + + Self { aggregate: true, write_filter, _inner: PhantomData }.with_filter(profile_filter) } } @@ -136,7 +126,7 @@ impl<'a> Visit for DataVisitor<'a> { } } -impl<S> Layer<S> for SpanTree +impl<S> Layer<S> for SpanTree<S> where S: Subscriber + for<'span> LookupSpan<'span>, { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs new file mode 100644 index 00000000000..f540a33b451 --- /dev/null +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/tracing/json.rs @@ -0,0 +1,90 @@ +//! A [tracing_subscriber::layer::Layer] that exports new-line delinated JSON. +//! +//! Usage: +//! +//! ```rust +//! let layer = json::TimingLayer::new(std::io::stderr); +//! Registry::default().with(layer).init(); +//! ``` + +use std::{io::Write as _, marker::PhantomData, time::Instant}; + +use ide_db::FxHashSet; +use tracing::{ + span::{Attributes, Id}, + Event, Subscriber, +}; +use tracing_subscriber::{fmt::MakeWriter, layer::Context, registry::LookupSpan, Layer}; + +struct JsonData { + name: &'static str, + start: std::time::Instant, +} + +impl JsonData { + fn new(name: &'static str) -> Self { + Self { name, start: Instant::now() } + } +} + +#[derive(Debug)] +pub(crate) struct TimingLayer<S, W> { + writer: W, + _inner: PhantomData<fn(S)>, +} + +impl<S, W> TimingLayer<S, W> { + pub(crate) fn new(writer: W) -> Self { + Self { writer, _inner: PhantomData } + } +} + +impl<S, W> Layer<S> for TimingLayer<S, W> +where + S: Subscriber + for<'span> LookupSpan<'span>, + W: for<'writer> MakeWriter<'writer> + Send + Sync + 'static, +{ + fn on_new_span(&self, attrs: &Attributes<'_>, id: &Id, ctx: Context<'_, S>) { + let span = ctx.span(id).unwrap(); + + let data = JsonData::new(attrs.metadata().name()); + span.extensions_mut().insert(data); + } + + fn on_event(&self, _event: &Event<'_>, _ctx: Context<'_, S>) {} + + fn on_close(&self, id: Id, ctx: Context<'_, S>) { + #[derive(serde::Serialize)] + struct JsonDataInner { + name: &'static str, + elapsed_ms: u128, + } + + let span = ctx.span(&id).unwrap(); + let Some(data) = span.extensions_mut().remove::<JsonData>() else { + return; + }; + + let data = JsonDataInner { name: data.name, elapsed_ms: data.start.elapsed().as_millis() }; + let mut out = serde_json::to_string(&data).expect("Unable to serialize data"); + out.push('\n'); + self.writer.make_writer().write_all(out.as_bytes()).expect("Unable to write data"); + } +} + +#[derive(Default, Clone, Debug)] +pub(crate) struct JsonFilter { + pub(crate) allowed_names: Option<FxHashSet<String>>, +} + +impl JsonFilter { + pub(crate) fn from_spec(spec: &str) -> Self { + let allowed_names = if spec == "*" { + None + } else { + Some(FxHashSet::from_iter(spec.split('|').map(String::from))) + }; + + Self { allowed_names } + } +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs index 06ce9846818..18aface632d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs @@ -103,6 +103,7 @@ impl Project<'_> { filter: std::env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_owned()), chalk_filter: std::env::var("CHALK_DEBUG").ok(), profile_filter: std::env::var("RA_PROFILE").ok(), + json_profile_filter: std::env::var("RA_PROFILE_JSON").ok(), }; }); diff --git a/src/tools/rust-analyzer/crates/span/src/hygiene.rs b/src/tools/rust-analyzer/crates/span/src/hygiene.rs index cb9c092f5fc..3863b3e809c 100644 --- a/src/tools/rust-analyzer/crates/span/src/hygiene.rs +++ b/src/tools/rust-analyzer/crates/span/src/hygiene.rs @@ -142,3 +142,12 @@ pub enum Transparency { /// Def-site spans in procedural macros, identifiers from `macro` by default use this. Opaque, } + +impl Transparency { + /// Returns `true` if the transparency is [`Opaque`]. + /// + /// [`Opaque`]: Transparency::Opaque + pub fn is_opaque(&self) -> bool { + matches!(self, Self::Opaque) + } +} diff --git a/src/tools/rust-analyzer/crates/span/src/map.rs b/src/tools/rust-analyzer/crates/span/src/map.rs index f80de05ec65..66bbce18594 100644 --- a/src/tools/rust-analyzer/crates/span/src/map.rs +++ b/src/tools/rust-analyzer/crates/span/src/map.rs @@ -55,7 +55,10 @@ where /// Returns all [`TextRange`]s that correspond to the given span. /// /// Note this does a linear search through the entire backing vector. - pub fn ranges_with_span_exact(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_ + pub fn ranges_with_span_exact( + &self, + span: SpanData<S>, + ) -> impl Iterator<Item = (TextRange, S)> + '_ where S: Copy, { @@ -64,14 +67,14 @@ where return None; } let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0); - Some(TextRange::new(start, end)) + Some((TextRange::new(start, end), s.ctx)) }) } /// Returns all [`TextRange`]s whose spans contain the given span. /// /// Note this does a linear search through the entire backing vector. - pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = TextRange> + '_ + pub fn ranges_with_span(&self, span: SpanData<S>) -> impl Iterator<Item = (TextRange, S)> + '_ where S: Copy, { @@ -83,7 +86,7 @@ where return None; } let start = idx.checked_sub(1).map_or(TextSize::new(0), |prev| self.spans[prev].0); - Some(TextRange::new(start, end)) + Some((TextRange::new(start, end), s.ctx)) }) } diff --git a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs index 0ccd0886760..3a05b83e497 100644 --- a/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs +++ b/src/tools/rust-analyzer/crates/syntax-bridge/src/lib.rs @@ -148,6 +148,7 @@ pub fn token_tree_to_syntax_node<Ctx>( ) -> (Parse<SyntaxNode>, SpanMap<Ctx>) where SpanData<Ctx>: Copy + fmt::Debug, + Ctx: PartialEq, { let buffer = match tt { tt::Subtree { @@ -892,6 +893,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { impl<Ctx> TtTreeSink<'_, Ctx> where SpanData<Ctx>: Copy + fmt::Debug, + Ctx: PartialEq, { /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. @@ -949,6 +951,7 @@ where } let mut last = self.cursor; + let mut combined_span = None; 'tokens: for _ in 0..n_tokens { let tmp: u8; if self.cursor.eof() { @@ -982,7 +985,10 @@ where format_to!(self.buf, "{lit}"); debug_assert_ne!(self.buf.len() - buf_l, 0); self.text_pos += TextSize::new((self.buf.len() - buf_l) as u32); - self.token_map.push(self.text_pos, lit.span); + combined_span = match combined_span { + None => Some(lit.span), + Some(prev_span) => Some(Self::merge_spans(prev_span, lit.span)), + }; self.cursor = self.cursor.bump(); continue 'tokens; } @@ -1006,9 +1012,13 @@ where }; self.buf += text; self.text_pos += TextSize::of(text); - self.token_map.push(self.text_pos, span); + combined_span = match combined_span { + None => Some(span), + Some(prev_span) => Some(Self::merge_spans(prev_span, span)), + } } + self.token_map.push(self.text_pos, combined_span.expect("expected at least one token")); self.inner.token(kind, self.buf.as_str()); self.buf.clear(); // FIXME: Emitting whitespace for this is really just a hack, we should get rid of it. @@ -1043,4 +1053,22 @@ where fn error(&mut self, error: String) { self.inner.error(error, self.text_pos) } + + fn merge_spans(a: SpanData<Ctx>, b: SpanData<Ctx>) -> SpanData<Ctx> { + // We don't do what rustc does exactly, rustc does something clever when the spans have different syntax contexts + // but this runs afoul of our separation between `span` and `hir-expand`. + SpanData { + range: if a.ctx == b.ctx { + TextRange::new( + std::cmp::min(a.range.start(), b.range.start()), + std::cmp::max(a.range.end(), b.range.end()), + ) + } else { + // Combining ranges make no sense when they come from different syntax contexts. + a.range + }, + anchor: a.anchor, + ctx: a.ctx, + } + } } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast.rs b/src/tools/rust-analyzer/crates/syntax/src/ast.rs index 3282bd6eff2..32b1f5f7544 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast.rs @@ -8,6 +8,7 @@ pub mod make; mod node_ext; mod operators; pub mod prec; +pub mod syntax_factory; mod token_ext; mod traits; @@ -166,7 +167,7 @@ mod support { } #[test] -fn assert_ast_is_object_safe() { +fn assert_ast_is_dyn_compatible() { fn _f(_: &dyn AstNode, _: &dyn HasName) {} } diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs new file mode 100644 index 00000000000..73bbe49105d --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory.rs @@ -0,0 +1,45 @@ +//! Builds upon [`crate::ast::make`] constructors to create ast fragments with +//! optional syntax mappings. +//! +//! Instead of forcing make constructors to perform syntax mapping, we instead +//! let [`SyntaxFactory`] handle constructing the mappings. Care must be taken +//! to remember to feed the syntax mappings into a [`SyntaxEditor`](crate::syntax_editor::SyntaxEditor), +//! if applicable. + +mod constructors; + +use std::cell::{RefCell, RefMut}; + +use crate::syntax_editor::SyntaxMapping; + +pub struct SyntaxFactory { + // Stored in a refcell so that the factory methods can be &self + mappings: Option<RefCell<SyntaxMapping>>, +} + +impl SyntaxFactory { + /// Creates a new [`SyntaxFactory`], generating mappings between input nodes and generated nodes. + pub fn new() -> Self { + Self { mappings: Some(RefCell::new(SyntaxMapping::new())) } + } + + /// Creates a [`SyntaxFactory`] without generating mappings. + pub fn without_mappings() -> Self { + Self { mappings: None } + } + + /// Gets all of the tracked syntax mappings, if any. + pub fn finish_with_mappings(self) -> SyntaxMapping { + self.mappings.unwrap_or_default().into_inner() + } + + fn mappings(&self) -> Option<RefMut<'_, SyntaxMapping>> { + self.mappings.as_ref().map(|it| it.borrow_mut()) + } +} + +impl Default for SyntaxFactory { + fn default() -> Self { + Self::without_mappings() + } +} diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs new file mode 100644 index 00000000000..9f88add0f78 --- /dev/null +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/syntax_factory/constructors.rs @@ -0,0 +1,110 @@ +//! Wrappers over [`make`] constructors +use itertools::Itertools; + +use crate::{ + ast::{self, make, HasName}, + syntax_editor::SyntaxMappingBuilder, + AstNode, +}; + +use super::SyntaxFactory; + +impl SyntaxFactory { + pub fn name(&self, name: &str) -> ast::Name { + make::name(name).clone_for_update() + } + + pub fn ident_pat(&self, ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat { + let ast = make::ident_pat(ref_, mut_, name.clone()).clone_for_update(); + + if let Some(mut mapping) = self.mappings() { + let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.map_node(name.syntax().clone(), ast.name().unwrap().syntax().clone()); + builder.finish(&mut mapping); + } + + ast + } + + pub fn block_expr( + &self, + stmts: impl IntoIterator<Item = ast::Stmt>, + tail_expr: Option<ast::Expr>, + ) -> ast::BlockExpr { + let stmts = stmts.into_iter().collect_vec(); + let input = stmts.iter().map(|it| it.syntax().clone()).collect_vec(); + + let ast = make::block_expr(stmts, tail_expr.clone()).clone_for_update(); + + if let Some((mut mapping, stmt_list)) = self.mappings().zip(ast.stmt_list()) { + let mut builder = SyntaxMappingBuilder::new(stmt_list.syntax().clone()); + + builder.map_children( + input.into_iter(), + stmt_list.statements().map(|it| it.syntax().clone()), + ); + + if let Some((input, output)) = tail_expr.zip(stmt_list.tail_expr()) { + builder.map_node(input.syntax().clone(), output.syntax().clone()); + } + + builder.finish(&mut mapping); + } + + ast + } + + pub fn expr_path(&self, path: ast::Path) -> ast::Expr { + let ast::Expr::PathExpr(ast) = make::expr_path(path.clone()).clone_for_update() else { + unreachable!() + }; + + if let Some(mut mapping) = self.mappings() { + let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.map_node(path.syntax().clone(), ast.path().unwrap().syntax().clone()); + builder.finish(&mut mapping); + } + + ast.into() + } + + pub fn expr_ref(&self, expr: ast::Expr, exclusive: bool) -> ast::Expr { + let ast::Expr::RefExpr(ast) = make::expr_ref(expr.clone(), exclusive).clone_for_update() + else { + unreachable!() + }; + + if let Some(mut mapping) = self.mappings() { + let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.map_node(expr.syntax().clone(), ast.expr().unwrap().syntax().clone()); + builder.finish(&mut mapping); + } + + ast.into() + } + + pub fn let_stmt( + &self, + pattern: ast::Pat, + ty: Option<ast::Type>, + initializer: Option<ast::Expr>, + ) -> ast::LetStmt { + let ast = + make::let_stmt(pattern.clone(), ty.clone(), initializer.clone()).clone_for_update(); + + if let Some(mut mapping) = self.mappings() { + let mut builder = SyntaxMappingBuilder::new(ast.syntax().clone()); + builder.map_node(pattern.syntax().clone(), ast.pat().unwrap().syntax().clone()); + if let Some(input) = ty { + builder.map_node(input.syntax().clone(), ast.ty().unwrap().syntax().clone()); + } + if let Some(input) = initializer { + builder + .map_node(input.syntax().clone(), ast.initializer().unwrap().syntax().clone()); + } + builder.finish(&mut mapping); + } + + ast + } +} diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs index eb114f5e5f1..714f5a99111 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor.rs @@ -100,6 +100,10 @@ impl SyntaxEditor { pub fn finish(self) -> SyntaxEdit { edit_algo::apply_edits(self) } + + pub fn add_mappings(&mut self, other: SyntaxMapping) { + self.mappings.merge(other); + } } /// Represents a completed [`SyntaxEditor`] operation. @@ -319,85 +323,14 @@ fn is_ancestor_or_self_of_element(node: &SyntaxElement, ancestor: &SyntaxNode) - #[cfg(test)] mod tests { use expect_test::expect; - use itertools::Itertools; use crate::{ - ast::{self, make, HasName}, + ast::{self, make, syntax_factory::SyntaxFactory}, AstNode, }; use super::*; - fn make_ident_pat( - editor: Option<&mut SyntaxEditor>, - ref_: bool, - mut_: bool, - name: ast::Name, - ) -> ast::IdentPat { - let ast = make::ident_pat(ref_, mut_, name.clone()).clone_for_update(); - - if let Some(editor) = editor { - let mut mapping = SyntaxMappingBuilder::new(ast.syntax().clone()); - mapping.map_node(name.syntax().clone(), ast.name().unwrap().syntax().clone()); - mapping.finish(editor); - } - - ast - } - - fn make_let_stmt( - editor: Option<&mut SyntaxEditor>, - pattern: ast::Pat, - ty: Option<ast::Type>, - initializer: Option<ast::Expr>, - ) -> ast::LetStmt { - let ast = - make::let_stmt(pattern.clone(), ty.clone(), initializer.clone()).clone_for_update(); - - if let Some(editor) = editor { - let mut mapping = SyntaxMappingBuilder::new(ast.syntax().clone()); - mapping.map_node(pattern.syntax().clone(), ast.pat().unwrap().syntax().clone()); - if let Some(input) = ty { - mapping.map_node(input.syntax().clone(), ast.ty().unwrap().syntax().clone()); - } - if let Some(input) = initializer { - mapping - .map_node(input.syntax().clone(), ast.initializer().unwrap().syntax().clone()); - } - mapping.finish(editor); - } - - ast - } - - fn make_block_expr( - editor: Option<&mut SyntaxEditor>, - stmts: impl IntoIterator<Item = ast::Stmt>, - tail_expr: Option<ast::Expr>, - ) -> ast::BlockExpr { - let stmts = stmts.into_iter().collect_vec(); - let input = stmts.iter().map(|it| it.syntax().clone()).collect_vec(); - - let ast = make::block_expr(stmts, tail_expr.clone()).clone_for_update(); - - if let Some((editor, stmt_list)) = editor.zip(ast.stmt_list()) { - let mut mapping = SyntaxMappingBuilder::new(stmt_list.syntax().clone()); - - mapping.map_children( - input.into_iter(), - stmt_list.statements().map(|it| it.syntax().clone()), - ); - - if let Some((input, output)) = tail_expr.zip(stmt_list.tail_expr()) { - mapping.map_node(input.syntax().clone(), output.syntax().clone()); - } - - mapping.finish(editor); - } - - ast - } - #[test] fn basic_usage() { let root = make::match_arm( @@ -417,6 +350,7 @@ mod tests { let to_replace = root.syntax().descendants().find_map(ast::BinExpr::cast).unwrap(); let mut editor = SyntaxEditor::new(root.syntax().clone()); + let make = SyntaxFactory::new(); let name = make::name("var_name"); let name_ref = make::name_ref("var_name").clone_for_update(); @@ -425,21 +359,20 @@ mod tests { editor.add_annotation(name.syntax(), placeholder_snippet); editor.add_annotation(name_ref.syntax(), placeholder_snippet); - let make_ident_pat = make_ident_pat(Some(&mut editor), false, false, name); - let make_let_stmt = make_let_stmt( - Some(&mut editor), - make_ident_pat.into(), - None, - Some(to_replace.clone().into()), - ); - let new_block = make_block_expr( - Some(&mut editor), - [make_let_stmt.into()], + let new_block = make.block_expr( + [make + .let_stmt( + make.ident_pat(false, false, name.clone()).into(), + None, + Some(to_replace.clone().into()), + ) + .into()], Some(to_wrap.clone().into()), ); editor.replace(to_replace.syntax(), name_ref.syntax()); editor.replace(to_wrap.syntax(), new_block.syntax()); + editor.add_mappings(make.finish_with_mappings()); let edit = editor.finish(); @@ -473,11 +406,11 @@ mod tests { let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap(); let mut editor = SyntaxEditor::new(root.syntax().clone()); + let make = SyntaxFactory::without_mappings(); editor.insert( Position::first_child_of(root.stmt_list().unwrap().syntax()), - make_let_stmt( - None, + make.let_stmt( make::ext::simple_ident_pat(make::name("first")).into(), None, Some(make::expr_literal("1").into()), @@ -487,8 +420,7 @@ mod tests { editor.insert( Position::after(second_let.syntax()), - make_let_stmt( - None, + make.let_stmt( make::ext::simple_ident_pat(make::name("third")).into(), None, Some(make::expr_literal("3").into()), @@ -528,19 +460,17 @@ mod tests { let second_let = root.syntax().descendants().find_map(ast::LetStmt::cast).unwrap(); let mut editor = SyntaxEditor::new(root.syntax().clone()); + let make = SyntaxFactory::new(); - let new_block_expr = - make_block_expr(Some(&mut editor), [], Some(ast::Expr::BlockExpr(inner_block.clone()))); + let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone()))); - let first_let = make_let_stmt( - Some(&mut editor), + let first_let = make.let_stmt( make::ext::simple_ident_pat(make::name("first")).into(), None, Some(make::expr_literal("1").into()), ); - let third_let = make_let_stmt( - Some(&mut editor), + let third_let = make.let_stmt( make::ext::simple_ident_pat(make::name("third")).into(), None, Some(make::expr_literal("3").into()), @@ -552,6 +482,7 @@ mod tests { ); editor.insert(Position::after(second_let.syntax()), third_let.syntax()); editor.replace(inner_block.syntax(), new_block_expr.syntax()); + editor.add_mappings(make.finish_with_mappings()); let edit = editor.finish(); @@ -581,12 +512,11 @@ mod tests { let inner_block = root.clone(); let mut editor = SyntaxEditor::new(root.syntax().clone()); + let make = SyntaxFactory::new(); - let new_block_expr = - make_block_expr(Some(&mut editor), [], Some(ast::Expr::BlockExpr(inner_block.clone()))); + let new_block_expr = make.block_expr([], Some(ast::Expr::BlockExpr(inner_block.clone()))); - let first_let = make_let_stmt( - Some(&mut editor), + let first_let = make.let_stmt( make::ext::simple_ident_pat(make::name("first")).into(), None, Some(make::expr_literal("1").into()), @@ -597,6 +527,7 @@ mod tests { first_let.syntax(), ); editor.replace(inner_block.syntax(), new_block_expr.syntax()); + editor.add_mappings(make.finish_with_mappings()); let edit = editor.finish(); diff --git a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs index 9bb5e6d9338..16bc55ed2d4 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/syntax_editor/mapping.rs @@ -7,8 +7,6 @@ use rustc_hash::FxHashMap; use crate::{SyntaxElement, SyntaxNode}; -use super::SyntaxEditor; - #[derive(Debug, Default)] pub struct SyntaxMapping { // important information to keep track of: @@ -209,7 +207,7 @@ impl SyntaxMapping { Some(output) } - fn add_mapping(&mut self, syntax_mapping: SyntaxMappingBuilder) { + pub fn add_mapping(&mut self, syntax_mapping: SyntaxMappingBuilder) { let SyntaxMappingBuilder { parent_node, node_mappings } = syntax_mapping; let parent_entry: u32 = self.entry_parents.len().try_into().unwrap(); @@ -257,8 +255,8 @@ impl SyntaxMappingBuilder { } } - pub fn finish(self, editor: &mut SyntaxEditor) { - editor.mappings.add_mapping(self); + pub fn finish(self, mappings: &mut SyntaxMapping) { + mappings.add_mapping(self); } } diff --git a/src/tools/rust-analyzer/crates/vfs/src/loader.rs b/src/tools/rust-analyzer/crates/vfs/src/loader.rs index f24354cb493..c49e4c4322d 100644 --- a/src/tools/rust-analyzer/crates/vfs/src/loader.rs +++ b/src/tools/rust-analyzer/crates/vfs/src/loader.rs @@ -1,4 +1,4 @@ -//! Object safe interface for file watching and reading. +//! Dynamically compatible interface for file watching and reading. use std::fmt; use paths::{AbsPath, AbsPathBuf}; @@ -232,6 +232,6 @@ impl fmt::Debug for Message { } #[test] -fn handle_is_object_safe() { +fn handle_is_dyn_compatible() { fn _assert(_: &dyn Handle) {} } diff --git a/src/tools/rust-analyzer/docs/dev/README.md b/src/tools/rust-analyzer/docs/dev/README.md index 002b8ba2a66..12e6d829a08 100644 --- a/src/tools/rust-analyzer/docs/dev/README.md +++ b/src/tools/rust-analyzer/docs/dev/README.md @@ -178,7 +178,15 @@ RA_PROFILE=foo|bar|baz // enabled only selected entries RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms ``` -In particular, I have `export RA_PROFILE='*>10'` in my shell profile. +Some rust-analyzer contributors have `export RA_PROFILE='*>10'` in my shell profile. + +For machine-readable JSON output, we have the `RA_PROFILE_JSON` env variable. We support +filtering only by span name: + +``` +RA_PROFILE=* // dump everything +RA_PROFILE_JSON="vfs_load|parallel_prime_caches|discover_command" // dump selected spans +``` We also have a "counting" profiler which counts number of instances of popular structs. It is enabled by `RA_COUNT=1`. diff --git a/src/tools/rust-analyzer/docs/dev/architecture.md b/src/tools/rust-analyzer/docs/dev/architecture.md index 4f8723a9368..6aa57b2f9be 100644 --- a/src/tools/rust-analyzer/docs/dev/architecture.md +++ b/src/tools/rust-analyzer/docs/dev/architecture.md @@ -42,7 +42,7 @@ The underlying engine makes sure that model is computed lazily (on-demand) and c `crates/rust-analyzer/src/bin/main.rs` contains the main function which spawns LSP. This is *the* entry point, but it front-loads a lot of complexity, so it's fine to just skim through it. -`crates/rust-analyzer/src/handlers.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP. +`crates/rust-analyzer/src/handlers/requests.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP. `Analysis` and `AnalysisHost` types define the main API for consumers of IDE services. diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md index b7bac4d29fa..7764f7843a0 100644 --- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md +++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ <!--- -lsp/ext.rs hash: 6292ee8d88d4c9ec +lsp/ext.rs hash: 90cf7718d54fe3c2 If you need to change the above hash to make the test pass, please check if you need to adjust this doc as well and ping this issue: diff --git a/src/tools/rust-analyzer/docs/dev/syntax.md b/src/tools/rust-analyzer/docs/dev/syntax.md index 6c4daecc58f..3dcd430cea5 100644 --- a/src/tools/rust-analyzer/docs/dev/syntax.md +++ b/src/tools/rust-analyzer/docs/dev/syntax.md @@ -378,7 +378,7 @@ impl AstNode for AssocItem { } ``` -Shared AST substructures are modeled via (object safe) traits: +Shared AST substructures are modeled via (dynamically compatible) traits: ```rust trait HasVisibility: AstNode { diff --git a/src/tools/rust-analyzer/docs/user/generated_config.adoc b/src/tools/rust-analyzer/docs/user/generated_config.adoc index f37fd7f4ab3..708fc2b7891 100644 --- a/src/tools/rust-analyzer/docs/user/generated_config.adoc +++ b/src/tools/rust-analyzer/docs/user/generated_config.adoc @@ -158,6 +158,11 @@ building from locking the `Cargo.lock` at the expense of duplicating build artif Set to `true` to use a subdirectory of the existing target directory or set to a path relative to the workspace to use that path. -- +[[rust-analyzer.cfg.setTest]]rust-analyzer.cfg.setTest (default: `true`):: ++ +-- +Set `cfg(test)` for local crates. Defaults to true. +-- [[rust-analyzer.checkOnSave]]rust-analyzer.checkOnSave (default: `true`):: + -- diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index b66f0a64d57..a823e5bb96c 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -512,6 +512,11 @@ "type": "boolean", "default": false }, + "rust-analyzer.debug.buildBeforeRestart": { + "markdownDescription": "Whether to rebuild the project modules before debugging the same test again", + "type": "boolean", + "default": false + }, "rust-analyzer.debug.engineSettings": { "type": "object", "default": {}, @@ -849,6 +854,16 @@ } }, { + "title": "cfg", + "properties": { + "rust-analyzer.cfg.setTest": { + "markdownDescription": "Set `cfg(test)` for local crates. Defaults to true.", + "default": true, + "type": "boolean" + } + } + }, + { "title": "general", "properties": { "rust-analyzer.checkOnSave": { diff --git a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts index daead47e942..35867f710d5 100644 --- a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts +++ b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts @@ -23,10 +23,11 @@ export async function bootstrap( if (!isValidExecutable(path, config.serverExtraEnv)) { throw new Error( - `Failed to execute ${path} --version.` + config.serverPath - ? `\`config.server.path\` or \`config.serverPath\` has been set explicitly.\ + `Failed to execute ${path} --version.` + + (config.serverPath + ? `\`config.server.path\` or \`config.serverPath\` has been set explicitly.\ Consider removing this config or making a valid server binary available at that path.` - : "", + : ""), ); } diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index 1e3dc608095..abb4099f9f5 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -24,6 +24,7 @@ export class Config { "serverPath", "server", "files", + "cfg", ].map((opt) => `${this.rootSection}.${opt}`); private readonly requiresWindowReloadOpts = ["testExplorer"].map( @@ -299,6 +300,7 @@ export class Config { engine: this.get<string>("debug.engine"), engineSettings: this.get<object>("debug.engineSettings") ?? {}, openDebugPane: this.get<boolean>("debug.openDebugPane"), + buildBeforeRestart: this.get<boolean>("debug.buildBeforeRestart"), sourceFileMap: sourceFileMap, }; } diff --git a/src/tools/rust-analyzer/editors/code/src/debug.ts b/src/tools/rust-analyzer/editors/code/src/debug.ts index 3aae0f9ce6e..fb7e340e517 100644 --- a/src/tools/rust-analyzer/editors/code/src/debug.ts +++ b/src/tools/rust-analyzer/editors/code/src/debug.ts @@ -5,12 +5,15 @@ import type * as ra from "./lsp_ext"; import { Cargo } from "./toolchain"; import type { Ctx } from "./ctx"; -import { prepareEnv } from "./run"; +import { createTaskFromRunnable, prepareEnv } from "./run"; import { execute, isCargoRunnableArgs, unwrapUndefinable } from "./util"; import type { Config } from "./config"; const debugOutput = vscode.window.createOutputChannel("Debug"); +// Here we want to keep track on everything that's currently running +const activeDebugSessionIds: string[] = []; + export async function makeDebugConfig(ctx: Ctx, runnable: ra.Runnable): Promise<void> { const scope = ctx.activeRustEditor?.document.uri; if (!scope) return; @@ -45,6 +48,8 @@ export async function startDebugSession(ctx: Ctx, runnable: ra.Runnable): Promis const wsLaunchSection = vscode.workspace.getConfiguration("launch"); const configurations = wsLaunchSection.get<any[]>("configurations") || []; + // The runnable label is the name of the test with the "test prefix" + // e.g. test test_feature_x const index = configurations.findIndex((c) => c.name === runnable.label); if (-1 !== index) { debugConfig = configurations[index]; @@ -168,6 +173,8 @@ async function getDebugConfiguration( if (debugConfig.name === "run binary") { // The LSP side: crates\rust-analyzer\src\main_loop\handlers.rs, // fn to_lsp_runnable(...) with RunnableKind::Bin + // FIXME: Neither crates\rust-analyzer\src\main_loop\handlers.rs + // nor to_lsp_runnable exist anymore debugConfig.name = `run ${path.basename(executable)}`; } @@ -359,3 +366,49 @@ function quote(xs: string[]) { }) .join(" "); } + +async function recompileTestFromDebuggingSession(session: vscode.DebugSession, ctx: Ctx) { + const { cwd, args: sessionArgs }: vscode.DebugConfiguration = session.configuration; + + const args: ra.CargoRunnableArgs = { + cwd: cwd, + cargoArgs: ["test", "--no-run", "--test", "lib"], + + // The first element of the debug configuration args is the test path e.g. "test_bar::foo::test_a::test_b" + executableArgs: sessionArgs, + }; + const runnable: ra.Runnable = { + kind: "cargo", + label: "compile-test", + args, + }; + const task: vscode.Task = await createTaskFromRunnable(runnable, ctx.config); + + // It is not needed to call the language server, since the test path is already resolved in the + // configuration option. We can simply call a debug configuration with the --no-run option to compile + await vscode.tasks.executeTask(task); +} + +export function initializeDebugSessionTrackingAndRebuild(ctx: Ctx) { + vscode.debug.onDidStartDebugSession((session: vscode.DebugSession) => { + if (!activeDebugSessionIds.includes(session.id)) { + activeDebugSessionIds.push(session.id); + } + }); + + vscode.debug.onDidTerminateDebugSession(async (session: vscode.DebugSession) => { + // The id of the session will be the same when pressing restart the restart button + if (activeDebugSessionIds.find((s) => s === session.id)) { + await recompileTestFromDebuggingSession(session, ctx); + } + removeActiveSession(session); + }); +} + +function removeActiveSession(session: vscode.DebugSession) { + const activeSessionId = activeDebugSessionIds.findIndex((id) => id === session.id); + + if (activeSessionId !== -1) { + activeDebugSessionIds.splice(activeSessionId, 1); + } +} diff --git a/src/tools/rust-analyzer/editors/code/src/main.ts b/src/tools/rust-analyzer/editors/code/src/main.ts index 4769fdd864a..0ddc5619e99 100644 --- a/src/tools/rust-analyzer/editors/code/src/main.ts +++ b/src/tools/rust-analyzer/editors/code/src/main.ts @@ -6,6 +6,7 @@ import { type CommandFactory, Ctx, fetchWorkspace } from "./ctx"; import * as diagnostics from "./diagnostics"; import { activateTaskProvider } from "./tasks"; import { setContextValue } from "./util"; +import { initializeDebugSessionTrackingAndRebuild } from "./debug"; const RUST_PROJECT_CONTEXT_NAME = "inRustProject"; @@ -102,6 +103,10 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> { ctx.subscriptions, ); + if (ctx.config.debug.buildBeforeRestart) { + initializeDebugSessionTrackingAndRebuild(ctx); + } + await ctx.start(); return ctx; } diff --git a/src/tools/rust-analyzer/editors/code/src/run.ts b/src/tools/rust-analyzer/editors/code/src/run.ts index dd0da6b62c8..8a82a5a58cf 100644 --- a/src/tools/rust-analyzer/editors/code/src/run.ts +++ b/src/tools/rust-analyzer/editors/code/src/run.ts @@ -36,7 +36,7 @@ export async function selectRunnable( if (runnables.length === 0) { // it is the debug case, run always has at least 'cargo check ...' - // see crates\rust-analyzer\src\main_loop\handlers.rs, handle_runnables + // see crates\rust-analyzer\src\handlers\request.rs, handle_runnables await vscode.window.showErrorMessage("There's no debug target!"); quickPick.dispose(); return; diff --git a/src/tools/rust-analyzer/editors/code/src/toolchain.ts b/src/tools/rust-analyzer/editors/code/src/toolchain.ts index 850a6a55616..e8bab9c3d84 100644 --- a/src/tools/rust-analyzer/editors/code/src/toolchain.ts +++ b/src/tools/rust-analyzer/editors/code/src/toolchain.ts @@ -29,7 +29,7 @@ export class Cargo { static artifactSpec(cargoArgs: string[], executableArgs?: string[]): ArtifactSpec { cargoArgs = [...cargoArgs, "--message-format=json"]; // arguments for a runnable from the quick pick should be updated. - // see crates\rust-analyzer\src\main_loop\handlers.rs, handle_code_lens + // see crates\rust-analyzer\src\handlers\request.rs, handle_code_lens switch (cargoArgs[0]) { case "run": cargoArgs[0] = "build"; diff --git a/src/tools/rust-analyzer/rust-version b/src/tools/rust-analyzer/rust-version index 79ed6cc7d74..f217c6a19cb 100644 --- a/src/tools/rust-analyzer/rust-version +++ b/src/tools/rust-analyzer/rust-version @@ -1 +1 @@ -1b5aa96d6016bafe50e071b45d4d2e3c90fd766f +cf24c73141a77db730f4b7fda69dcd7e8b113b51 diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs index 1ffad06457f..5163f039a23 100644 --- a/src/tools/tidy/src/deps.rs +++ b/src/tools/tidy/src/deps.rs @@ -129,11 +129,14 @@ const EXCEPTIONS_STDARCH: ExceptionList = &[ const EXCEPTIONS_CARGO: ExceptionList = &[ // tidy-alphabetical-start + ("arrayref", "BSD-2-Clause"), ("bitmaps", "MPL-2.0+"), + ("blake3", "CC0-1.0 OR Apache-2.0 OR Apache-2.0 WITH LLVM-exception"), ("bytesize", "Apache-2.0"), ("ciborium", "Apache-2.0"), ("ciborium-io", "Apache-2.0"), ("ciborium-ll", "Apache-2.0"), + ("constant_time_eq", "CC0-1.0 OR MIT-0 OR Apache-2.0"), ("dunce", "CC0-1.0 OR MIT-0 OR Apache-2.0"), ("encoding_rs", "(Apache-2.0 OR MIT) AND BSD-3-Clause"), ("fiat-crypto", "MIT OR Apache-2.0 OR BSD-1-Clause"), diff --git a/tests/coverage/color.coverage b/tests/coverage/color.coverage index b12f20204b4..4e6ef6b60ce 100644 --- a/tests/coverage/color.coverage +++ b/tests/coverage/color.coverage @@ -1,5 +1,5 @@ LL| |//@ edition: 2021 - LL| |//@ ignore-mode-coverage-map + LL| |//@ ignore-coverage-map LL| |//@ ignore-windows LL| |//@ llvm-cov-flags: --use-color LL| | diff --git a/tests/coverage/color.rs b/tests/coverage/color.rs index 144e798ba5d..bdb81c088f5 100644 --- a/tests/coverage/color.rs +++ b/tests/coverage/color.rs @@ -1,5 +1,5 @@ //@ edition: 2021 -//@ ignore-mode-coverage-map +//@ ignore-coverage-map //@ ignore-windows //@ llvm-cov-flags: --use-color diff --git a/tests/coverage/ignore_map.coverage b/tests/coverage/ignore_map.coverage index a796a7375a7..466f9e29815 100644 --- a/tests/coverage/ignore_map.coverage +++ b/tests/coverage/ignore_map.coverage @@ -1,4 +1,4 @@ - LL| |//@ ignore-mode-coverage-map + LL| |//@ ignore-coverage-map LL| | LL| 1|fn main() {} diff --git a/tests/coverage/ignore_map.rs b/tests/coverage/ignore_map.rs index deee6e27d99..95df6cbbf0d 100644 --- a/tests/coverage/ignore_map.rs +++ b/tests/coverage/ignore_map.rs @@ -1,3 +1,3 @@ -//@ ignore-mode-coverage-map +//@ ignore-coverage-map fn main() {} diff --git a/tests/coverage/ignore_run.rs b/tests/coverage/ignore_run.rs index 0363524d369..2d67ebe6f3a 100644 --- a/tests/coverage/ignore_run.rs +++ b/tests/coverage/ignore_run.rs @@ -1,3 +1,3 @@ -//@ ignore-mode-coverage-run +//@ ignore-coverage-run fn main() {} diff --git a/tests/run-make/broken-pipe-no-ice/rmake.rs b/tests/run-make/broken-pipe-no-ice/rmake.rs new file mode 100644 index 00000000000..d1db0bc7368 --- /dev/null +++ b/tests/run-make/broken-pipe-no-ice/rmake.rs @@ -0,0 +1,73 @@ +//! Check that `rustc` and `rustdoc` does not ICE upon encountering a broken pipe due to unhandled +//! panics from raw std `println!` usages. +//! +//! Regression test for <https://github.com/rust-lang/rust/issues/34376>. + +//@ ignore-cross-compile (needs to run test binary) + +#![feature(anonymous_pipe)] + +use std::io::Read; +use std::process::{Command, Stdio}; + +use run_make_support::env_var; + +#[derive(Debug, PartialEq)] +enum Binary { + Rustc, + Rustdoc, +} + +fn check_broken_pipe_handled_gracefully(bin: Binary, mut cmd: Command) { + let (reader, writer) = std::pipe::pipe().unwrap(); + drop(reader); // close read-end + cmd.stdout(writer).stderr(Stdio::piped()); + + let mut child = cmd.spawn().unwrap(); + + let mut stderr = String::new(); + child.stderr.as_mut().unwrap().read_to_string(&mut stderr).unwrap(); + let status = child.wait().unwrap(); + + assert!(!status.success(), "{bin:?} unexpectedly succeeded"); + + const PANIC_ICE_EXIT_CODE: i32 = 101; + + #[cfg(not(windows))] + { + // On non-Windows, rustc/rustdoc built with `-Zon-broken-pipe=kill` shouldn't have an exit + // code of 101 because it should have an wait status that corresponds to SIGPIPE signal + // number. + assert_ne!(status.code(), Some(PANIC_ICE_EXIT_CODE), "{bin:?}"); + // And the stderr should be empty because rustc/rustdoc should've gotten killed. + assert!(stderr.is_empty(), "{bin:?} stderr:\n{}", stderr); + } + + #[cfg(windows)] + { + match bin { + // On Windows, rustc has a paper that propagates the panic exit code of 101 but converts + // broken pipe errors into fatal errors instead of ICEs. + Binary::Rustc => { + assert_eq!(status.code(), Some(PANIC_ICE_EXIT_CODE), "{bin:?}"); + // But make sure it doesn't manifest as an ICE. + assert!(!stderr.contains("internal compiler error"), "{bin:?} ICE'd"); + } + // On Windows, rustdoc seems to cleanly exit with exit code of 1. + Binary::Rustdoc => { + assert_eq!(status.code(), Some(1), "{bin:?}"); + assert!(!stderr.contains("panic"), "{bin:?} stderr contains panic"); + } + } + } +} + +fn main() { + let mut rustc = Command::new(env_var("RUSTC")); + rustc.arg("--print=sysroot"); + check_broken_pipe_handled_gracefully(Binary::Rustc, rustc); + + let mut rustdoc = Command::new(env_var("RUSTDOC")); + rustdoc.arg("--version"); + check_broken_pipe_handled_gracefully(Binary::Rustdoc, rustdoc); +} diff --git a/tests/run-make/cross-lang-lto-pgo-smoketest-clang/rmake.rs b/tests/run-make/cross-lang-lto-pgo-smoketest-clang/rmake.rs index 03c9af4bb89..50790e18cec 100644 --- a/tests/run-make/cross-lang-lto-pgo-smoketest-clang/rmake.rs +++ b/tests/run-make/cross-lang-lto-pgo-smoketest-clang/rmake.rs @@ -9,7 +9,7 @@ // RUSTBUILD_FORCE_CLANG_BASED_TESTS and only runs tests which contain "clang" in their // name. -//@ needs-profiler-support +//@ needs-profiler-runtime // FIXME(Oneirical): Except that due to the reliance on llvm-profdata, this test // never runs, because `x86_64-gnu-debug` does not have the `profiler_builtins` crate. diff --git a/tests/run-make/optimization-remarks-dir-pgo/rmake.rs b/tests/run-make/optimization-remarks-dir-pgo/rmake.rs index 228c43cc5f1..471ce89f188 100644 --- a/tests/run-make/optimization-remarks-dir-pgo/rmake.rs +++ b/tests/run-make/optimization-remarks-dir-pgo/rmake.rs @@ -4,7 +4,7 @@ // the output remark files. // See https://github.com/rust-lang/rust/pull/114439 -//@ needs-profiler-support +//@ needs-profiler-runtime //@ ignore-cross-compile use run_make_support::{ diff --git a/tests/run-make/pgo-branch-weights/rmake.rs b/tests/run-make/pgo-branch-weights/rmake.rs index 105c2fafc5a..1893248e307 100644 --- a/tests/run-make/pgo-branch-weights/rmake.rs +++ b/tests/run-make/pgo-branch-weights/rmake.rs @@ -7,7 +7,7 @@ // If the test passes, the expected function call count was added to the use-phase LLVM-IR. // See https://github.com/rust-lang/rust/pull/66631 -//@ needs-profiler-support +//@ needs-profiler-runtime //@ ignore-cross-compile use std::path::Path; diff --git a/tests/run-make/pgo-gen-lto/rmake.rs b/tests/run-make/pgo-gen-lto/rmake.rs index 53d1623bf58..4f7ae9fb24c 100644 --- a/tests/run-make/pgo-gen-lto/rmake.rs +++ b/tests/run-make/pgo-gen-lto/rmake.rs @@ -2,7 +2,7 @@ // should be generated. // See https://github.com/rust-lang/rust/pull/48346 -//@ needs-profiler-support +//@ needs-profiler-runtime // Reason: this exercises LTO profiling //@ ignore-cross-compile // Reason: the compiled binary is executed diff --git a/tests/run-make/pgo-gen/rmake.rs b/tests/run-make/pgo-gen/rmake.rs index ad2f6388e8f..5cd5a4583ed 100644 --- a/tests/run-make/pgo-gen/rmake.rs +++ b/tests/run-make/pgo-gen/rmake.rs @@ -3,7 +3,7 @@ // optimizes code. This test checks that these files are generated. // See https://github.com/rust-lang/rust/pull/48346 -//@ needs-profiler-support +//@ needs-profiler-runtime //@ ignore-cross-compile use run_make_support::{cwd, has_extension, has_prefix, run, rustc, shallow_find_files}; diff --git a/tests/run-make/pgo-indirect-call-promotion/rmake.rs b/tests/run-make/pgo-indirect-call-promotion/rmake.rs index 28232eb2566..ce9754f13b9 100644 --- a/tests/run-make/pgo-indirect-call-promotion/rmake.rs +++ b/tests/run-make/pgo-indirect-call-promotion/rmake.rs @@ -5,7 +5,7 @@ // whether it can make a direct call instead of the indirect call. // See https://github.com/rust-lang/rust/pull/66631 -//@ needs-profiler-support +//@ needs-profiler-runtime // Reason: llvm_profdata is used //@ ignore-cross-compile // Reason: the compiled binary is executed diff --git a/tests/run-make/pgo-use/rmake.rs b/tests/run-make/pgo-use/rmake.rs index 276af9ea263..c09a82353b9 100644 --- a/tests/run-make/pgo-use/rmake.rs +++ b/tests/run-make/pgo-use/rmake.rs @@ -5,7 +5,7 @@ // be marked as cold. // See https://github.com/rust-lang/rust/pull/60262 -//@ needs-profiler-support +//@ needs-profiler-runtime //@ ignore-cross-compile use run_make_support::{ diff --git a/tests/run-make/profile/rmake.rs b/tests/run-make/profile/rmake.rs index 4287ab0a931..58a1b53c040 100644 --- a/tests/run-make/profile/rmake.rs +++ b/tests/run-make/profile/rmake.rs @@ -6,7 +6,7 @@ // See https://github.com/rust-lang/rust/pull/42433 //@ ignore-cross-compile -//@ needs-profiler-support +//@ needs-profiler-runtime use run_make_support::{path, run, rustc}; diff --git a/tests/run-make/track-pgo-dep-info/rmake.rs b/tests/run-make/track-pgo-dep-info/rmake.rs index 84f4e0bd383..5869dbf9c24 100644 --- a/tests/run-make/track-pgo-dep-info/rmake.rs +++ b/tests/run-make/track-pgo-dep-info/rmake.rs @@ -6,7 +6,7 @@ //@ ignore-cross-compile // Reason: the binary is executed -//@ needs-profiler-support +//@ needs-profiler-runtime use run_make_support::{llvm_profdata, rfs, run, rustc}; diff --git a/tests/ui/coverage-attr/bad-attr-ice.rs b/tests/ui/coverage-attr/bad-attr-ice.rs index ae4d27d65eb..55c86d260d4 100644 --- a/tests/ui/coverage-attr/bad-attr-ice.rs +++ b/tests/ui/coverage-attr/bad-attr-ice.rs @@ -1,7 +1,7 @@ #![cfg_attr(feat, feature(coverage_attribute))] //@ revisions: feat nofeat //@ compile-flags: -Cinstrument-coverage -//@ needs-profiler-support +//@ needs-profiler-runtime // Malformed `#[coverage(..)]` attributes should not cause an ICE when built // with `-Cinstrument-coverage`. diff --git a/tests/ui/issues/issue-85461.rs b/tests/ui/issues/issue-85461.rs index 7fe7a4aa579..72538081ccb 100644 --- a/tests/ui/issues/issue-85461.rs +++ b/tests/ui/issues/issue-85461.rs @@ -1,6 +1,6 @@ //@ compile-flags: -Cinstrument-coverage -Ccodegen-units=4 --crate-type dylib -Copt-level=0 //@ build-pass -//@ needs-profiler-support +//@ needs-profiler-runtime //@ needs-dynamic-linking // Regression test for #85461 where MSVC sometimes fails to link instrument-coverage binaries diff --git a/tests/ui/rust-2024/auxiliary/reserved-guarded-strings-macro-2021.rs b/tests/ui/rust-2024/auxiliary/reserved-guarded-strings-macro-2021.rs new file mode 100644 index 00000000000..81080fcdce3 --- /dev/null +++ b/tests/ui/rust-2024/auxiliary/reserved-guarded-strings-macro-2021.rs @@ -0,0 +1,20 @@ +//@ force-host +//@ edition:2021 +//@ no-prefer-dynamic + +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::TokenStream; +use std::str::FromStr; + +#[proc_macro] +pub fn number_of_tokens_in_a_guarded_string_literal(_: TokenStream) -> TokenStream { + TokenStream::from_str("#\"abc\"#").unwrap().into_iter().count().to_string().parse().unwrap() +} + +#[proc_macro] +pub fn number_of_tokens_in_a_guarded_unterminated_string_literal(_: TokenStream) -> TokenStream { + TokenStream::from_str("#\"abc\"").unwrap().into_iter().count().to_string().parse().unwrap() +} diff --git a/tests/ui/rust-2024/auxiliary/reserved-guarded-strings-macro-2024.rs b/tests/ui/rust-2024/auxiliary/reserved-guarded-strings-macro-2024.rs new file mode 100644 index 00000000000..2c3dc30f0ae --- /dev/null +++ b/tests/ui/rust-2024/auxiliary/reserved-guarded-strings-macro-2024.rs @@ -0,0 +1,21 @@ +//@ force-host +//@ compile-flags: -Zunstable-options +//@ edition:2024 +//@ no-prefer-dynamic + +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::TokenStream; +use std::str::FromStr; + +#[proc_macro] +pub fn number_of_tokens_in_a_guarded_string_literal(_: TokenStream) -> TokenStream { + TokenStream::from_str("#\"abc\"#").unwrap().into_iter().count().to_string().parse().unwrap() +} + +#[proc_macro] +pub fn number_of_tokens_in_a_guarded_unterminated_string_literal(_: TokenStream) -> TokenStream { + TokenStream::from_str("#\"abc\"").unwrap().into_iter().count().to_string().parse().unwrap() +} diff --git a/tests/ui/rust-2024/reserved-guarded-strings-lexing.rs b/tests/ui/rust-2024/reserved-guarded-strings-lexing.rs new file mode 100644 index 00000000000..83e0dcbb4be --- /dev/null +++ b/tests/ui/rust-2024/reserved-guarded-strings-lexing.rs @@ -0,0 +1,80 @@ +//@ edition:2021 +// ignore-tidy-linelength + +#![warn(rust_2024_guarded_string_incompatible_syntax)] + +macro_rules! demo2 { + ( $a:tt $b:tt ) => { println!("two tokens") }; +} + +macro_rules! demo3 { + ( $a:tt $b:tt $c:tt ) => { println!("three tokens") }; +} + +macro_rules! demo4 { + ( $a:tt $b:tt $c:tt $d:tt ) => { println!("four tokens") }; +} + +macro_rules! demo5 { + ( $a:tt $b:tt $c:tt $d:tt $e:tt ) => { println!("five tokens") }; +} + +macro_rules! demo7 { + ( $a:tt $b:tt $c:tt $d:tt $e:tt $f:tt $g:tt ) => { println!("seven tokens") }; +} + + +fn main() { + demo3!(## "foo"); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo4!(### "foo"); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo4!(## "foo"#); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo7!(### "foo"###); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + + demo5!(###"foo"#); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo5!(#"foo"###); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo4!("foo"###); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + + // Non-ascii identifiers + demo2!(Ñ"foo"); + //~^ ERROR prefix `Ñ` is unknown + demo4!(Ñ#""#); + //~^ ERROR prefix `Ñ` is unknown + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo3!(🙃#""); + //~^ ERROR identifiers cannot contain emoji + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 +} diff --git a/tests/ui/rust-2024/reserved-guarded-strings-lexing.stderr b/tests/ui/rust-2024/reserved-guarded-strings-lexing.stderr new file mode 100644 index 00000000000..e2e1ac42f05 --- /dev/null +++ b/tests/ui/rust-2024/reserved-guarded-strings-lexing.stderr @@ -0,0 +1,271 @@ +error: prefix `Ñ` is unknown + --> $DIR/reserved-guarded-strings-lexing.rs:70:12 + | +LL | demo2!(Ñ"foo"); + | ^ unknown prefix + | + = note: prefixed identifiers and literals are reserved since Rust 2021 +help: consider inserting whitespace here + | +LL | demo2!(Ñ "foo"); + | + + +error: prefix `Ñ` is unknown + --> $DIR/reserved-guarded-strings-lexing.rs:72:12 + | +LL | demo4!(Ñ#""#); + | ^ unknown prefix + | + = note: prefixed identifiers and literals are reserved since Rust 2021 +help: consider inserting whitespace here + | +LL | demo4!(Ñ #""#); + | + + +error: identifiers cannot contain emoji: `🙃` + --> $DIR/reserved-guarded-strings-lexing.rs:76:12 + | +LL | demo3!(🙃#""); + | ^^ + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:28:12 + | +LL | demo3!(## "foo"); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +note: the lint level is defined here + --> $DIR/reserved-guarded-strings-lexing.rs:4:9 + | +LL | #![warn(rust_2024_guarded_string_incompatible_syntax)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo3!(# # "foo"); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:31:12 + | +LL | demo4!(### "foo"); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!(# ## "foo"); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:31:13 + | +LL | demo4!(### "foo"); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!(## # "foo"); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:36:12 + | +LL | demo4!(## "foo"#); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!(# # "foo"#); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:39:12 + | +LL | demo7!(### "foo"###); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo7!(# ## "foo"###); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:39:13 + | +LL | demo7!(### "foo"###); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo7!(## # "foo"###); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:39:21 + | +LL | demo7!(### "foo"###); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo7!(### "foo"# ##); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:39:22 + | +LL | demo7!(### "foo"###); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo7!(### "foo"## #); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:49:12 + | +LL | demo5!(###"foo"#); + | ^^^^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo5!(# ##"foo"#); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:49:13 + | +LL | demo5!(###"foo"#); + | ^^^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo5!(## #"foo"#); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:49:14 + | +LL | demo5!(###"foo"#); + | ^^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo5!(### "foo"#); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:56:12 + | +LL | demo5!(#"foo"###); + | ^^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo5!(# "foo"###); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:56:18 + | +LL | demo5!(#"foo"###); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo5!(#"foo"# ##); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:56:19 + | +LL | demo5!(#"foo"###); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo5!(#"foo"## #); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:63:17 + | +LL | demo4!("foo"###); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!("foo"# ##); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:63:18 + | +LL | demo4!("foo"###); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!("foo"## #); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:72:13 + | +LL | demo4!(Ñ#""#); + | ^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!(Ñ# ""#); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-lexing.rs:76:13 + | +LL | demo3!(🙃#""); + | ^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo3!(🙃# ""); + | + + +error: aborting due to 3 previous errors; 18 warnings emitted + diff --git a/tests/ui/rust-2024/reserved-guarded-strings-migration.fixed b/tests/ui/rust-2024/reserved-guarded-strings-migration.fixed new file mode 100644 index 00000000000..d92df7b5375 --- /dev/null +++ b/tests/ui/rust-2024/reserved-guarded-strings-migration.fixed @@ -0,0 +1,99 @@ +//@ check-pass +//@ run-rustfix +//@ edition:2021 + +#![warn(rust_2024_guarded_string_incompatible_syntax)] + +macro_rules! demo1 { + ( $a:tt ) => { println!("one tokens") }; +} + +macro_rules! demo2 { + ( $a:tt $b:tt ) => { println!("two tokens") }; +} + +macro_rules! demo3 { + ( $a:tt $b:tt $c:tt ) => { println!("three tokens") }; +} + +macro_rules! demo4 { + ( $a:tt $b:tt $c:tt $d:tt ) => { println!("four tokens") }; +} + +macro_rules! demo5 { + ( $a:tt $b:tt $c:tt $d:tt $e:tt ) => { println!("five tokens") }; +} + +macro_rules! demo6 { + ( $a:tt $b:tt $c:tt $d:tt $e:tt $f:tt ) => { println!("six tokens") }; +} + + +fn main() { + demo1!(""); + demo2!(# ""); + demo3!(# ""#); + demo2!(# "foo"); + demo3!(# "foo"#); + demo2!("foo"#); + + demo3!(# # "foo"); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo4!(# # # "foo"); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo4!(# # "foo"#); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo6!(# # # "foo"# #); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + + demo4!("foo"# # #); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + + demo2!(# ""); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo3!(# ""#); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo3!(# # ""); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo2!(# "foo"); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo3!(# # "foo"); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo3!(# "foo"#); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo4!(# # "foo"#); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo5!(# # "foo"# #); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 +} diff --git a/tests/ui/rust-2024/reserved-guarded-strings-migration.rs b/tests/ui/rust-2024/reserved-guarded-strings-migration.rs new file mode 100644 index 00000000000..5905f2abe32 --- /dev/null +++ b/tests/ui/rust-2024/reserved-guarded-strings-migration.rs @@ -0,0 +1,99 @@ +//@ check-pass +//@ run-rustfix +//@ edition:2021 + +#![warn(rust_2024_guarded_string_incompatible_syntax)] + +macro_rules! demo1 { + ( $a:tt ) => { println!("one tokens") }; +} + +macro_rules! demo2 { + ( $a:tt $b:tt ) => { println!("two tokens") }; +} + +macro_rules! demo3 { + ( $a:tt $b:tt $c:tt ) => { println!("three tokens") }; +} + +macro_rules! demo4 { + ( $a:tt $b:tt $c:tt $d:tt ) => { println!("four tokens") }; +} + +macro_rules! demo5 { + ( $a:tt $b:tt $c:tt $d:tt $e:tt ) => { println!("five tokens") }; +} + +macro_rules! demo6 { + ( $a:tt $b:tt $c:tt $d:tt $e:tt $f:tt ) => { println!("six tokens") }; +} + + +fn main() { + demo1!(""); + demo2!(# ""); + demo3!(# ""#); + demo2!(# "foo"); + demo3!(# "foo"#); + demo2!("foo"#); + + demo3!(## "foo"); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo4!(### "foo"); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo4!(## "foo"#); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo6!(### "foo"##); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + + demo4!("foo"###); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + + demo2!(#""); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo3!(#""#); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo3!(##""); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo2!(#"foo"); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo3!(##"foo"); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo3!(#"foo"#); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo4!(##"foo"#); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + demo5!(##"foo"##); + //~^ WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 + //~| WARNING parsed as a guarded string in Rust 2024 [rust_2024_guarded_string_incompatible_syntax] + //~| WARNING hard error in Rust 2024 +} diff --git a/tests/ui/rust-2024/reserved-guarded-strings-migration.stderr b/tests/ui/rust-2024/reserved-guarded-strings-migration.stderr new file mode 100644 index 00000000000..d7f8e5c9b4b --- /dev/null +++ b/tests/ui/rust-2024/reserved-guarded-strings-migration.stderr @@ -0,0 +1,293 @@ +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:40:12 + | +LL | demo3!(## "foo"); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +note: the lint level is defined here + --> $DIR/reserved-guarded-strings-migration.rs:5:9 + | +LL | #![warn(rust_2024_guarded_string_incompatible_syntax)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo3!(# # "foo"); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:43:12 + | +LL | demo4!(### "foo"); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!(# ## "foo"); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:43:13 + | +LL | demo4!(### "foo"); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!(## # "foo"); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:48:12 + | +LL | demo4!(## "foo"#); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!(# # "foo"#); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:51:12 + | +LL | demo6!(### "foo"##); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo6!(# ## "foo"##); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:51:13 + | +LL | demo6!(### "foo"##); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo6!(## # "foo"##); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:51:21 + | +LL | demo6!(### "foo"##); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo6!(### "foo"# #); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:59:17 + | +LL | demo4!("foo"###); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!("foo"# ##); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:59:18 + | +LL | demo4!("foo"###); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!("foo"## #); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:65:12 + | +LL | demo2!(#""); + | ^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo2!(# ""); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:68:12 + | +LL | demo3!(#""#); + | ^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo3!(# ""#); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:71:12 + | +LL | demo3!(##""); + | ^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo3!(# #""); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:71:13 + | +LL | demo3!(##""); + | ^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo3!(## ""); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:76:12 + | +LL | demo2!(#"foo"); + | ^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo2!(# "foo"); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:79:12 + | +LL | demo3!(##"foo"); + | ^^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo3!(# #"foo"); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:79:13 + | +LL | demo3!(##"foo"); + | ^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo3!(## "foo"); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:84:12 + | +LL | demo3!(#"foo"#); + | ^^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo3!(# "foo"#); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:87:12 + | +LL | demo4!(##"foo"#); + | ^^^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!(# #"foo"#); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:87:13 + | +LL | demo4!(##"foo"#); + | ^^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo4!(## "foo"#); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:92:12 + | +LL | demo5!(##"foo"##); + | ^^^^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo5!(# #"foo"##); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:92:13 + | +LL | demo5!(##"foo"##); + | ^^^^^^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo5!(## "foo"##); + | + + +warning: will be parsed as a guarded string in Rust 2024 + --> $DIR/reserved-guarded-strings-migration.rs:92:19 + | +LL | demo5!(##"foo"##); + | ^^ + | + = warning: this is accepted in the current edition (Rust 2021) but is a hard error in Rust 2024! + = note: for more information, see issue #123735 <https://github.com/rust-lang/rust/issues/123735> +help: insert whitespace here to avoid this being parsed as a guarded string in Rust 2024 + | +LL | demo5!(##"foo"# #); + | + + +warning: 22 warnings emitted + diff --git a/tests/ui/rust-2024/reserved-guarded-strings-via-macro-2.rs b/tests/ui/rust-2024/reserved-guarded-strings-via-macro-2.rs new file mode 100644 index 00000000000..3f9f373ba22 --- /dev/null +++ b/tests/ui/rust-2024/reserved-guarded-strings-via-macro-2.rs @@ -0,0 +1,18 @@ +//@ edition:2021 +//@ aux-build:reserved-guarded-strings-macro-2021.rs +//@ aux-build:reserved-guarded-strings-macro-2024.rs + +extern crate reserved_guarded_strings_macro_2021 as m2021; +extern crate reserved_guarded_strings_macro_2024 as m2024; + +fn main() { + // Ok: + m2021::number_of_tokens_in_a_guarded_string_literal!(); + m2021::number_of_tokens_in_a_guarded_unterminated_string_literal!(); + + // Error, even though *this* crate is 2021: + m2024::number_of_tokens_in_a_guarded_string_literal!(); + //~^ ERROR invalid string literal + m2024::number_of_tokens_in_a_guarded_unterminated_string_literal!(); + //~^ ERROR invalid string literal +} diff --git a/tests/ui/rust-2024/reserved-guarded-strings-via-macro-2.stderr b/tests/ui/rust-2024/reserved-guarded-strings-via-macro-2.stderr new file mode 100644 index 00000000000..1074c8a682b --- /dev/null +++ b/tests/ui/rust-2024/reserved-guarded-strings-via-macro-2.stderr @@ -0,0 +1,20 @@ +error: invalid string literal + --> $DIR/reserved-guarded-strings-via-macro-2.rs:14:5 + | +LL | m2024::number_of_tokens_in_a_guarded_string_literal!(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 + = note: this error originates in the macro `m2024::number_of_tokens_in_a_guarded_string_literal` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: invalid string literal + --> $DIR/reserved-guarded-strings-via-macro-2.rs:16:5 + | +LL | m2024::number_of_tokens_in_a_guarded_unterminated_string_literal!(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 + = note: this error originates in the macro `m2024::number_of_tokens_in_a_guarded_unterminated_string_literal` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: aborting due to 2 previous errors + diff --git a/tests/ui/rust-2024/reserved-guarded-strings-via-macro.rs b/tests/ui/rust-2024/reserved-guarded-strings-via-macro.rs new file mode 100644 index 00000000000..f9e3c1e3c51 --- /dev/null +++ b/tests/ui/rust-2024/reserved-guarded-strings-via-macro.rs @@ -0,0 +1,12 @@ +//@ run-pass +//@ compile-flags: -Zunstable-options +//@ edition:2024 +//@ aux-build:reserved-guarded-strings-macro-2021.rs + +extern crate reserved_guarded_strings_macro_2021 as m2021; + +fn main() { + // Ok, even though *this* crate is 2024: + assert_eq!(m2021::number_of_tokens_in_a_guarded_string_literal!(), 3); + assert_eq!(m2021::number_of_tokens_in_a_guarded_unterminated_string_literal!(), 2); +} diff --git a/tests/ui/rust-2024/reserved-guarded-strings.rs b/tests/ui/rust-2024/reserved-guarded-strings.rs new file mode 100644 index 00000000000..dab97039be0 --- /dev/null +++ b/tests/ui/rust-2024/reserved-guarded-strings.rs @@ -0,0 +1,74 @@ +//@ compile-flags: -Zunstable-options +//@ edition:2024 +// ignore-tidy-linelength + +macro_rules! demo1 { + ( $a:tt ) => { println!("one tokens") }; +} + +macro_rules! demo2 { + ( $a:tt $b:tt ) => { println!("two tokens") }; +} + +macro_rules! demo3 { + ( $a:tt $b:tt $c:tt ) => { println!("three tokens") }; +} + +macro_rules! demo4 { + ( $a:tt $b:tt $c:tt $d:tt ) => { println!("four tokens") }; +} + +macro_rules! demo5 { + ( $a:tt $b:tt $c:tt $d:tt $e:tt ) => { println!("five tokens") }; +} + +macro_rules! demo6 { + ( $a:tt $b:tt $c:tt $d:tt $e:tt $f:tt ) => { println!("six tokens") }; +} + +macro_rules! demo7 { + ( $a:tt $b:tt $c:tt $d:tt $e:tt $f:tt $g:tt ) => { println!("seven tokens") }; +} + +macro_rules! demon { + ( $($n:tt)* ) => { println!("unknown number of tokens") }; +} + +fn main() { + demo1!(""); + demo2!(# ""); + demo3!(# ""#); + demo2!(# "foo"); + demo3!(# "foo"#); + demo2!("foo"#); + + demo2!(blah"xx"); //~ ERROR prefix `blah` is unknown + demo2!(blah#"xx"#); + //~^ ERROR prefix `blah` is unknown + //~| ERROR invalid string literal + + demo2!(## "foo"); //~ ERROR invalid string literal + demo3!("foo"###); //~ ERROR invalid string literal + demo3!(### "foo"); //~ ERROR invalid string literal + demo3!(## "foo"#); //~ ERROR invalid string literal + demo5!(### "foo"###); + //~^ ERROR invalid string literal + //~| ERROR invalid string literal + + demo1!(#""); //~ ERROR invalid string literal + demo1!(#""#); //~ ERROR invalid string literal + demo1!(####""); //~ ERROR invalid string literal + demo1!(#"foo"); //~ ERROR invalid string literal + demo1!(###"foo"); //~ ERROR invalid string literal + demo1!(#"foo"#); //~ ERROR invalid string literal + demo1!(###"foo"#); //~ ERROR invalid string literal + demo1!(###"foo"##); //~ ERROR invalid string literal + demo1!(###"foo"###); //~ ERROR invalid string literal + demo2!(#"foo"###); + //~^ ERROR invalid string literal + //~| ERROR invalid string literal + + // More than 255 hashes + demon!(####################################################################################################################################################################################################################################################################"foo"); + //~^ ERROR invalid string literal +} diff --git a/tests/ui/rust-2024/reserved-guarded-strings.stderr b/tests/ui/rust-2024/reserved-guarded-strings.stderr new file mode 100644 index 00000000000..f465ba7944a --- /dev/null +++ b/tests/ui/rust-2024/reserved-guarded-strings.stderr @@ -0,0 +1,254 @@ +error: prefix `blah` is unknown + --> $DIR/reserved-guarded-strings.rs:45:12 + | +LL | demo2!(blah"xx"); + | ^^^^ unknown prefix + | + = note: prefixed identifiers and literals are reserved since Rust 2021 +help: consider inserting whitespace here + | +LL | demo2!(blah "xx"); + | + + +error: prefix `blah` is unknown + --> $DIR/reserved-guarded-strings.rs:46:12 + | +LL | demo2!(blah#"xx"#); + | ^^^^ unknown prefix + | + = note: prefixed identifiers and literals are reserved since Rust 2021 +help: consider inserting whitespace here + | +LL | demo2!(blah #"xx"#); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:46:16 + | +LL | demo2!(blah#"xx"#); + | ^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo2!(blah# "xx"#); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:50:12 + | +LL | demo2!(## "foo"); + | ^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo2!(# # "foo"); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:51:17 + | +LL | demo3!("foo"###); + | ^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo3!("foo"# ##); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:52:12 + | +LL | demo3!(### "foo"); + | ^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo3!(# ## "foo"); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:53:12 + | +LL | demo3!(## "foo"#); + | ^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo3!(# # "foo"#); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:54:12 + | +LL | demo5!(### "foo"###); + | ^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo5!(# ## "foo"###); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:54:21 + | +LL | demo5!(### "foo"###); + | ^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo5!(### "foo"# ##); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:58:12 + | +LL | demo1!(#""); + | ^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo1!(# ""); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:59:12 + | +LL | demo1!(#""#); + | ^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo1!(# ""#); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:60:12 + | +LL | demo1!(####""); + | ^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo1!(# ###""); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:61:12 + | +LL | demo1!(#"foo"); + | ^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo1!(# "foo"); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:62:12 + | +LL | demo1!(###"foo"); + | ^^^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo1!(# ##"foo"); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:63:12 + | +LL | demo1!(#"foo"#); + | ^^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo1!(# "foo"#); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:64:12 + | +LL | demo1!(###"foo"#); + | ^^^^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo1!(# ##"foo"#); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:65:12 + | +LL | demo1!(###"foo"##); + | ^^^^^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo1!(# ##"foo"##); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:66:12 + | +LL | demo1!(###"foo"###); + | ^^^^^^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo1!(# ##"foo"###); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:67:12 + | +LL | demo2!(#"foo"###); + | ^^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo2!(# "foo"###); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:67:19 + | +LL | demo2!(#"foo"###); + | ^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demo2!(#"foo"## #); + | + + +error: invalid string literal + --> $DIR/reserved-guarded-strings.rs:72:12 + | +LL | ...n!(####################################################################################################################################################################################################################################################################"foo... + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: unprefixed guarded string literals are reserved for future use since Rust 2024 +help: consider inserting whitespace here + | +LL | demon!(# ###################################################################################################################################################################################################################################################################"foo"); + | + + +error: aborting due to 21 previous errors + |
