diff options
1162 files changed, 25151 insertions, 13697 deletions
diff --git a/.mailmap b/.mailmap index 92be3174750..d526b43e4eb 100644 --- a/.mailmap +++ b/.mailmap @@ -299,10 +299,14 @@ Joseph T. Lyons <JosephTLyons@gmail.com> <JosephTLyons@users.noreply.github.com> Josh Cotton <jcotton42@outlook.com> Josh Driver <keeperofdakeys@gmail.com> Josh Holmer <jholmer.in@gmail.com> -Joshua Nelson <jyn514@gmail.com> <joshua@yottadb.com> Julian Knodt <julianknodt@gmail.com> jumbatm <jumbatm@gmail.com> <30644300+jumbatm@users.noreply.github.com> Junyoung Cho <june0.cho@samsung.com> +Jynn Nelson <github@jyn.dev> <jyn514@gmail.com> +Jynn Nelson <github@jyn.dev> <joshua@yottadb.com> +Jynn Nelson <github@jyn.dev> <jyn.nelson@redjack.com> +Jynn Nelson <github@jyn.dev> <jnelson@cloudflare.com> +Jynn Nelson <github@jyn.dev> Jyun-Yan You <jyyou.tw@gmail.com> <jyyou@cs.nctu.edu.tw> Kalita Alexey <kalita.alexey@outlook.com> Kampfkarren <boynedmaster@gmail.com> diff --git a/Cargo.lock b/Cargo.lock index 9f1561b503d..fb619d3870f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -513,16 +513,25 @@ checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" [[package]] name = "clippy" -version = "0.1.72" +version = "0.1.73" dependencies = [ "clippy_lints", + "clippy_utils", + "derive-new", "filetime", + "futures", + "if_chain", "itertools", + "parking_lot 0.12.1", + "quote", "regex", "rustc_tools_util", + "serde", + "syn 2.0.8", "tempfile", "termize", "tester", + "tokio", "toml 0.7.5", "ui_test", "walkdir", @@ -543,7 +552,7 @@ dependencies = [ [[package]] name = "clippy_lints" -version = "0.1.72" +version = "0.1.73" dependencies = [ "arrayvec", "cargo_metadata", @@ -567,26 +576,8 @@ dependencies = [ ] [[package]] -name = "clippy_test_deps" -version = "0.1.0" -dependencies = [ - "clippy_lints", - "clippy_utils", - "derive-new", - "futures", - "if_chain", - "itertools", - "parking_lot 0.12.1", - "quote", - "regex", - "serde", - "syn 2.0.8", - "tokio", -] - -[[package]] name = "clippy_utils" -version = "0.1.72" +version = "0.1.73" dependencies = [ "arrayvec", "if_chain", @@ -847,7 +838,7 @@ checksum = "a0afaad2b26fa326569eb264b1363e8ae3357618c43982b3f285f0774ce76b69" [[package]] name = "declare_clippy_lint" -version = "0.1.72" +version = "0.1.73" dependencies = [ "itertools", "quote", @@ -3665,6 +3656,7 @@ name = "rustc_hir_typeck" version = "0.1.0" dependencies = [ "rustc_ast", + "rustc_attr", "rustc_data_structures", "rustc_errors", "rustc_fluent_macro", diff --git a/Cargo.toml b/Cargo.toml index 53bf9a8af2c..d2e84d5426f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -9,7 +9,6 @@ members = [ "src/tools/cargotest", "src/tools/clippy", "src/tools/clippy/clippy_dev", - "src/tools/clippy/clippy_test_deps", "src/tools/compiletest", "src/tools/error_index_generator", "src/tools/linkchecker", diff --git a/RELEASES.md b/RELEASES.md index 419c20b9071..3205b02e5c4 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -203,7 +203,7 @@ Stabilized APIs - [`Default for std::collections::binary_heap::IntoIter`](https://doc.rust-lang.org/stable/std/collections/binary_heap/struct.IntoIter.html) - [`Default for std::collections::btree_map::{IntoIter, Iter, IterMut}`](https://doc.rust-lang.org/stable/std/collections/btree_map/struct.IntoIter.html) - [`Default for std::collections::btree_map::{IntoKeys, Keys}`](https://doc.rust-lang.org/stable/std/collections/btree_map/struct.IntoKeys.html) -- [`Default for std::collections::btree_map::{IntoValues, Values}`](https://doc.rust-lang.org/stable/std/collections/btree_map/struct.IntoKeys.html) +- [`Default for std::collections::btree_map::{IntoValues, Values}`](https://doc.rust-lang.org/stable/std/collections/btree_map/struct.IntoValues.html) - [`Default for std::collections::btree_map::Range`](https://doc.rust-lang.org/stable/std/collections/btree_map/struct.Range.html) - [`Default for std::collections::btree_set::{IntoIter, Iter}`](https://doc.rust-lang.org/stable/std/collections/btree_set/struct.IntoIter.html) - [`Default for std::collections::btree_set::Range`](https://doc.rust-lang.org/stable/std/collections/btree_set/struct.Range.html) @@ -2618,7 +2618,7 @@ related tools. [`OsStr::to_ascii_lowercase`]: https://doc.rust-lang.org/std/ffi/struct.OsStr.html#method.to_ascii_lowercase [`OsStr::to_ascii_uppercase`]: https://doc.rust-lang.org/std/ffi/struct.OsStr.html#method.to_ascii_uppercase [`Peekable::peek_mut`]: https://doc.rust-lang.org/std/iter/struct.Peekable.html#method.peek_mut -[`Rc::decrement_strong_count`]: https://doc.rust-lang.org/std/rc/struct.Rc.html#method.increment_strong_count +[`Rc::decrement_strong_count`]: https://doc.rust-lang.org/std/rc/struct.Rc.html#method.decrement_strong_count [`Rc::increment_strong_count`]: https://doc.rust-lang.org/std/rc/struct.Rc.html#method.increment_strong_count [`Vec::extend_from_within`]: https://doc.rust-lang.org/beta/std/vec/struct.Vec.html#method.extend_from_within [`array::from_mut`]: https://doc.rust-lang.org/beta/std/array/fn.from_mut.html @@ -2627,7 +2627,7 @@ related tools. [`cmp::max_by`]: https://doc.rust-lang.org/beta/std/cmp/fn.max_by.html [`cmp::min_by_key`]: https://doc.rust-lang.org/beta/std/cmp/fn.min_by_key.html [`cmp::min_by`]: https://doc.rust-lang.org/beta/std/cmp/fn.min_by.html -[`f32::is_subnormal`]: https://doc.rust-lang.org/std/primitive.f64.html#method.is_subnormal +[`f32::is_subnormal`]: https://doc.rust-lang.org/std/primitive.f32.html#method.is_subnormal [`f64::is_subnormal`]: https://doc.rust-lang.org/std/primitive.f64.html#method.is_subnormal [ietf6943]: https://datatracker.ietf.org/doc/html/rfc6943#section-3.1.1 @@ -2963,7 +2963,7 @@ Internal Only [`sync::OnceState`]: https://doc.rust-lang.org/stable/std/sync/struct.OnceState.html [`panic::panic_any`]: https://doc.rust-lang.org/stable/std/panic/fn.panic_any.html [`slice::strip_prefix`]: https://doc.rust-lang.org/stable/std/primitive.slice.html#method.strip_prefix -[`slice::strip_suffix`]: https://doc.rust-lang.org/stable/std/primitive.slice.html#method.strip_prefix +[`slice::strip_suffix`]: https://doc.rust-lang.org/stable/std/primitive.slice.html#method.strip_suffix [`Arc::increment_strong_count`]: https://doc.rust-lang.org/nightly/std/sync/struct.Arc.html#method.increment_strong_count [`Arc::decrement_strong_count`]: https://doc.rust-lang.org/nightly/std/sync/struct.Arc.html#method.decrement_strong_count [`slice::fill_with`]: https://doc.rust-lang.org/nightly/std/primitive.slice.html#method.fill_with @@ -8033,7 +8033,7 @@ Compatibility Notes [39379]: https://github.com/rust-lang/rust/pull/39379 [41105]: https://github.com/rust-lang/rust/issues/41105 [`<*const T>::wrapping_offset`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.wrapping_offset -[`<*mut T>::wrapping_offset`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.wrapping_offset +[`<*mut T>::wrapping_offset`]: https://doc.rust-lang.org/std/primitive.pointer.html#method.wrapping_offset-1 [`Duration::checked_add`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.checked_add [`Duration::checked_div`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.checked_div [`Duration::checked_mul`]: https://doc.rust-lang.org/std/time/struct.Duration.html#method.checked_mul @@ -9011,7 +9011,7 @@ Stabilized APIs * [`f64::to_radians`](https://doc.rust-lang.org/std/primitive.f64.html#method.to_radians) (in libcore - previously stabilized in libstd) * [`Iterator::sum`](https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.sum) -* [`Iterator::product`](https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.sum) +* [`Iterator::product`](https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.product) * [`Cell::get_mut`](https://doc.rust-lang.org/std/cell/struct.Cell.html#method.get_mut) * [`RefCell::get_mut`](https://doc.rust-lang.org/std/cell/struct.RefCell.html#method.get_mut) diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs index f6875d895d3..aea88641f82 100644 --- a/compiler/rustc_abi/src/layout.rs +++ b/compiler/rustc_abi/src/layout.rs @@ -40,6 +40,8 @@ pub trait LayoutCalculator { largest_niche, align, size, + max_repr_align: None, + unadjusted_abi_align: align.abi, } } @@ -122,6 +124,8 @@ pub trait LayoutCalculator { largest_niche: None, align: dl.i8_align, size: Size::ZERO, + max_repr_align: None, + unadjusted_abi_align: dl.i8_align.abi, } } @@ -289,6 +293,9 @@ pub trait LayoutCalculator { } let mut align = dl.aggregate_align; + let mut max_repr_align = repr.align; + let mut unadjusted_abi_align = align.abi; + let mut variant_layouts = variants .iter_enumerated() .map(|(j, v)| { @@ -296,6 +303,8 @@ pub trait LayoutCalculator { st.variants = Variants::Single { index: j }; align = align.max(st.align); + max_repr_align = max_repr_align.max(st.max_repr_align); + unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align); Some(st) }) @@ -422,6 +431,8 @@ pub trait LayoutCalculator { largest_niche, size, align, + max_repr_align, + unadjusted_abi_align, }; Some(TmpLayout { layout, variants: variant_layouts }) @@ -456,6 +467,9 @@ pub trait LayoutCalculator { let (min_ity, signed) = discr_range_of_repr(min, max); //Integer::repr_discr(tcx, ty, &repr, min, max); let mut align = dl.aggregate_align; + let mut max_repr_align = repr.align; + let mut unadjusted_abi_align = align.abi; + let mut size = Size::ZERO; // We're interested in the smallest alignment, so start large. @@ -498,6 +512,8 @@ pub trait LayoutCalculator { } size = cmp::max(size, st.size); align = align.max(st.align); + max_repr_align = max_repr_align.max(st.max_repr_align); + unadjusted_abi_align = unadjusted_abi_align.max(st.unadjusted_abi_align); Some(st) }) .collect::<Option<IndexVec<VariantIdx, _>>>()?; @@ -691,6 +707,8 @@ pub trait LayoutCalculator { abi, align, size, + max_repr_align, + unadjusted_abi_align, }; let tagged_layout = TmpLayout { layout: tagged_layout, variants: layout_variants }; @@ -730,10 +748,7 @@ pub trait LayoutCalculator { let dl = self.current_data_layout(); let dl = dl.borrow(); let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align }; - - if let Some(repr_align) = repr.align { - align = align.max(AbiAndPrefAlign::new(repr_align)); - } + let mut max_repr_align = repr.align; // If all the non-ZST fields have the same ABI and union ABI optimizations aren't // disabled, we can use that common ABI for the union as a whole. @@ -751,6 +766,7 @@ pub trait LayoutCalculator { assert!(field.0.is_sized()); align = align.max(field.align()); + max_repr_align = max_repr_align.max(field.max_repr_align()); size = cmp::max(size, field.size()); if field.0.is_zst() { @@ -787,6 +803,14 @@ pub trait LayoutCalculator { if let Some(pack) = repr.pack { align = align.min(AbiAndPrefAlign::new(pack)); } + // The unadjusted ABI alignment does not include repr(align), but does include repr(pack). + // See documentation on `LayoutS::unadjusted_abi_align`. + let unadjusted_abi_align = align.abi; + if let Some(repr_align) = repr.align { + align = align.max(AbiAndPrefAlign::new(repr_align)); + } + // `align` must not be modified after this, or `unadjusted_abi_align` could be inaccurate. + let align = align; // If all non-ZST fields have the same ABI, we may forward that ABI // for the union as a whole, unless otherwise inhibited. @@ -809,6 +833,8 @@ pub trait LayoutCalculator { largest_niche: None, align, size: size.align_to(align.abi), + max_repr_align, + unadjusted_abi_align, }) } } @@ -829,6 +855,7 @@ fn univariant( ) -> Option<LayoutS> { let pack = repr.pack; let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align }; + let mut max_repr_align = repr.align; let mut inverse_memory_index: IndexVec<u32, FieldIdx> = fields.indices().collect(); let optimize = !repr.inhibit_struct_field_reordering_opt(); if optimize && fields.len() > 1 { @@ -997,6 +1024,7 @@ fn univariant( }; offset = offset.align_to(field_align.abi); align = align.max(field_align); + max_repr_align = max_repr_align.max(field.max_repr_align()); debug!("univariant offset: {:?} field: {:#?}", offset, field); offsets[i] = offset; @@ -1018,9 +1046,16 @@ fn univariant( offset = offset.checked_add(field.size(), dl)?; } + + // The unadjusted ABI alignment does not include repr(align), but does include repr(pack). + // See documentation on `LayoutS::unadjusted_abi_align`. + let unadjusted_abi_align = align.abi; if let Some(repr_align) = repr.align { align = align.max(AbiAndPrefAlign::new(repr_align)); } + // `align` must not be modified after this point, or `unadjusted_abi_align` could be inaccurate. + let align = align; + debug!("univariant min_size: {:?}", offset); let min_size = offset; // As stated above, inverse_memory_index holds field indices by increasing offset. @@ -1036,6 +1071,7 @@ fn univariant( inverse_memory_index.into_iter().map(FieldIdx::as_u32).collect() }; let size = min_size.align_to(align.abi); + let mut layout_of_single_non_zst_field = None; let mut abi = Abi::Aggregate { sized }; // Unpack newtype ABIs and find scalar pairs. if sized && size.bytes() > 0 { @@ -1045,6 +1081,8 @@ fn univariant( match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) { // We have exactly one non-ZST field. (Some((i, field)), None, None) => { + layout_of_single_non_zst_field = Some(field); + // Field fills the struct and it has a scalar or scalar pair ABI. if offsets[i].bytes() == 0 && align.abi == field.align().abi && size == field.size() { @@ -1102,6 +1140,19 @@ fn univariant( if fields.iter().any(|f| f.abi().is_uninhabited()) { abi = Abi::Uninhabited; } + + let unadjusted_abi_align = if repr.transparent() { + match layout_of_single_non_zst_field { + Some(l) => l.unadjusted_abi_align(), + None => { + // `repr(transparent)` with all ZST fields. + align.abi + } + } + } else { + unadjusted_abi_align + }; + Some(LayoutS { variants: Variants::Single { index: FIRST_VARIANT }, fields: FieldsShape::Arbitrary { offsets, memory_index }, @@ -1109,6 +1160,8 @@ fn univariant( largest_niche, align, size, + max_repr_align, + unadjusted_abi_align, }) } diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs index e1b9987f578..ef0c763ac20 100644 --- a/compiler/rustc_abi/src/lib.rs +++ b/compiler/rustc_abi/src/lib.rs @@ -1531,6 +1531,16 @@ pub struct LayoutS { pub align: AbiAndPrefAlign, pub size: Size, + + /// The largest alignment explicitly requested with `repr(align)` on this type or any field. + /// Only used on i686-windows, where the argument passing ABI is different when alignment is + /// requested, even if the requested alignment is equal to the natural alignment. + pub max_repr_align: Option<Align>, + + /// The alignment the type would have, ignoring any `repr(align)` but including `repr(packed)`. + /// Only used on aarch64-linux, where the argument passing ABI ignores the requested alignment + /// in some cases. + pub unadjusted_abi_align: Align, } impl LayoutS { @@ -1545,6 +1555,8 @@ impl LayoutS { largest_niche, size, align, + max_repr_align: None, + unadjusted_abi_align: align.abi, } } } @@ -1554,7 +1566,16 @@ impl fmt::Debug for LayoutS { // This is how `Layout` used to print before it become // `Interned<LayoutS>`. We print it like this to avoid having to update // expected output in a lot of tests. - let LayoutS { size, align, abi, fields, largest_niche, variants } = self; + let LayoutS { + size, + align, + abi, + fields, + largest_niche, + variants, + max_repr_align, + unadjusted_abi_align, + } = self; f.debug_struct("Layout") .field("size", size) .field("align", align) @@ -1562,6 +1583,8 @@ impl fmt::Debug for LayoutS { .field("fields", fields) .field("largest_niche", largest_niche) .field("variants", variants) + .field("max_repr_align", max_repr_align) + .field("unadjusted_abi_align", unadjusted_abi_align) .finish() } } @@ -1602,6 +1625,14 @@ impl<'a> Layout<'a> { self.0.0.size } + pub fn max_repr_align(self) -> Option<Align> { + self.0.0.max_repr_align + } + + pub fn unadjusted_abi_align(self) -> Align { + self.0.0.unadjusted_abi_align + } + /// Whether the layout is from a type that implements [`std::marker::PointerLike`]. /// /// Currently, that means that the type is pointer-sized, pointer-aligned, diff --git a/compiler/rustc_attr/src/builtin.rs b/compiler/rustc_attr/src/builtin.rs index 372a58857f3..6cce3a56f08 100644 --- a/compiler/rustc_attr/src/builtin.rs +++ b/compiler/rustc_attr/src/builtin.rs @@ -1217,3 +1217,20 @@ pub fn parse_alignment(node: &ast::LitKind) -> Result<u32, &'static str> { Err("not an unsuffixed integer") } } + +/// Read the content of a `rustc_confusables` attribute, and return the list of candidate names. +pub fn parse_confusables(attr: &Attribute) -> Option<Vec<Symbol>> { + let meta = attr.meta()?; + let MetaItem { kind: MetaItemKind::List(ref metas), .. } = meta else { return None }; + + let mut candidates = Vec::new(); + + for meta in metas { + let NestedMetaItem::Lit(meta_lit) = meta else { + return None; + }; + candidates.push(meta_lit.symbol); + } + + return Some(candidates); +} diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs index b08d2f90d26..97c3e0b879a 100644 --- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs @@ -363,21 +363,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { } } let hir = self.infcx.tcx.hir(); - if let Some(hir::Node::Item(hir::Item { - kind: hir::ItemKind::Fn(_, _, body_id), - .. - })) = hir.find(self.mir_hir_id()) - && let Some(hir::Node::Expr(expr)) = hir.find(body_id.hir_id) - { + if let Some(body_id) = hir.maybe_body_owned_by(self.mir_def_id()) { + let expr = hir.body(body_id).value; let place = &self.move_data.move_paths[mpi].place; - let span = place.as_local() - .map(|local| self.body.local_decls[local].source_info.span); - let mut finder = ExpressionFinder { - expr_span: move_span, - expr: None, - pat: None, - parent_pat: None, - }; + let span = place.as_local().map(|local| self.body.local_decls[local].source_info.span); + let mut finder = + ExpressionFinder { expr_span: move_span, expr: None, pat: None, parent_pat: None }; finder.visit_expr(expr); if let Some(span) = span && let Some(expr) = finder.expr { for (_, expr) in hir.parent_iter(expr.hir_id) { @@ -461,7 +452,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { } = move_spans { // We already suggest cloning for these cases in `explain_captures`. } else { - self.suggest_cloning(err, ty, move_span); + self.suggest_cloning(err, ty, expr, move_span); } } } @@ -704,7 +695,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { .find_map(find_fn_kind_from_did), ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => tcx .explicit_item_bounds(def_id) - .arg_iter_copied(tcx, args) + .iter_instantiated_copied(tcx, args) .find_map(|(clause, span)| find_fn_kind_from_did((clause, span))), ty::Closure(_, args) => match args.as_closure().kind() { ty::ClosureKind::Fn => Some(hir::Mutability::Not), @@ -736,9 +727,21 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { true } - fn suggest_cloning(&self, err: &mut Diagnostic, ty: Ty<'tcx>, span: Span) { + fn suggest_cloning( + &self, + err: &mut Diagnostic, + ty: Ty<'tcx>, + expr: &hir::Expr<'_>, + span: Span, + ) { let tcx = self.infcx.tcx; // Try to find predicates on *generic params* that would allow copying `ty` + let suggestion = + if let Some(symbol) = tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { + format!(": {}.clone()", symbol) + } else { + ".clone()".to_owned() + }; if let Some(clone_trait_def) = tcx.lang_items().clone_trait() && self.infcx .type_implements_trait( @@ -751,7 +754,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { err.span_suggestion_verbose( span.shrink_to_hi(), "consider cloning the value if the performance cost is acceptable", - ".clone()", + suggestion, Applicability::MachineApplicable, ); } diff --git a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs index a0a145ef70a..3c32121a51a 100644 --- a/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/mutability_errors.rs @@ -2,7 +2,6 @@ use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed use rustc_hir as hir; use rustc_hir::intravisit::Visitor; use rustc_hir::Node; -use rustc_middle::hir::map::Map; use rustc_middle::mir::{Mutability, Place, PlaceRef, ProjectionElem}; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_middle::{ @@ -646,14 +645,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { } let hir_map = self.infcx.tcx.hir(); let def_id = self.body.source.def_id(); - let hir_id = hir_map.local_def_id_to_hir_id(def_id.as_local().unwrap()); - let node = hir_map.find(hir_id); - let Some(hir::Node::Item(item)) = node else { - return; - }; - let hir::ItemKind::Fn(.., body_id) = item.kind else { - return; - }; + let Some(local_def_id) = def_id.as_local() else { return }; + let Some(body_id) = hir_map.maybe_body_owned_by(local_def_id) else { return }; let body = self.infcx.tcx.hir().body(body_id); let mut v = V { assign_span: span, err, ty, suggested: false }; @@ -790,23 +783,12 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { // In the future, attempt in all path but initially for RHS of for_loop fn suggest_similar_mut_method_for_for_loop(&self, err: &mut Diagnostic) { use hir::{ - BodyId, Expr, + Expr, ExprKind::{Block, Call, DropTemps, Match, MethodCall}, - HirId, ImplItem, ImplItemKind, Item, ItemKind, }; - fn maybe_body_id_of_fn(hir_map: Map<'_>, id: HirId) -> Option<BodyId> { - match hir_map.find(id) { - Some(Node::Item(Item { kind: ItemKind::Fn(_, _, body_id), .. })) - | Some(Node::ImplItem(ImplItem { kind: ImplItemKind::Fn(_, body_id), .. })) => { - Some(*body_id) - } - _ => None, - } - } let hir_map = self.infcx.tcx.hir(); - let mir_body_hir_id = self.mir_hir_id(); - if let Some(fn_body_id) = maybe_body_id_of_fn(hir_map, mir_body_hir_id) { + if let Some(body_id) = hir_map.maybe_body_owned_by(self.mir_def_id()) { if let Block( hir::Block { expr: @@ -840,7 +822,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { .. }, _, - ) = hir_map.body(fn_body_id).value.kind + ) = hir_map.body(body_id).value.kind { let opt_suggestions = self .infcx @@ -1102,46 +1084,45 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { } let hir_map = self.infcx.tcx.hir(); let def_id = self.body.source.def_id(); - let hir_id = hir_map.local_def_id_to_hir_id(def_id.expect_local()); - let node = hir_map.find(hir_id); - let hir_id = if let Some(hir::Node::Item(item)) = node - && let hir::ItemKind::Fn(.., body_id) = item.kind - { - let body = hir_map.body(body_id); - let mut v = BindingFinder { - span: err_label_span, - hir_id: None, + let hir_id = if let Some(local_def_id) = def_id.as_local() && + let Some(body_id) = hir_map.maybe_body_owned_by(local_def_id) + { + let body = hir_map.body(body_id); + let mut v = BindingFinder { + span: err_label_span, + hir_id: None, + }; + v.visit_body(body); + v.hir_id + } else { + None }; - v.visit_body(body); - v.hir_id - } else { - None - }; + if let Some(hir_id) = hir_id && let Some(hir::Node::Local(local)) = hir_map.find(hir_id) - { - let (changing, span, sugg) = match local.ty { - Some(ty) => ("changing", ty.span, message), - None => ( - "specifying", - local.pat.span.shrink_to_hi(), - format!(": {message}"), - ), - }; - err.span_suggestion_verbose( - span, - format!("consider {changing} this binding's type"), - sugg, - Applicability::HasPlaceholders, - ); - } else { - err.span_label( - err_label_span, - format!( - "consider changing this binding's type to be: `{message}`" - ), - ); - } + { + let (changing, span, sugg) = match local.ty { + Some(ty) => ("changing", ty.span, message), + None => ( + "specifying", + local.pat.span.shrink_to_hi(), + format!(": {message}"), + ), + }; + err.span_suggestion_verbose( + span, + format!("consider {changing} this binding's type"), + sugg, + Applicability::HasPlaceholders, + ); + } else { + err.span_label( + err_label_span, + format!( + "consider changing this binding's type to be: `{message}`" + ), + ); + } } None => {} } diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 4ba09335cb7..9865b6a72ee 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -1134,9 +1134,14 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef<'b>, enum_def: &'b EnumDef, type_ident: Ident, - selflike_args: ThinVec<P<Expr>>, + mut selflike_args: ThinVec<P<Expr>>, nonselflike_args: &[P<Expr>], ) -> BlockOrExpr { + assert!( + !selflike_args.is_empty(), + "static methods must use `expand_static_enum_method_body`", + ); + let span = trait_.span; let variants = &enum_def.variants; @@ -1144,10 +1149,15 @@ impl<'a> MethodDef<'a> { let unify_fieldless_variants = self.fieldless_variants_strategy == FieldlessVariantsStrategy::Unify; - // There is no sensible code to be generated for *any* deriving on a - // zero-variant enum. So we just generate a failing expression. + // For zero-variant enum, this function body is unreachable. Generate + // `match *self {}`. This produces machine code identical to `unsafe { + // core::intrinsics::unreachable() }` while being safe and stable. if variants.is_empty() { - return BlockOrExpr(ThinVec::new(), Some(deriving::call_unreachable(cx, span))); + selflike_args.truncate(1); + let match_arg = cx.expr_deref(span, selflike_args.pop().unwrap()); + let match_arms = ThinVec::new(); + let expr = cx.expr_match(span, match_arg, match_arms); + return BlockOrExpr(ThinVec::new(), Some(expr)); } let prefixes = iter::once("__self".to_string()) diff --git a/compiler/rustc_builtin_macros/src/standard_library_imports.rs b/compiler/rustc_builtin_macros/src/standard_library_imports.rs index 6493c6f13d5..07e6288ed8c 100644 --- a/compiler/rustc_builtin_macros/src/standard_library_imports.rs +++ b/compiler/rustc_builtin_macros/src/standard_library_imports.rs @@ -44,20 +44,29 @@ pub fn inject( // .rev() to preserve ordering above in combination with insert(0, ...) for &name in names.iter().rev() { - let ident = if edition >= Edition2018 { - Ident::new(name, span) + let ident_span = if edition >= Edition2018 { span } else { call_site }; + let item = if name == sym::compiler_builtins { + // compiler_builtins is a private implementation detail. We only + // need to insert it into the crate graph for linking and should not + // expose any of its public API. + // + // FIXME(#113634) We should inject this during post-processing like + // we do for the panic runtime, profiler runtime, etc. + cx.item( + span, + Ident::new(kw::Underscore, ident_span), + thin_vec![], + ast::ItemKind::ExternCrate(Some(name)), + ) } else { - Ident::new(name, call_site) - }; - krate.items.insert( - 0, cx.item( span, - ident, + Ident::new(name, ident_span), thin_vec![cx.attr_word(sym::macro_use, span)], ast::ItemKind::ExternCrate(None), - ), - ); + ) + }; + krate.items.insert(0, item); } // The crates have been injected, the assumption is that the first one is diff --git a/compiler/rustc_codegen_cranelift/src/abi/comments.rs b/compiler/rustc_codegen_cranelift/src/abi/comments.rs index 364503fd363..ade6968de2b 100644 --- a/compiler/rustc_codegen_cranelift/src/abi/comments.rs +++ b/compiler/rustc_codegen_cranelift/src/abi/comments.rs @@ -80,14 +80,7 @@ pub(super) fn add_local_place_comments<'tcx>( return; } let TyAndLayout { ty, layout } = place.layout(); - let rustc_target::abi::LayoutS { - size, - align, - abi: _, - variants: _, - fields: _, - largest_niche: _, - } = layout.0.0; + let rustc_target::abi::LayoutS { size, align, .. } = layout.0.0; let (kind, extra) = place.debug_comment(); diff --git a/compiler/rustc_codegen_cranelift/src/driver/mod.rs b/compiler/rustc_codegen_cranelift/src/driver/mod.rs index 5c52c9c18ad..12e90b58410 100644 --- a/compiler/rustc_codegen_cranelift/src/driver/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/driver/mod.rs @@ -5,7 +5,7 @@ //! [`codegen_static`]: crate::constant::codegen_static use rustc_data_structures::profiling::SelfProfilerRef; -use rustc_middle::mir::mono::{Linkage as RLinkage, MonoItem, Visibility}; +use rustc_middle::mir::mono::{MonoItem, MonoItemData}; use crate::prelude::*; @@ -16,11 +16,11 @@ pub(crate) mod jit; fn predefine_mono_items<'tcx>( tcx: TyCtxt<'tcx>, module: &mut dyn Module, - mono_items: &[(MonoItem<'tcx>, (RLinkage, Visibility))], + mono_items: &[(MonoItem<'tcx>, MonoItemData)], ) { tcx.prof.generic_activity("predefine functions").run(|| { let is_compiler_builtins = tcx.is_compiler_builtins(LOCAL_CRATE); - for &(mono_item, (linkage, visibility)) in mono_items { + for &(mono_item, data) in mono_items { match mono_item { MonoItem::Fn(instance) => { let name = tcx.symbol_name(instance).name; @@ -29,8 +29,8 @@ fn predefine_mono_items<'tcx>( get_function_sig(tcx, module.target_config().default_call_conv, instance); let linkage = crate::linkage::get_clif_linkage( mono_item, - linkage, - visibility, + data.linkage, + data.visibility, is_compiler_builtins, ); module.declare_function(name, linkage, &sig).unwrap(); diff --git a/compiler/rustc_codegen_gcc/src/base.rs b/compiler/rustc_codegen_gcc/src/base.rs index dcd560b3dcd..9e614ca4ace 100644 --- a/compiler/rustc_codegen_gcc/src/base.rs +++ b/compiler/rustc_codegen_gcc/src/base.rs @@ -159,8 +159,8 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, supports_128bit_i let cx = CodegenCx::new(&context, cgu, tcx, supports_128bit_integers); let mono_items = cgu.items_in_deterministic_order(tcx); - for &(mono_item, (linkage, visibility)) in &mono_items { - mono_item.predefine::<Builder<'_, '_, '_>>(&cx, linkage, visibility); + for &(mono_item, data) in &mono_items { + mono_item.predefine::<Builder<'_, '_, '_>>(&cx, data.linkage, data.visibility); } // ... and now that we have everything pre-defined, fill out those definitions. diff --git a/compiler/rustc_codegen_llvm/src/base.rs b/compiler/rustc_codegen_llvm/src/base.rs index 5b2bbdb4bde..5b5f81c0329 100644 --- a/compiler/rustc_codegen_llvm/src/base.rs +++ b/compiler/rustc_codegen_llvm/src/base.rs @@ -86,8 +86,8 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol) -> (ModuleCodegen { let cx = CodegenCx::new(tcx, cgu, &llvm_module); let mono_items = cx.codegen_unit.items_in_deterministic_order(cx.tcx); - for &(mono_item, (linkage, visibility)) in &mono_items { - mono_item.predefine::<Builder<'_, '_, '_>>(&cx, linkage, visibility); + for &(mono_item, data) in &mono_items { + mono_item.predefine::<Builder<'_, '_, '_>>(&cx, data.linkage, data.visibility); } // ... and now that we have everything pre-defined, fill out those definitions. diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs index a1ff2aa6625..6e815ab3ff5 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mapgen.rs @@ -63,7 +63,7 @@ pub fn finalize(cx: &CodegenCx<'_, '_>) { let mut function_data = Vec::new(); for (instance, function_coverage) in function_coverage_map { debug!("Generate function coverage for {}, {:?}", cx.codegen_unit.name(), instance); - let mangled_function_name = tcx.symbol_name(instance).to_string(); + let mangled_function_name = tcx.symbol_name(instance).name; let source_hash = function_coverage.source_hash(); let is_used = function_coverage.is_used(); let (expressions, counter_regions) = @@ -95,7 +95,7 @@ pub fn finalize(cx: &CodegenCx<'_, '_>) { let filenames_size = filenames_buffer.len(); let filenames_val = cx.const_bytes(&filenames_buffer); - let filenames_ref = coverageinfo::hash_bytes(filenames_buffer); + let filenames_ref = coverageinfo::hash_bytes(&filenames_buffer); // Generate the LLVM IR representation of the coverage map and store it in a well-known global let cov_data_val = mapgen.generate_coverage_map(cx, version, filenames_size, filenames_val); @@ -228,7 +228,7 @@ impl CoverageMapGenerator { /// specific, well-known section and name. fn save_function_record( cx: &CodegenCx<'_, '_>, - mangled_function_name: String, + mangled_function_name: &str, source_hash: u64, filenames_ref: u64, coverage_mapping_buffer: Vec<u8>, @@ -238,7 +238,7 @@ fn save_function_record( let coverage_mapping_size = coverage_mapping_buffer.len(); let coverage_mapping_val = cx.const_bytes(&coverage_mapping_buffer); - let func_name_hash = coverageinfo::hash_str(&mangled_function_name); + let func_name_hash = coverageinfo::hash_bytes(mangled_function_name.as_bytes()); let func_name_hash_val = cx.const_u64(func_name_hash); let coverage_mapping_size_val = cx.const_u32(coverage_mapping_size as u32); let source_hash_val = cx.const_u64(source_hash); diff --git a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs index 56a5c9e9061..1f8f4033931 100644 --- a/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs +++ b/compiler/rustc_codegen_llvm/src/coverageinfo/mod.rs @@ -373,12 +373,7 @@ pub(crate) fn write_mapping_to_buffer( } } -pub(crate) fn hash_str(strval: &str) -> u64 { - let strval = CString::new(strval).expect("null error converting hashable str to C string"); - unsafe { llvm::LLVMRustCoverageHashCString(strval.as_ptr()) } -} - -pub(crate) fn hash_bytes(bytes: Vec<u8>) -> u64 { +pub(crate) fn hash_bytes(bytes: &[u8]) -> u64 { unsafe { llvm::LLVMRustCoverageHashByteArray(bytes.as_ptr().cast(), bytes.len()) } } diff --git a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs index e1dfc1b2dd6..605f0154773 100644 --- a/compiler/rustc_codegen_llvm/src/llvm/ffi.rs +++ b/compiler/rustc_codegen_llvm/src/llvm/ffi.rs @@ -1916,7 +1916,6 @@ extern "C" { ); pub fn LLVMRustCoverageCreatePGOFuncNameVar(F: &Value, FuncName: *const c_char) -> &Value; - pub fn LLVMRustCoverageHashCString(StrVal: *const c_char) -> u64; pub fn LLVMRustCoverageHashByteArray(Bytes: *const c_char, NumBytes: size_t) -> u64; #[allow(improper_ctypes)] diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs index b603a878746..0dfb41f42f0 100644 --- a/compiler/rustc_codegen_ssa/src/back/link.rs +++ b/compiler/rustc_codegen_ssa/src/back/link.rs @@ -1392,6 +1392,11 @@ fn print_native_static_libs( let mut lib_args: Vec<_> = all_native_libs .iter() .filter(|l| relevant_lib(sess, l)) + // Deduplication of successive repeated libraries, see rust-lang/rust#113209 + // + // note: we don't use PartialEq/Eq because NativeLib transitively depends on local + // elements like spans, which we don't care about and would make the deduplication impossible + .dedup_by(|l1, l2| l1.name == l2.name && l1.kind == l2.kind && l1.verbatim == l2.verbatim) .filter_map(|lib| { let name = lib.name; match lib.kind { diff --git a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs index 406048bfe05..cbe7e519079 100644 --- a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs +++ b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs @@ -328,14 +328,14 @@ fn exported_symbols_provider_local( let (_, cgus) = tcx.collect_and_partition_mono_items(()); - for (mono_item, &(linkage, visibility)) in cgus.iter().flat_map(|cgu| cgu.items().iter()) { - if linkage != Linkage::External { + for (mono_item, data) in cgus.iter().flat_map(|cgu| cgu.items().iter()) { + if data.linkage != Linkage::External { // We can only re-use things with external linkage, otherwise // we'll get a linker error continue; } - if need_visibility && visibility == Visibility::Hidden { + if need_visibility && data.visibility == Visibility::Hidden { // If we potentially share things from Rust dylibs, they must // not be hidden continue; diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 269c12e7a68..ed608bdbe9a 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -23,6 +23,8 @@ use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode, Reg}; use rustc_target::abi::{self, HasDataLayout, WrappingRange}; use rustc_target::spec::abi::Abi; +use std::cmp; + // Indicates if we are in the middle of merging a BB's successor into it. This // can happen when BB jumps directly to its successor and the successor has no // other predecessors. @@ -1360,36 +1362,58 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // Force by-ref if we have to load through a cast pointer. let (mut llval, align, by_ref) = match op.val { Immediate(_) | Pair(..) => match arg.mode { - PassMode::Indirect { .. } | PassMode::Cast(..) => { + PassMode::Indirect { attrs, .. } => { + // Indirect argument may have higher alignment requirements than the type's alignment. + // This can happen, e.g. when passing types with <4 byte alignment on the stack on x86. + let required_align = match attrs.pointee_align { + Some(pointee_align) => cmp::max(pointee_align, arg.layout.align.abi), + None => arg.layout.align.abi, + }; + let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align); + op.val.store(bx, scratch); + (scratch.llval, scratch.align, true) + } + PassMode::Cast(..) => { let scratch = PlaceRef::alloca(bx, arg.layout); op.val.store(bx, scratch); (scratch.llval, scratch.align, true) } _ => (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false), }, - Ref(llval, _, align) => { - if arg.is_indirect() && align < arg.layout.align.abi { - // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I - // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't - // have scary latent bugs around. - - let scratch = PlaceRef::alloca(bx, arg.layout); - base::memcpy_ty( - bx, - scratch.llval, - scratch.align, - llval, - align, - op.layout, - MemFlags::empty(), - ); - (scratch.llval, scratch.align, true) - } else { - (llval, align, true) + Ref(llval, _, align) => match arg.mode { + PassMode::Indirect { attrs, .. } => { + let required_align = match attrs.pointee_align { + Some(pointee_align) => cmp::max(pointee_align, arg.layout.align.abi), + None => arg.layout.align.abi, + }; + if align < required_align { + // For `foo(packed.large_field)`, and types with <4 byte alignment on x86, + // alignment requirements may be higher than the type's alignment, so copy + // to a higher-aligned alloca. + let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align); + base::memcpy_ty( + bx, + scratch.llval, + scratch.align, + llval, + align, + op.layout, + MemFlags::empty(), + ); + (scratch.llval, scratch.align, true) + } else { + (llval, align, true) + } } - } + _ => (llval, align, true), + }, ZeroSized => match arg.mode { - PassMode::Indirect { .. } => { + PassMode::Indirect { on_stack, .. } => { + if on_stack { + // It doesn't seem like any target can have `byval` ZSTs, so this assert + // is here to replace a would-be untested codepath. + bug!("ZST {op:?} passed on stack with abi {arg:?}"); + } // Though `extern "Rust"` doesn't pass ZSTs, some ABIs pass // a pointer for `repr(C)` structs even when empty, so get // one from an `alloca` (which can be left uninitialized). diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs index ab493ae5c1f..90eab55f76e 100644 --- a/compiler/rustc_codegen_ssa/src/mir/place.rs +++ b/compiler/rustc_codegen_ssa/src/mir/place.rs @@ -48,9 +48,17 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { bx: &mut Bx, layout: TyAndLayout<'tcx>, ) -> Self { + Self::alloca_aligned(bx, layout, layout.align.abi) + } + + pub fn alloca_aligned<Bx: BuilderMethods<'a, 'tcx, Value = V>>( + bx: &mut Bx, + layout: TyAndLayout<'tcx>, + align: Align, + ) -> Self { assert!(layout.is_sized(), "tried to statically allocate unsized place"); - let tmp = bx.alloca(bx.cx().backend_type(layout), layout.align.abi); - Self::new_sized(tmp, layout) + let tmp = bx.alloca(bx.cx().backend_type(layout), align); + Self::new_sized_aligned(tmp, layout, align) } /// Returns a place for an indirect reference to an unsized place. diff --git a/compiler/rustc_const_eval/messages.ftl b/compiler/rustc_const_eval/messages.ftl index e99005316b3..d8eade5bd2a 100644 --- a/compiler/rustc_const_eval/messages.ftl +++ b/compiler/rustc_const_eval/messages.ftl @@ -399,6 +399,9 @@ const_eval_unallowed_mutable_refs_raw = const_eval_unallowed_op_in_const_context = {$msg} +const_eval_unavailable_target_features_for_fn = + calling a function that requires unavailable target features: {$unavailable_feats} + const_eval_undefined_behavior = it is undefined behavior to use this value diff --git a/compiler/rustc_const_eval/src/interpret/terminator.rs b/compiler/rustc_const_eval/src/interpret/terminator.rs index c944782b487..7964c6be008 100644 --- a/compiler/rustc_const_eval/src/interpret/terminator.rs +++ b/compiler/rustc_const_eval/src/interpret/terminator.rs @@ -503,6 +503,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } } + // Check that all target features required by the callee (i.e., from + // the attribute `#[target_feature(enable = ...)]`) are enabled at + // compile time. + self.check_fn_target_features(instance)?; + if !callee_fn_abi.can_unwind { // The callee cannot unwind, so force the `Unreachable` unwind handling. unwind = mir::UnwindAction::Unreachable; @@ -786,6 +791,31 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } } + fn check_fn_target_features(&self, instance: ty::Instance<'tcx>) -> InterpResult<'tcx, ()> { + let attrs = self.tcx.codegen_fn_attrs(instance.def_id()); + if attrs + .target_features + .iter() + .any(|feature| !self.tcx.sess.target_features.contains(feature)) + { + throw_ub_custom!( + fluent::const_eval_unavailable_target_features_for_fn, + unavailable_feats = attrs + .target_features + .iter() + .filter(|&feature| !self.tcx.sess.target_features.contains(feature)) + .fold(String::new(), |mut s, feature| { + if !s.is_empty() { + s.push_str(", "); + } + s.push_str(feature.as_str()); + s + }), + ); + } + Ok(()) + } + fn drop_in_place( &mut self, place: &PlaceTy<'tcx, M::Provenance>, diff --git a/compiler/rustc_driver_impl/src/lib.rs b/compiler/rustc_driver_impl/src/lib.rs index 2e26ce111f0..25c043149e8 100644 --- a/compiler/rustc_driver_impl/src/lib.rs +++ b/compiler/rustc_driver_impl/src/lib.rs @@ -279,9 +279,6 @@ fn run_compiler( let sopts = config::build_session_options(&mut early_error_handler, &matches); - // Set parallel mode before thread pool creation, which will create `Lock`s. - interface::set_thread_safe_mode(&sopts.unstable_opts); - if let Some(ref code) = matches.opt_str("explain") { handle_explain(&early_error_handler, diagnostics_registry(), code, sopts.color); return Ok(()); diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index a70671dd9fb..a183cfd8776 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -625,6 +625,12 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ ErrorFollowing, INTERNAL_UNSTABLE ), + rustc_attr!( + rustc_confusables, Normal, + template!(List: r#""name1", "name2", ..."#), + ErrorFollowing, + INTERNAL_UNSTABLE, + ), // Enumerates "identity-like" conversion methods to suggest on type mismatch. rustc_attr!( rustc_conversion_suggestion, Normal, template!(Word), WarnFollowing, INTERNAL_UNSTABLE diff --git a/compiler/rustc_hir_analysis/src/astconv/errors.rs b/compiler/rustc_hir_analysis/src/astconv/errors.rs index 8aaa005a24d..7f4927bbb98 100644 --- a/compiler/rustc_hir_analysis/src/astconv/errors.rs +++ b/compiler/rustc_hir_analysis/src/astconv/errors.rs @@ -123,7 +123,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { let all_candidate_names: Vec<_> = all_candidates() .flat_map(|r| self.tcx().associated_items(r.def_id()).in_definition_order()) .filter_map(|item| { - if item.opt_rpitit_info.is_none() && item.kind == ty::AssocKind::Type { + if !item.is_impl_trait_in_trait() && item.kind == ty::AssocKind::Type { Some(item.name) } else { None @@ -164,7 +164,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { self.tcx().associated_items(*trait_def_id).in_definition_order() }) .filter_map(|item| { - if item.opt_rpitit_info.is_none() && item.kind == ty::AssocKind::Type { + if !item.is_impl_trait_in_trait() && item.kind == ty::AssocKind::Type { Some(item.name) } else { None diff --git a/compiler/rustc_hir_analysis/src/astconv/object_safety.rs b/compiler/rustc_hir_analysis/src/astconv/object_safety.rs index 034f2e4da01..a36ec0c5730 100644 --- a/compiler/rustc_hir_analysis/src/astconv/object_safety.rs +++ b/compiler/rustc_hir_analysis/src/astconv/object_safety.rs @@ -173,7 +173,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { tcx.associated_items(pred.def_id()) .in_definition_order() .filter(|item| item.kind == ty::AssocKind::Type) - .filter(|item| item.opt_rpitit_info.is_none()) + .filter(|item| !item.is_impl_trait_in_trait()) .map(|item| item.def_id), ); } diff --git a/compiler/rustc_hir_analysis/src/check/check.rs b/compiler/rustc_hir_analysis/src/check/check.rs index a4ef8e43527..d9e14096954 100644 --- a/compiler/rustc_hir_analysis/src/check/check.rs +++ b/compiler/rustc_hir_analysis/src/check/check.rs @@ -8,7 +8,7 @@ use rustc_attr as attr; use rustc_errors::{Applicability, ErrorGuaranteed, MultiSpan}; use rustc_hir as hir; use rustc_hir::def::{CtorKind, DefKind, Res}; -use rustc_hir::def_id::{DefId, LocalDefId}; +use rustc_hir::def_id::{DefId, LocalDefId, CRATE_DEF_ID}; use rustc_hir::intravisit::Visitor; use rustc_hir::{ItemKind, Node, PathSegment}; use rustc_infer::infer::opaque_types::ConstrainOpaqueTypeRegionVisitor; @@ -1378,6 +1378,9 @@ pub(super) fn check_mod_item_types(tcx: TyCtxt<'_>, module_def_id: LocalDefId) { for id in module.items() { check_item_type(tcx, id); } + if module_def_id == CRATE_DEF_ID { + super::entry::check_for_entry_fn(tcx); + } } fn async_opaque_type_cycle_error(tcx: TyCtxt<'_>, span: Span) -> ErrorGuaranteed { diff --git a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs index 2075b1add39..89877280a73 100644 --- a/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs +++ b/compiler/rustc_hir_analysis/src/check/compare_impl_item.rs @@ -867,7 +867,7 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for ImplTraitInTraitCollector<'_, 'tcx> { }); self.types.insert(proj.def_id, (infer_ty, proj.args)); // Recurse into bounds - for (pred, pred_span) in self.interner().explicit_item_bounds(proj.def_id).arg_iter_copied(self.interner(), proj.args) { + for (pred, pred_span) in self.interner().explicit_item_bounds(proj.def_id).iter_instantiated_copied(self.interner(), proj.args) { let pred = pred.fold_with(self); let pred = self.ocx.normalize( &ObligationCause::misc(self.span, self.body_id), @@ -1322,7 +1322,7 @@ fn compare_number_of_generics<'tcx>( // has mismatched type or const generic arguments, then the method that it's // inheriting the generics from will also have mismatched arguments, and // we'll report an error for that instead. Delay a bug for safety, though. - if trait_.opt_rpitit_info.is_some() { + if trait_.is_impl_trait_in_trait() { return Err(tcx.sess.delay_span_bug( rustc_span::DUMMY_SP, "errors comparing numbers of generics of trait/impl functions were not emitted", @@ -2116,7 +2116,7 @@ pub(super) fn check_type_bounds<'tcx>( // A synthetic impl Trait for RPITIT desugaring has no HIR, which we currently use to get the // span for an impl's associated type. Instead, for these, use the def_span for the synthesized // associated type. - let impl_ty_span = if impl_ty.opt_rpitit_info.is_some() { + let impl_ty_span = if impl_ty.is_impl_trait_in_trait() { tcx.def_span(impl_ty_def_id) } else { match tcx.hir().get_by_def_id(impl_ty_def_id) { @@ -2149,7 +2149,7 @@ pub(super) fn check_type_bounds<'tcx>( let obligations: Vec<_> = tcx .explicit_item_bounds(trait_ty.def_id) - .arg_iter_copied(tcx, rebased_args) + .iter_instantiated_copied(tcx, rebased_args) .map(|(concrete_ty_bound, span)| { debug!("check_type_bounds: concrete_ty_bound = {:?}", concrete_ty_bound); traits::Obligation::new(tcx, mk_cause(span), param_env, concrete_ty_bound) diff --git a/compiler/rustc_hir_analysis/src/check/entry.rs b/compiler/rustc_hir_analysis/src/check/entry.rs new file mode 100644 index 00000000000..fcaefe0261b --- /dev/null +++ b/compiler/rustc_hir_analysis/src/check/entry.rs @@ -0,0 +1,277 @@ +use rustc_hir as hir; +use rustc_hir::Node; +use rustc_infer::infer::TyCtxtInferExt; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_session::config::EntryFnType; +use rustc_span::def_id::{DefId, LocalDefId, CRATE_DEF_ID}; +use rustc_span::{symbol::sym, Span}; +use rustc_target::spec::abi::Abi; +use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _; +use rustc_trait_selection::traits::{self, ObligationCause, ObligationCauseCode}; + +use std::ops::Not; + +use crate::errors; +use crate::require_same_types; + +pub(crate) fn check_for_entry_fn(tcx: TyCtxt<'_>) { + match tcx.entry_fn(()) { + Some((def_id, EntryFnType::Main { .. })) => check_main_fn_ty(tcx, def_id), + Some((def_id, EntryFnType::Start)) => check_start_fn_ty(tcx, def_id), + _ => {} + } +} + +fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { + let main_fnsig = tcx.fn_sig(main_def_id).instantiate_identity(); + let main_span = tcx.def_span(main_def_id); + + fn main_fn_diagnostics_def_id(tcx: TyCtxt<'_>, def_id: DefId, sp: Span) -> LocalDefId { + if let Some(local_def_id) = def_id.as_local() { + let hir_type = tcx.type_of(local_def_id).instantiate_identity(); + if !matches!(hir_type.kind(), ty::FnDef(..)) { + span_bug!(sp, "main has a non-function type: found `{}`", hir_type); + } + local_def_id + } else { + CRATE_DEF_ID + } + } + + fn main_fn_generics_params_span(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Span> { + if !def_id.is_local() { + return None; + } + let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local()); + match tcx.hir().find(hir_id) { + Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. })) => { + generics.params.is_empty().not().then_some(generics.span) + } + _ => { + span_bug!(tcx.def_span(def_id), "main has a non-function type"); + } + } + } + + fn main_fn_where_clauses_span(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Span> { + if !def_id.is_local() { + return None; + } + let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local()); + match tcx.hir().find(hir_id) { + Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. })) => { + Some(generics.where_clause_span) + } + _ => { + span_bug!(tcx.def_span(def_id), "main has a non-function type"); + } + } + } + + fn main_fn_asyncness_span(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Span> { + if !def_id.is_local() { + return None; + } + Some(tcx.def_span(def_id)) + } + + fn main_fn_return_type_span(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Span> { + if !def_id.is_local() { + return None; + } + let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local()); + match tcx.hir().find(hir_id) { + Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(fn_sig, _, _), .. })) => { + Some(fn_sig.decl.output.span()) + } + _ => { + span_bug!(tcx.def_span(def_id), "main has a non-function type"); + } + } + } + + let mut error = false; + let main_diagnostics_def_id = main_fn_diagnostics_def_id(tcx, main_def_id, main_span); + let main_fn_generics = tcx.generics_of(main_def_id); + let main_fn_predicates = tcx.predicates_of(main_def_id); + if main_fn_generics.count() != 0 || !main_fnsig.bound_vars().is_empty() { + let generics_param_span = main_fn_generics_params_span(tcx, main_def_id); + tcx.sess.emit_err(errors::MainFunctionGenericParameters { + span: generics_param_span.unwrap_or(main_span), + label_span: generics_param_span, + }); + error = true; + } else if !main_fn_predicates.predicates.is_empty() { + // generics may bring in implicit predicates, so we skip this check if generics is present. + let generics_where_clauses_span = main_fn_where_clauses_span(tcx, main_def_id); + tcx.sess.emit_err(errors::WhereClauseOnMain { + span: generics_where_clauses_span.unwrap_or(main_span), + generics_span: generics_where_clauses_span, + }); + error = true; + } + + let main_asyncness = tcx.asyncness(main_def_id); + if let hir::IsAsync::Async = main_asyncness { + let asyncness_span = main_fn_asyncness_span(tcx, main_def_id); + tcx.sess.emit_err(errors::MainFunctionAsync { span: main_span, asyncness: asyncness_span }); + error = true; + } + + for attr in tcx.get_attrs(main_def_id, sym::track_caller) { + tcx.sess.emit_err(errors::TrackCallerOnMain { span: attr.span, annotated: main_span }); + error = true; + } + + if !tcx.codegen_fn_attrs(main_def_id).target_features.is_empty() + // Calling functions with `#[target_feature]` is not unsafe on WASM, see #84988 + && !tcx.sess.target.is_like_wasm + && !tcx.sess.opts.actually_rustdoc + { + tcx.sess.emit_err(errors::TargetFeatureOnMain { main: main_span }); + error = true; + } + + if error { + return; + } + + // Main should have no WC, so empty param env is OK here. + let param_env = ty::ParamEnv::empty(); + let expected_return_type; + if let Some(term_did) = tcx.lang_items().termination() { + let return_ty = main_fnsig.output(); + let return_ty_span = main_fn_return_type_span(tcx, main_def_id).unwrap_or(main_span); + if !return_ty.bound_vars().is_empty() { + tcx.sess.emit_err(errors::MainFunctionReturnTypeGeneric { span: return_ty_span }); + error = true; + } + let return_ty = return_ty.skip_binder(); + let infcx = tcx.infer_ctxt().build(); + let cause = traits::ObligationCause::new( + return_ty_span, + main_diagnostics_def_id, + ObligationCauseCode::MainFunctionType, + ); + let ocx = traits::ObligationCtxt::new(&infcx); + let norm_return_ty = ocx.normalize(&cause, param_env, return_ty); + ocx.register_bound(cause, param_env, norm_return_ty, term_did); + let errors = ocx.select_all_or_error(); + if !errors.is_empty() { + infcx.err_ctxt().report_fulfillment_errors(&errors); + error = true; + } + // now we can take the return type of the given main function + expected_return_type = main_fnsig.output(); + } else { + // standard () main return type + expected_return_type = ty::Binder::dummy(Ty::new_unit(tcx)); + } + + if error { + return; + } + + let se_ty = Ty::new_fn_ptr( + tcx, + expected_return_type.map_bound(|expected_return_type| { + tcx.mk_fn_sig([], expected_return_type, false, hir::Unsafety::Normal, Abi::Rust) + }), + ); + + require_same_types( + tcx, + &ObligationCause::new( + main_span, + main_diagnostics_def_id, + ObligationCauseCode::MainFunctionType, + ), + param_env, + se_ty, + Ty::new_fn_ptr(tcx, main_fnsig), + ); +} + +fn check_start_fn_ty(tcx: TyCtxt<'_>, start_def_id: DefId) { + let start_def_id = start_def_id.expect_local(); + let start_id = tcx.hir().local_def_id_to_hir_id(start_def_id); + let start_span = tcx.def_span(start_def_id); + let start_t = tcx.type_of(start_def_id).instantiate_identity(); + match start_t.kind() { + ty::FnDef(..) => { + if let Some(Node::Item(it)) = tcx.hir().find(start_id) { + if let hir::ItemKind::Fn(sig, generics, _) = &it.kind { + let mut error = false; + if !generics.params.is_empty() { + tcx.sess.emit_err(errors::StartFunctionParameters { span: generics.span }); + error = true; + } + if generics.has_where_clause_predicates { + tcx.sess.emit_err(errors::StartFunctionWhere { + span: generics.where_clause_span, + }); + error = true; + } + if let hir::IsAsync::Async = sig.header.asyncness { + let span = tcx.def_span(it.owner_id); + tcx.sess.emit_err(errors::StartAsync { span: span }); + error = true; + } + + let attrs = tcx.hir().attrs(start_id); + for attr in attrs { + if attr.has_name(sym::track_caller) { + tcx.sess.emit_err(errors::StartTrackCaller { + span: attr.span, + start: start_span, + }); + error = true; + } + if attr.has_name(sym::target_feature) + // Calling functions with `#[target_feature]` is + // not unsafe on WASM, see #84988 + && !tcx.sess.target.is_like_wasm + && !tcx.sess.opts.actually_rustdoc + { + tcx.sess.emit_err(errors::StartTargetFeature { + span: attr.span, + start: start_span, + }); + error = true; + } + } + + if error { + return; + } + } + } + + let se_ty = Ty::new_fn_ptr( + tcx, + ty::Binder::dummy(tcx.mk_fn_sig( + [tcx.types.isize, Ty::new_imm_ptr(tcx, Ty::new_imm_ptr(tcx, tcx.types.u8))], + tcx.types.isize, + false, + hir::Unsafety::Normal, + Abi::Rust, + )), + ); + + require_same_types( + tcx, + &ObligationCause::new( + start_span, + start_def_id, + ObligationCauseCode::StartFunctionType, + ), + ty::ParamEnv::empty(), // start should not have any where bounds. + se_ty, + Ty::new_fn_ptr(tcx, tcx.fn_sig(start_def_id).instantiate_identity()), + ); + } + _ => { + span_bug!(start_span, "start has a non-function type: found `{}`", start_t); + } + } +} diff --git a/compiler/rustc_hir_analysis/src/check/mod.rs b/compiler/rustc_hir_analysis/src/check/mod.rs index 1805a08bbb2..def7a3a9d88 100644 --- a/compiler/rustc_hir_analysis/src/check/mod.rs +++ b/compiler/rustc_hir_analysis/src/check/mod.rs @@ -65,6 +65,7 @@ a type parameter). mod check; mod compare_impl_item; pub mod dropck; +mod entry; pub mod intrinsic; pub mod intrinsicck; mod region; @@ -188,7 +189,7 @@ fn missing_items_err( full_impl_span: Span, ) { let missing_items = - missing_items.iter().filter(|trait_item| trait_item.opt_rpitit_info.is_none()); + missing_items.iter().filter(|trait_item| !trait_item.is_impl_trait_in_trait()); let missing_items_msg = missing_items .clone() @@ -408,7 +409,7 @@ fn fn_sig_suggestion<'tcx>( let asyncness = if tcx.asyncness(assoc.def_id).is_async() { output = if let ty::Alias(_, alias_ty) = *output.kind() { tcx.explicit_item_bounds(alias_ty.def_id) - .arg_iter_copied(tcx, alias_ty.args) + .iter_instantiated_copied(tcx, alias_ty.args) .find_map(|(bound, _)| bound.as_projection_clause()?.no_bound_vars()?.term.ty()) .unwrap_or_else(|| { span_bug!( diff --git a/compiler/rustc_hir_analysis/src/check/wfcheck.rs b/compiler/rustc_hir_analysis/src/check/wfcheck.rs index 443d072f992..4e194f1c381 100644 --- a/compiler/rustc_hir_analysis/src/check/wfcheck.rs +++ b/compiler/rustc_hir_analysis/src/check/wfcheck.rs @@ -1567,7 +1567,7 @@ impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for ImplTraitInTraitFinder<'_, 'tcx> { }); for (bound, bound_span) in tcx .explicit_item_bounds(opaque_ty.def_id) - .arg_iter_copied(tcx, opaque_ty.args) + .iter_instantiated_copied(tcx, opaque_ty.args) { let bound = self.wfcx.normalize(bound_span, None, bound); self.wfcx.register_obligations(traits::wf::predicate_obligations( diff --git a/compiler/rustc_hir_analysis/src/lib.rs b/compiler/rustc_hir_analysis/src/lib.rs index 677d94f1731..d7e62457f36 100644 --- a/compiler/rustc_hir_analysis/src/lib.rs +++ b/compiler/rustc_hir_analysis/src/lib.rs @@ -99,20 +99,16 @@ use rustc_errors::ErrorGuaranteed; use rustc_errors::{DiagnosticMessage, SubdiagnosticMessage}; use rustc_fluent_macro::fluent_messages; use rustc_hir as hir; -use rustc_hir::Node; use rustc_infer::infer::TyCtxtInferExt; use rustc_middle::middle; use rustc_middle::query::Providers; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_middle::util; -use rustc_session::{config::EntryFnType, parse::feature_err}; -use rustc_span::def_id::{DefId, LocalDefId, CRATE_DEF_ID}; +use rustc_session::parse::feature_err; use rustc_span::{symbol::sym, Span, DUMMY_SP}; use rustc_target::spec::abi::Abi; use rustc_trait_selection::traits::error_reporting::TypeErrCtxtExt as _; -use rustc_trait_selection::traits::{self, ObligationCause, ObligationCauseCode, ObligationCtxt}; - -use std::ops::Not; +use rustc_trait_selection::traits::{self, ObligationCause, ObligationCtxt}; use astconv::{AstConv, OnlySelfBounds}; use bounds::Bounds; @@ -177,267 +173,6 @@ fn require_same_types<'tcx>( } } -fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) { - let main_fnsig = tcx.fn_sig(main_def_id).instantiate_identity(); - let main_span = tcx.def_span(main_def_id); - - fn main_fn_diagnostics_def_id(tcx: TyCtxt<'_>, def_id: DefId, sp: Span) -> LocalDefId { - if let Some(local_def_id) = def_id.as_local() { - let hir_type = tcx.type_of(local_def_id).instantiate_identity(); - if !matches!(hir_type.kind(), ty::FnDef(..)) { - span_bug!(sp, "main has a non-function type: found `{}`", hir_type); - } - local_def_id - } else { - CRATE_DEF_ID - } - } - - fn main_fn_generics_params_span(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Span> { - if !def_id.is_local() { - return None; - } - let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local()); - match tcx.hir().find(hir_id) { - Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. })) => { - generics.params.is_empty().not().then_some(generics.span) - } - _ => { - span_bug!(tcx.def_span(def_id), "main has a non-function type"); - } - } - } - - fn main_fn_where_clauses_span(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Span> { - if !def_id.is_local() { - return None; - } - let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local()); - match tcx.hir().find(hir_id) { - Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(_, generics, _), .. })) => { - Some(generics.where_clause_span) - } - _ => { - span_bug!(tcx.def_span(def_id), "main has a non-function type"); - } - } - } - - fn main_fn_asyncness_span(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Span> { - if !def_id.is_local() { - return None; - } - Some(tcx.def_span(def_id)) - } - - fn main_fn_return_type_span(tcx: TyCtxt<'_>, def_id: DefId) -> Option<Span> { - if !def_id.is_local() { - return None; - } - let hir_id = tcx.hir().local_def_id_to_hir_id(def_id.expect_local()); - match tcx.hir().find(hir_id) { - Some(Node::Item(hir::Item { kind: hir::ItemKind::Fn(fn_sig, _, _), .. })) => { - Some(fn_sig.decl.output.span()) - } - _ => { - span_bug!(tcx.def_span(def_id), "main has a non-function type"); - } - } - } - - let mut error = false; - let main_diagnostics_def_id = main_fn_diagnostics_def_id(tcx, main_def_id, main_span); - let main_fn_generics = tcx.generics_of(main_def_id); - let main_fn_predicates = tcx.predicates_of(main_def_id); - if main_fn_generics.count() != 0 || !main_fnsig.bound_vars().is_empty() { - let generics_param_span = main_fn_generics_params_span(tcx, main_def_id); - tcx.sess.emit_err(errors::MainFunctionGenericParameters { - span: generics_param_span.unwrap_or(main_span), - label_span: generics_param_span, - }); - error = true; - } else if !main_fn_predicates.predicates.is_empty() { - // generics may bring in implicit predicates, so we skip this check if generics is present. - let generics_where_clauses_span = main_fn_where_clauses_span(tcx, main_def_id); - tcx.sess.emit_err(errors::WhereClauseOnMain { - span: generics_where_clauses_span.unwrap_or(main_span), - generics_span: generics_where_clauses_span, - }); - error = true; - } - - let main_asyncness = tcx.asyncness(main_def_id); - if let hir::IsAsync::Async = main_asyncness { - let asyncness_span = main_fn_asyncness_span(tcx, main_def_id); - tcx.sess.emit_err(errors::MainFunctionAsync { span: main_span, asyncness: asyncness_span }); - error = true; - } - - for attr in tcx.get_attrs(main_def_id, sym::track_caller) { - tcx.sess.emit_err(errors::TrackCallerOnMain { span: attr.span, annotated: main_span }); - error = true; - } - - if !tcx.codegen_fn_attrs(main_def_id).target_features.is_empty() - // Calling functions with `#[target_feature]` is not unsafe on WASM, see #84988 - && !tcx.sess.target.is_like_wasm - && !tcx.sess.opts.actually_rustdoc - { - tcx.sess.emit_err(errors::TargetFeatureOnMain { main: main_span }); - error = true; - } - - if error { - return; - } - - // Main should have no WC, so empty param env is OK here. - let param_env = ty::ParamEnv::empty(); - let expected_return_type; - if let Some(term_did) = tcx.lang_items().termination() { - let return_ty = main_fnsig.output(); - let return_ty_span = main_fn_return_type_span(tcx, main_def_id).unwrap_or(main_span); - if !return_ty.bound_vars().is_empty() { - tcx.sess.emit_err(errors::MainFunctionReturnTypeGeneric { span: return_ty_span }); - error = true; - } - let return_ty = return_ty.skip_binder(); - let infcx = tcx.infer_ctxt().build(); - let cause = traits::ObligationCause::new( - return_ty_span, - main_diagnostics_def_id, - ObligationCauseCode::MainFunctionType, - ); - let ocx = traits::ObligationCtxt::new(&infcx); - let norm_return_ty = ocx.normalize(&cause, param_env, return_ty); - ocx.register_bound(cause, param_env, norm_return_ty, term_did); - let errors = ocx.select_all_or_error(); - if !errors.is_empty() { - infcx.err_ctxt().report_fulfillment_errors(&errors); - error = true; - } - // now we can take the return type of the given main function - expected_return_type = main_fnsig.output(); - } else { - // standard () main return type - expected_return_type = ty::Binder::dummy(Ty::new_unit(tcx)); - } - - if error { - return; - } - - let se_ty = Ty::new_fn_ptr( - tcx, - expected_return_type.map_bound(|expected_return_type| { - tcx.mk_fn_sig([], expected_return_type, false, hir::Unsafety::Normal, Abi::Rust) - }), - ); - - require_same_types( - tcx, - &ObligationCause::new( - main_span, - main_diagnostics_def_id, - ObligationCauseCode::MainFunctionType, - ), - param_env, - se_ty, - Ty::new_fn_ptr(tcx, main_fnsig), - ); -} -fn check_start_fn_ty(tcx: TyCtxt<'_>, start_def_id: DefId) { - let start_def_id = start_def_id.expect_local(); - let start_id = tcx.hir().local_def_id_to_hir_id(start_def_id); - let start_span = tcx.def_span(start_def_id); - let start_t = tcx.type_of(start_def_id).instantiate_identity(); - match start_t.kind() { - ty::FnDef(..) => { - if let Some(Node::Item(it)) = tcx.hir().find(start_id) { - if let hir::ItemKind::Fn(sig, generics, _) = &it.kind { - let mut error = false; - if !generics.params.is_empty() { - tcx.sess.emit_err(errors::StartFunctionParameters { span: generics.span }); - error = true; - } - if generics.has_where_clause_predicates { - tcx.sess.emit_err(errors::StartFunctionWhere { - span: generics.where_clause_span, - }); - error = true; - } - if let hir::IsAsync::Async = sig.header.asyncness { - let span = tcx.def_span(it.owner_id); - tcx.sess.emit_err(errors::StartAsync { span: span }); - error = true; - } - - let attrs = tcx.hir().attrs(start_id); - for attr in attrs { - if attr.has_name(sym::track_caller) { - tcx.sess.emit_err(errors::StartTrackCaller { - span: attr.span, - start: start_span, - }); - error = true; - } - if attr.has_name(sym::target_feature) - // Calling functions with `#[target_feature]` is - // not unsafe on WASM, see #84988 - && !tcx.sess.target.is_like_wasm - && !tcx.sess.opts.actually_rustdoc - { - tcx.sess.emit_err(errors::StartTargetFeature { - span: attr.span, - start: start_span, - }); - error = true; - } - } - - if error { - return; - } - } - } - - let se_ty = Ty::new_fn_ptr( - tcx, - ty::Binder::dummy(tcx.mk_fn_sig( - [tcx.types.isize, Ty::new_imm_ptr(tcx, Ty::new_imm_ptr(tcx, tcx.types.u8))], - tcx.types.isize, - false, - hir::Unsafety::Normal, - Abi::Rust, - )), - ); - - require_same_types( - tcx, - &ObligationCause::new( - start_span, - start_def_id, - ObligationCauseCode::StartFunctionType, - ), - ty::ParamEnv::empty(), // start should not have any where bounds. - se_ty, - Ty::new_fn_ptr(tcx, tcx.fn_sig(start_def_id).instantiate_identity()), - ); - } - _ => { - span_bug!(start_span, "start has a non-function type: found `{}`", start_t); - } - } -} - -fn check_for_entry_fn(tcx: TyCtxt<'_>) { - match tcx.entry_fn(()) { - Some((def_id, EntryFnType::Main { .. })) => check_main_fn_ty(tcx, def_id), - Some((def_id, EntryFnType::Start)) => check_start_fn_ty(tcx, def_id), - _ => {} - } -} - pub fn provide(providers: &mut Providers) { collect::provide(providers); coherence::provide(providers); @@ -513,7 +248,6 @@ pub fn check_crate(tcx: TyCtxt<'_>) -> Result<(), ErrorGuaranteed> { }); check_unused::check_crate(tcx); - check_for_entry_fn(tcx); if let Some(reported) = tcx.sess.has_errors() { Err(reported) } else { Ok(()) } } diff --git a/compiler/rustc_hir_analysis/src/variance/mod.rs b/compiler/rustc_hir_analysis/src/variance/mod.rs index 199cdabb7e9..1ef257e87d6 100644 --- a/compiler/rustc_hir_analysis/src/variance/mod.rs +++ b/compiler/rustc_hir_analysis/src/variance/mod.rs @@ -145,7 +145,7 @@ fn variance_of_opaque(tcx: TyCtxt<'_>, item_def_id: LocalDefId) -> &[ty::Varianc let mut collector = OpaqueTypeLifetimeCollector { tcx, root_def_id: item_def_id.to_def_id(), variances }; let id_args = ty::GenericArgs::identity_for_item(tcx, item_def_id); - for (pred, _) in tcx.explicit_item_bounds(item_def_id).arg_iter_copied(tcx, id_args) { + for (pred, _) in tcx.explicit_item_bounds(item_def_id).iter_instantiated_copied(tcx, id_args) { debug!(?pred); // We only ignore opaque type args if the opaque type is the outermost type. diff --git a/compiler/rustc_hir_typeck/Cargo.toml b/compiler/rustc_hir_typeck/Cargo.toml index 13e1ea31c4d..ce91d023a0a 100644 --- a/compiler/rustc_hir_typeck/Cargo.toml +++ b/compiler/rustc_hir_typeck/Cargo.toml @@ -9,6 +9,7 @@ edition = "2021" smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } tracing = "0.1" rustc_ast = { path = "../rustc_ast" } +rustc_attr = { path = "../rustc_attr" } rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_graphviz = { path = "../rustc_graphviz" } diff --git a/compiler/rustc_hir_typeck/src/_match.rs b/compiler/rustc_hir_typeck/src/_match.rs index f2a43cc414d..119ed2fa408 100644 --- a/compiler/rustc_hir_typeck/src/_match.rs +++ b/compiler/rustc_hir_typeck/src/_match.rs @@ -551,8 +551,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } for ty in [first_ty, second_ty] { - for (clause, _) in - self.tcx.explicit_item_bounds(rpit_def_id).arg_iter_copied(self.tcx, args) + for (clause, _) in self + .tcx + .explicit_item_bounds(rpit_def_id) + .iter_instantiated_copied(self.tcx, args) { let pred = clause.kind().rebind(match clause.kind().skip_binder() { ty::ClauseKind::Trait(trait_pred) => { diff --git a/compiler/rustc_hir_typeck/src/closure.rs b/compiler/rustc_hir_typeck/src/closure.rs index e7df9ecf383..affeee55e79 100644 --- a/compiler/rustc_hir_typeck/src/closure.rs +++ b/compiler/rustc_hir_typeck/src/closure.rs @@ -177,7 +177,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { expected_ty, self.tcx .explicit_item_bounds(def_id) - .arg_iter_copied(self.tcx, args) + .iter_instantiated_copied(self.tcx, args) .map(|(c, s)| (c.as_predicate(), s)), ), ty::Dynamic(ref object_type, ..) => { @@ -720,13 +720,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => self .tcx .explicit_item_bounds(def_id) - .arg_iter_copied(self.tcx, args) + .iter_instantiated_copied(self.tcx, args) .find_map(|(p, s)| get_future_output(p.as_predicate(), s))?, ty::Error(_) => return None, ty::Alias(ty::Projection, proj) if self.tcx.is_impl_trait_in_trait(proj.def_id) => self .tcx .explicit_item_bounds(proj.def_id) - .arg_iter_copied(self.tcx, proj.args) + .iter_instantiated_copied(self.tcx, proj.args) .find_map(|(p, s)| get_future_output(p.as_predicate(), s))?, _ => span_bug!( self.tcx.def_span(expr_def_id), diff --git a/compiler/rustc_hir_typeck/src/demand.rs b/compiler/rustc_hir_typeck/src/demand.rs index 8fecb82259f..429c3366fed 100644 --- a/compiler/rustc_hir_typeck/src/demand.rs +++ b/compiler/rustc_hir_typeck/src/demand.rs @@ -15,7 +15,7 @@ use rustc_middle::ty::error::{ExpectedFound, TypeError}; use rustc_middle::ty::fold::BottomUpFolder; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::{self, Article, AssocItem, Ty, TypeAndMut, TypeFoldable}; -use rustc_span::symbol::{sym, Symbol}; +use rustc_span::symbol::sym; use rustc_span::{BytePos, Span, DUMMY_SP}; use rustc_trait_selection::infer::InferCtxtExt as _; use rustc_trait_selection::traits::ObligationCause; @@ -997,7 +997,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { .collect(); let suggestions_for = |variant: &_, ctor_kind, field_name| { - let prefix = match self.maybe_get_struct_pattern_shorthand_field(expr) { + let prefix = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!("{ident}: "), None => String::new(), }; @@ -1240,39 +1240,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - pub(crate) fn maybe_get_struct_pattern_shorthand_field( - &self, - expr: &hir::Expr<'_>, - ) -> Option<Symbol> { - let hir = self.tcx.hir(); - let local = match expr { - hir::Expr { - kind: - hir::ExprKind::Path(hir::QPath::Resolved( - None, - hir::Path { - res: hir::def::Res::Local(_), - segments: [hir::PathSegment { ident, .. }], - .. - }, - )), - .. - } => Some(ident), - _ => None, - }?; - - match hir.find_parent(expr.hir_id)? { - Node::ExprField(field) => { - if field.ident.name == local.name && field.is_shorthand { - return Some(local.name); - } - } - _ => {} - } - - None - } - /// If the given `HirId` corresponds to a block with a trailing expression, return that expression pub(crate) fn maybe_get_block_expr( &self, @@ -1467,7 +1434,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { )); } - let prefix = match self.maybe_get_struct_pattern_shorthand_field(expr) { + let prefix = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!("{ident}: "), None => String::new(), }; @@ -1661,7 +1628,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ) }; - let prefix = match self.maybe_get_struct_pattern_shorthand_field(expr) { + let prefix = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!("{ident}: "), None => String::new(), }; diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs index 15ca5808a93..c44d12e61e3 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs @@ -2,7 +2,7 @@ use crate::FnCtxt; use rustc_hir as hir; use rustc_hir::def::Res; use rustc_hir::def_id::DefId; -use rustc_infer::traits::ObligationCauseCode; +use rustc_infer::{infer::type_variable::TypeVariableOriginKind, traits::ObligationCauseCode}; use rustc_middle::ty::{self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitor}; use rustc_span::{self, symbol::kw, Span}; use rustc_trait_selection::traits; @@ -267,8 +267,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { type BreakTy = ty::GenericArg<'tcx>; fn visit_ty(&mut self, ty: Ty<'tcx>) -> std::ops::ControlFlow<Self::BreakTy> { if let Some(origin) = self.0.type_var_origin(ty) - && let rustc_infer::infer::type_variable::TypeVariableOriginKind::TypeParameterDefinition(_, def_id) = - origin.kind + && let TypeVariableOriginKind::TypeParameterDefinition(_, def_id) = origin.kind && let generics = self.0.tcx.generics_of(self.1) && let Some(index) = generics.param_def_id_to_index(self.0.tcx, def_id) && let Some(subst) = ty::GenericArgs::identity_for_item(self.0.tcx, self.1) diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs index 1e8af6c6ed7..6a82b00211e 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/mod.rs @@ -302,7 +302,9 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> { match ty.kind() { ty::Adt(adt_def, _) => Some(*adt_def), // FIXME(#104767): Should we handle bound regions here? - ty::Alias(ty::Projection | ty::Inherent, _) if !ty.has_escaping_bound_vars() => { + ty::Alias(ty::Projection | ty::Inherent | ty::Weak, _) + if !ty.has_escaping_bound_vars() => + { self.normalize(span, ty).ty_adt_def() } _ => None, diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index 92cae84afd9..ec19d017c25 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -395,7 +395,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { vec![(expr.span.shrink_to_hi(), format!(".{}()", conversion_method.name))] }; let struct_pat_shorthand_field = - self.maybe_get_struct_pattern_shorthand_field(expr); + self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr); if let Some(name) = struct_pat_shorthand_field { sugg.insert(0, (expr.span.shrink_to_lo(), format!("{}: ", name))); } @@ -1069,7 +1069,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ) .must_apply_modulo_regions() { - let suggestion = match self.maybe_get_struct_pattern_shorthand_field(expr) { + let suggestion = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!(": {}.clone()", ident), None => ".clone()".to_string() }; @@ -1247,7 +1247,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return false; } - let suggestion = match self.maybe_get_struct_pattern_shorthand_field(expr) { + let suggestion = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) { Some(ident) => format!(": {}.is_some()", ident), None => ".is_some()".to_string(), }; diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs index e3e0eff23d2..3d7187cb16f 100644 --- a/compiler/rustc_hir_typeck/src/method/suggest.rs +++ b/compiler/rustc_hir_typeck/src/method/suggest.rs @@ -2,13 +2,12 @@ //! found or is otherwise invalid. use crate::errors; -use crate::errors::CandidateTraitNote; -use crate::errors::NoAssociatedItem; +use crate::errors::{CandidateTraitNote, NoAssociatedItem}; use crate::Expectation; use crate::FnCtxt; use rustc_ast::ast::Mutability; -use rustc_data_structures::fx::FxIndexMap; -use rustc_data_structures::fx::FxIndexSet; +use rustc_attr::parse_confusables; +use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::unord::UnordSet; use rustc_errors::StashKey; use rustc_errors::{ @@ -1038,6 +1037,28 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { "the {item_kind} was found for\n{}{}", type_candidates, additional_types )); + } else { + 'outer: for inherent_impl_did in self.tcx.inherent_impls(adt.did()) { + for inherent_method in + self.tcx.associated_items(inherent_impl_did).in_definition_order() + { + if let Some(attr) = self.tcx.get_attr(inherent_method.def_id, sym::rustc_confusables) + && let Some(candidates) = parse_confusables(attr) + && candidates.contains(&item_name.name) + { + err.span_suggestion_verbose( + item_name.span, + format!( + "you might have meant to use `{}`", + inherent_method.name.as_str() + ), + inherent_method.name.as_str(), + Applicability::MaybeIncorrect, + ); + break 'outer; + } + } + } } } } else { diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs index bf46f9881d4..8e3c76d6a4b 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs @@ -403,7 +403,7 @@ impl<'tcx> InferCtxt<'tcx> { let future_trait = self.tcx.require_lang_item(LangItem::Future, None); let item_def_id = self.tcx.associated_item_def_ids(future_trait)[0]; - self.tcx.explicit_item_bounds(def_id).arg_iter_copied(self.tcx, args).find_map( + self.tcx.explicit_item_bounds(def_id).iter_instantiated_copied(self.tcx, args).find_map( |(predicate, _)| { predicate .kind() diff --git a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs index b4d8205fd6d..36b56fe782c 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/need_type_info.rs @@ -163,7 +163,7 @@ fn fmt_printer<'a, 'tcx>(infcx: &'a InferCtxt<'tcx>, ns: Namespace) -> FmtPrinte let ty_vars = infcx_inner.type_variables(); let var_origin = ty_vars.var_origin(ty_vid); if let TypeVariableOriginKind::TypeParameterDefinition(name, def_id) = var_origin.kind - && !var_origin.span.from_expansion() + && name != kw::SelfUpper && !var_origin.span.from_expansion() { let generics = infcx.tcx.generics_of(infcx.tcx.parent(def_id)); let idx = generics.param_def_id_to_index(infcx.tcx, def_id).unwrap(); diff --git a/compiler/rustc_infer/src/infer/opaque_types.rs b/compiler/rustc_infer/src/infer/opaque_types.rs index 90f0b4ce401..945136fbff2 100644 --- a/compiler/rustc_infer/src/infer/opaque_types.rs +++ b/compiler/rustc_infer/src/infer/opaque_types.rs @@ -591,7 +591,7 @@ impl<'tcx> InferCtxt<'tcx> { let tcx = self.tcx; let item_bounds = tcx.explicit_item_bounds(def_id); - for (predicate, _) in item_bounds.arg_iter_copied(tcx, args) { + for (predicate, _) in item_bounds.iter_instantiated_copied(tcx, args) { let predicate = predicate.fold_with(&mut BottomUpFolder { tcx, ty_op: |ty| match *ty.kind() { diff --git a/compiler/rustc_infer/src/infer/outlives/verify.rs b/compiler/rustc_infer/src/infer/outlives/verify.rs index 1e1ecd3fb94..2bc6546ba28 100644 --- a/compiler/rustc_infer/src/infer/outlives/verify.rs +++ b/compiler/rustc_infer/src/infer/outlives/verify.rs @@ -295,7 +295,7 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> { let bounds = tcx.item_bounds(alias_ty.def_id); trace!("{:#?}", bounds.skip_binder()); bounds - .arg_iter(tcx, alias_ty.args) + .iter_instantiated(tcx, alias_ty.args) .filter_map(|p| p.as_type_outlives_clause()) .filter_map(|p| p.no_bound_vars()) .map(|OutlivesPredicate(_, r)| r) diff --git a/compiler/rustc_interface/src/interface.rs b/compiler/rustc_interface/src/interface.rs index 6471dce88c2..3f420f19efe 100644 --- a/compiler/rustc_interface/src/interface.rs +++ b/compiler/rustc_interface/src/interface.rs @@ -59,11 +59,6 @@ impl Compiler { } } -#[allow(rustc::bad_opt_access)] -pub fn set_thread_safe_mode(sopts: &config::UnstableOptions) { - rustc_data_structures::sync::set_dyn_thread_safe_mode(sopts.threads > 1); -} - /// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`. pub fn parse_cfgspecs( handler: &EarlyErrorHandler, @@ -289,6 +284,10 @@ pub struct Config { #[allow(rustc::bad_opt_access)] pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Send) -> R { trace!("run_compiler"); + + // Set parallel mode before thread pool creation, which will create `Lock`s. + rustc_data_structures::sync::set_dyn_thread_safe_mode(config.opts.unstable_opts.threads > 1); + util::run_in_thread_pool_with_globals( config.opts.edition, config.opts.unstable_opts.threads, diff --git a/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs b/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs index d5f0290767a..79b0b32bef2 100644 --- a/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs +++ b/compiler/rustc_lint/src/opaque_hidden_inferred_bound.rs @@ -117,7 +117,7 @@ impl<'tcx> LateLintPass<'tcx> for OpaqueHiddenInferredBound { for (assoc_pred, assoc_pred_span) in cx .tcx .explicit_item_bounds(proj.projection_ty.def_id) - .arg_iter_copied(cx.tcx, &proj.projection_ty.args) + .iter_instantiated_copied(cx.tcx, &proj.projection_ty.args) { let assoc_pred = assoc_pred.fold_with(proj_replacer); let Ok(assoc_pred) = traits::fully_normalize( diff --git a/compiler/rustc_llvm/build.rs b/compiler/rustc_llvm/build.rs index b0783d75d47..aa1121d6bb3 100644 --- a/compiler/rustc_llvm/build.rs +++ b/compiler/rustc_llvm/build.rs @@ -251,8 +251,11 @@ fn main() { } else if target.contains("windows-gnu") { println!("cargo:rustc-link-lib=shell32"); println!("cargo:rustc-link-lib=uuid"); - } else if target.contains("netbsd") || target.contains("haiku") || target.contains("darwin") { + } else if target.contains("haiku") || target.contains("darwin") { println!("cargo:rustc-link-lib=z"); + } else if target.contains("netbsd") { + println!("cargo:rustc-link-lib=z"); + println!("cargo:rustc-link-lib=execinfo"); } cmd.args(&components); diff --git a/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp index 87906dee4d3..80b6c0fb439 100644 --- a/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/CoverageMappingWrapper.cpp @@ -158,14 +158,9 @@ extern "C" LLVMValueRef LLVMRustCoverageCreatePGOFuncNameVar(LLVMValueRef F, con return wrap(createPGOFuncNameVar(*cast<Function>(unwrap(F)), FuncNameRef)); } -extern "C" uint64_t LLVMRustCoverageHashCString(const char *StrVal) { - StringRef StrRef(StrVal); - return IndexedInstrProf::ComputeHash(StrRef); -} - extern "C" uint64_t LLVMRustCoverageHashByteArray( const char *Bytes, - unsigned NumBytes) { + size_t NumBytes) { StringRef StrRef(Bytes, NumBytes); return IndexedInstrProf::ComputeHash(StrRef); } diff --git a/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp b/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp index 0493d6b05d0..bf00d11edf6 100644 --- a/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp +++ b/compiler/rustc_llvm/llvm-wrapper/SymbolWrapper.cpp @@ -7,6 +7,7 @@ // * https://github.com/llvm/llvm-project/blob/8ef3e895ad8ab1724e2b87cabad1dacdc7a397a3/llvm/include/llvm/Object/ArchiveWriter.h // * https://github.com/llvm/llvm-project/blob/8ef3e895ad8ab1724e2b87cabad1dacdc7a397a3/llvm/lib/Object/ArchiveWriter.cpp +#include "llvm/ADT/SmallString.h" #include "llvm/IR/LLVMContext.h" #include "llvm/Object/ObjectFile.h" diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index 0056769cabb..2785732727f 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -608,7 +608,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { trace!("encoding {} further alloc ids", new_n - n); for idx in n..new_n { let id = self.interpret_allocs[idx]; - let pos = self.position() as u32; + let pos = self.position() as u64; interpret_alloc_index.push(pos); interpret::specialized_encode_alloc_id(self, tcx, id); } @@ -1136,7 +1136,7 @@ fn should_encode_type(tcx: TyCtxt<'_>, def_id: LocalDefId, def_kind: DefKind) -> // the default projection predicates in default trait methods // with RPITITs. ty::AssocItemContainer::TraitContainer => { - assoc_item.defaultness(tcx).has_value() || assoc_item.opt_rpitit_info.is_some() + assoc_item.defaultness(tcx).has_value() || assoc_item.is_impl_trait_in_trait() } } } diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index 9cffd96f4a3..f6087fbe8f6 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -264,7 +264,7 @@ pub(crate) struct CrateRoot { traits: LazyArray<DefIndex>, impls: LazyArray<TraitImpls>, incoherent_impls: LazyArray<IncoherentImpls>, - interpret_alloc_index: LazyArray<u32>, + interpret_alloc_index: LazyArray<u64>, proc_macro_data: Option<ProcMacroData>, tables: LazyTables, diff --git a/compiler/rustc_middle/src/hir/map/mod.rs b/compiler/rustc_middle/src/hir/map/mod.rs index 6bb8e632dbe..1fd68dc5cb2 100644 --- a/compiler/rustc_middle/src/hir/map/mod.rs +++ b/compiler/rustc_middle/src/hir/map/mod.rs @@ -1103,6 +1103,33 @@ impl<'hir> Map<'hir> { _ => None, } } + + pub fn maybe_get_struct_pattern_shorthand_field(&self, expr: &Expr<'_>) -> Option<Symbol> { + let local = match expr { + Expr { + kind: + ExprKind::Path(QPath::Resolved( + None, + Path { + res: def::Res::Local(_), segments: [PathSegment { ident, .. }], .. + }, + )), + .. + } => Some(ident), + _ => None, + }?; + + match self.find_parent(expr.hir_id)? { + Node::ExprField(field) => { + if field.ident.name == local.name && field.is_shorthand { + return Some(local.name); + } + } + _ => {} + } + + None + } } impl<'hir> intravisit::Map<'hir> for Map<'hir> { diff --git a/compiler/rustc_middle/src/mir/interpret/mod.rs b/compiler/rustc_middle/src/mir/interpret/mod.rs index fbd667a87fe..69c15e9cc06 100644 --- a/compiler/rustc_middle/src/mir/interpret/mod.rs +++ b/compiler/rustc_middle/src/mir/interpret/mod.rs @@ -274,7 +274,7 @@ pub struct AllocDecodingState { // For each `AllocId`, we keep track of which decoding state it's currently in. decoding_state: Vec<Lock<State>>, // The offsets of each allocation in the data stream. - data_offsets: Vec<u32>, + data_offsets: Vec<u64>, } impl AllocDecodingState { @@ -289,7 +289,7 @@ impl AllocDecodingState { AllocDecodingSession { state: self, session_id } } - pub fn new(data_offsets: Vec<u32>) -> Self { + pub fn new(data_offsets: Vec<u64>) -> Self { let decoding_state = std::iter::repeat_with(|| Lock::new(State::Empty)).take(data_offsets.len()).collect(); diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs index ca3cd943d3d..16addc2dc1e 100644 --- a/compiler/rustc_middle/src/mir/mono.rs +++ b/compiler/rustc_middle/src/mir/mono.rs @@ -59,12 +59,19 @@ impl<'tcx> MonoItem<'tcx> { pub fn size_estimate(&self, tcx: TyCtxt<'tcx>) -> usize { match *self { MonoItem::Fn(instance) => { - // Estimate the size of a function based on how many statements - // it contains. - tcx.instance_def_size_estimate(instance.def) + match instance.def { + // "Normal" functions size estimate: the number of + // statements, plus one for the terminator. + InstanceDef::Item(..) | InstanceDef::DropGlue(..) => { + let mir = tcx.instance_mir(instance.def); + mir.basic_blocks.iter().map(|bb| bb.statements.len() + 1).sum() + } + // Other compiler-generated shims size estimate: 1 + _ => 1, + } } - // Conservatively estimate the size of a static declaration - // or assembly to be 1. + // Conservatively estimate the size of a static declaration or + // assembly item to be 1. MonoItem::Static(_) | MonoItem::GlobalAsm(_) => 1, } } @@ -230,7 +237,7 @@ pub struct CodegenUnit<'tcx> { /// contain something unique to this crate (e.g., a module path) /// as well as the crate name and disambiguator. name: Symbol, - items: FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)>, + items: FxHashMap<MonoItem<'tcx>, MonoItemData>, size_estimate: usize, primary: bool, /// True if this is CGU is used to hold code coverage information for dead code, @@ -238,6 +245,14 @@ pub struct CodegenUnit<'tcx> { is_code_coverage_dead_code_cgu: bool, } +/// Auxiliary info about a `MonoItem`. +#[derive(Copy, Clone, PartialEq, Debug, HashStable)] +pub struct MonoItemData { + pub linkage: Linkage, + pub visibility: Visibility, + pub size_estimate: usize, +} + /// Specifies the linkage type for a `MonoItem`. /// /// See <https://llvm.org/docs/LangRef.html#linkage-types> for more details about these variants. @@ -292,12 +307,12 @@ impl<'tcx> CodegenUnit<'tcx> { } /// The order of these items is non-determinstic. - pub fn items(&self) -> &FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)> { + pub fn items(&self) -> &FxHashMap<MonoItem<'tcx>, MonoItemData> { &self.items } /// The order of these items is non-determinstic. - pub fn items_mut(&mut self) -> &mut FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)> { + pub fn items_mut(&mut self) -> &mut FxHashMap<MonoItem<'tcx>, MonoItemData> { &mut self.items } @@ -320,16 +335,16 @@ impl<'tcx> CodegenUnit<'tcx> { base_n::encode(hash, base_n::CASE_INSENSITIVE) } - pub fn compute_size_estimate(&mut self, tcx: TyCtxt<'tcx>) { - // Estimate the size of a codegen unit as (approximately) the number of MIR - // statements it corresponds to. - self.size_estimate = self.items.keys().map(|mi| mi.size_estimate(tcx)).sum(); + pub fn compute_size_estimate(&mut self) { + // The size of a codegen unit as the sum of the sizes of the items + // within it. + self.size_estimate = self.items.values().map(|data| data.size_estimate).sum(); } - #[inline] /// Should only be called if [`compute_size_estimate`] has previously been called. /// /// [`compute_size_estimate`]: Self::compute_size_estimate + #[inline] pub fn size_estimate(&self) -> usize { // Items are never zero-sized, so if we have items the estimate must be // non-zero, unless we forgot to call `compute_size_estimate` first. @@ -355,7 +370,7 @@ impl<'tcx> CodegenUnit<'tcx> { pub fn items_in_deterministic_order( &self, tcx: TyCtxt<'tcx>, - ) -> Vec<(MonoItem<'tcx>, (Linkage, Visibility))> { + ) -> Vec<(MonoItem<'tcx>, MonoItemData)> { // The codegen tests rely on items being process in the same order as // they appear in the file, so for local items, we sort by node_id first #[derive(PartialEq, Eq, PartialOrd, Ord)] @@ -390,7 +405,7 @@ impl<'tcx> CodegenUnit<'tcx> { ) } - let mut items: Vec<_> = self.items().iter().map(|(&i, &l)| (i, l)).collect(); + let mut items: Vec<_> = self.items().iter().map(|(&i, &data)| (i, data)).collect(); items.sort_by_cached_key(|&(i, _)| item_sort_key(tcx, i)); items } diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 45fa82ba68a..c304245ca39 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -2080,12 +2080,6 @@ rustc_queries! { desc { "looking up supported target features" } } - /// Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning. - query instance_def_size_estimate(def: ty::InstanceDef<'tcx>) - -> usize { - desc { |tcx| "estimating size for `{}`", tcx.def_path_str(def.def_id()) } - } - query features_query(_: ()) -> &'tcx rustc_feature::Features { feedable desc { "looking up enabled feature gates" } diff --git a/compiler/rustc_middle/src/query/on_disk_cache.rs b/compiler/rustc_middle/src/query/on_disk_cache.rs index 8751d3b7890..995b2140f61 100644 --- a/compiler/rustc_middle/src/query/on_disk_cache.rs +++ b/compiler/rustc_middle/src/query/on_disk_cache.rs @@ -104,7 +104,9 @@ struct Footer { query_result_index: EncodedDepNodeIndex, side_effects_index: EncodedDepNodeIndex, // The location of all allocations. - interpret_alloc_index: Vec<u32>, + // Most uses only need values up to u32::MAX, but benchmarking indicates that we can use a u64 + // without measurable overhead. This permits larger const allocations without ICEing. + interpret_alloc_index: Vec<u64>, // See `OnDiskCache.syntax_contexts` syntax_contexts: FxHashMap<u32, AbsoluteBytePos>, // See `OnDiskCache.expn_data` @@ -301,7 +303,7 @@ impl<'sess> OnDiskCache<'sess> { interpret_alloc_index.reserve(new_n - n); for idx in n..new_n { let id = encoder.interpret_allocs[idx]; - let pos: u32 = encoder.position().try_into().unwrap(); + let pos: u64 = encoder.position().try_into().unwrap(); interpret_alloc_index.push(pos); interpret::specialized_encode_alloc_id(&mut encoder, tcx, id); } diff --git a/compiler/rustc_middle/src/ty/assoc.rs b/compiler/rustc_middle/src/ty/assoc.rs index 4e746a49d84..f77a8c6712e 100644 --- a/compiler/rustc_middle/src/ty/assoc.rs +++ b/compiler/rustc_middle/src/ty/assoc.rs @@ -96,6 +96,10 @@ impl AssocItem { } } } + + pub fn is_impl_trait_in_trait(&self) -> bool { + self.opt_rpitit_info.is_some() + } } #[derive(Copy, Clone, PartialEq, Debug, HashStable, Eq, Hash, Encodable, Decodable)] diff --git a/compiler/rustc_middle/src/ty/consts.rs b/compiler/rustc_middle/src/ty/consts.rs index bf9f5846ed9..4ef70107f19 100644 --- a/compiler/rustc_middle/src/ty/consts.rs +++ b/compiler/rustc_middle/src/ty/consts.rs @@ -294,6 +294,14 @@ impl<'tcx> Const<'tcx> { Self::from_bits(tcx, n as u128, ParamEnv::empty().and(tcx.types.usize)) } + /// Attempts to convert to a `ValTree` + pub fn try_to_valtree(self) -> Option<ty::ValTree<'tcx>> { + match self.kind() { + ty::ConstKind::Value(valtree) => Some(valtree), + _ => None, + } + } + #[inline] /// Attempts to evaluate the given constant to bits. Can fail to evaluate in the presence of /// generics (or erroneous code) or if the value can't be represented as bits (e.g. because it diff --git a/compiler/rustc_middle/src/ty/consts/valtree.rs b/compiler/rustc_middle/src/ty/consts/valtree.rs index 8b96864ddd7..fb7bf78bafe 100644 --- a/compiler/rustc_middle/src/ty/consts/valtree.rs +++ b/compiler/rustc_middle/src/ty/consts/valtree.rs @@ -24,7 +24,7 @@ pub enum ValTree<'tcx> { Leaf(ScalarInt), //SliceOrStr(ValSlice<'tcx>), - // dont use SliceOrStr for now + // don't use SliceOrStr for now /// The fields of any kind of aggregate. Structs, tuples and arrays are represented by /// listing their fields' values in order. /// diff --git a/compiler/rustc_middle/src/ty/generic_args.rs b/compiler/rustc_middle/src/ty/generic_args.rs index 12af8456494..97dab5cb47e 100644 --- a/compiler/rustc_middle/src/ty/generic_args.rs +++ b/compiler/rustc_middle/src/ty/generic_args.rs @@ -610,8 +610,12 @@ impl<'tcx, 's, I: IntoIterator> EarlyBinder<I> where I::Item: TypeFoldable<TyCtxt<'tcx>>, { - pub fn arg_iter(self, tcx: TyCtxt<'tcx>, args: &'s [GenericArg<'tcx>]) -> ArgIter<'s, 'tcx, I> { - ArgIter { it: self.value.into_iter(), tcx, args } + pub fn iter_instantiated( + self, + tcx: TyCtxt<'tcx>, + args: &'s [GenericArg<'tcx>], + ) -> IterInstantiated<'s, 'tcx, I> { + IterInstantiated { it: self.value.into_iter(), tcx, args } } /// Similar to [`instantiate_identity`](EarlyBinder::instantiate_identity), @@ -621,13 +625,13 @@ where } } -pub struct ArgIter<'s, 'tcx, I: IntoIterator> { +pub struct IterInstantiated<'s, 'tcx, I: IntoIterator> { it: I::IntoIter, tcx: TyCtxt<'tcx>, args: &'s [GenericArg<'tcx>], } -impl<'tcx, I: IntoIterator> Iterator for ArgIter<'_, 'tcx, I> +impl<'tcx, I: IntoIterator> Iterator for IterInstantiated<'_, 'tcx, I> where I::Item: TypeFoldable<TyCtxt<'tcx>>, { @@ -642,7 +646,7 @@ where } } -impl<'tcx, I: IntoIterator> DoubleEndedIterator for ArgIter<'_, 'tcx, I> +impl<'tcx, I: IntoIterator> DoubleEndedIterator for IterInstantiated<'_, 'tcx, I> where I::IntoIter: DoubleEndedIterator, I::Item: TypeFoldable<TyCtxt<'tcx>>, @@ -652,7 +656,7 @@ where } } -impl<'tcx, I: IntoIterator> ExactSizeIterator for ArgIter<'_, 'tcx, I> +impl<'tcx, I: IntoIterator> ExactSizeIterator for IterInstantiated<'_, 'tcx, I> where I::IntoIter: ExactSizeIterator, I::Item: TypeFoldable<TyCtxt<'tcx>>, @@ -664,12 +668,12 @@ where I::Item: Deref, <I::Item as Deref>::Target: Copy + TypeFoldable<TyCtxt<'tcx>>, { - pub fn arg_iter_copied( + pub fn iter_instantiated_copied( self, tcx: TyCtxt<'tcx>, args: &'s [GenericArg<'tcx>], - ) -> ArgIterCopied<'s, 'tcx, I> { - ArgIterCopied { it: self.value.into_iter(), tcx, args } + ) -> IterInstantiatedCopied<'s, 'tcx, I> { + IterInstantiatedCopied { it: self.value.into_iter(), tcx, args } } /// Similar to [`instantiate_identity`](EarlyBinder::instantiate_identity), @@ -681,13 +685,13 @@ where } } -pub struct ArgIterCopied<'a, 'tcx, I: IntoIterator> { +pub struct IterInstantiatedCopied<'a, 'tcx, I: IntoIterator> { it: I::IntoIter, tcx: TyCtxt<'tcx>, args: &'a [GenericArg<'tcx>], } -impl<'tcx, I: IntoIterator> Iterator for ArgIterCopied<'_, 'tcx, I> +impl<'tcx, I: IntoIterator> Iterator for IterInstantiatedCopied<'_, 'tcx, I> where I::Item: Deref, <I::Item as Deref>::Target: Copy + TypeFoldable<TyCtxt<'tcx>>, @@ -703,7 +707,7 @@ where } } -impl<'tcx, I: IntoIterator> DoubleEndedIterator for ArgIterCopied<'_, 'tcx, I> +impl<'tcx, I: IntoIterator> DoubleEndedIterator for IterInstantiatedCopied<'_, 'tcx, I> where I::IntoIter: DoubleEndedIterator, I::Item: Deref, @@ -716,7 +720,7 @@ where } } -impl<'tcx, I: IntoIterator> ExactSizeIterator for ArgIterCopied<'_, 'tcx, I> +impl<'tcx, I: IntoIterator> ExactSizeIterator for IterInstantiatedCopied<'_, 'tcx, I> where I::IntoIter: ExactSizeIterator, I::Item: Deref, diff --git a/compiler/rustc_middle/src/ty/generics.rs b/compiler/rustc_middle/src/ty/generics.rs index 338590717d0..70a35f137d8 100644 --- a/compiler/rustc_middle/src/ty/generics.rs +++ b/compiler/rustc_middle/src/ty/generics.rs @@ -347,7 +347,7 @@ impl<'tcx> GenericPredicates<'tcx> { tcx: TyCtxt<'tcx>, args: GenericArgsRef<'tcx>, ) -> impl Iterator<Item = (Clause<'tcx>, Span)> + DoubleEndedIterator + ExactSizeIterator { - EarlyBinder::bind(self.predicates).arg_iter_copied(tcx, args) + EarlyBinder::bind(self.predicates).iter_instantiated_copied(tcx, args) } #[instrument(level = "debug", skip(self, tcx))] diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index 61d95e64c9c..62805d1e8b5 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -755,6 +755,8 @@ where largest_niche: None, align: tcx.data_layout.i8_align, size: Size::ZERO, + max_repr_align: None, + unadjusted_abi_align: tcx.data_layout.i8_align.abi, }) } diff --git a/compiler/rustc_middle/src/ty/parameterized.rs b/compiler/rustc_middle/src/ty/parameterized.rs index cc2b26a5e14..f1c38984296 100644 --- a/compiler/rustc_middle/src/ty/parameterized.rs +++ b/compiler/rustc_middle/src/ty/parameterized.rs @@ -52,6 +52,7 @@ trivially_parameterized_over_tcx! { usize, (), u32, + u64, bool, std::string::String, crate::metadata::ModChild, diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index 3591acdea56..70eb389b406 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -928,7 +928,7 @@ pub trait PrettyPrinter<'tcx>: let mut is_sized = false; let mut lifetimes = SmallVec::<[ty::Region<'tcx>; 1]>::new(); - for (predicate, _) in bounds.arg_iter_copied(tcx, args) { + for (predicate, _) in bounds.iter_instantiated_copied(tcx, args) { let bound_predicate = predicate.kind(); match bound_predicate.skip_binder() { diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 66d8a79de42..cdb0b2240a4 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -1313,7 +1313,7 @@ impl<'tcx> AliasTy<'tcx> { /// I_i impl subst /// P_j GAT subst /// ``` - pub fn rebase_args_onto_impl( + pub fn rebase_inherent_args_onto_impl( self, impl_args: ty::GenericArgsRef<'tcx>, tcx: TyCtxt<'tcx>, diff --git a/compiler/rustc_mir_build/messages.ftl b/compiler/rustc_mir_build/messages.ftl index 98156840dd8..938f3edd31b 100644 --- a/compiler/rustc_mir_build/messages.ftl +++ b/compiler/rustc_mir_build/messages.ftl @@ -312,6 +312,8 @@ mir_build_unreachable_pattern = unreachable pattern .label = unreachable pattern .catchall_label = matches any value +mir_build_unsafe_not_inherited = items do not inherit unsafety from separate enclosing items + mir_build_unsafe_op_in_unsafe_fn_borrow_of_layout_constrained_field_requires_unsafe = borrow of layout constrained field with interior mutability is unsafe and requires unsafe block (error E0133) .note = references to fields of layout constrained fields lose the constraints. Coupled with interior mutability, the field can be changed to invalid values diff --git a/compiler/rustc_mir_build/src/check_unsafety.rs b/compiler/rustc_mir_build/src/check_unsafety.rs index 8b4ff9bb868..192bd4a83e3 100644 --- a/compiler/rustc_mir_build/src/check_unsafety.rs +++ b/compiler/rustc_mir_build/src/check_unsafety.rs @@ -91,7 +91,12 @@ impl<'tcx> UnsafetyVisitor<'_, 'tcx> { kind.emit_unsafe_op_in_unsafe_fn_lint(self.tcx, self.hir_context, span); } SafetyContext::Safe => { - kind.emit_requires_unsafe_err(self.tcx, span, unsafe_op_in_unsafe_fn_allowed); + kind.emit_requires_unsafe_err( + self.tcx, + span, + self.hir_context, + unsafe_op_in_unsafe_fn_allowed, + ); } } } @@ -602,98 +607,164 @@ impl UnsafeOpKind { &self, tcx: TyCtxt<'_>, span: Span, + hir_context: hir::HirId, unsafe_op_in_unsafe_fn_allowed: bool, ) { + let note_non_inherited = tcx.hir().parent_iter(hir_context).find(|(id, node)| { + if let hir::Node::Expr(block) = node + && let hir::ExprKind::Block(block, _) = block.kind + && let hir::BlockCheckMode::UnsafeBlock(_) = block.rules + { + true + } + else if let Some(sig) = tcx.hir().fn_sig_by_hir_id(*id) + && sig.header.is_unsafe() + { + true + } else { + false + } + }); + let unsafe_not_inherited_note = if let Some((id, _)) = note_non_inherited { + let span = tcx.hir().span(id); + let span = tcx.sess.source_map().guess_head_span(span); + Some(UnsafeNotInheritedNote { span }) + } else { + None + }; + match self { CallToUnsafeFunction(Some(did)) if unsafe_op_in_unsafe_fn_allowed => { tcx.sess.emit_err(CallToUnsafeFunctionRequiresUnsafeUnsafeOpInUnsafeFnAllowed { span, + unsafe_not_inherited_note, function: &tcx.def_path_str(*did), }); } CallToUnsafeFunction(Some(did)) => { tcx.sess.emit_err(CallToUnsafeFunctionRequiresUnsafe { span, + unsafe_not_inherited_note, function: &tcx.def_path_str(*did), }); } CallToUnsafeFunction(None) if unsafe_op_in_unsafe_fn_allowed => { tcx.sess.emit_err( - CallToUnsafeFunctionRequiresUnsafeNamelessUnsafeOpInUnsafeFnAllowed { span }, + CallToUnsafeFunctionRequiresUnsafeNamelessUnsafeOpInUnsafeFnAllowed { + span, + unsafe_not_inherited_note, + }, ); } CallToUnsafeFunction(None) => { - tcx.sess.emit_err(CallToUnsafeFunctionRequiresUnsafeNameless { span }); + tcx.sess.emit_err(CallToUnsafeFunctionRequiresUnsafeNameless { + span, + unsafe_not_inherited_note, + }); } UseOfInlineAssembly if unsafe_op_in_unsafe_fn_allowed => { - tcx.sess - .emit_err(UseOfInlineAssemblyRequiresUnsafeUnsafeOpInUnsafeFnAllowed { span }); + tcx.sess.emit_err(UseOfInlineAssemblyRequiresUnsafeUnsafeOpInUnsafeFnAllowed { + span, + unsafe_not_inherited_note, + }); } UseOfInlineAssembly => { - tcx.sess.emit_err(UseOfInlineAssemblyRequiresUnsafe { span }); + tcx.sess.emit_err(UseOfInlineAssemblyRequiresUnsafe { + span, + unsafe_not_inherited_note, + }); } InitializingTypeWith if unsafe_op_in_unsafe_fn_allowed => { - tcx.sess - .emit_err(InitializingTypeWithRequiresUnsafeUnsafeOpInUnsafeFnAllowed { span }); + tcx.sess.emit_err(InitializingTypeWithRequiresUnsafeUnsafeOpInUnsafeFnAllowed { + span, + unsafe_not_inherited_note, + }); } InitializingTypeWith => { - tcx.sess.emit_err(InitializingTypeWithRequiresUnsafe { span }); + tcx.sess.emit_err(InitializingTypeWithRequiresUnsafe { + span, + unsafe_not_inherited_note, + }); } UseOfMutableStatic if unsafe_op_in_unsafe_fn_allowed => { - tcx.sess - .emit_err(UseOfMutableStaticRequiresUnsafeUnsafeOpInUnsafeFnAllowed { span }); + tcx.sess.emit_err(UseOfMutableStaticRequiresUnsafeUnsafeOpInUnsafeFnAllowed { + span, + unsafe_not_inherited_note, + }); } UseOfMutableStatic => { - tcx.sess.emit_err(UseOfMutableStaticRequiresUnsafe { span }); + tcx.sess + .emit_err(UseOfMutableStaticRequiresUnsafe { span, unsafe_not_inherited_note }); } UseOfExternStatic if unsafe_op_in_unsafe_fn_allowed => { - tcx.sess - .emit_err(UseOfExternStaticRequiresUnsafeUnsafeOpInUnsafeFnAllowed { span }); + tcx.sess.emit_err(UseOfExternStaticRequiresUnsafeUnsafeOpInUnsafeFnAllowed { + span, + unsafe_not_inherited_note, + }); } UseOfExternStatic => { - tcx.sess.emit_err(UseOfExternStaticRequiresUnsafe { span }); + tcx.sess + .emit_err(UseOfExternStaticRequiresUnsafe { span, unsafe_not_inherited_note }); } DerefOfRawPointer if unsafe_op_in_unsafe_fn_allowed => { - tcx.sess - .emit_err(DerefOfRawPointerRequiresUnsafeUnsafeOpInUnsafeFnAllowed { span }); + tcx.sess.emit_err(DerefOfRawPointerRequiresUnsafeUnsafeOpInUnsafeFnAllowed { + span, + unsafe_not_inherited_note, + }); } DerefOfRawPointer => { - tcx.sess.emit_err(DerefOfRawPointerRequiresUnsafe { span }); + tcx.sess + .emit_err(DerefOfRawPointerRequiresUnsafe { span, unsafe_not_inherited_note }); } AccessToUnionField if unsafe_op_in_unsafe_fn_allowed => { - tcx.sess - .emit_err(AccessToUnionFieldRequiresUnsafeUnsafeOpInUnsafeFnAllowed { span }); + tcx.sess.emit_err(AccessToUnionFieldRequiresUnsafeUnsafeOpInUnsafeFnAllowed { + span, + unsafe_not_inherited_note, + }); } AccessToUnionField => { - tcx.sess.emit_err(AccessToUnionFieldRequiresUnsafe { span }); + tcx.sess + .emit_err(AccessToUnionFieldRequiresUnsafe { span, unsafe_not_inherited_note }); } MutationOfLayoutConstrainedField if unsafe_op_in_unsafe_fn_allowed => { tcx.sess.emit_err( MutationOfLayoutConstrainedFieldRequiresUnsafeUnsafeOpInUnsafeFnAllowed { span, + unsafe_not_inherited_note, }, ); } MutationOfLayoutConstrainedField => { - tcx.sess.emit_err(MutationOfLayoutConstrainedFieldRequiresUnsafe { span }); + tcx.sess.emit_err(MutationOfLayoutConstrainedFieldRequiresUnsafe { + span, + unsafe_not_inherited_note, + }); } BorrowOfLayoutConstrainedField if unsafe_op_in_unsafe_fn_allowed => { tcx.sess.emit_err( - BorrowOfLayoutConstrainedFieldRequiresUnsafeUnsafeOpInUnsafeFnAllowed { span }, + BorrowOfLayoutConstrainedFieldRequiresUnsafeUnsafeOpInUnsafeFnAllowed { + span, + unsafe_not_inherited_note, + }, ); } BorrowOfLayoutConstrainedField => { - tcx.sess.emit_err(BorrowOfLayoutConstrainedFieldRequiresUnsafe { span }); + tcx.sess.emit_err(BorrowOfLayoutConstrainedFieldRequiresUnsafe { + span, + unsafe_not_inherited_note, + }); } CallToFunctionWith(did) if unsafe_op_in_unsafe_fn_allowed => { tcx.sess.emit_err(CallToFunctionWithRequiresUnsafeUnsafeOpInUnsafeFnAllowed { span, + unsafe_not_inherited_note, function: &tcx.def_path_str(*did), }); } CallToFunctionWith(did) => { tcx.sess.emit_err(CallToFunctionWithRequiresUnsafe { span, + unsafe_not_inherited_note, function: &tcx.def_path_str(*did), }); } diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs index df00cc75cc7..820ab93851e 100644 --- a/compiler/rustc_mir_build/src/errors.rs +++ b/compiler/rustc_mir_build/src/errors.rs @@ -119,6 +119,8 @@ pub struct CallToUnsafeFunctionRequiresUnsafe<'a> { #[label] pub span: Span, pub function: &'a str, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -128,6 +130,8 @@ pub struct CallToUnsafeFunctionRequiresUnsafeNameless { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -138,6 +142,8 @@ pub struct CallToUnsafeFunctionRequiresUnsafeUnsafeOpInUnsafeFnAllowed<'a> { #[label] pub span: Span, pub function: &'a str, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -150,6 +156,8 @@ pub struct CallToUnsafeFunctionRequiresUnsafeNamelessUnsafeOpInUnsafeFnAllowed { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -159,6 +167,8 @@ pub struct UseOfInlineAssemblyRequiresUnsafe { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -168,6 +178,8 @@ pub struct UseOfInlineAssemblyRequiresUnsafeUnsafeOpInUnsafeFnAllowed { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -177,6 +189,8 @@ pub struct InitializingTypeWithRequiresUnsafe { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -189,6 +203,8 @@ pub struct InitializingTypeWithRequiresUnsafeUnsafeOpInUnsafeFnAllowed { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -198,6 +214,8 @@ pub struct UseOfMutableStaticRequiresUnsafe { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -207,6 +225,8 @@ pub struct UseOfMutableStaticRequiresUnsafeUnsafeOpInUnsafeFnAllowed { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -216,6 +236,8 @@ pub struct UseOfExternStaticRequiresUnsafe { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -225,6 +247,8 @@ pub struct UseOfExternStaticRequiresUnsafeUnsafeOpInUnsafeFnAllowed { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -234,6 +258,8 @@ pub struct DerefOfRawPointerRequiresUnsafe { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -243,6 +269,8 @@ pub struct DerefOfRawPointerRequiresUnsafeUnsafeOpInUnsafeFnAllowed { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -252,6 +280,8 @@ pub struct AccessToUnionFieldRequiresUnsafe { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -261,6 +291,8 @@ pub struct AccessToUnionFieldRequiresUnsafeUnsafeOpInUnsafeFnAllowed { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -270,6 +302,8 @@ pub struct MutationOfLayoutConstrainedFieldRequiresUnsafe { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -282,6 +316,8 @@ pub struct MutationOfLayoutConstrainedFieldRequiresUnsafeUnsafeOpInUnsafeFnAllow #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -291,6 +327,8 @@ pub struct BorrowOfLayoutConstrainedFieldRequiresUnsafe { #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -303,6 +341,8 @@ pub struct BorrowOfLayoutConstrainedFieldRequiresUnsafeUnsafeOpInUnsafeFnAllowed #[primary_span] #[label] pub span: Span, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -313,6 +353,8 @@ pub struct CallToFunctionWithRequiresUnsafe<'a> { #[label] pub span: Span, pub function: &'a str, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, } #[derive(Diagnostic)] @@ -323,6 +365,15 @@ pub struct CallToFunctionWithRequiresUnsafeUnsafeOpInUnsafeFnAllowed<'a> { #[label] pub span: Span, pub function: &'a str, + #[subdiagnostic] + pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>, +} + +#[derive(Subdiagnostic)] +#[label(mir_build_unsafe_not_inherited)] +pub struct UnsafeNotInheritedNote { + #[primary_span] + pub span: Span, } #[derive(LintDiagnostic)] diff --git a/compiler/rustc_monomorphize/src/partitioning.rs b/compiler/rustc_monomorphize/src/partitioning.rs index 391666554bb..54096abb2e0 100644 --- a/compiler/rustc_monomorphize/src/partitioning.rs +++ b/compiler/rustc_monomorphize/src/partitioning.rs @@ -107,7 +107,8 @@ use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel}; use rustc_middle::mir; use rustc_middle::mir::mono::{ - CodegenUnit, CodegenUnitNameBuilder, InstantiationMode, Linkage, MonoItem, Visibility, + CodegenUnit, CodegenUnitNameBuilder, InstantiationMode, Linkage, MonoItem, MonoItemData, + Visibility, }; use rustc_middle::query::Providers; use rustc_middle::ty::print::{characteristic_def_id_of_type, with_no_trimmed_paths}; @@ -130,11 +131,6 @@ struct PlacedMonoItems<'tcx> { codegen_units: Vec<CodegenUnit<'tcx>>, internalization_candidates: FxHashSet<MonoItem<'tcx>>, - - /// These must be obtained when the iterator in `partition` runs. They - /// can't be obtained later because some inlined functions might not be - /// reachable. - unique_inlined_stats: (usize, usize), } // The output CGUs are sorted by name. @@ -152,11 +148,11 @@ where // Place all mono items into a codegen unit. `place_mono_items` is // responsible for initializing the CGU size estimates. - let PlacedMonoItems { mut codegen_units, internalization_candidates, unique_inlined_stats } = { + let PlacedMonoItems { mut codegen_units, internalization_candidates } = { let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_place_items"); let placed = place_mono_items(cx, mono_items); - debug_dump(tcx, "PLACE", &placed.codegen_units, placed.unique_inlined_stats); + debug_dump(tcx, "PLACE", &placed.codegen_units); placed }; @@ -167,7 +163,7 @@ where { let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_merge_cgus"); merge_codegen_units(cx, &mut codegen_units); - debug_dump(tcx, "MERGE", &codegen_units, unique_inlined_stats); + debug_dump(tcx, "MERGE", &codegen_units); } // Make as many symbols "internal" as possible, so LLVM has more freedom to @@ -176,7 +172,7 @@ where let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_internalize_symbols"); internalize_symbols(cx, &mut codegen_units, internalization_candidates); - debug_dump(tcx, "INTERNALIZE", &codegen_units, unique_inlined_stats); + debug_dump(tcx, "INTERNALIZE", &codegen_units); } // Mark one CGU for dead code, if necessary. @@ -216,18 +212,12 @@ where let cgu_name_builder = &mut CodegenUnitNameBuilder::new(cx.tcx); let cgu_name_cache = &mut FxHashMap::default(); - let mut num_unique_inlined_items = 0; - let mut unique_inlined_items_size = 0; for mono_item in mono_items { // Handle only root items directly here. Inlined items are handled at // the bottom of the loop based on reachability. match mono_item.instantiation_mode(cx.tcx) { InstantiationMode::GloballyShared { .. } => {} - InstantiationMode::LocalCopy => { - num_unique_inlined_items += 1; - unique_inlined_items_size += mono_item.size_estimate(cx.tcx); - continue; - } + InstantiationMode::LocalCopy => continue, } let characteristic_def_id = characteristic_def_id_of_mono_item(cx.tcx, mono_item); @@ -256,8 +246,9 @@ where if visibility == Visibility::Hidden && can_be_internalized { internalization_candidates.insert(mono_item); } + let size_estimate = mono_item.size_estimate(cx.tcx); - cgu.items_mut().insert(mono_item, (linkage, visibility)); + cgu.items_mut().insert(mono_item, MonoItemData { linkage, visibility, size_estimate }); // Get all inlined items that are reachable from `mono_item` without // going via another root item. This includes drop-glue, functions from @@ -271,7 +262,11 @@ where // the `insert` will be a no-op. for inlined_item in reachable_inlined_items { // This is a CGU-private copy. - cgu.items_mut().insert(inlined_item, (Linkage::Internal, Visibility::Default)); + cgu.items_mut().entry(inlined_item).or_insert_with(|| MonoItemData { + linkage: Linkage::Internal, + visibility: Visibility::Default, + size_estimate: inlined_item.size_estimate(cx.tcx), + }); } } @@ -286,14 +281,10 @@ where codegen_units.sort_by(|a, b| a.name().as_str().cmp(b.name().as_str())); for cgu in codegen_units.iter_mut() { - cgu.compute_size_estimate(cx.tcx); + cgu.compute_size_estimate(); } - return PlacedMonoItems { - codegen_units, - internalization_candidates, - unique_inlined_stats: (num_unique_inlined_items, unique_inlined_items_size), - }; + return PlacedMonoItems { codegen_units, internalization_candidates }; fn get_reachable_inlined_items<'tcx>( tcx: TyCtxt<'tcx>, @@ -349,7 +340,7 @@ fn merge_codegen_units<'tcx>( && codegen_units.iter().any(|cgu| cgu.size_estimate() < NON_INCR_MIN_CGU_SIZE)) { // Sort small cgus to the back. - codegen_units.sort_by_cached_key(|cgu| cmp::Reverse(cgu.size_estimate())); + codegen_units.sort_by_key(|cgu| cmp::Reverse(cgu.size_estimate())); let mut smallest = codegen_units.pop().unwrap(); let second_smallest = codegen_units.last_mut().unwrap(); @@ -358,7 +349,7 @@ fn merge_codegen_units<'tcx>( // may be duplicate inlined items, in which case the destination CGU is // unaffected. Recalculate size estimates afterwards. second_smallest.items_mut().extend(smallest.items_mut().drain()); - second_smallest.compute_size_estimate(cx.tcx); + second_smallest.compute_size_estimate(); // Record that `second_smallest` now contains all the stuff that was // in `smallest` before. @@ -492,7 +483,7 @@ fn internalize_symbols<'tcx>( for cgu in codegen_units { let home_cgu = MonoItemPlacement::SingleCgu(cgu.name()); - for (item, linkage_and_visibility) in cgu.items_mut() { + for (item, data) in cgu.items_mut() { if !internalization_candidates.contains(item) { // This item is no candidate for internalizing, so skip it. continue; @@ -520,7 +511,8 @@ fn internalize_symbols<'tcx>( // If we got here, we did not find any uses from other CGUs, so // it's fine to make this monomorphization internal. - *linkage_and_visibility = (Linkage::Internal, Visibility::Default); + data.linkage = Linkage::Internal; + data.visibility = Visibility::Default; } } } @@ -537,7 +529,7 @@ fn mark_code_coverage_dead_code_cgu<'tcx>(codegen_units: &mut [CodegenUnit<'tcx> // function symbols to be included via `-u` or `/include` linker args. let dead_code_cgu = codegen_units .iter_mut() - .filter(|cgu| cgu.items().iter().any(|(_, (linkage, _))| *linkage == Linkage::External)) + .filter(|cgu| cgu.items().iter().any(|(_, data)| data.linkage == Linkage::External)) .min_by_key(|cgu| cgu.size_estimate()); // If there are no CGUs that have externally linked items, then we just @@ -851,12 +843,7 @@ fn default_visibility(tcx: TyCtxt<'_>, id: DefId, is_generic: bool) -> Visibilit } } -fn debug_dump<'a, 'tcx: 'a>( - tcx: TyCtxt<'tcx>, - label: &str, - cgus: &[CodegenUnit<'tcx>], - (unique_inlined_items, unique_inlined_size): (usize, usize), -) { +fn debug_dump<'a, 'tcx: 'a>(tcx: TyCtxt<'tcx>, label: &str, cgus: &[CodegenUnit<'tcx>]) { let dump = move || { use std::fmt::Write; @@ -865,28 +852,36 @@ fn debug_dump<'a, 'tcx: 'a>( // Note: every unique root item is placed exactly once, so the number // of unique root items always equals the number of placed root items. + // + // Also, unreached inlined items won't be counted here. This is fine. + + let mut inlined_items = FxHashSet::default(); let mut root_items = 0; - // unique_inlined_items is passed in above. + let mut unique_inlined_items = 0; let mut placed_inlined_items = 0; let mut root_size = 0; - // unique_inlined_size is passed in above. + let mut unique_inlined_size = 0; let mut placed_inlined_size = 0; for cgu in cgus.iter() { num_cgus += 1; all_cgu_sizes.push(cgu.size_estimate()); - for (item, _) in cgu.items() { + for (item, data) in cgu.items() { match item.instantiation_mode(tcx) { InstantiationMode::GloballyShared { .. } => { root_items += 1; - root_size += item.size_estimate(tcx); + root_size += data.size_estimate; } InstantiationMode::LocalCopy => { + if inlined_items.insert(item) { + unique_inlined_items += 1; + unique_inlined_size += data.size_estimate; + } placed_inlined_items += 1; - placed_inlined_size += item.size_estimate(tcx); + placed_inlined_size += data.size_estimate; } } } @@ -928,7 +923,7 @@ fn debug_dump<'a, 'tcx: 'a>( let mean_size = size as f64 / num_items as f64; let mut placed_item_sizes: Vec<_> = - cgu.items().iter().map(|(item, _)| item.size_estimate(tcx)).collect(); + cgu.items().values().map(|data| data.size_estimate).collect(); placed_item_sizes.sort_unstable_by_key(|&n| cmp::Reverse(n)); let sizes = list(&placed_item_sizes); @@ -937,15 +932,16 @@ fn debug_dump<'a, 'tcx: 'a>( let _ = writeln!(s, " - items: {num_items}, mean size: {mean_size:.1}, sizes: {sizes}",); - for (item, linkage) in cgu.items_in_deterministic_order(tcx) { + for (item, data) in cgu.items_in_deterministic_order(tcx) { + let linkage = data.linkage; let symbol_name = item.symbol_name(tcx).name; let symbol_hash_start = symbol_name.rfind('h'); let symbol_hash = symbol_hash_start.map_or("<no hash>", |i| &symbol_name[i..]); - let size = item.size_estimate(tcx); let kind = match item.instantiation_mode(tcx) { InstantiationMode::GloballyShared { .. } => "root", InstantiationMode::LocalCopy => "inlined", }; + let size = data.size_estimate; let _ = with_no_trimmed_paths!(writeln!( s, " - {item} [{linkage:?}] [{symbol_hash}] ({kind}, size: {size})" @@ -1100,8 +1096,8 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> (&DefIdSet, &[Co let mut item_to_cgus: FxHashMap<_, Vec<_>> = Default::default(); for cgu in codegen_units { - for (&mono_item, &linkage) in cgu.items() { - item_to_cgus.entry(mono_item).or_default().push((cgu.name(), linkage)); + for (&mono_item, &data) in cgu.items() { + item_to_cgus.entry(mono_item).or_default().push((cgu.name(), data.linkage)); } } @@ -1114,7 +1110,7 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> (&DefIdSet, &[Co let cgus = item_to_cgus.get_mut(i).unwrap_or(&mut empty); cgus.sort_by_key(|(name, _)| *name); cgus.dedup(); - for &(ref cgu_name, (linkage, _)) in cgus.iter() { + for &(ref cgu_name, linkage) in cgus.iter() { output.push(' '); output.push_str(cgu_name.as_str()); diff --git a/compiler/rustc_passes/messages.ftl b/compiler/rustc_passes/messages.ftl index a607e483c97..0aa3d6265dc 100644 --- a/compiler/rustc_passes/messages.ftl +++ b/compiler/rustc_passes/messages.ftl @@ -98,6 +98,9 @@ passes_collapse_debuginfo = `collapse_debuginfo` attribute should be applied to macro definitions .label = not a macro definition +passes_confusables = attribute should be applied to an inherent method + .label = not an inherent method + passes_const_impl_const_trait = const `impl`s must be for traits marked with `#[const_trait]` .note = this trait must be annotated with `#[const_trait]` @@ -266,6 +269,9 @@ passes_duplicate_lang_item_crate_depends = .first_definition_path = first definition in `{$orig_crate_name}` loaded from {$orig_path} .second_definition_path = second definition in `{$crate_name}` loaded from {$path} +passes_empty_confusables = + expected at least one confusable name + passes_export_name = attribute should be applied to a free function, impl method or static .label = not a free function, impl method or static @@ -326,6 +332,9 @@ passes_implied_feature_not_exist = passes_incorrect_do_not_recommend_location = `#[do_not_recommend]` can only be placed on trait implementations +passes_incorrect_meta_item = expected a quoted string literal +passes_incorrect_meta_item_suggestion = consider surrounding this with quotes + passes_incorrect_target = `{$name}` language item must be applied to a {$kind} with {$at_least -> [true] at least {$num} diff --git a/compiler/rustc_passes/src/check_attr.rs b/compiler/rustc_passes/src/check_attr.rs index ee14ffa6da8..4d7ebe3fefe 100644 --- a/compiler/rustc_passes/src/check_attr.rs +++ b/compiler/rustc_passes/src/check_attr.rs @@ -183,6 +183,7 @@ impl CheckAttrVisitor<'_> { | sym::rustc_allowed_through_unstable_modules | sym::rustc_promotable => self.check_stability_promotable(&attr, span, target), sym::link_ordinal => self.check_link_ordinal(&attr, span, target), + sym::rustc_confusables => self.check_confusables(&attr, target), _ => true, }; @@ -1985,6 +1986,46 @@ impl CheckAttrVisitor<'_> { } } + fn check_confusables(&self, attr: &Attribute, target: Target) -> bool { + match target { + Target::Method(MethodKind::Inherent) => { + let Some(meta) = attr.meta() else { + return false; + }; + let ast::MetaItem { kind: MetaItemKind::List(ref metas), .. } = meta else { + return false; + }; + + let mut candidates = Vec::new(); + + for meta in metas { + let NestedMetaItem::Lit(meta_lit) = meta else { + self.tcx.sess.emit_err(errors::IncorrectMetaItem { + span: meta.span(), + suggestion: errors::IncorrectMetaItemSuggestion { + lo: meta.span().shrink_to_lo(), + hi: meta.span().shrink_to_hi(), + }, + }); + return false; + }; + candidates.push(meta_lit.symbol); + } + + if candidates.is_empty() { + self.tcx.sess.emit_err(errors::EmptyConfusables { span: attr.span }); + return false; + } + + true + } + _ => { + self.tcx.sess.emit_err(errors::Confusables { attr_span: attr.span }); + false + } + } + } + fn check_deprecated(&self, hir_id: HirId, attr: &Attribute, _span: Span, target: Target) { match target { Target::Closure | Target::Expression | Target::Statement | Target::Arm => { diff --git a/compiler/rustc_passes/src/dead.rs b/compiler/rustc_passes/src/dead.rs index 8cab487dd84..fbe6fc3bee4 100644 --- a/compiler/rustc_passes/src/dead.rs +++ b/compiler/rustc_passes/src/dead.rs @@ -276,7 +276,7 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { } // Avoid accessing the HIR for the synthesized associated type generated for RPITITs. - if self.tcx.opt_rpitit_info(id.to_def_id()).is_some() { + if self.tcx.is_impl_trait_in_trait(id.to_def_id()) { self.live_symbols.insert(id); continue; } diff --git a/compiler/rustc_passes/src/errors.rs b/compiler/rustc_passes/src/errors.rs index 3fe7feb9dfa..eae13f86049 100644 --- a/compiler/rustc_passes/src/errors.rs +++ b/compiler/rustc_passes/src/errors.rs @@ -618,6 +618,38 @@ pub struct LinkOrdinal { } #[derive(Diagnostic)] +#[diag(passes_confusables)] +pub struct Confusables { + #[primary_span] + pub attr_span: Span, +} + +#[derive(Diagnostic)] +#[diag(passes_empty_confusables)] +pub(crate) struct EmptyConfusables { + #[primary_span] + pub span: Span, +} + +#[derive(Diagnostic)] +#[diag(passes_incorrect_meta_item, code = "E0539")] +pub(crate) struct IncorrectMetaItem { + #[primary_span] + pub span: Span, + #[subdiagnostic] + pub suggestion: IncorrectMetaItemSuggestion, +} + +#[derive(Subdiagnostic)] +#[multipart_suggestion(passes_incorrect_meta_item_suggestion, applicability = "maybe-incorrect")] +pub(crate) struct IncorrectMetaItemSuggestion { + #[suggestion_part(code = "\"")] + pub lo: Span, + #[suggestion_part(code = "\"")] + pub hi: Span, +} + +#[derive(Diagnostic)] #[diag(passes_stability_promotable)] pub struct StabilityPromotable { #[primary_span] diff --git a/compiler/rustc_smir/src/rustc_smir/mod.rs b/compiler/rustc_smir/src/rustc_smir/mod.rs index 6af6cfe58f1..0e5de1e74d3 100644 --- a/compiler/rustc_smir/src/rustc_smir/mod.rs +++ b/compiler/rustc_smir/src/rustc_smir/mod.rs @@ -119,8 +119,12 @@ impl<'tcx> Tables<'tcx> { TyKind::RigidTy(RigidTy::Array(self.intern_ty(*ty), opaque(constant))) } ty::Slice(ty) => TyKind::RigidTy(RigidTy::Slice(self.intern_ty(*ty))), - ty::RawPtr(_) => todo!(), - ty::Ref(_, _, _) => todo!(), + ty::RawPtr(ty::TypeAndMut { ty, mutbl }) => { + TyKind::RigidTy(RigidTy::RawPtr(self.intern_ty(*ty), mutbl.stable())) + } + ty::Ref(region, ty, mutbl) => { + TyKind::RigidTy(RigidTy::Ref(opaque(region), self.intern_ty(*ty), mutbl.stable())) + } ty::FnDef(_, _) => todo!(), ty::FnPtr(_) => todo!(), ty::Dynamic(_, _, _) => todo!(), diff --git a/compiler/rustc_smir/src/stable_mir/mir/body.rs b/compiler/rustc_smir/src/stable_mir/mir/body.rs index 02ac907f09a..831eb6589e4 100644 --- a/compiler/rustc_smir/src/stable_mir/mir/body.rs +++ b/compiler/rustc_smir/src/stable_mir/mir/body.rs @@ -1,4 +1,4 @@ -use crate::rustc_internal::Opaque; +use crate::stable_mir::ty::Region; use crate::stable_mir::{self, ty::Ty}; #[derive(Clone, Debug)] @@ -137,8 +137,6 @@ pub enum Statement { Nop, } -type Region = Opaque; - // FIXME this is incomplete #[derive(Clone, Debug)] pub enum Rvalue { diff --git a/compiler/rustc_smir/src/stable_mir/ty.rs b/compiler/rustc_smir/src/stable_mir/ty.rs index e9f17f92c04..7ae07efb729 100644 --- a/compiler/rustc_smir/src/stable_mir/ty.rs +++ b/compiler/rustc_smir/src/stable_mir/ty.rs @@ -1,4 +1,4 @@ -use super::{with, DefId}; +use super::{mir::Mutability, with, DefId}; use crate::rustc_internal::Opaque; #[derive(Copy, Clone, Debug)] @@ -11,7 +11,7 @@ impl Ty { } type Const = Opaque; -type Region = Opaque; +pub(crate) type Region = Opaque; #[derive(Clone, Debug)] pub enum TyKind { @@ -29,6 +29,8 @@ pub enum RigidTy { Str, Array(Ty, Const), Slice(Ty), + RawPtr(Ty, Mutability), + Ref(Region, Ty, Mutability), Tuple(Vec<Ty>), } diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 4fc440ef947..08925761b39 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -1265,6 +1265,7 @@ symbols! { rustc_clean, rustc_coherence_is_core, rustc_coinductive, + rustc_confusables, rustc_const_stable, rustc_const_unstable, rustc_conversion_suggestion, diff --git a/compiler/rustc_target/src/abi/call/aarch64.rs b/compiler/rustc_target/src/abi/call/aarch64.rs index a84988fa75c..b4c7b0f120f 100644 --- a/compiler/rustc_target/src/abi/call/aarch64.rs +++ b/compiler/rustc_target/src/abi/call/aarch64.rs @@ -1,25 +1,15 @@ use crate::abi::call::{ArgAbi, FnAbi, Reg, RegKind, Uniform}; use crate::abi::{HasDataLayout, TyAbiInterface}; -/// Given integer-types M and register width N (e.g. M=u16 and N=32 bits), the -/// `ParamExtension` policy specifies how a uM value should be treated when -/// passed via register or stack-slot of width N. See also rust-lang/rust#97463. +/// Indicates the variant of the AArch64 ABI we are compiling for. +/// Used to accommodate Apple and Microsoft's deviations from the usual AAPCS ABI. +/// +/// Corresponds to Clang's `AArch64ABIInfo::ABIKind`. #[derive(Copy, Clone, PartialEq)] -pub enum ParamExtension { - /// Indicates that when passing an i8/i16, either as a function argument or - /// as a return value, it must be sign-extended to 32 bits, and likewise a - /// u8/u16 must be zero-extended to 32-bits. (This variant is here to - /// accommodate Apple's deviation from the usual AArch64 ABI as defined by - /// ARM.) - /// - /// See also: <https://developer.apple.com/documentation/xcode/writing-arm64-code-for-apple-platforms#Pass-Arguments-to-Functions-Correctly> - ExtendTo32Bits, - - /// Indicates that no sign- nor zero-extension is performed: if a value of - /// type with bitwidth M is passed as function argument or return value, - /// then M bits are copied into the least significant M bits, and the - /// remaining bits of the register (or word of memory) are untouched. - NoExtension, +pub enum AbiKind { + AAPCS, + DarwinPCS, + Win64, } fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>) -> Option<Uniform> @@ -45,15 +35,17 @@ where }) } -fn classify_ret<'a, Ty, C>(cx: &C, ret: &mut ArgAbi<'a, Ty>, param_policy: ParamExtension) +fn classify_ret<'a, Ty, C>(cx: &C, ret: &mut ArgAbi<'a, Ty>, kind: AbiKind) where Ty: TyAbiInterface<'a, C> + Copy, C: HasDataLayout, { if !ret.layout.is_aggregate() { - match param_policy { - ParamExtension::ExtendTo32Bits => ret.extend_integer_width_to(32), - ParamExtension::NoExtension => {} + if kind == AbiKind::DarwinPCS { + // On Darwin, when returning an i8/i16, it must be sign-extended to 32 bits, + // and likewise a u8/u16 must be zero-extended to 32-bits. + // See also: <https://developer.apple.com/documentation/xcode/writing-arm64-code-for-apple-platforms#Pass-Arguments-to-Functions-Correctly> + ret.extend_integer_width_to(32) } return; } @@ -70,15 +62,17 @@ where ret.make_indirect(); } -fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, param_policy: ParamExtension) +fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, kind: AbiKind) where Ty: TyAbiInterface<'a, C> + Copy, C: HasDataLayout, { if !arg.layout.is_aggregate() { - match param_policy { - ParamExtension::ExtendTo32Bits => arg.extend_integer_width_to(32), - ParamExtension::NoExtension => {} + if kind == AbiKind::DarwinPCS { + // On Darwin, when passing an i8/i16, it must be sign-extended to 32 bits, + // and likewise a u8/u16 must be zero-extended to 32-bits. + // See also: <https://developer.apple.com/documentation/xcode/writing-arm64-code-for-apple-platforms#Pass-Arguments-to-Functions-Correctly> + arg.extend_integer_width_to(32); } return; } @@ -87,27 +81,39 @@ where return; } let size = arg.layout.size; - let bits = size.bits(); - if bits <= 128 { - arg.cast_to(Uniform { unit: Reg::i64(), total: size }); + let align = if kind == AbiKind::AAPCS { + // When passing small aggregates by value, the AAPCS ABI mandates using the unadjusted + // alignment of the type (not including `repr(align)`). + // This matches behavior of `AArch64ABIInfo::classifyArgumentType` in Clang. + // See: <https://github.com/llvm/llvm-project/blob/5e691a1c9b0ad22689d4a434ddf4fed940e58dec/clang/lib/CodeGen/TargetInfo.cpp#L5816-L5823> + arg.layout.unadjusted_abi_align + } else { + arg.layout.align.abi + }; + if size.bits() <= 128 { + if align.bits() == 128 { + arg.cast_to(Uniform { unit: Reg::i128(), total: size }); + } else { + arg.cast_to(Uniform { unit: Reg::i64(), total: size }); + } return; } arg.make_indirect(); } -pub fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, param_policy: ParamExtension) +pub fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>, kind: AbiKind) where Ty: TyAbiInterface<'a, C> + Copy, C: HasDataLayout, { if !fn_abi.ret.is_ignore() { - classify_ret(cx, &mut fn_abi.ret, param_policy); + classify_ret(cx, &mut fn_abi.ret, kind); } for arg in fn_abi.args.iter_mut() { if arg.is_ignore() { continue; } - classify_arg(cx, arg, param_policy); + classify_arg(cx, arg, kind); } } diff --git a/compiler/rustc_target/src/abi/call/m68k.rs b/compiler/rustc_target/src/abi/call/m68k.rs index c1e0f54af5f..1d4649ed867 100644 --- a/compiler/rustc_target/src/abi/call/m68k.rs +++ b/compiler/rustc_target/src/abi/call/m68k.rs @@ -10,7 +10,7 @@ fn classify_ret<Ty>(ret: &mut ArgAbi<'_, Ty>) { fn classify_arg<Ty>(arg: &mut ArgAbi<'_, Ty>) { if arg.layout.is_aggregate() { - arg.make_indirect_byval(); + arg.make_indirect_byval(None); } else { arg.extend_integer_width_to(32); } diff --git a/compiler/rustc_target/src/abi/call/mod.rs b/compiler/rustc_target/src/abi/call/mod.rs index c4abf6f4b5e..03e7b3e7b40 100644 --- a/compiler/rustc_target/src/abi/call/mod.rs +++ b/compiler/rustc_target/src/abi/call/mod.rs @@ -494,9 +494,7 @@ impl<'a, Ty> ArgAbi<'a, Ty> { .set(ArgAttribute::NonNull) .set(ArgAttribute::NoUndef); attrs.pointee_size = layout.size; - // FIXME(eddyb) We should be doing this, but at least on - // i686-pc-windows-msvc, it results in wrong stack offsets. - // attrs.pointee_align = Some(layout.align.abi); + attrs.pointee_align = Some(layout.align.abi); let extra_attrs = layout.is_unsized().then_some(ArgAttributes::new()); @@ -513,11 +511,19 @@ impl<'a, Ty> ArgAbi<'a, Ty> { self.mode = Self::indirect_pass_mode(&self.layout); } - pub fn make_indirect_byval(&mut self) { + pub fn make_indirect_byval(&mut self, byval_align: Option<Align>) { self.make_indirect(); match self.mode { - PassMode::Indirect { attrs: _, extra_attrs: _, ref mut on_stack } => { + PassMode::Indirect { ref mut attrs, extra_attrs: _, ref mut on_stack } => { *on_stack = true; + + // Some platforms, like 32-bit x86, change the alignment of the type when passing + // `byval`. Account for that. + if let Some(byval_align) = byval_align { + // On all targets with byval align this is currently true, so let's assert it. + debug_assert!(byval_align >= Align::from_bytes(4).unwrap()); + attrs.pointee_align = Some(byval_align); + } } _ => unreachable!(), } @@ -644,7 +650,8 @@ impl<'a, Ty> FnAbi<'a, Ty> { { if abi == spec::abi::Abi::X86Interrupt { if let Some(arg) = self.args.first_mut() { - arg.make_indirect_byval(); + // FIXME(pcwalton): This probably should use the x86 `byval` ABI... + arg.make_indirect_byval(None); } return Ok(()); } @@ -672,12 +679,14 @@ impl<'a, Ty> FnAbi<'a, Ty> { } }, "aarch64" => { - let param_policy = if cx.target_spec().is_like_osx { - aarch64::ParamExtension::ExtendTo32Bits + let kind = if cx.target_spec().is_like_osx { + aarch64::AbiKind::DarwinPCS + } else if cx.target_spec().is_like_windows { + aarch64::AbiKind::Win64 } else { - aarch64::ParamExtension::NoExtension + aarch64::AbiKind::AAPCS }; - aarch64::compute_abi_info(cx, self, param_policy) + aarch64::compute_abi_info(cx, self, kind) } "amdgpu" => amdgpu::compute_abi_info(cx, self), "arm" => arm::compute_abi_info(cx, self), diff --git a/compiler/rustc_target/src/abi/call/wasm.rs b/compiler/rustc_target/src/abi/call/wasm.rs index 44427ee5317..0eb2309ecb2 100644 --- a/compiler/rustc_target/src/abi/call/wasm.rs +++ b/compiler/rustc_target/src/abi/call/wasm.rs @@ -36,7 +36,7 @@ where { arg.extend_integer_width_to(32); if arg.layout.is_aggregate() && !unwrap_trivial_aggregate(cx, arg) { - arg.make_indirect_byval(); + arg.make_indirect_byval(None); } } diff --git a/compiler/rustc_target/src/abi/call/x86.rs b/compiler/rustc_target/src/abi/call/x86.rs index 7c26335dcf4..b738c3133d9 100644 --- a/compiler/rustc_target/src/abi/call/x86.rs +++ b/compiler/rustc_target/src/abi/call/x86.rs @@ -1,5 +1,5 @@ use crate::abi::call::{ArgAttribute, FnAbi, PassMode, Reg, RegKind}; -use crate::abi::{HasDataLayout, TyAbiInterface}; +use crate::abi::{Abi, Align, HasDataLayout, TyAbiInterface, TyAndLayout}; use crate::spec::HasTargetSpec; #[derive(PartialEq)] @@ -53,8 +53,75 @@ where if arg.is_ignore() { continue; } - if arg.layout.is_aggregate() { - arg.make_indirect_byval(); + + // FIXME: MSVC 2015+ will pass the first 3 vector arguments in [XYZ]MM0-2 + // See https://reviews.llvm.org/D72114 for Clang behavior + + let t = cx.target_spec(); + let align_4 = Align::from_bytes(4).unwrap(); + let align_16 = Align::from_bytes(16).unwrap(); + + if t.is_like_msvc + && arg.layout.is_adt() + && let Some(max_repr_align) = arg.layout.max_repr_align + && max_repr_align > align_4 + { + // MSVC has special rules for overaligned arguments: https://reviews.llvm.org/D72114. + // Summarized here: + // - Arguments with _requested_ alignment > 4 are passed indirectly. + // - For backwards compatibility, arguments with natural alignment > 4 are still passed + // on stack (via `byval`). For example, this includes `double`, `int64_t`, + // and structs containing them, provided they lack an explicit alignment attribute. + assert!(arg.layout.align.abi >= max_repr_align, + "abi alignment {:?} less than requested alignment {max_repr_align:?}", + arg.layout.align.abi, + ); + arg.make_indirect(); + } else if arg.layout.is_aggregate() { + // We need to compute the alignment of the `byval` argument. The rules can be found in + // `X86_32ABIInfo::getTypeStackAlignInBytes` in Clang's `TargetInfo.cpp`. Summarized + // here, they are: + // + // 1. If the natural alignment of the type is <= 4, the alignment is 4. + // + // 2. Otherwise, on Linux, the alignment of any vector type is the natural alignment. + // This doesn't matter here because we only pass aggregates via `byval`, not vectors. + // + // 3. Otherwise, on Apple platforms, the alignment of anything that contains a vector + // type is 16. + // + // 4. If none of these conditions are true, the alignment is 4. + + fn contains_vector<'a, Ty, C>(cx: &C, layout: TyAndLayout<'a, Ty>) -> bool + where + Ty: TyAbiInterface<'a, C> + Copy, + { + match layout.abi { + Abi::Uninhabited | Abi::Scalar(_) | Abi::ScalarPair(..) => false, + Abi::Vector { .. } => true, + Abi::Aggregate { .. } => { + for i in 0..layout.fields.count() { + if contains_vector(cx, layout.field(cx, i)) { + return true; + } + } + false + } + } + } + + let byval_align = if arg.layout.align.abi < align_4 { + // (1.) + align_4 + } else if t.is_like_osx && contains_vector(cx, arg.layout) { + // (3.) + align_16 + } else { + // (4.) + align_4 + }; + + arg.make_indirect_byval(Some(byval_align)); } else { arg.extend_integer_width_to(32); } diff --git a/compiler/rustc_target/src/abi/call/x86_64.rs b/compiler/rustc_target/src/abi/call/x86_64.rs index b1aefaf0507..d1efe977699 100644 --- a/compiler/rustc_target/src/abi/call/x86_64.rs +++ b/compiler/rustc_target/src/abi/call/x86_64.rs @@ -213,7 +213,7 @@ where match cls_or_mem { Err(Memory) => { if is_arg { - arg.make_indirect_byval(); + arg.make_indirect_byval(None); } else { // `sret` parameter thus one less integer register available arg.make_indirect(); diff --git a/compiler/rustc_target/src/lib.rs b/compiler/rustc_target/src/lib.rs index a7b54766bc6..3307244a217 100644 --- a/compiler/rustc_target/src/lib.rs +++ b/compiler/rustc_target/src/lib.rs @@ -12,6 +12,7 @@ #![feature(associated_type_bounds)] #![feature(exhaustive_patterns)] #![feature(iter_intersperse)] +#![feature(let_chains)] #![feature(min_specialization)] #![feature(never_type)] #![feature(rustc_attrs)] diff --git a/compiler/rustc_target/src/spec/aarch64_unknown_linux_ohos.rs b/compiler/rustc_target/src/spec/aarch64_unknown_linux_ohos.rs index bf1b089f657..c8f3db00e01 100644 --- a/compiler/rustc_target/src/spec/aarch64_unknown_linux_ohos.rs +++ b/compiler/rustc_target/src/spec/aarch64_unknown_linux_ohos.rs @@ -3,9 +3,7 @@ use crate::spec::{Target, TargetOptions}; use super::SanitizerSet; pub fn target() -> Target { - let mut base = super::linux_musl_base::opts(); - base.env = "ohos".into(); - base.crt_static_default = false; + let mut base = super::linux_ohos_base::opts(); base.max_atomic_width = Some(128); Target { @@ -17,8 +15,6 @@ pub fn target() -> Target { options: TargetOptions { features: "+reserve-x18".into(), mcount: "\u{1}_mcount".into(), - force_emulated_tls: true, - has_thread_local: false, supported_sanitizers: SanitizerSet::ADDRESS | SanitizerSet::CFI | SanitizerSet::LEAK diff --git a/compiler/rustc_target/src/spec/armv7_unknown_linux_ohos.rs b/compiler/rustc_target/src/spec/armv7_unknown_linux_ohos.rs index 16da2453367..e9b0bda68ef 100644 --- a/compiler/rustc_target/src/spec/armv7_unknown_linux_ohos.rs +++ b/compiler/rustc_target/src/spec/armv7_unknown_linux_ohos.rs @@ -17,12 +17,8 @@ pub fn target() -> Target { abi: "eabi".into(), features: "+v7,+thumb2,+soft-float,-neon".into(), max_atomic_width: Some(64), - env: "ohos".into(), - crt_static_default: false, mcount: "\u{1}mcount".into(), - force_emulated_tls: true, - has_thread_local: false, - ..super::linux_musl_base::opts() + ..super::linux_ohos_base::opts() }, } } diff --git a/compiler/rustc_target/src/spec/linux_ohos_base.rs b/compiler/rustc_target/src/spec/linux_ohos_base.rs new file mode 100644 index 00000000000..4ad4c837336 --- /dev/null +++ b/compiler/rustc_target/src/spec/linux_ohos_base.rs @@ -0,0 +1,12 @@ +use crate::spec::TargetOptions; + +pub fn opts() -> TargetOptions { + let mut base = super::linux_base::opts(); + + base.env = "ohos".into(); + base.crt_static_default = false; + base.force_emulated_tls = true; + base.has_thread_local = false; + + base +} diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index 2365dfaf1af..6ae07f45f4a 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -74,6 +74,7 @@ mod l4re_base; mod linux_base; mod linux_gnu_base; mod linux_musl_base; +mod linux_ohos_base; mod linux_uclibc_base; mod msvc_base; mod netbsd_base; @@ -1433,6 +1434,8 @@ supported_targets! { ("riscv64gc-unknown-linux-gnu", riscv64gc_unknown_linux_gnu), ("riscv64gc-unknown-linux-musl", riscv64gc_unknown_linux_musl), + ("sparc-unknown-none-elf", sparc_unknown_none_elf), + ("loongarch64-unknown-none", loongarch64_unknown_none), ("loongarch64-unknown-none-softfloat", loongarch64_unknown_none_softfloat), @@ -1493,6 +1496,7 @@ supported_targets! { ("aarch64-unknown-linux-ohos", aarch64_unknown_linux_ohos), ("armv7-unknown-linux-ohos", armv7_unknown_linux_ohos), + ("x86_64-unknown-linux-ohos", x86_64_unknown_linux_ohos), } /// Cow-Vec-Str: Cow<'static, [Cow<'static, str>]> diff --git a/compiler/rustc_target/src/spec/sparc_unknown_none_elf.rs b/compiler/rustc_target/src/spec/sparc_unknown_none_elf.rs new file mode 100644 index 00000000000..7e908a0f365 --- /dev/null +++ b/compiler/rustc_target/src/spec/sparc_unknown_none_elf.rs @@ -0,0 +1,27 @@ +use crate::abi::Endian; +use crate::spec::{Cc, LinkerFlavor, Lld, PanicStrategy, RelocModel, Target, TargetOptions}; + +pub fn target() -> Target { + let options = TargetOptions { + linker_flavor: LinkerFlavor::Gnu(Cc::Yes, Lld::No), + linker: Some("sparc-elf-gcc".into()), + endian: Endian::Big, + cpu: "v7".into(), + abi: "elf".into(), + max_atomic_width: Some(32), + atomic_cas: true, + panic_strategy: PanicStrategy::Abort, + relocation_model: RelocModel::Static, + no_default_libraries: false, + emit_debug_gdb_scripts: false, + eh_frame_header: false, + ..Default::default() + }; + Target { + data_layout: "E-m:e-p:32:32-i64:64-f128:64-n32-S64".into(), + llvm_target: "sparc-unknown-none-elf".into(), + pointer_width: 32, + arch: "sparc".into(), + options, + } +} diff --git a/compiler/rustc_target/src/spec/x86_64_unknown_linux_ohos.rs b/compiler/rustc_target/src/spec/x86_64_unknown_linux_ohos.rs new file mode 100644 index 00000000000..a96be8cd554 --- /dev/null +++ b/compiler/rustc_target/src/spec/x86_64_unknown_linux_ohos.rs @@ -0,0 +1,26 @@ +use crate::spec::{Cc, LinkerFlavor, Lld, SanitizerSet, StackProbeType, Target}; + +pub fn target() -> Target { + let mut base = super::linux_ohos_base::opts(); + base.cpu = "x86-64".into(); + base.max_atomic_width = Some(64); + base.add_pre_link_args(LinkerFlavor::Gnu(Cc::Yes, Lld::No), &["-m64"]); + base.stack_probes = StackProbeType::X86; + base.static_position_independent_executables = true; + base.supported_sanitizers = SanitizerSet::ADDRESS + | SanitizerSet::CFI + | SanitizerSet::LEAK + | SanitizerSet::MEMORY + | SanitizerSet::THREAD; + base.supports_xray = true; + + Target { + // LLVM 15 doesn't support OpenHarmony yet, use a linux target instead. + llvm_target: "x86_64-unknown-linux-musl".into(), + pointer_width: 64, + data_layout: "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128" + .into(), + arch: "x86_64".into(), + options: base, + } +} diff --git a/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs b/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs index 3a1302e46ba..a8ba98bef6d 100644 --- a/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs +++ b/compiler/rustc_trait_selection/src/solve/assembly/structural_traits.rs @@ -148,7 +148,7 @@ pub(in crate::solve) fn instantiate_constituent_tys_for_sized_trait<'tcx>( ty::Adt(def, args) => { let sized_crit = def.sized_constraint(ecx.tcx()); - Ok(sized_crit.arg_iter_copied(ecx.tcx(), args).collect()) + Ok(sized_crit.iter_instantiated_copied(ecx.tcx(), args).collect()) } } } @@ -353,7 +353,8 @@ pub(in crate::solve) fn predicates_for_object_candidate<'tcx>( // FIXME(associated_const_equality): Also add associated consts to // the requirements here. if item.kind == ty::AssocKind::Type { - requirements.extend(tcx.item_bounds(item.def_id).arg_iter(tcx, trait_ref.args)); + requirements + .extend(tcx.item_bounds(item.def_id).iter_instantiated(tcx, trait_ref.args)); } } diff --git a/compiler/rustc_trait_selection/src/solve/eval_ctxt/select.rs b/compiler/rustc_trait_selection/src/solve/eval_ctxt/select.rs index 8af9d090164..a6d118d8cc2 100644 --- a/compiler/rustc_trait_selection/src/solve/eval_ctxt/select.rs +++ b/compiler/rustc_trait_selection/src/solve/eval_ctxt/select.rs @@ -5,7 +5,7 @@ use rustc_hir::def_id::DefId; use rustc_infer::infer::{DefineOpaqueTypes, InferCtxt, InferOk}; use rustc_infer::traits::util::supertraits; use rustc_infer::traits::{ - Obligation, PolyTraitObligation, PredicateObligation, Selection, SelectionResult, + Obligation, PolyTraitObligation, PredicateObligation, Selection, SelectionResult, TraitEngine, }; use rustc_middle::traits::solve::{CanonicalInput, Certainty, Goal}; use rustc_middle::traits::{ @@ -20,6 +20,8 @@ use crate::solve::eval_ctxt::{EvalCtxt, GenerateProofTree}; use crate::solve::inspect::ProofTreeBuilder; use crate::solve::search_graph::OverflowHandler; use crate::traits::vtable::{count_own_vtable_entries, prepare_vtable_segments, VtblSegment}; +use crate::traits::StructurallyNormalizeExt; +use crate::traits::TraitEngineExt; pub trait InferCtxtSelectExt<'tcx> { fn select_in_new_trait_solver( @@ -228,18 +230,24 @@ fn rematch_object<'tcx>( goal: Goal<'tcx, ty::TraitPredicate<'tcx>>, mut nested: Vec<PredicateObligation<'tcx>>, ) -> SelectionResult<'tcx, Selection<'tcx>> { - let self_ty = goal.predicate.self_ty(); - let ty::Dynamic(data, _, source_kind) = *self_ty.kind() else { bug!() }; - let source_trait_ref = data.principal().unwrap().with_self_ty(infcx.tcx, self_ty); + let a_ty = structurally_normalize(goal.predicate.self_ty(), infcx, goal.param_env, &mut nested); + let ty::Dynamic(data, _, source_kind) = *a_ty.kind() else { bug!() }; + let source_trait_ref = data.principal().unwrap().with_self_ty(infcx.tcx, a_ty); let (is_upcasting, target_trait_ref_unnormalized) = if Some(goal.predicate.def_id()) == infcx.tcx.lang_items().unsize_trait() { assert_eq!(source_kind, ty::Dyn, "cannot upcast dyn*"); - if let ty::Dynamic(data, _, ty::Dyn) = goal.predicate.trait_ref.args.type_at(1).kind() { + let b_ty = structurally_normalize( + goal.predicate.trait_ref.args.type_at(1), + infcx, + goal.param_env, + &mut nested, + ); + if let ty::Dynamic(data, _, ty::Dyn) = *b_ty.kind() { // FIXME: We also need to ensure that the source lifetime outlives the // target lifetime. This doesn't matter for codegen, though, and only // *really* matters if the goal's certainty is ambiguous. - (true, data.principal().unwrap().with_self_ty(infcx.tcx, self_ty)) + (true, data.principal().unwrap().with_self_ty(infcx.tcx, a_ty)) } else { bug!() } @@ -447,3 +455,22 @@ fn rematch_unsize<'tcx>( Ok(Some(ImplSource::Builtin(nested))) } + +fn structurally_normalize<'tcx>( + ty: Ty<'tcx>, + infcx: &InferCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + nested: &mut Vec<PredicateObligation<'tcx>>, +) -> Ty<'tcx> { + if matches!(ty.kind(), ty::Alias(..)) { + let mut engine = <dyn TraitEngine<'tcx>>::new(infcx); + let normalized_ty = infcx + .at(&ObligationCause::dummy(), param_env) + .structurally_normalize(ty, &mut *engine) + .expect("normalization shouldn't fail if we got to here"); + nested.extend(engine.pending_obligations()); + normalized_ty + } else { + ty + } +} diff --git a/compiler/rustc_trait_selection/src/solve/inherent_projection.rs b/compiler/rustc_trait_selection/src/solve/inherent_projection.rs new file mode 100644 index 00000000000..d10a14ff742 --- /dev/null +++ b/compiler/rustc_trait_selection/src/solve/inherent_projection.rs @@ -0,0 +1,44 @@ +use rustc_middle::traits::solve::{Certainty, Goal, QueryResult}; +use rustc_middle::ty; + +use super::EvalCtxt; + +impl<'tcx> EvalCtxt<'_, 'tcx> { + pub(super) fn normalize_inherent_associated_type( + &mut self, + goal: Goal<'tcx, ty::ProjectionPredicate<'tcx>>, + ) -> QueryResult<'tcx> { + let tcx = self.tcx(); + let inherent = goal.predicate.projection_ty; + let expected = goal.predicate.term.ty().expect("inherent consts are treated separately"); + + let impl_def_id = tcx.parent(inherent.def_id); + let impl_substs = self.fresh_args_for_item(impl_def_id); + + // Equate impl header and add impl where clauses + self.eq( + goal.param_env, + inherent.self_ty(), + tcx.type_of(impl_def_id).instantiate(tcx, impl_substs), + )?; + + // Equate IAT with the RHS of the project goal + let inherent_substs = inherent.rebase_inherent_args_onto_impl(impl_substs, tcx); + self.eq( + goal.param_env, + expected, + tcx.type_of(inherent.def_id).instantiate(tcx, inherent_substs), + ) + .expect("expected goal term to be fully unconstrained"); + + // Check both where clauses on the impl and IAT + self.add_goals( + tcx.predicates_of(inherent.def_id) + .instantiate(tcx, inherent_substs) + .into_iter() + .map(|(pred, _)| goal.with(tcx, pred)), + ); + + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + } +} diff --git a/compiler/rustc_trait_selection/src/solve/mod.rs b/compiler/rustc_trait_selection/src/solve/mod.rs index 94b8af966bf..7c15c3c0e8b 100644 --- a/compiler/rustc_trait_selection/src/solve/mod.rs +++ b/compiler/rustc_trait_selection/src/solve/mod.rs @@ -25,6 +25,7 @@ mod assembly; mod canonicalize; mod eval_ctxt; mod fulfill; +mod inherent_projection; pub mod inspect; mod normalize; mod opaques; @@ -37,7 +38,7 @@ pub use eval_ctxt::{ EvalCtxt, GenerateProofTree, InferCtxtEvalExt, InferCtxtSelectExt, UseGlobalCache, }; pub use fulfill::FulfillmentCtxt; -pub(crate) use normalize::deeply_normalize; +pub(crate) use normalize::{deeply_normalize, deeply_normalize_with_skipped_universes}; #[derive(Debug, Clone, Copy)] enum SolverMode { diff --git a/compiler/rustc_trait_selection/src/solve/normalize.rs b/compiler/rustc_trait_selection/src/solve/normalize.rs index ed60071c2cd..f51f4edb933 100644 --- a/compiler/rustc_trait_selection/src/solve/normalize.rs +++ b/compiler/rustc_trait_selection/src/solve/normalize.rs @@ -20,8 +20,22 @@ pub(crate) fn deeply_normalize<'tcx, T: TypeFoldable<TyCtxt<'tcx>>>( at: At<'_, 'tcx>, value: T, ) -> Result<T, Vec<FulfillmentError<'tcx>>> { + assert!(!value.has_escaping_bound_vars()); + deeply_normalize_with_skipped_universes(at, value, vec![]) +} + +/// Deeply normalize all aliases in `value`. This does not handle inference and expects +/// its input to be already fully resolved. +/// +/// Additionally takes a list of universes which represents the binders which have been +/// entered before passing `value` to the function. +pub(crate) fn deeply_normalize_with_skipped_universes<'tcx, T: TypeFoldable<TyCtxt<'tcx>>>( + at: At<'_, 'tcx>, + value: T, + universes: Vec<Option<UniverseIndex>>, +) -> Result<T, Vec<FulfillmentError<'tcx>>> { let fulfill_cx = FulfillmentCtxt::new(at.infcx); - let mut folder = NormalizationFolder { at, fulfill_cx, depth: 0, universes: Vec::new() }; + let mut folder = NormalizationFolder { at, fulfill_cx, depth: 0, universes }; value.try_fold_with(&mut folder) } diff --git a/compiler/rustc_trait_selection/src/solve/project_goals.rs b/compiler/rustc_trait_selection/src/solve/project_goals.rs index 564451a31ed..d677fbdc7f4 100644 --- a/compiler/rustc_trait_selection/src/solve/project_goals.rs +++ b/compiler/rustc_trait_selection/src/solve/project_goals.rs @@ -48,7 +48,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { self.merge_candidates(candidates) } ty::AssocItemContainer::ImplContainer => { - bug!("IATs not supported here yet") + self.normalize_inherent_associated_type(goal) } } } else { @@ -112,6 +112,7 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> { ) -> QueryResult<'tcx> { if let Some(projection_pred) = assumption.as_projection_clause() { if projection_pred.projection_def_id() == goal.predicate.def_id() { + let tcx = ecx.tcx(); ecx.probe_candidate("assumption").enter(|ecx| { let assumption_projection_pred = ecx.instantiate_binder_with_infer(projection_pred); @@ -122,6 +123,14 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> { )?; ecx.eq(goal.param_env, goal.predicate.term, assumption_projection_pred.term) .expect("expected goal term to be fully unconstrained"); + + // Add GAT where clauses from the trait's definition + ecx.add_goals( + tcx.predicates_of(goal.predicate.def_id()) + .instantiate_own(tcx, goal.predicate.projection_ty.args) + .map(|(pred, _)| goal.with(tcx, pred)), + ); + then(ecx) }) } else { @@ -160,6 +169,13 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> { .map(|pred| goal.with(tcx, pred)); ecx.add_goals(where_clause_bounds); + // Add GAT where clauses from the trait's definition + ecx.add_goals( + tcx.predicates_of(goal.predicate.def_id()) + .instantiate_own(tcx, goal.predicate.projection_ty.args) + .map(|(pred, _)| goal.with(tcx, pred)), + ); + // In case the associated item is hidden due to specialization, we have to // return ambiguity this would otherwise be incomplete, resulting in // unsoundness during coherence (#105782). diff --git a/compiler/rustc_trait_selection/src/solve/weak_types.rs b/compiler/rustc_trait_selection/src/solve/weak_types.rs index 2c176d4cfd6..c7717879a4a 100644 --- a/compiler/rustc_trait_selection/src/solve/weak_types.rs +++ b/compiler/rustc_trait_selection/src/solve/weak_types.rs @@ -14,6 +14,16 @@ impl<'tcx> EvalCtxt<'_, 'tcx> { let actual = tcx.type_of(weak_ty.def_id).instantiate(tcx, weak_ty.args); self.eq(goal.param_env, expected, actual)?; + + // Check where clauses + self.add_goals( + tcx.predicates_of(weak_ty.def_id) + .instantiate(tcx, weak_ty.args) + .predicates + .into_iter() + .map(|pred| goal.with(tcx, pred)), + ); + self.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) } } diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs index a821d1be64b..c14839fe9be 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/mod.rs @@ -2388,14 +2388,11 @@ impl<'tcx> InferCtxtPrivExt<'tcx> for TypeErrCtxt<'_, 'tcx> { // If there is only one implementation of the trait, suggest using it. // Otherwise, use a placeholder comment for the implementation. let (message, impl_suggestion) = if non_blanket_impl_count == 1 {( - "use the fully-qualified path to the only available implementation".to_string(), + "use the fully-qualified path to the only available implementation", format!("<{} as ", self.tcx.type_of(impl_def_id).instantiate_identity()) - )} else {( - format!( - "use a fully-qualified path to a specific available implementation ({} found)", - non_blanket_impl_count - ), - "</* self type */ as ".to_string() + )} else { + ("use a fully-qualified path to a specific available implementation", + "</* self type */ as ".to_string() )}; let mut suggestions = vec![( path.span.shrink_to_lo(), diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs index 2c78ae66907..073a2a2b1a0 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs @@ -435,7 +435,7 @@ fn suggest_restriction<'tcx>( ) { if hir_generics.where_clause_span.from_expansion() || hir_generics.where_clause_span.desugaring_kind().is_some() - || projection.is_some_and(|projection| tcx.opt_rpitit_info(projection.def_id).is_some()) + || projection.is_some_and(|projection| tcx.is_impl_trait_in_trait(projection.def_id)) { return; } diff --git a/compiler/rustc_trait_selection/src/traits/fulfill.rs b/compiler/rustc_trait_selection/src/traits/fulfill.rs index 9ddf8a09b58..3ebf1246a41 100644 --- a/compiler/rustc_trait_selection/src/traits/fulfill.rs +++ b/compiler/rustc_trait_selection/src/traits/fulfill.rs @@ -670,7 +670,7 @@ impl<'a, 'tcx> FulfillProcessor<'a, 'tcx> { stalled_on: &mut Vec<TyOrConstInferVar<'tcx>>, ) -> ProcessResult<PendingPredicateObligation<'tcx>, FulfillmentErrorCode<'tcx>> { let infcx = self.selcx.infcx; - if obligation.predicate.is_global() { + if obligation.predicate.is_global() && !self.selcx.is_intercrate() { // no type variables present, can use evaluation for better caching. // FIXME: consider caching errors too. if infcx.predicate_must_hold_considering_regions(obligation) { @@ -724,7 +724,7 @@ impl<'a, 'tcx> FulfillProcessor<'a, 'tcx> { ) -> ProcessResult<PendingPredicateObligation<'tcx>, FulfillmentErrorCode<'tcx>> { let tcx = self.selcx.tcx(); - if obligation.predicate.is_global() { + if obligation.predicate.is_global() && !self.selcx.is_intercrate() { // no type variables present, can use evaluation for better caching. // FIXME: consider caching errors too. if self.selcx.infcx.predicate_must_hold_considering_regions(obligation) { diff --git a/compiler/rustc_trait_selection/src/traits/object_safety.rs b/compiler/rustc_trait_selection/src/traits/object_safety.rs index 265a2ae7621..143e8412967 100644 --- a/compiler/rustc_trait_selection/src/traits/object_safety.rs +++ b/compiler/rustc_trait_selection/src/traits/object_safety.rs @@ -393,7 +393,7 @@ fn object_safety_violation_for_assoc_item( ty::AssocKind::Type => { if !tcx.features().generic_associated_types_extended && !tcx.generics_of(item.def_id).params.is_empty() - && item.opt_rpitit_info.is_none() + && !item.is_impl_trait_in_trait() { Some(ObjectSafetyViolation::GAT(item.name, item.ident(tcx).span)) } else { diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index e137ed9cda8..a39fc1f1771 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -1402,9 +1402,17 @@ pub fn compute_inherent_assoc_ty_args<'a, 'b, 'tcx>( let impl_def_id = tcx.parent(alias_ty.def_id); let impl_args = selcx.infcx.fresh_args_for_item(cause.span, impl_def_id); - let impl_ty = tcx.type_of(impl_def_id).instantiate(tcx, impl_args); - let impl_ty = - normalize_with_depth_to(selcx, param_env, cause.clone(), depth + 1, impl_ty, obligations); + let mut impl_ty = tcx.type_of(impl_def_id).instantiate(tcx, impl_args); + if !selcx.infcx.next_trait_solver() { + impl_ty = normalize_with_depth_to( + selcx, + param_env, + cause.clone(), + depth + 1, + impl_ty, + obligations, + ); + } // Infer the generic parameters of the impl by unifying the // impl type with the self type of the projection. @@ -1421,7 +1429,7 @@ pub fn compute_inherent_assoc_ty_args<'a, 'b, 'tcx>( } } - alias_ty.rebase_args_onto_impl(impl_args, tcx) + alias_ty.rebase_inherent_args_onto_impl(impl_args, tcx) } enum Projected<'tcx> { diff --git a/compiler/rustc_trait_selection/src/traits/query/normalize.rs b/compiler/rustc_trait_selection/src/traits/query/normalize.rs index ab301b593c2..d45cf94f731 100644 --- a/compiler/rustc_trait_selection/src/traits/query/normalize.rs +++ b/compiler/rustc_trait_selection/src/traits/query/normalize.rs @@ -61,8 +61,27 @@ impl<'cx, 'tcx> QueryNormalizeExt<'tcx> for At<'cx, 'tcx> { self.cause, ); + // This is actually a consequence by the way `normalize_erasing_regions` works currently. + // Because it needs to call the `normalize_generic_arg_after_erasing_regions`, it folds + // through tys and consts in a `TypeFoldable`. Importantly, it skips binders, leaving us + // with trying to normalize with escaping bound vars. + // + // Here, we just add the universes that we *would* have created had we passed through the binders. + // + // We *could* replace escaping bound vars eagerly here, but it doesn't seem really necessary. + // The rest of the code is already set up to be lazy about replacing bound vars, + // and only when we actually have to normalize. + let universes = if value.has_escaping_bound_vars() { + let mut max_visitor = + MaxEscapingBoundVarVisitor { outer_index: ty::INNERMOST, escaping: 0 }; + value.visit_with(&mut max_visitor); + vec![None; max_visitor.escaping] + } else { + vec![] + }; + if self.infcx.next_trait_solver() { - match crate::solve::deeply_normalize(self, value) { + match crate::solve::deeply_normalize_with_skipped_universes(self, value, universes) { Ok(value) => return Ok(Normalized { value, obligations: vec![] }), Err(_errors) => { return Err(NoSolution); @@ -81,27 +100,9 @@ impl<'cx, 'tcx> QueryNormalizeExt<'tcx> for At<'cx, 'tcx> { obligations: vec![], cache: SsoHashMap::new(), anon_depth: 0, - universes: vec![], + universes, }; - // This is actually a consequence by the way `normalize_erasing_regions` works currently. - // Because it needs to call the `normalize_generic_arg_after_erasing_regions`, it folds - // through tys and consts in a `TypeFoldable`. Importantly, it skips binders, leaving us - // with trying to normalize with escaping bound vars. - // - // Here, we just add the universes that we *would* have created had we passed through the binders. - // - // We *could* replace escaping bound vars eagerly here, but it doesn't seem really necessary. - // The rest of the code is already set up to be lazy about replacing bound vars, - // and only when we actually have to normalize. - if value.has_escaping_bound_vars() { - let mut max_visitor = - MaxEscapingBoundVarVisitor { outer_index: ty::INNERMOST, escaping: 0 }; - value.visit_with(&mut max_visitor); - if max_visitor.escaping > 0 { - normalizer.universes.extend((0..max_visitor.escaping).map(|_| None)); - } - } let result = value.try_fold_with(&mut normalizer); info!( "normalize::<{}>: result={:?} with {} obligations", diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 56d5d698d90..e086489b1bc 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -2133,7 +2133,7 @@ impl<'tcx> SelectionContext<'_, 'tcx> { Where( obligation .predicate - .rebind(sized_crit.arg_iter_copied(self.tcx(), args).collect()), + .rebind(sized_crit.iter_instantiated_copied(self.tcx(), args).collect()), ) } diff --git a/compiler/rustc_trait_selection/src/traits/wf.rs b/compiler/rustc_trait_selection/src/traits/wf.rs index b327dd2e150..5f6bb04fda4 100644 --- a/compiler/rustc_trait_selection/src/traits/wf.rs +++ b/compiler/rustc_trait_selection/src/traits/wf.rs @@ -570,7 +570,7 @@ impl<'a, 'tcx> WfPredicates<'a, 'tcx> { )); } ty::ConstKind::Expr(_) => { - // FIXME(generic_const_exprs): this doesnt verify that given `Expr(N + 1)` the + // FIXME(generic_const_exprs): this doesn't verify that given `Expr(N + 1)` the // trait bound `typeof(N): Add<typeof(1)>` holds. This is currently unnecessary // as `ConstKind::Expr` is only produced via normalization of `ConstKind::Unevaluated` // which means that the `DefId` would have been typeck'd elsewhere. However in diff --git a/compiler/rustc_transmute/src/lib.rs b/compiler/rustc_transmute/src/lib.rs index 34ad6bd8c69..05ad4a4a12a 100644 --- a/compiler/rustc_transmute/src/lib.rs +++ b/compiler/rustc_transmute/src/lib.rs @@ -78,6 +78,7 @@ mod rustc { use rustc_middle::ty::ParamEnv; use rustc_middle::ty::Ty; use rustc_middle::ty::TyCtxt; + use rustc_middle::ty::ValTree; /// The source and destination types of a transmutation. #[derive(TypeVisitable, Debug, Clone, Copy)] @@ -148,7 +149,17 @@ mod rustc { ); let variant = adt_def.non_enum_variant(); - let fields = c.to_valtree().unwrap_branch(); + let fields = match c.try_to_valtree() { + Some(ValTree::Branch(branch)) => branch, + _ => { + return Some(Self { + alignment: true, + lifetimes: true, + safety: true, + validity: true, + }); + } + }; let get_field = |name| { let (field_idx, _) = variant diff --git a/compiler/rustc_ty_utils/src/consts.rs b/compiler/rustc_ty_utils/src/consts.rs index 5f754d90b6d..383cc996b9e 100644 --- a/compiler/rustc_ty_utils/src/consts.rs +++ b/compiler/rustc_ty_utils/src/consts.rs @@ -174,7 +174,7 @@ fn recurse_build<'tcx>( } // `ExprKind::Use` happens when a `hir::ExprKind::Cast` is a // "coercion cast" i.e. using a coercion or is a no-op. - // This is important so that `N as usize as usize` doesnt unify with `N as usize`. (untested) + // This is important so that `N as usize as usize` doesn't unify with `N as usize`. (untested) &ExprKind::Use { source } => { let arg = recurse_build(tcx, body, source, root_span)?; ty::Const::new_expr(tcx, Expr::Cast(CastKind::Use, arg, node.ty), node.ty) diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index d858fa5a97c..b840ff184e0 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -258,6 +258,8 @@ fn layout_of_uncached<'tcx>( largest_niche, align: element.align, size, + max_repr_align: None, + unadjusted_abi_align: element.align.abi, }) } ty::Slice(element) => { @@ -269,6 +271,8 @@ fn layout_of_uncached<'tcx>( largest_niche: None, align: element.align, size: Size::ZERO, + max_repr_align: None, + unadjusted_abi_align: element.align.abi, }) } ty::Str => tcx.mk_layout(LayoutS { @@ -278,6 +282,8 @@ fn layout_of_uncached<'tcx>( largest_niche: None, align: dl.i8_align, size: Size::ZERO, + max_repr_align: None, + unadjusted_abi_align: dl.i8_align.abi, }), // Odd unit types. @@ -431,6 +437,8 @@ fn layout_of_uncached<'tcx>( largest_niche: e_ly.largest_niche, size, align, + max_repr_align: None, + unadjusted_abi_align: align.abi, }) } @@ -884,6 +892,8 @@ fn generator_layout<'tcx>( largest_niche: prefix.largest_niche, size, align, + max_repr_align: None, + unadjusted_abi_align: align.abi, }); debug!("generator layout ({:?}): {:#?}", ty, layout); Ok(layout) diff --git a/compiler/rustc_ty_utils/src/ty.rs b/compiler/rustc_ty_utils/src/ty.rs index c228938126e..505f78d0e5f 100644 --- a/compiler/rustc_ty_utils/src/ty.rs +++ b/compiler/rustc_ty_utils/src/ty.rs @@ -42,7 +42,7 @@ fn sized_constraint_for_ty<'tcx>( let adt_tys = adt.sized_constraint(tcx); debug!("sized_constraint_for_ty({:?}) intermediate = {:?}", ty, adt_tys); adt_tys - .arg_iter_copied(tcx, args) + .iter_instantiated_copied(tcx, args) .flat_map(|ty| sized_constraint_for_ty(tcx, adtdef, ty)) .collect() } @@ -297,7 +297,7 @@ impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for ImplTraitInTraitFinder<'_, 'tcx> { for bound in self .tcx .item_bounds(unshifted_alias_ty.def_id) - .arg_iter(self.tcx, unshifted_alias_ty.args) + .iter_instantiated(self.tcx, unshifted_alias_ty.args) { bound.visit_with(self); } @@ -311,22 +311,6 @@ fn param_env_reveal_all_normalized(tcx: TyCtxt<'_>, def_id: DefId) -> ty::ParamE tcx.param_env(def_id).with_reveal_all_normalized(tcx) } -fn instance_def_size_estimate<'tcx>( - tcx: TyCtxt<'tcx>, - instance_def: ty::InstanceDef<'tcx>, -) -> usize { - use ty::InstanceDef; - - match instance_def { - InstanceDef::Item(..) | InstanceDef::DropGlue(..) => { - let mir = tcx.instance_mir(instance_def); - mir.basic_blocks.iter().map(|bb| bb.statements.len() + 1).sum() - } - // Estimate the size of other compiler-generated shims to be 1. - _ => 1, - } -} - /// If `def_id` is an issue 33140 hack impl, returns its self type; otherwise, returns `None`. /// /// See [`ty::ImplOverlapKind::Issue33140`] for more details. @@ -432,7 +416,6 @@ pub fn provide(providers: &mut Providers) { adt_sized_constraint, param_env, param_env_reveal_all_normalized, - instance_def_size_estimate, issue33140_self_ty, defaultness, unsizing_params_for_adt, diff --git a/library/alloc/src/collections/vec_deque/mod.rs b/library/alloc/src/collections/vec_deque/mod.rs index 896da37f94c..5965ec2affa 100644 --- a/library/alloc/src/collections/vec_deque/mod.rs +++ b/library/alloc/src/collections/vec_deque/mod.rs @@ -2283,21 +2283,21 @@ impl<T, A: Allocator> VecDeque<T, A> { unsafe { slice::from_raw_parts_mut(ptr.add(self.head), self.len) } } - /// Rotates the double-ended queue `mid` places to the left. + /// Rotates the double-ended queue `n` places to the left. /// /// Equivalently, - /// - Rotates item `mid` into the first position. - /// - Pops the first `mid` items and pushes them to the end. - /// - Rotates `len() - mid` places to the right. + /// - Rotates item `n` into the first position. + /// - Pops the first `n` items and pushes them to the end. + /// - Rotates `len() - n` places to the right. /// /// # Panics /// - /// If `mid` is greater than `len()`. Note that `mid == len()` + /// If `n` is greater than `len()`. Note that `n == len()` /// does _not_ panic and is a no-op rotation. /// /// # Complexity /// - /// Takes `*O*(min(mid, len() - mid))` time and no extra space. + /// Takes `*O*(min(n, len() - n))` time and no extra space. /// /// # Examples /// @@ -2316,31 +2316,31 @@ impl<T, A: Allocator> VecDeque<T, A> { /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); /// ``` #[stable(feature = "vecdeque_rotate", since = "1.36.0")] - pub fn rotate_left(&mut self, mid: usize) { - assert!(mid <= self.len()); - let k = self.len - mid; - if mid <= k { - unsafe { self.rotate_left_inner(mid) } + pub fn rotate_left(&mut self, n: usize) { + assert!(n <= self.len()); + let k = self.len - n; + if n <= k { + unsafe { self.rotate_left_inner(n) } } else { unsafe { self.rotate_right_inner(k) } } } - /// Rotates the double-ended queue `k` places to the right. + /// Rotates the double-ended queue `n` places to the right. /// /// Equivalently, - /// - Rotates the first item into position `k`. - /// - Pops the last `k` items and pushes them to the front. - /// - Rotates `len() - k` places to the left. + /// - Rotates the first item into position `n`. + /// - Pops the last `n` items and pushes them to the front. + /// - Rotates `len() - n` places to the left. /// /// # Panics /// - /// If `k` is greater than `len()`. Note that `k == len()` + /// If `n` is greater than `len()`. Note that `n == len()` /// does _not_ panic and is a no-op rotation. /// /// # Complexity /// - /// Takes `*O*(min(k, len() - k))` time and no extra space. + /// Takes `*O*(min(n, len() - n))` time and no extra space. /// /// # Examples /// @@ -2359,13 +2359,13 @@ impl<T, A: Allocator> VecDeque<T, A> { /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); /// ``` #[stable(feature = "vecdeque_rotate", since = "1.36.0")] - pub fn rotate_right(&mut self, k: usize) { - assert!(k <= self.len()); - let mid = self.len - k; - if k <= mid { - unsafe { self.rotate_right_inner(k) } + pub fn rotate_right(&mut self, n: usize) { + assert!(n <= self.len()); + let k = self.len - n; + if n <= k { + unsafe { self.rotate_right_inner(n) } } else { - unsafe { self.rotate_left_inner(mid) } + unsafe { self.rotate_left_inner(k) } } } diff --git a/library/alloc/src/collections/vec_deque/tests.rs b/library/alloc/src/collections/vec_deque/tests.rs index 205a8ff3c19..b7fdebfa60b 100644 --- a/library/alloc/src/collections/vec_deque/tests.rs +++ b/library/alloc/src/collections/vec_deque/tests.rs @@ -351,14 +351,14 @@ fn test_rotate_left_right() { } #[test] -#[should_panic = "assertion failed: mid <= self.len()"] +#[should_panic = "assertion failed: n <= self.len()"] fn test_rotate_left_panic() { let mut tester: VecDeque<_> = (1..=10).collect(); tester.rotate_left(tester.len() + 1); } #[test] -#[should_panic = "assertion failed: k <= self.len()"] +#[should_panic = "assertion failed: n <= self.len()"] fn test_rotate_right_panic() { let mut tester: VecDeque<_> = (1..=10).collect(); tester.rotate_right(tester.len() + 1); diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index b3305b8ca6d..b3ec830a7d7 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -313,13 +313,17 @@ fn rcbox_layout_for_value_layout(layout: Layout) -> Layout { #[cfg_attr(not(test), rustc_diagnostic_item = "Rc")] #[stable(feature = "rust1", since = "1.0.0")] #[rustc_insignificant_dtor] -pub struct Rc<T: ?Sized> { +pub struct Rc< + T: ?Sized, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { ptr: NonNull<RcBox<T>>, phantom: PhantomData<RcBox<T>>, + alloc: A, } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized> !Send for Rc<T> {} +impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {} // Note that this negative impl isn't strictly necessary for correctness, // as `Rc` transitively contains a `Cell`, which is itself `!Sync`. @@ -327,20 +331,32 @@ impl<T: ?Sized> !Send for Rc<T> {} // having an explicit negative impl is nice for documentation purposes // and results in nicer error messages. #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized> !Sync for Rc<T> {} +impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {} #[stable(feature = "catch_unwind", since = "1.9.0")] -impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Rc<T> {} +impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {} #[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")] -impl<T: RefUnwindSafe + ?Sized> RefUnwindSafe for Rc<T> {} +impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {} #[unstable(feature = "coerce_unsized", issue = "18598")] -impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {} +impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {} impl<T: ?Sized> Rc<T> { + #[inline] + unsafe fn from_inner(ptr: NonNull<RcBox<T>>) -> Self { + unsafe { Self::from_inner_in(ptr, Global) } + } + + #[inline] + unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self { + unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } + } +} + +impl<T: ?Sized, A: Allocator> Rc<T, A> { #[inline(always)] fn inner(&self) -> &RcBox<T> { // This unsafety is ok because while this Rc is alive we're guaranteed @@ -348,12 +364,14 @@ impl<T: ?Sized> Rc<T> { unsafe { self.ptr.as_ref() } } - unsafe fn from_inner(ptr: NonNull<RcBox<T>>) -> Self { - Self { ptr, phantom: PhantomData } + #[inline] + unsafe fn from_inner_in(ptr: NonNull<RcBox<T>>, alloc: A) -> Self { + Self { ptr, phantom: PhantomData, alloc } } - unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self { - unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } + #[inline] + unsafe fn from_ptr_in(ptr: *mut RcBox<T>, alloc: A) -> Self { + unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } } } @@ -450,7 +468,7 @@ impl<T> Rc<T> { let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast(); - let weak = Weak { ptr: init_ptr }; + let weak = Weak { ptr: init_ptr, alloc: Global }; // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If @@ -504,7 +522,7 @@ impl<T> Rc<T> { Rc::from_ptr(Rc::allocate_for_layout( Layout::new::<T>(), |layout| Global.allocate(layout), - |mem| mem as *mut RcBox<mem::MaybeUninit<T>>, + <*mut u8>::cast, )) } } @@ -537,7 +555,7 @@ impl<T> Rc<T> { Rc::from_ptr(Rc::allocate_for_layout( Layout::new::<T>(), |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut RcBox<mem::MaybeUninit<T>>, + <*mut u8>::cast, )) } } @@ -594,7 +612,7 @@ impl<T> Rc<T> { Ok(Rc::from_ptr(Rc::try_allocate_for_layout( Layout::new::<T>(), |layout| Global.allocate(layout), - |mem| mem as *mut RcBox<mem::MaybeUninit<T>>, + <*mut u8>::cast, )?)) } } @@ -627,7 +645,7 @@ impl<T> Rc<T> { Ok(Rc::from_ptr(Rc::try_allocate_for_layout( Layout::new::<T>(), |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut RcBox<mem::MaybeUninit<T>>, + <*mut u8>::cast, )?)) } } @@ -639,6 +657,231 @@ impl<T> Rc<T> { pub fn pin(value: T) -> Pin<Rc<T>> { unsafe { Pin::new_unchecked(Rc::new(value)) } } +} + +impl<T, A: Allocator> Rc<T, A> { + /// Returns a reference to the underlying allocator. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn allocator(&self) -> &A { + &self.alloc + } + /// Constructs a new `Rc` in the provided allocator. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let five = Rc::new_in(5, System); + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn new_in(value: T, alloc: A) -> Rc<T, A> { + // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable. + // That would make code size bigger. + match Self::try_new_in(value, alloc) { + Ok(m) => m, + Err(_) => handle_alloc_error(Layout::new::<RcBox<T>>()), + } + } + + /// Constructs a new `Rc` with uninitialized contents in the provided allocator. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(get_mut_unchecked)] + /// #![feature(allocator_api)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let mut five = Rc::<u32, _>::new_uninit_in(System); + /// + /// let five = unsafe { + /// // Deferred initialization: + /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5) + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> { + unsafe { + Rc::from_ptr_in( + Rc::allocate_for_layout( + Layout::new::<T>(), + |layout| alloc.allocate(layout), + <*mut u8>::cast, + ), + alloc, + ) + } + } + + /// Constructs a new `Rc` with uninitialized contents, with the memory + /// being filled with `0` bytes, in the provided allocator. + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and + /// incorrect usage of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(allocator_api)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let zero = Rc::<u32, _>::new_zeroed_in(System); + /// let zero = unsafe { zero.assume_init() }; + /// + /// assert_eq!(*zero, 0) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> { + unsafe { + Rc::from_ptr_in( + Rc::allocate_for_layout( + Layout::new::<T>(), + |layout| alloc.allocate_zeroed(layout), + <*mut u8>::cast, + ), + alloc, + ) + } + } + + /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation + /// fails + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let five = Rc::try_new_in(5, System); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> { + // There is an implicit weak pointer owned by all the strong + // pointers, which ensures that the weak destructor never frees + // the allocation while the strong destructor is running, even + // if the weak pointer is stored inside the strong one. + let (ptr, alloc) = Box::into_unique(Box::try_new_in( + RcBox { strong: Cell::new(1), weak: Cell::new(1), value }, + alloc, + )?); + Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) }) + } + + /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an + /// error if the allocation fails + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api, new_uninit)] + /// #![feature(get_mut_unchecked)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?; + /// + /// let five = unsafe { + /// // Deferred initialization: + /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> { + unsafe { + Ok(Rc::from_ptr_in( + Rc::try_allocate_for_layout( + Layout::new::<T>(), + |layout| alloc.allocate(layout), + <*mut u8>::cast, + )?, + alloc, + )) + } + } + + /// Constructs a new `Rc` with uninitialized contents, with the memory + /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation + /// fails + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and + /// incorrect usage of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api, new_uninit)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?; + /// let zero = unsafe { zero.assume_init() }; + /// + /// assert_eq!(*zero, 0); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[unstable(feature = "allocator_api", issue = "32838")] + //#[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> { + unsafe { + Ok(Rc::from_ptr_in( + Rc::try_allocate_for_layout( + Layout::new::<T>(), + |layout| alloc.allocate_zeroed(layout), + <*mut u8>::cast, + )?, + alloc, + )) + } + } + + /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then + /// `value` will be pinned in memory and unable to be moved. + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn pin_in(value: T, alloc: A) -> Pin<Self> { + unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) } + } /// Returns the inner value, if the `Rc` has exactly one strong reference. /// @@ -665,13 +908,14 @@ impl<T> Rc<T> { if Rc::strong_count(&this) == 1 { unsafe { let val = ptr::read(&*this); // copy the contained object + let alloc = ptr::read(&this.alloc); // copy the allocator // Indicate to Weaks that they can't be promoted by decrementing // the strong count, and then remove the implicit "strong weak" // pointer while also handling drop logic by just crafting a // fake Weak. this.inner().dec_strong(); - let _weak = Weak { ptr: this.ptr }; + let _weak = Weak { ptr: this.ptr, alloc }; forget(this); Ok(val) } @@ -758,7 +1002,7 @@ impl<T> Rc<[T]> { Layout::array::<T>(len).unwrap(), |layout| Global.allocate_zeroed(layout), |mem| { - ptr::slice_from_raw_parts_mut(mem as *mut T, len) + ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcBox<[mem::MaybeUninit<T>]> }, )) @@ -766,7 +1010,84 @@ impl<T> Rc<[T]> { } } -impl<T> Rc<mem::MaybeUninit<T>> { +impl<T, A: Allocator> Rc<[T], A> { + /// Constructs a new reference-counted slice with uninitialized contents. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(get_mut_unchecked)] + /// #![feature(allocator_api)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System); + /// + /// let values = unsafe { + /// // Deferred initialization: + /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1); + /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2); + /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3); + /// + /// values.assume_init() + /// }; + /// + /// assert_eq!(*values, [1, 2, 3]) + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> { + unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) } + } + + /// Constructs a new reference-counted slice with uninitialized contents, with the memory being + /// filled with `0` bytes. + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and + /// incorrect usage of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(allocator_api)] + /// + /// use std::rc::Rc; + /// use std::alloc::System; + /// + /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System); + /// let values = unsafe { values.assume_init() }; + /// + /// assert_eq!(*values, [0, 0, 0]) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> { + unsafe { + Rc::from_ptr_in( + Rc::allocate_for_layout( + Layout::array::<T>(len).unwrap(), + |layout| alloc.allocate_zeroed(layout), + |mem| { + ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) + as *mut RcBox<[mem::MaybeUninit<T>]> + }, + ), + alloc, + ) + } + } +} + +impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> { /// Converts to `Rc<T>`. /// /// # Safety @@ -798,12 +1119,16 @@ impl<T> Rc<mem::MaybeUninit<T>> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] - pub unsafe fn assume_init(self) -> Rc<T> { - unsafe { Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) } + pub unsafe fn assume_init(self) -> Rc<T, A> + where + A: Clone, + { + let md_self = mem::ManuallyDrop::new(self); + unsafe { Rc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) } } } -impl<T> Rc<[mem::MaybeUninit<T>]> { +impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> { /// Converts to `Rc<[T]>`. /// /// # Safety @@ -838,12 +1163,128 @@ impl<T> Rc<[mem::MaybeUninit<T>]> { /// ``` #[unstable(feature = "new_uninit", issue = "63291")] #[inline] - pub unsafe fn assume_init(self) -> Rc<[T]> { - unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } + pub unsafe fn assume_init(self) -> Rc<[T], A> + where + A: Clone, + { + let md_self = mem::ManuallyDrop::new(self); + unsafe { Rc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) } } } impl<T: ?Sized> Rc<T> { + /// Constructs an `Rc<T>` from a raw pointer. + /// + /// The raw pointer must have been previously returned by a call to + /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size + /// and alignment as `T`. This is trivially true if `U` is `T`. + /// Note that if `U` is not `T` but has the same size and alignment, this is + /// basically like transmuting references of different types. See + /// [`mem::transmute`][transmute] for more information on what + /// restrictions apply in this case. + /// + /// The raw pointer must point to a block of memory allocated by the global allocator + /// + /// The user of `from_raw` has to make sure a specific value of `T` is only + /// dropped once. + /// + /// This function is unsafe because improper use may lead to memory unsafety, + /// even if the returned `Rc<T>` is never accessed. + /// + /// [into_raw]: Rc::into_raw + /// [transmute]: core::mem::transmute + /// + /// # Examples + /// + /// ``` + /// use std::rc::Rc; + /// + /// let x = Rc::new("hello".to_owned()); + /// let x_ptr = Rc::into_raw(x); + /// + /// unsafe { + /// // Convert back to an `Rc` to prevent leak. + /// let x = Rc::from_raw(x_ptr); + /// assert_eq!(&*x, "hello"); + /// + /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe. + /// } + /// + /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! + /// ``` + #[inline] + #[stable(feature = "rc_raw", since = "1.17.0")] + pub unsafe fn from_raw(ptr: *const T) -> Self { + unsafe { Self::from_raw_in(ptr, Global) } + } + + /// Increments the strong reference count on the `Rc<T>` associated with the + /// provided pointer by one. + /// + /// # Safety + /// + /// The pointer must have been obtained through `Rc::into_raw`, the + /// associated `Rc` instance must be valid (i.e. the strong count must be at + /// least 1) for the duration of this method, and `ptr` must point to a block of memory + /// allocated by the global allocator. + /// + /// # Examples + /// + /// ``` + /// use std::rc::Rc; + /// + /// let five = Rc::new(5); + /// + /// unsafe { + /// let ptr = Rc::into_raw(five); + /// Rc::increment_strong_count(ptr); + /// + /// let five = Rc::from_raw(ptr); + /// assert_eq!(2, Rc::strong_count(&five)); + /// } + /// ``` + #[inline] + #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] + pub unsafe fn increment_strong_count(ptr: *const T) { + unsafe { Self::increment_strong_count_in(ptr, Global) } + } + + /// Decrements the strong reference count on the `Rc<T>` associated with the + /// provided pointer by one. + /// + /// # Safety + /// + /// The pointer must have been obtained through `Rc::into_raw`, the + /// associated `Rc` instance must be valid (i.e. the strong count must be at + /// least 1) when invoking this method, and `ptr` must point to a block of memory + /// allocated by the global allocator. This method can be used to release the final `Rc` and + /// backing storage, but **should not** be called after the final `Rc` has been released. + /// + /// # Examples + /// + /// ``` + /// use std::rc::Rc; + /// + /// let five = Rc::new(5); + /// + /// unsafe { + /// let ptr = Rc::into_raw(five); + /// Rc::increment_strong_count(ptr); + /// + /// let five = Rc::from_raw(ptr); + /// assert_eq!(2, Rc::strong_count(&five)); + /// Rc::decrement_strong_count(ptr); + /// assert_eq!(1, Rc::strong_count(&five)); + /// } + /// ``` + #[inline] + #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] + pub unsafe fn decrement_strong_count(ptr: *const T) { + unsafe { Self::decrement_strong_count_in(ptr, Global) } + } +} + +impl<T: ?Sized, A: Allocator> Rc<T, A> { /// Consumes the `Rc`, returning the wrapped pointer. /// /// To avoid a memory leak the pointer must be converted back to an `Rc` using @@ -891,16 +1332,18 @@ impl<T: ?Sized> Rc<T> { unsafe { ptr::addr_of_mut!((*ptr).value) } } - /// Constructs an `Rc<T>` from a raw pointer. + /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator. /// /// The raw pointer must have been previously returned by a call to - /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size + /// [`Rc<U, A>::into_raw`][into_raw] where `U` must have the same size /// and alignment as `T`. This is trivially true if `U` is `T`. /// Note that if `U` is not `T` but has the same size and alignment, this is /// basically like transmuting references of different types. See /// [`mem::transmute`] for more information on what /// restrictions apply in this case. /// + /// The raw pointer must point to a block of memory allocated by `alloc` + /// /// The user of `from_raw` has to make sure a specific value of `T` is only /// dropped once. /// @@ -912,14 +1355,17 @@ impl<T: ?Sized> Rc<T> { /// # Examples /// /// ``` + /// #![feature(allocator_api)] + /// /// use std::rc::Rc; + /// use std::alloc::System; /// - /// let x = Rc::new("hello".to_owned()); + /// let x = Rc::new_in("hello".to_owned(), System); /// let x_ptr = Rc::into_raw(x); /// /// unsafe { /// // Convert back to an `Rc` to prevent leak. - /// let x = Rc::from_raw(x_ptr); + /// let x = Rc::from_raw_in(x_ptr, System); /// assert_eq!(&*x, "hello"); /// /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe. @@ -927,14 +1373,14 @@ impl<T: ?Sized> Rc<T> { /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` - #[stable(feature = "rc_raw", since = "1.17.0")] - pub unsafe fn from_raw(ptr: *const T) -> Self { + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { let offset = unsafe { data_offset(ptr) }; // Reverse the offset to find the original RcBox. let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcBox<T> }; - unsafe { Self::from_ptr(rc_ptr) } + unsafe { Self::from_ptr_in(rc_ptr, alloc) } } /// Creates a new [`Weak`] pointer to this allocation. @@ -951,11 +1397,14 @@ impl<T: ?Sized> Rc<T> { #[must_use = "this returns a new `Weak` pointer, \ without modifying the original `Rc`"] #[stable(feature = "rc_weak", since = "1.4.0")] - pub fn downgrade(this: &Self) -> Weak<T> { + pub fn downgrade(this: &Self) -> Weak<T, A> + where + A: Clone, + { this.inner().inc_weak(); // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); - Weak { ptr: this.ptr } + Weak { ptr: this.ptr, alloc: this.alloc.clone() } } /// Gets the number of [`Weak`] pointers to this allocation. @@ -999,30 +1448,37 @@ impl<T: ?Sized> Rc<T> { /// /// # Safety /// - /// The pointer must have been obtained through `Rc::into_raw`, and the + /// The pointer must have been obtained through `Rc::into_raw`, the /// associated `Rc` instance must be valid (i.e. the strong count must be at - /// least 1) for the duration of this method. + /// least 1) for the duration of this method, and `ptr` must point to a block of memory + /// allocated by `alloc` /// /// # Examples /// /// ``` + /// #![feature(allocator_api)] + /// /// use std::rc::Rc; + /// use std::alloc::System; /// - /// let five = Rc::new(5); + /// let five = Rc::new_in(5, System); /// /// unsafe { /// let ptr = Rc::into_raw(five); - /// Rc::increment_strong_count(ptr); + /// Rc::increment_strong_count_in(ptr, System); /// - /// let five = Rc::from_raw(ptr); + /// let five = Rc::from_raw_in(ptr, System); /// assert_eq!(2, Rc::strong_count(&five)); /// } /// ``` #[inline] - #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] - pub unsafe fn increment_strong_count(ptr: *const T) { + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A) + where + A: Clone, + { // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop - let rc = unsafe { mem::ManuallyDrop::new(Rc::<T>::from_raw(ptr)) }; + let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) }; // Now increase refcount, but don't drop new refcount either let _rc_clone: mem::ManuallyDrop<_> = rc.clone(); } @@ -1032,33 +1488,36 @@ impl<T: ?Sized> Rc<T> { /// /// # Safety /// - /// The pointer must have been obtained through `Rc::into_raw`, and the + /// The pointer must have been obtained through `Rc::into_raw`, the /// associated `Rc` instance must be valid (i.e. the strong count must be at - /// least 1) when invoking this method. This method can be used to release - /// the final `Rc` and backing storage, but **should not** be called after - /// the final `Rc` has been released. + /// least 1) when invoking this method, and `ptr` must point to a block of memory + /// allocated by `alloc`. This method can be used to release the final `Rc` and backing storage, + /// but **should not** be called after the final `Rc` has been released. /// /// # Examples /// /// ``` + /// #![feature(allocator_api)] + /// /// use std::rc::Rc; + /// use std::alloc::System; /// - /// let five = Rc::new(5); + /// let five = Rc::new_in(5, System); /// /// unsafe { /// let ptr = Rc::into_raw(five); - /// Rc::increment_strong_count(ptr); + /// Rc::increment_strong_count_in(ptr, System); /// - /// let five = Rc::from_raw(ptr); + /// let five = Rc::from_raw_in(ptr, System); /// assert_eq!(2, Rc::strong_count(&five)); - /// Rc::decrement_strong_count(ptr); + /// Rc::decrement_strong_count_in(ptr, System); /// assert_eq!(1, Rc::strong_count(&five)); /// } /// ``` #[inline] - #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")] - pub unsafe fn decrement_strong_count(ptr: *const T) { - unsafe { drop(Rc::from_raw(ptr)) }; + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) { + unsafe { drop(Rc::from_raw_in(ptr, alloc)) }; } /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to @@ -1188,7 +1647,7 @@ impl<T: ?Sized> Rc<T> { } } -impl<T: Clone> Rc<T> { +impl<T: Clone, A: Allocator + Clone> Rc<T, A> { /// Makes a mutable reference into the given `Rc`. /// /// If there are other `Rc` pointers to the same allocation, then `make_mut` will @@ -1246,7 +1705,7 @@ impl<T: Clone> Rc<T> { if Rc::strong_count(this) != 1 { // Gotta clone the data, there are other Rcs. // Pre-allocate memory to allow writing the cloned value directly. - let mut rc = Self::new_uninit(); + let mut rc = Self::new_uninit_in(this.alloc.clone()); unsafe { let data = Rc::get_mut_unchecked(&mut rc); (**this).write_clone_into_raw(data.as_mut_ptr()); @@ -1254,7 +1713,7 @@ impl<T: Clone> Rc<T> { } } else if Rc::weak_count(this) != 0 { // Can just steal the data, all that's left is Weaks - let mut rc = Self::new_uninit(); + let mut rc = Self::new_uninit_in(this.alloc.clone()); unsafe { let data = Rc::get_mut_unchecked(&mut rc); data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); @@ -1310,7 +1769,7 @@ impl<T: Clone> Rc<T> { } } -impl Rc<dyn Any> { +impl<A: Allocator + Clone> Rc<dyn Any, A> { /// Attempt to downcast the `Rc<dyn Any>` to a concrete type. /// /// # Examples @@ -1331,12 +1790,13 @@ impl Rc<dyn Any> { /// ``` #[inline] #[stable(feature = "rc_downcast", since = "1.29.0")] - pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> { + pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> { if (*self).is::<T>() { unsafe { let ptr = self.ptr.cast::<RcBox<T>>(); + let alloc = self.alloc.clone(); forget(self); - Ok(Rc::from_inner(ptr)) + Ok(Rc::from_inner_in(ptr, alloc)) } } else { Err(self) @@ -1371,11 +1831,12 @@ impl Rc<dyn Any> { /// [`downcast`]: Self::downcast #[inline] #[unstable(feature = "downcast_unchecked", issue = "90850")] - pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T> { + pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> { unsafe { let ptr = self.ptr.cast::<RcBox<T>>(); + let alloc = self.alloc.clone(); mem::forget(self); - Rc::from_inner(ptr) + Rc::from_inner_in(ptr, alloc) } } } @@ -1427,25 +1888,27 @@ impl<T: ?Sized> Rc<T> { Ok(inner) } +} +impl<T: ?Sized, A: Allocator> Rc<T, A> { /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> { + unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcBox<T> { // Allocate for the `RcBox<T>` using the given value. unsafe { - Self::allocate_for_layout( + Rc::<T>::allocate_for_layout( Layout::for_value(&*ptr), - |layout| Global.allocate(layout), + |layout| alloc.allocate(layout), |mem| mem.with_metadata_of(ptr as *const RcBox<T>), ) } } #[cfg(not(no_global_oom_handling))] - fn from_box(src: Box<T>) -> Rc<T> { + fn from_box_in(src: Box<T, A>) -> Rc<T, A> { unsafe { let value_size = size_of_val(&*src); - let ptr = Self::allocate_for_ptr(&*src); + let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src)); // Copy value as bytes ptr::copy_nonoverlapping( @@ -1455,10 +1918,11 @@ impl<T: ?Sized> Rc<T> { ); // Free the allocation without dropping its contents - let src = Box::from_raw(Box::into_raw(src) as *mut mem::ManuallyDrop<T>); + let (bptr, alloc) = Box::into_raw_with_allocator(src); + let src = Box::from_raw(bptr as *mut mem::ManuallyDrop<T>); drop(src); - Self::from_ptr(ptr) + Self::from_ptr_in(ptr, alloc) } } } @@ -1471,7 +1935,7 @@ impl<T> Rc<[T]> { Self::allocate_for_layout( Layout::array::<T>(len).unwrap(), |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>, + |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcBox<[T]>, ) } } @@ -1538,6 +2002,21 @@ impl<T> Rc<[T]> { } } +impl<T, A: Allocator> Rc<[T], A> { + /// Allocates an `RcBox<[T]>` with the given length. + #[inline] + #[cfg(not(no_global_oom_handling))] + unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcBox<[T]> { + unsafe { + Rc::<[T]>::allocate_for_layout( + Layout::array::<T>(len).unwrap(), + |layout| alloc.allocate(layout), + |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcBox<[T]>, + ) + } + } +} + /// Specialization trait used for `From<&[T]>`. trait RcFromSlice<T> { fn from_slice(slice: &[T]) -> Self; @@ -1560,7 +2039,7 @@ impl<T: Copy> RcFromSlice<T> for Rc<[T]> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized> Deref for Rc<T> { +impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> { type Target = T; #[inline(always)] @@ -1573,7 +2052,7 @@ impl<T: ?Sized> Deref for Rc<T> { impl<T: ?Sized> Receiver for Rc<T> {} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> { +unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> { /// Drops the `Rc`. /// /// This will decrement the strong reference count. If the strong reference @@ -1611,7 +2090,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> { self.inner().dec_weak(); if self.inner().weak() == 0 { - Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); + self.alloc.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())); } } } @@ -1619,7 +2098,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized> Clone for Rc<T> { +impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> { /// Makes a clone of the `Rc` pointer. /// /// This creates another pointer to the same allocation, increasing the @@ -1635,10 +2114,10 @@ impl<T: ?Sized> Clone for Rc<T> { /// let _ = Rc::clone(&five); /// ``` #[inline] - fn clone(&self) -> Rc<T> { + fn clone(&self) -> Self { unsafe { self.inner().inc_strong(); - Self::from_inner(self.ptr) + Self::from_inner_in(self.ptr, self.alloc.clone()) } } } @@ -1663,20 +2142,20 @@ impl<T: Default> Default for Rc<T> { } #[stable(feature = "rust1", since = "1.0.0")] -trait RcEqIdent<T: ?Sized + PartialEq> { - fn eq(&self, other: &Rc<T>) -> bool; - fn ne(&self, other: &Rc<T>) -> bool; +trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> { + fn eq(&self, other: &Rc<T, A>) -> bool; + fn ne(&self, other: &Rc<T, A>) -> bool; } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + PartialEq> RcEqIdent<T> for Rc<T> { +impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> { #[inline] - default fn eq(&self, other: &Rc<T>) -> bool { + default fn eq(&self, other: &Rc<T, A>) -> bool { **self == **other } #[inline] - default fn ne(&self, other: &Rc<T>) -> bool { + default fn ne(&self, other: &Rc<T, A>) -> bool { **self != **other } } @@ -1695,20 +2174,20 @@ impl<T: Eq> MarkerEq for T {} /// /// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + MarkerEq> RcEqIdent<T> for Rc<T> { +impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> { #[inline] - fn eq(&self, other: &Rc<T>) -> bool { + fn eq(&self, other: &Rc<T, A>) -> bool { Rc::ptr_eq(self, other) || **self == **other } #[inline] - fn ne(&self, other: &Rc<T>) -> bool { + fn ne(&self, other: &Rc<T, A>) -> bool { !Rc::ptr_eq(self, other) && **self != **other } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + PartialEq> PartialEq for Rc<T> { +impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> { /// Equality for two `Rc`s. /// /// Two `Rc`s are equal if their inner values are equal, even if they are @@ -1728,7 +2207,7 @@ impl<T: ?Sized + PartialEq> PartialEq for Rc<T> { /// assert!(five == Rc::new(5)); /// ``` #[inline] - fn eq(&self, other: &Rc<T>) -> bool { + fn eq(&self, other: &Rc<T, A>) -> bool { RcEqIdent::eq(self, other) } @@ -1750,16 +2229,16 @@ impl<T: ?Sized + PartialEq> PartialEq for Rc<T> { /// assert!(five != Rc::new(6)); /// ``` #[inline] - fn ne(&self, other: &Rc<T>) -> bool { + fn ne(&self, other: &Rc<T, A>) -> bool { RcEqIdent::ne(self, other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + Eq> Eq for Rc<T> {} +impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {} #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> { +impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> { /// Partial comparison for two `Rc`s. /// /// The two are compared by calling `partial_cmp()` on their inner values. @@ -1775,7 +2254,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> { /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6))); /// ``` #[inline(always)] - fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> { + fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> { (**self).partial_cmp(&**other) } @@ -1793,7 +2272,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> { /// assert!(five < Rc::new(6)); /// ``` #[inline(always)] - fn lt(&self, other: &Rc<T>) -> bool { + fn lt(&self, other: &Rc<T, A>) -> bool { **self < **other } @@ -1811,7 +2290,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> { /// assert!(five <= Rc::new(5)); /// ``` #[inline(always)] - fn le(&self, other: &Rc<T>) -> bool { + fn le(&self, other: &Rc<T, A>) -> bool { **self <= **other } @@ -1829,7 +2308,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> { /// assert!(five > Rc::new(4)); /// ``` #[inline(always)] - fn gt(&self, other: &Rc<T>) -> bool { + fn gt(&self, other: &Rc<T, A>) -> bool { **self > **other } @@ -1847,13 +2326,13 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> { /// assert!(five >= Rc::new(5)); /// ``` #[inline(always)] - fn ge(&self, other: &Rc<T>) -> bool { + fn ge(&self, other: &Rc<T, A>) -> bool { **self >= **other } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + Ord> Ord for Rc<T> { +impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> { /// Comparison for two `Rc`s. /// /// The two are compared by calling `cmp()` on their inner values. @@ -1869,34 +2348,34 @@ impl<T: ?Sized + Ord> Ord for Rc<T> { /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6))); /// ``` #[inline] - fn cmp(&self, other: &Rc<T>) -> Ordering { + fn cmp(&self, other: &Rc<T, A>) -> Ordering { (**self).cmp(&**other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + Hash> Hash for Rc<T> { +impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> { fn hash<H: Hasher>(&self, state: &mut H) { (**self).hash(state); } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + fmt::Display> fmt::Display for Rc<T> { +impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> { +impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized> fmt::Pointer for Rc<T> { +impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&(&**self as *const T), f) } @@ -1982,7 +2461,7 @@ impl From<String> for Rc<str> { #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl<T: ?Sized> From<Box<T>> for Rc<T> { +impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> { /// Move a boxed object to a new, reference counted, allocation. /// /// # Example @@ -1994,14 +2473,14 @@ impl<T: ?Sized> From<Box<T>> for Rc<T> { /// assert_eq!(1, *shared); /// ``` #[inline] - fn from(v: Box<T>) -> Rc<T> { - Rc::from_box(v) + fn from(v: Box<T, A>) -> Rc<T, A> { + Rc::from_box_in(v) } } #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl<T> From<Vec<T>> for Rc<[T]> { +impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> { /// Allocate a reference-counted slice and move `v`'s items into it. /// /// # Example @@ -2013,12 +2492,18 @@ impl<T> From<Vec<T>> for Rc<[T]> { /// assert_eq!(vec![1, 2, 3], *shared); /// ``` #[inline] - fn from(mut v: Vec<T>) -> Rc<[T]> { + fn from(v: Vec<T, A>) -> Rc<[T], A> { unsafe { - let rc = Rc::copy_from_slice(&v); - // Allow the Vec to free its memory, but not destroy its contents - v.set_len(0); - rc + let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); + + let rc_ptr = Self::allocate_for_slice_in(len, &alloc); + ptr::copy_nonoverlapping(vec_ptr, &mut (*rc_ptr).value as *mut [T] as *mut T, len); + + // Create a `Vec<T, &A>` with length 0, to deallocate the buffer + // without dropping its contents or the allocator + let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc); + + Self::from_ptr_in(rc_ptr, alloc) } } } @@ -2189,7 +2674,10 @@ impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I { /// /// [`upgrade`]: Weak::upgrade #[stable(feature = "rc_weak", since = "1.4.0")] -pub struct Weak<T: ?Sized> { +pub struct Weak< + T: ?Sized, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need @@ -2197,15 +2685,16 @@ pub struct Weak<T: ?Sized> { // will ever have because RcBox has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. ptr: NonNull<RcBox<T>>, + alloc: A, } #[stable(feature = "rc_weak", since = "1.4.0")] -impl<T: ?Sized> !Send for Weak<T> {} +impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {} #[stable(feature = "rc_weak", since = "1.4.0")] -impl<T: ?Sized> !Sync for Weak<T> {} +impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {} #[unstable(feature = "coerce_unsized", issue = "18598")] -impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {} +impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {} @@ -2224,16 +2713,45 @@ impl<T> Weak<T> { /// let empty: Weak<i64> = Weak::new(); /// assert!(empty.upgrade().is_none()); /// ``` + #[inline] #[stable(feature = "downgraded_weak", since = "1.10.0")] #[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")] #[must_use] pub const fn new() -> Weak<T> { - Weak { ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) } } + Weak { + ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) }, + alloc: Global, + } + } +} + +impl<T, A: Allocator> Weak<T, A> { + /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided + /// allocator. + /// Calling [`upgrade`] on the return value always gives [`None`]. + /// + /// [`upgrade`]: Weak::upgrade + /// + /// # Examples + /// + /// ``` + /// use std::rc::Weak; + /// + /// let empty: Weak<i64> = Weak::new(); + /// assert!(empty.upgrade().is_none()); + /// ``` + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in(alloc: A) -> Weak<T, A> { + Weak { + ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) }, + alloc, + } } } pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool { - (ptr as *mut ()).addr() == usize::MAX + (ptr.cast::<()>()).addr() == usize::MAX } /// Helper type to allow accessing the reference counts without @@ -2244,6 +2762,56 @@ struct WeakInner<'a> { } impl<T: ?Sized> Weak<T> { + /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`. + /// + /// This can be used to safely get a strong reference (by calling [`upgrade`] + /// later) or to deallocate the weak count by dropping the `Weak<T>`. + /// + /// It takes ownership of one weak reference (with the exception of pointers created by [`new`], + /// as these don't own anything; the method still works on them). + /// + /// # Safety + /// + /// The pointer must have originated from the [`into_raw`] and must still own its potential + /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator. + /// + /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this + /// takes ownership of one weak reference currently represented as a raw pointer (the weak + /// count is not modified by this operation) and therefore it must be paired with a previous + /// call to [`into_raw`]. + /// + /// # Examples + /// + /// ``` + /// use std::rc::{Rc, Weak}; + /// + /// let strong = Rc::new("hello".to_owned()); + /// + /// let raw_1 = Rc::downgrade(&strong).into_raw(); + /// let raw_2 = Rc::downgrade(&strong).into_raw(); + /// + /// assert_eq!(2, Rc::weak_count(&strong)); + /// + /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap()); + /// assert_eq!(1, Rc::weak_count(&strong)); + /// + /// drop(strong); + /// + /// // Decrement the last weak count. + /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none()); + /// ``` + /// + /// [`into_raw`]: Weak::into_raw + /// [`upgrade`]: Weak::upgrade + /// [`new`]: Weak::new + #[inline] + #[stable(feature = "weak_into_raw", since = "1.45.0")] + pub unsafe fn from_raw(ptr: *const T) -> Self { + unsafe { Self::from_raw_in(ptr, Global) } + } +} + +impl<T: ?Sized, A: Allocator> Weak<T, A> { /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`. /// /// The pointer is valid only if there are some strong references. The pointer may be dangling, @@ -2321,6 +2889,45 @@ impl<T: ?Sized> Weak<T> { result } + /// Consumes the `Weak<T>` and turns it into a raw pointer. + /// + /// This converts the weak pointer into a raw pointer, while still preserving the ownership of + /// one weak reference (the weak count is not modified by this operation). It can be turned + /// back into the `Weak<T>` with [`from_raw`]. + /// + /// The same restrictions of accessing the target of the pointer as with + /// [`as_ptr`] apply. + /// + /// # Examples + /// + /// ``` + /// use std::rc::{Rc, Weak}; + /// + /// let strong = Rc::new("hello".to_owned()); + /// let weak = Rc::downgrade(&strong); + /// let raw = weak.into_raw(); + /// + /// assert_eq!(1, Rc::weak_count(&strong)); + /// assert_eq!("hello", unsafe { &*raw }); + /// + /// drop(unsafe { Weak::from_raw(raw) }); + /// assert_eq!(0, Rc::weak_count(&strong)); + /// ``` + /// + /// [`from_raw`]: Weak::from_raw + /// [`as_ptr`]: Weak::as_ptr + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn into_raw_and_alloc(self) -> (*const T, A) + where + A: Clone, + { + let result = self.as_ptr(); + let alloc = self.alloc.clone(); + mem::forget(self); + (result, alloc) + } + /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`. /// /// This can be used to safely get a strong reference (by calling [`upgrade`] @@ -2332,7 +2939,7 @@ impl<T: ?Sized> Weak<T> { /// # Safety /// /// The pointer must have originated from the [`into_raw`] and must still own its potential - /// weak reference. + /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`. /// /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this /// takes ownership of one weak reference currently represented as a raw pointer (the weak @@ -2363,8 +2970,9 @@ impl<T: ?Sized> Weak<T> { /// [`into_raw`]: Weak::into_raw /// [`upgrade`]: Weak::upgrade /// [`new`]: Weak::new - #[stable(feature = "weak_into_raw", since = "1.45.0")] - pub unsafe fn from_raw(ptr: *const T) -> Self { + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { // See Weak::as_ptr for context on how the input pointer is derived. let ptr = if is_dangling(ptr as *mut T) { @@ -2380,7 +2988,7 @@ impl<T: ?Sized> Weak<T> { }; // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } } + Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc } } /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying @@ -2409,7 +3017,10 @@ impl<T: ?Sized> Weak<T> { #[must_use = "this returns a new `Rc`, \ without modifying the original weak pointer"] #[stable(feature = "rc_weak", since = "1.4.0")] - pub fn upgrade(&self) -> Option<Rc<T>> { + pub fn upgrade(&self) -> Option<Rc<T, A>> + where + A: Clone, + { let inner = self.inner()?; if inner.strong() == 0 { @@ -2417,7 +3028,7 @@ impl<T: ?Sized> Weak<T> { } else { unsafe { inner.inc_strong(); - Some(Rc::from_inner(self.ptr)) + Some(Rc::from_inner_in(self.ptr, self.alloc.clone())) } } } @@ -2437,15 +3048,15 @@ impl<T: ?Sized> Weak<T> { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - self.inner() - .map(|inner| { - if inner.strong() > 0 { - inner.weak() - 1 // subtract the implicit weak ptr - } else { - 0 - } - }) - .unwrap_or(0) + if let Some(inner) = self.inner() { + if inner.strong() > 0 { + inner.weak() - 1 // subtract the implicit weak ptr + } else { + 0 + } + } else { + 0 + } } /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`, @@ -2513,7 +3124,7 @@ impl<T: ?Sized> Weak<T> { } #[stable(feature = "rc_weak", since = "1.4.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak<T> { +unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> { /// Drops the `Weak` pointer. /// /// # Examples @@ -2546,14 +3157,14 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak<T> { // the strong pointers have disappeared. if inner.weak() == 0 { unsafe { - Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); + self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())); } } } } #[stable(feature = "rc_weak", since = "1.4.0")] -impl<T: ?Sized> Clone for Weak<T> { +impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> { /// Makes a clone of the `Weak` pointer that points to the same allocation. /// /// # Examples @@ -2566,16 +3177,16 @@ impl<T: ?Sized> Clone for Weak<T> { /// let _ = Weak::clone(&weak_five); /// ``` #[inline] - fn clone(&self) -> Weak<T> { + fn clone(&self) -> Weak<T, A> { if let Some(inner) = self.inner() { inner.inc_weak() } - Weak { ptr: self.ptr } + Weak { ptr: self.ptr, alloc: self.alloc.clone() } } } #[stable(feature = "rc_weak", since = "1.4.0")] -impl<T: ?Sized> fmt::Debug for Weak<T> { +impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "(Weak)") } @@ -2707,21 +3318,21 @@ impl<'a> RcInnerPtr for WeakInner<'a> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized> borrow::Borrow<T> for Rc<T> { +impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> { fn borrow(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl<T: ?Sized> AsRef<T> for Rc<T> { +impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> { fn as_ref(&self) -> &T { &**self } } #[stable(feature = "pin", since = "1.33.0")] -impl<T: ?Sized> Unpin for Rc<T> {} +impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {} /// Get the offset within an `RcBox` for the payload behind a pointer. /// @@ -2822,7 +3433,7 @@ impl<T> UniqueRc<T> { unsafe { this.ptr.as_ref().inc_weak(); } - Weak { ptr: this.ptr } + Weak { ptr: this.ptr, alloc: Global } } /// Converts the `UniqueRc` into a regular [`Rc`] diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 5bb1a93aeaf..e00850eb5d8 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -246,32 +246,48 @@ macro_rules! acquire { /// [rc_examples]: crate::rc#examples #[cfg_attr(not(test), rustc_diagnostic_item = "Arc")] #[stable(feature = "rust1", since = "1.0.0")] -pub struct Arc<T: ?Sized> { +pub struct Arc< + T: ?Sized, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { ptr: NonNull<ArcInner<T>>, phantom: PhantomData<ArcInner<T>>, + alloc: A, } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {} +unsafe impl<T: ?Sized + Sync + Send, A: Allocator + Send> Send for Arc<T, A> {} #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {} +unsafe impl<T: ?Sized + Sync + Send, A: Allocator + Sync> Sync for Arc<T, A> {} #[stable(feature = "catch_unwind", since = "1.9.0")] -impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Arc<T> {} +impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Arc<T, A> {} #[unstable(feature = "coerce_unsized", issue = "18598")] -impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {} +impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Arc<U, A>> for Arc<T, A> {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {} impl<T: ?Sized> Arc<T> { unsafe fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self { - Self { ptr, phantom: PhantomData } + unsafe { Self::from_inner_in(ptr, Global) } } unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self { - unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) } + unsafe { Self::from_ptr_in(ptr, Global) } + } +} + +impl<T: ?Sized, A: Allocator> Arc<T, A> { + #[inline] + unsafe fn from_inner_in(ptr: NonNull<ArcInner<T>>, alloc: A) -> Self { + Self { ptr, phantom: PhantomData, alloc } + } + + #[inline] + unsafe fn from_ptr_in(ptr: *mut ArcInner<T>, alloc: A) -> Self { + unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) } } } @@ -296,7 +312,10 @@ impl<T: ?Sized> Arc<T> { /// /// [`upgrade`]: Weak::upgrade #[stable(feature = "arc_weak", since = "1.4.0")] -pub struct Weak<T: ?Sized> { +pub struct Weak< + T: ?Sized, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { // This is a `NonNull` to allow optimizing the size of this type in enums, // but it is not necessarily a valid pointer. // `Weak::new` sets this to `usize::MAX` so that it doesn’t need @@ -304,15 +323,16 @@ pub struct Weak<T: ?Sized> { // will ever have because RcBox has alignment at least 2. // This is only possible when `T: Sized`; unsized `T` never dangle. ptr: NonNull<ArcInner<T>>, + alloc: A, } #[stable(feature = "arc_weak", since = "1.4.0")] -unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {} +unsafe impl<T: ?Sized + Sync + Send, A: Allocator + Send> Send for Weak<T, A> {} #[stable(feature = "arc_weak", since = "1.4.0")] -unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {} +unsafe impl<T: ?Sized + Sync + Send, A: Allocator + Sync> Sync for Weak<T, A> {} #[unstable(feature = "coerce_unsized", issue = "18598")] -impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {} +impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {} #[unstable(feature = "dispatch_from_dyn", issue = "none")] impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {} @@ -442,7 +462,7 @@ impl<T> Arc<T> { .into(); let init_ptr: NonNull<ArcInner<T>> = uninit_ptr.cast(); - let weak = Weak { ptr: init_ptr }; + let weak = Weak { ptr: init_ptr, alloc: Global }; // It's important we don't give up ownership of the weak pointer, or // else the memory might be freed by the time `data_fn` returns. If @@ -510,7 +530,7 @@ impl<T> Arc<T> { Arc::from_ptr(Arc::allocate_for_layout( Layout::new::<T>(), |layout| Global.allocate(layout), - |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>, + <*mut u8>::cast, )) } } @@ -544,7 +564,7 @@ impl<T> Arc<T> { Arc::from_ptr(Arc::allocate_for_layout( Layout::new::<T>(), |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>, + <*mut u8>::cast, )) } } @@ -617,7 +637,7 @@ impl<T> Arc<T> { Ok(Arc::from_ptr(Arc::try_allocate_for_layout( Layout::new::<T>(), |layout| Global.allocate(layout), - |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>, + <*mut u8>::cast, )?)) } } @@ -650,10 +670,254 @@ impl<T> Arc<T> { Ok(Arc::from_ptr(Arc::try_allocate_for_layout( Layout::new::<T>(), |layout| Global.allocate_zeroed(layout), - |mem| mem as *mut ArcInner<mem::MaybeUninit<T>>, + <*mut u8>::cast, )?)) } } +} + +impl<T, A: Allocator> Arc<T, A> { + /// Returns a reference to the underlying allocator. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn allocator(&self) -> &A { + &self.alloc + } + /// Constructs a new `Arc<T>` in the provided allocator. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let five = Arc::new_in(5, System); + /// ``` + #[inline] + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in(data: T, alloc: A) -> Arc<T, A> { + // Start the weak pointer count as 1 which is the weak pointer that's + // held by all the strong pointers (kinda), see std/rc.rs for more info + let x = Box::new_in( + ArcInner { + strong: atomic::AtomicUsize::new(1), + weak: atomic::AtomicUsize::new(1), + data, + }, + alloc, + ); + let (ptr, alloc) = Box::into_unique(x); + unsafe { Self::from_inner_in(ptr.into(), alloc) } + } + + /// Constructs a new `Arc` with uninitialized contents in the provided allocator. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(get_mut_unchecked)] + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let mut five = Arc::<u32, _>::new_uninit_in(System); + /// + /// let five = unsafe { + /// // Deferred initialization: + /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5) + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_uninit_in(alloc: A) -> Arc<mem::MaybeUninit<T>, A> { + unsafe { + Arc::from_ptr_in( + Arc::allocate_for_layout( + Layout::new::<T>(), + |layout| alloc.allocate(layout), + <*mut u8>::cast, + ), + alloc, + ) + } + } + + /// Constructs a new `Arc` with uninitialized contents, with the memory + /// being filled with `0` bytes, in the provided allocator. + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage + /// of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let zero = Arc::<u32, _>::new_zeroed_in(System); + /// let zero = unsafe { zero.assume_init() }; + /// + /// assert_eq!(*zero, 0) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_zeroed_in(alloc: A) -> Arc<mem::MaybeUninit<T>, A> { + unsafe { + Arc::from_ptr_in( + Arc::allocate_for_layout( + Layout::new::<T>(), + |layout| alloc.allocate_zeroed(layout), + <*mut u8>::cast, + ), + alloc, + ) + } + } + + /// Constructs a new `Pin<Arc<T, A>>` in the provided allocator. If `T` does not implement `Unpin`, + /// then `data` will be pinned in memory and unable to be moved. + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn pin_in(data: T, alloc: A) -> Pin<Arc<T, A>> { + unsafe { Pin::new_unchecked(Arc::new_in(data, alloc)) } + } + + /// Constructs a new `Pin<Arc<T, A>>` in the provided allocator, return an error if allocation + /// fails. + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn try_pin_in(data: T, alloc: A) -> Result<Pin<Arc<T, A>>, AllocError> { + unsafe { Ok(Pin::new_unchecked(Arc::try_new_in(data, alloc)?)) } + } + + /// Constructs a new `Arc<T, A>` in the provided allocator, returning an error if allocation fails. + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let five = Arc::try_new_in(5, System)?; + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + #[inline] + pub fn try_new_in(data: T, alloc: A) -> Result<Arc<T, A>, AllocError> { + // Start the weak pointer count as 1 which is the weak pointer that's + // held by all the strong pointers (kinda), see std/rc.rs for more info + let x = Box::try_new_in( + ArcInner { + strong: atomic::AtomicUsize::new(1), + weak: atomic::AtomicUsize::new(1), + data, + }, + alloc, + )?; + let (ptr, alloc) = Box::into_unique(x); + Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) }) + } + + /// Constructs a new `Arc` with uninitialized contents, in the provided allocator, returning an + /// error if allocation fails. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit, allocator_api)] + /// #![feature(get_mut_unchecked)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let mut five = Arc::<u32, _>::try_new_uninit_in(System)?; + /// + /// let five = unsafe { + /// // Deferred initialization: + /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5); + /// + /// five.assume_init() + /// }; + /// + /// assert_eq!(*five, 5); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn try_new_uninit_in(alloc: A) -> Result<Arc<mem::MaybeUninit<T>, A>, AllocError> { + unsafe { + Ok(Arc::from_ptr_in( + Arc::try_allocate_for_layout( + Layout::new::<T>(), + |layout| alloc.allocate(layout), + <*mut u8>::cast, + )?, + alloc, + )) + } + } + + /// Constructs a new `Arc` with uninitialized contents, with the memory + /// being filled with `0` bytes, in the provided allocator, returning an error if allocation + /// fails. + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage + /// of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit, allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let zero = Arc::<u32, _>::try_new_zeroed_in(System)?; + /// let zero = unsafe { zero.assume_init() }; + /// + /// assert_eq!(*zero, 0); + /// # Ok::<(), std::alloc::AllocError>(()) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[unstable(feature = "allocator_api", issue = "32838")] + // #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn try_new_zeroed_in(alloc: A) -> Result<Arc<mem::MaybeUninit<T>, A>, AllocError> { + unsafe { + Ok(Arc::from_ptr_in( + Arc::try_allocate_for_layout( + Layout::new::<T>(), + |layout| alloc.allocate_zeroed(layout), + <*mut u8>::cast, + )?, + alloc, + )) + } + } /// Returns the inner value, if the `Arc` has exactly one strong reference. /// /// Otherwise, an [`Err`] is returned with the same `Arc` that was @@ -695,9 +959,10 @@ impl<T> Arc<T> { unsafe { let elem = ptr::read(&this.ptr.as_ref().data); + let alloc = ptr::read(&this.alloc); // copy the allocator // Make a weak pointer to clean up the implicit strong-weak reference - let _weak = Weak { ptr: this.ptr }; + let _weak = Weak { ptr: this.ptr, alloc }; mem::forget(this); Ok(elem) @@ -814,9 +1079,11 @@ impl<T> Arc<T> { // in `drop_slow`. Instead of dropping the value behind the pointer, // it is read and eventually returned; `ptr::read` has the same // safety conditions as `ptr::drop_in_place`. + let inner = unsafe { ptr::read(Self::get_mut_unchecked(&mut this)) }; + let alloc = unsafe { ptr::read(&this.alloc) }; - drop(Weak { ptr: this.ptr }); + drop(Weak { ptr: this.ptr, alloc }); Some(inner) } @@ -891,7 +1158,83 @@ impl<T> Arc<[T]> { } } -impl<T> Arc<mem::MaybeUninit<T>> { +impl<T, A: Allocator> Arc<[T], A> { + /// Constructs a new atomically reference-counted slice with uninitialized contents in the + /// provided allocator. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(get_mut_unchecked)] + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let mut values = Arc::<[u32], _>::new_uninit_slice_in(3, System); + /// + /// let values = unsafe { + /// // Deferred initialization: + /// Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1); + /// Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2); + /// Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3); + /// + /// values.assume_init() + /// }; + /// + /// assert_eq!(*values, [1, 2, 3]) + /// ``` + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_uninit_slice_in(len: usize, alloc: A) -> Arc<[mem::MaybeUninit<T>], A> { + unsafe { Arc::from_ptr_in(Arc::allocate_for_slice_in(len, &alloc), alloc) } + } + + /// Constructs a new atomically reference-counted slice with uninitialized contents, with the memory being + /// filled with `0` bytes, in the provided allocator. + /// + /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and + /// incorrect usage of this method. + /// + /// # Examples + /// + /// ``` + /// #![feature(new_uninit)] + /// #![feature(allocator_api)] + /// + /// use std::sync::Arc; + /// use std::alloc::System; + /// + /// let values = Arc::<[u32], _>::new_zeroed_slice_in(3, System); + /// let values = unsafe { values.assume_init() }; + /// + /// assert_eq!(*values, [0, 0, 0]) + /// ``` + /// + /// [zeroed]: mem::MaybeUninit::zeroed + #[cfg(not(no_global_oom_handling))] + #[unstable(feature = "new_uninit", issue = "63291")] + #[inline] + pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Arc<[mem::MaybeUninit<T>], A> { + unsafe { + Arc::from_ptr_in( + Arc::allocate_for_layout( + Layout::array::<T>(len).unwrap(), + |layout| alloc.allocate_zeroed(layout), + |mem| { + ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) + as *mut ArcInner<[mem::MaybeUninit<T>]> + }, + ), + alloc, + ) + } + } +} + +impl<T, A: Allocator> Arc<mem::MaybeUninit<T>, A> { /// Converts to `Arc<T>`. /// /// # Safety @@ -924,12 +1267,16 @@ impl<T> Arc<mem::MaybeUninit<T>> { #[unstable(feature = "new_uninit", issue = "63291")] #[must_use = "`self` will be dropped if the result is not used"] #[inline] - pub unsafe fn assume_init(self) -> Arc<T> { - unsafe { Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) } + pub unsafe fn assume_init(self) -> Arc<T, A> + where + A: Clone, + { + let md_self = mem::ManuallyDrop::new(self); + unsafe { Arc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) } } } -impl<T> Arc<[mem::MaybeUninit<T>]> { +impl<T, A: Allocator> Arc<[mem::MaybeUninit<T>], A> { /// Converts to `Arc<[T]>`. /// /// # Safety @@ -965,12 +1312,129 @@ impl<T> Arc<[mem::MaybeUninit<T>]> { #[unstable(feature = "new_uninit", issue = "63291")] #[must_use = "`self` will be dropped if the result is not used"] #[inline] - pub unsafe fn assume_init(self) -> Arc<[T]> { - unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) } + pub unsafe fn assume_init(self) -> Arc<[T], A> + where + A: Clone, + { + let md_self = mem::ManuallyDrop::new(self); + unsafe { Arc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) } } } impl<T: ?Sized> Arc<T> { + /// Constructs an `Arc<T>` from a raw pointer. + /// + /// The raw pointer must have been previously returned by a call to + /// [`Arc<U>::into_raw`][into_raw] where `U` must have the same size and + /// alignment as `T`. This is trivially true if `U` is `T`. + /// Note that if `U` is not `T` but has the same size and alignment, this is + /// basically like transmuting references of different types. See + /// [`mem::transmute`][transmute] for more information on what + /// restrictions apply in this case. + /// + /// The user of `from_raw` has to make sure a specific value of `T` is only + /// dropped once. + /// + /// This function is unsafe because improper use may lead to memory unsafety, + /// even if the returned `Arc<T>` is never accessed. + /// + /// [into_raw]: Arc::into_raw + /// [transmute]: core::mem::transmute + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let x = Arc::new("hello".to_owned()); + /// let x_ptr = Arc::into_raw(x); + /// + /// unsafe { + /// // Convert back to an `Arc` to prevent leak. + /// let x = Arc::from_raw(x_ptr); + /// assert_eq!(&*x, "hello"); + /// + /// // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe. + /// } + /// + /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! + /// ``` + #[inline] + #[stable(feature = "rc_raw", since = "1.17.0")] + pub unsafe fn from_raw(ptr: *const T) -> Self { + unsafe { Arc::from_raw_in(ptr, Global) } + } + + /// Increments the strong reference count on the `Arc<T>` associated with the + /// provided pointer by one. + /// + /// # Safety + /// + /// The pointer must have been obtained through `Arc::into_raw`, and the + /// associated `Arc` instance must be valid (i.e. the strong count must be at + /// least 1) for the duration of this method. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5); + /// + /// unsafe { + /// let ptr = Arc::into_raw(five); + /// Arc::increment_strong_count(ptr); + /// + /// // This assertion is deterministic because we haven't shared + /// // the `Arc` between threads. + /// let five = Arc::from_raw(ptr); + /// assert_eq!(2, Arc::strong_count(&five)); + /// } + /// ``` + #[inline] + #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] + pub unsafe fn increment_strong_count(ptr: *const T) { + unsafe { Arc::increment_strong_count_in(ptr, Global) } + } + + /// Decrements the strong reference count on the `Arc<T>` associated with the + /// provided pointer by one. + /// + /// # Safety + /// + /// The pointer must have been obtained through `Arc::into_raw`, and the + /// associated `Arc` instance must be valid (i.e. the strong count must be at + /// least 1) when invoking this method. This method can be used to release the final + /// `Arc` and backing storage, but **should not** be called after the final `Arc` has been + /// released. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let five = Arc::new(5); + /// + /// unsafe { + /// let ptr = Arc::into_raw(five); + /// Arc::increment_strong_count(ptr); + /// + /// // Those assertions are deterministic because we haven't shared + /// // the `Arc` between threads. + /// let five = Arc::from_raw(ptr); + /// assert_eq!(2, Arc::strong_count(&five)); + /// Arc::decrement_strong_count(ptr); + /// assert_eq!(1, Arc::strong_count(&five)); + /// } + /// ``` + #[inline] + #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] + pub unsafe fn decrement_strong_count(ptr: *const T) { + unsafe { Arc::decrement_strong_count_in(ptr, Global) } + } +} + +impl<T: ?Sized, A: Allocator> Arc<T, A> { /// Consumes the `Arc`, returning the wrapped pointer. /// /// To avoid a memory leak the pointer must be converted back to an `Arc` using @@ -1020,16 +1484,18 @@ impl<T: ?Sized> Arc<T> { unsafe { ptr::addr_of_mut!((*ptr).data) } } - /// Constructs an `Arc<T>` from a raw pointer. + /// Constructs an `Arc<T, A>` from a raw pointer. /// /// The raw pointer must have been previously returned by a call to - /// [`Arc<U>::into_raw`][into_raw] where `U` must have the same size and + /// [`Arc<U, A>::into_raw`][into_raw] where `U` must have the same size and /// alignment as `T`. This is trivially true if `U` is `T`. /// Note that if `U` is not `T` but has the same size and alignment, this is /// basically like transmuting references of different types. See /// [`mem::transmute`][transmute] for more information on what /// restrictions apply in this case. /// + /// The raw pointer must point to a block of memory allocated by `alloc` + /// /// The user of `from_raw` has to make sure a specific value of `T` is only /// dropped once. /// @@ -1042,14 +1508,17 @@ impl<T: ?Sized> Arc<T> { /// # Examples /// /// ``` + /// #![feature(allocator_api)] + /// /// use std::sync::Arc; + /// use std::alloc::System; /// - /// let x = Arc::new("hello".to_owned()); + /// let x = Arc::new_in("hello".to_owned(), System); /// let x_ptr = Arc::into_raw(x); /// /// unsafe { /// // Convert back to an `Arc` to prevent leak. - /// let x = Arc::from_raw(x_ptr); + /// let x = Arc::from_raw_in(x_ptr, System); /// assert_eq!(&*x, "hello"); /// /// // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe. @@ -1057,15 +1526,16 @@ impl<T: ?Sized> Arc<T> { /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` - #[stable(feature = "rc_raw", since = "1.17.0")] - pub unsafe fn from_raw(ptr: *const T) -> Self { + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { unsafe { let offset = data_offset(ptr); // Reverse the offset to find the original ArcInner. let arc_ptr = ptr.byte_sub(offset) as *mut ArcInner<T>; - Self::from_ptr(arc_ptr) + Self::from_ptr_in(arc_ptr, alloc) } } @@ -1083,7 +1553,10 @@ impl<T: ?Sized> Arc<T> { #[must_use = "this returns a new `Weak` pointer, \ without modifying the original `Arc`"] #[stable(feature = "arc_weak", since = "1.4.0")] - pub fn downgrade(this: &Self) -> Weak<T> { + pub fn downgrade(this: &Self) -> Weak<T, A> + where + A: Clone, + { // This Relaxed is OK because we're checking the value in the CAS // below. let mut cur = this.inner().weak.load(Relaxed); @@ -1110,7 +1583,7 @@ impl<T: ?Sized> Arc<T> { Ok(_) => { // Make sure we do not create a dangling Weak debug_assert!(!is_dangling(this.ptr.as_ptr())); - return Weak { ptr: this.ptr }; + return Weak { ptr: this.ptr, alloc: this.alloc.clone() }; } Err(old) => cur = old, } @@ -1181,30 +1654,37 @@ impl<T: ?Sized> Arc<T> { /// /// The pointer must have been obtained through `Arc::into_raw`, and the /// associated `Arc` instance must be valid (i.e. the strong count must be at - /// least 1) for the duration of this method. + /// least 1) for the duration of this method,, and `ptr` must point to a block of memory + /// allocated by `alloc`. /// /// # Examples /// /// ``` + /// #![feature(allocator_api)] + /// /// use std::sync::Arc; + /// use std::alloc::System; /// - /// let five = Arc::new(5); + /// let five = Arc::new_in(5, System); /// /// unsafe { /// let ptr = Arc::into_raw(five); - /// Arc::increment_strong_count(ptr); + /// Arc::increment_strong_count_in(ptr, System); /// /// // This assertion is deterministic because we haven't shared /// // the `Arc` between threads. - /// let five = Arc::from_raw(ptr); + /// let five = Arc::from_raw_in(ptr, System); /// assert_eq!(2, Arc::strong_count(&five)); /// } /// ``` #[inline] - #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] - pub unsafe fn increment_strong_count(ptr: *const T) { + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A) + where + A: Clone, + { // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop - let arc = unsafe { mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr)) }; + let arc = unsafe { mem::ManuallyDrop::new(Arc::from_raw_in(ptr, alloc)) }; // Now increase refcount, but don't drop new refcount either let _arc_clone: mem::ManuallyDrop<_> = arc.clone(); } @@ -1214,35 +1694,39 @@ impl<T: ?Sized> Arc<T> { /// /// # Safety /// - /// The pointer must have been obtained through `Arc::into_raw`, and the + /// The pointer must have been obtained through `Arc::into_raw`, the /// associated `Arc` instance must be valid (i.e. the strong count must be at - /// least 1) when invoking this method. This method can be used to release the final + /// least 1) when invoking this method, and `ptr` must point to a block of memory + /// allocated by `alloc`. This method can be used to release the final /// `Arc` and backing storage, but **should not** be called after the final `Arc` has been /// released. /// /// # Examples /// /// ``` + /// #![feature(allocator_api)] + /// /// use std::sync::Arc; + /// use std::alloc::System; /// - /// let five = Arc::new(5); + /// let five = Arc::new_in(5, System); /// /// unsafe { /// let ptr = Arc::into_raw(five); - /// Arc::increment_strong_count(ptr); + /// Arc::increment_strong_count_in(ptr, System); /// /// // Those assertions are deterministic because we haven't shared /// // the `Arc` between threads. - /// let five = Arc::from_raw(ptr); + /// let five = Arc::from_raw_in(ptr, System); /// assert_eq!(2, Arc::strong_count(&five)); - /// Arc::decrement_strong_count(ptr); + /// Arc::decrement_strong_count_in(ptr, System); /// assert_eq!(1, Arc::strong_count(&five)); /// } /// ``` #[inline] - #[stable(feature = "arc_mutate_strong_count", since = "1.51.0")] - pub unsafe fn decrement_strong_count(ptr: *const T) { - unsafe { drop(Arc::from_raw(ptr)) }; + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) { + unsafe { drop(Arc::from_raw_in(ptr, alloc)) }; } #[inline] @@ -1263,7 +1747,10 @@ impl<T: ?Sized> Arc<T> { unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) }; // Drop the weak ref collectively held by all strong references - drop(Weak { ptr: self.ptr }); + // Take a reference to `self.alloc` instead of cloning because 1. it'll + // last long enough, and 2. you should be able to drop `Arc`s with + // unclonable allocators + drop(Weak { ptr: self.ptr, alloc: &self.alloc }); } /// Returns `true` if the two `Arc`s point to the same allocation in a vein similar to @@ -1345,25 +1832,28 @@ impl<T: ?Sized> Arc<T> { inner } +} +impl<T: ?Sized, A: Allocator> Arc<T, A> { /// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value. + #[inline] #[cfg(not(no_global_oom_handling))] - unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> { + unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut ArcInner<T> { // Allocate for the `ArcInner<T>` using the given value. unsafe { - Self::allocate_for_layout( + Arc::allocate_for_layout( Layout::for_value(&*ptr), - |layout| Global.allocate(layout), + |layout| alloc.allocate(layout), |mem| mem.with_metadata_of(ptr as *const ArcInner<T>), ) } } #[cfg(not(no_global_oom_handling))] - fn from_box(src: Box<T>) -> Arc<T> { + fn from_box_in(src: Box<T, A>) -> Arc<T, A> { unsafe { let value_size = size_of_val(&*src); - let ptr = Self::allocate_for_ptr(&*src); + let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src)); // Copy value as bytes ptr::copy_nonoverlapping( @@ -1373,10 +1863,11 @@ impl<T: ?Sized> Arc<T> { ); // Free the allocation without dropping its contents - let src = Box::from_raw(Box::into_raw(src) as *mut mem::ManuallyDrop<T>); + let (bptr, alloc) = Box::into_raw_with_allocator(src); + let src = Box::from_raw(bptr as *mut mem::ManuallyDrop<T>); drop(src); - Self::from_ptr(ptr) + Self::from_ptr_in(ptr, alloc) } } } @@ -1389,7 +1880,7 @@ impl<T> Arc<[T]> { Self::allocate_for_layout( Layout::array::<T>(len).unwrap(), |layout| Global.allocate(layout), - |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>, + |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut ArcInner<[T]>, ) } } @@ -1458,6 +1949,21 @@ impl<T> Arc<[T]> { } } +impl<T, A: Allocator> Arc<[T], A> { + /// Allocates an `ArcInner<[T]>` with the given length. + #[inline] + #[cfg(not(no_global_oom_handling))] + unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut ArcInner<[T]> { + unsafe { + Arc::allocate_for_layout( + Layout::array::<T>(len).unwrap(), + |layout| alloc.allocate(layout), + |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut ArcInner<[T]>, + ) + } + } +} + /// Specialization trait used for `From<&[T]>`. #[cfg(not(no_global_oom_handling))] trait ArcFromSlice<T> { @@ -1481,7 +1987,7 @@ impl<T: Copy> ArcFromSlice<T> for Arc<[T]> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized> Clone for Arc<T> { +impl<T: ?Sized, A: Allocator + Clone> Clone for Arc<T, A> { /// Makes a clone of the `Arc` pointer. /// /// This creates another pointer to the same allocation, increasing the @@ -1497,7 +2003,7 @@ impl<T: ?Sized> Clone for Arc<T> { /// let _ = Arc::clone(&five); /// ``` #[inline] - fn clone(&self) -> Arc<T> { + fn clone(&self) -> Arc<T, A> { // Using a relaxed ordering is alright here, as knowledge of the // original reference prevents other threads from erroneously deleting // the object. @@ -1530,12 +2036,12 @@ impl<T: ?Sized> Clone for Arc<T> { abort(); } - unsafe { Self::from_inner(self.ptr) } + unsafe { Self::from_inner_in(self.ptr, self.alloc.clone()) } } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized> Deref for Arc<T> { +impl<T: ?Sized, A: Allocator> Deref for Arc<T, A> { type Target = T; #[inline] @@ -1547,7 +2053,7 @@ impl<T: ?Sized> Deref for Arc<T> { #[unstable(feature = "receiver_trait", issue = "none")] impl<T: ?Sized> Receiver for Arc<T> {} -impl<T: Clone> Arc<T> { +impl<T: Clone, A: Allocator + Clone> Arc<T, A> { /// Makes a mutable reference into the given `Arc`. /// /// If there are other `Arc` pointers to the same allocation, then `make_mut` will @@ -1613,7 +2119,7 @@ impl<T: Clone> Arc<T> { if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() { // Another strong pointer exists, so we must clone. // Pre-allocate memory to allow writing the cloned value directly. - let mut arc = Self::new_uninit(); + let mut arc = Self::new_uninit_in(this.alloc.clone()); unsafe { let data = Arc::get_mut_unchecked(&mut arc); (**this).write_clone_into_raw(data.as_mut_ptr()); @@ -1634,10 +2140,10 @@ impl<T: Clone> Arc<T> { // Materialize our own implicit weak pointer, so that it can clean // up the ArcInner as needed. - let _weak = Weak { ptr: this.ptr }; + let _weak = Weak { ptr: this.ptr, alloc: this.alloc.clone() }; // Can just steal the data, all that's left is Weaks - let mut arc = Self::new_uninit(); + let mut arc = Self::new_uninit_in(this.alloc.clone()); unsafe { let data = Arc::get_mut_unchecked(&mut arc); data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1); @@ -1690,7 +2196,7 @@ impl<T: Clone> Arc<T> { } } -impl<T: ?Sized> Arc<T> { +impl<T: ?Sized, A: Allocator> Arc<T, A> { /// Returns a mutable reference into the given `Arc`, if there are /// no other `Arc` or [`Weak`] pointers to the same allocation. /// @@ -1828,7 +2334,7 @@ impl<T: ?Sized> Arc<T> { } #[stable(feature = "rust1", since = "1.0.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> { +unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Arc<T, A> { /// Drops the `Arc`. /// /// This will decrement the strong reference count. If the strong reference @@ -1899,7 +2405,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> { } } -impl Arc<dyn Any + Send + Sync> { +impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> { /// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type. /// /// # Examples @@ -1920,15 +2426,16 @@ impl Arc<dyn Any + Send + Sync> { /// ``` #[inline] #[stable(feature = "rc_downcast", since = "1.29.0")] - pub fn downcast<T>(self) -> Result<Arc<T>, Self> + pub fn downcast<T>(self) -> Result<Arc<T, A>, Self> where T: Any + Send + Sync, { if (*self).is::<T>() { unsafe { let ptr = self.ptr.cast::<ArcInner<T>>(); + let alloc = self.alloc.clone(); mem::forget(self); - Ok(Arc::from_inner(ptr)) + Ok(Arc::from_inner_in(ptr, alloc)) } } else { Err(self) @@ -1963,14 +2470,15 @@ impl Arc<dyn Any + Send + Sync> { /// [`downcast`]: Self::downcast #[inline] #[unstable(feature = "downcast_unchecked", issue = "90850")] - pub unsafe fn downcast_unchecked<T>(self) -> Arc<T> + pub unsafe fn downcast_unchecked<T>(self) -> Arc<T, A> where T: Any + Send + Sync, { unsafe { let ptr = self.ptr.cast::<ArcInner<T>>(); + let alloc = self.alloc.clone(); mem::forget(self); - Arc::from_inner(ptr) + Arc::from_inner_in(ptr, alloc) } } } @@ -1989,11 +2497,43 @@ impl<T> Weak<T> { /// let empty: Weak<i64> = Weak::new(); /// assert!(empty.upgrade().is_none()); /// ``` + #[inline] #[stable(feature = "downgraded_weak", since = "1.10.0")] #[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")] #[must_use] pub const fn new() -> Weak<T> { - Weak { ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<ArcInner<T>>(usize::MAX)) } } + Weak { + ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<ArcInner<T>>(usize::MAX)) }, + alloc: Global, + } + } +} + +impl<T, A: Allocator> Weak<T, A> { + /// Constructs a new `Weak<T, A>`, without allocating any memory, technically in the provided + /// allocator. + /// Calling [`upgrade`] on the return value always gives [`None`]. + /// + /// [`upgrade`]: Weak::upgrade + /// + /// # Examples + /// + /// ``` + /// #![feature(allocator_api)] + /// + /// use std::sync::Weak; + /// use std::alloc::System; + /// + /// let empty: Weak<i64, _> = Weak::new_in(System); + /// assert!(empty.upgrade().is_none()); + /// ``` + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub fn new_in(alloc: A) -> Weak<T, A> { + Weak { + ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<ArcInner<T>>(usize::MAX)) }, + alloc, + } } } @@ -2005,6 +2545,55 @@ struct WeakInner<'a> { } impl<T: ?Sized> Weak<T> { + /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`. + /// + /// This can be used to safely get a strong reference (by calling [`upgrade`] + /// later) or to deallocate the weak count by dropping the `Weak<T>`. + /// + /// It takes ownership of one weak reference (with the exception of pointers created by [`new`], + /// as these don't own anything; the method still works on them). + /// + /// # Safety + /// + /// The pointer must have originated from the [`into_raw`] and must still own its potential + /// weak reference. + /// + /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this + /// takes ownership of one weak reference currently represented as a raw pointer (the weak + /// count is not modified by this operation) and therefore it must be paired with a previous + /// call to [`into_raw`]. + /// # Examples + /// + /// ``` + /// use std::sync::{Arc, Weak}; + /// + /// let strong = Arc::new("hello".to_owned()); + /// + /// let raw_1 = Arc::downgrade(&strong).into_raw(); + /// let raw_2 = Arc::downgrade(&strong).into_raw(); + /// + /// assert_eq!(2, Arc::weak_count(&strong)); + /// + /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap()); + /// assert_eq!(1, Arc::weak_count(&strong)); + /// + /// drop(strong); + /// + /// // Decrement the last weak count. + /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none()); + /// ``` + /// + /// [`new`]: Weak::new + /// [`into_raw`]: Weak::into_raw + /// [`upgrade`]: Weak::upgrade + #[inline] + #[stable(feature = "weak_into_raw", since = "1.45.0")] + pub unsafe fn from_raw(ptr: *const T) -> Self { + unsafe { Weak::from_raw_in(ptr, Global) } + } +} + +impl<T: ?Sized, A: Allocator> Weak<T, A> { /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`. /// /// The pointer is valid only if there are some strong references. The pointer may be dangling, @@ -2082,7 +2671,8 @@ impl<T: ?Sized> Weak<T> { result } - /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`. + /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>` in the provided + /// allocator. /// /// This can be used to safely get a strong reference (by calling [`upgrade`] /// later) or to deallocate the weak count by dropping the `Weak<T>`. @@ -2093,7 +2683,7 @@ impl<T: ?Sized> Weak<T> { /// # Safety /// /// The pointer must have originated from the [`into_raw`] and must still own its potential - /// weak reference. + /// weak reference, and must point to a block of memory allocated by `alloc`. /// /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this /// takes ownership of one weak reference currently represented as a raw pointer (the weak @@ -2123,8 +2713,9 @@ impl<T: ?Sized> Weak<T> { /// [`new`]: Weak::new /// [`into_raw`]: Weak::into_raw /// [`upgrade`]: Weak::upgrade - #[stable(feature = "weak_into_raw", since = "1.45.0")] - pub unsafe fn from_raw(ptr: *const T) -> Self { + #[inline] + #[unstable(feature = "allocator_api", issue = "32838")] + pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self { // See Weak::as_ptr for context on how the input pointer is derived. let ptr = if is_dangling(ptr as *mut T) { @@ -2140,11 +2731,11 @@ impl<T: ?Sized> Weak<T> { }; // SAFETY: we now have recovered the original Weak pointer, so can create the Weak. - Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } } + Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc } } } -impl<T: ?Sized> Weak<T> { +impl<T: ?Sized, A: Allocator> Weak<T, A> { /// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying /// dropping of the inner value if successful. /// @@ -2171,28 +2762,35 @@ impl<T: ?Sized> Weak<T> { #[must_use = "this returns a new `Arc`, \ without modifying the original weak pointer"] #[stable(feature = "arc_weak", since = "1.4.0")] - pub fn upgrade(&self) -> Option<Arc<T>> { + pub fn upgrade(&self) -> Option<Arc<T, A>> + where + A: Clone, + { + #[inline] + fn checked_increment(n: usize) -> Option<usize> { + // Any write of 0 we can observe leaves the field in permanently zero state. + if n == 0 { + return None; + } + // See comments in `Arc::clone` for why we do this (for `mem::forget`). + assert!(n <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR); + Some(n + 1) + } + // We use a CAS loop to increment the strong count instead of a // fetch_add as this function should never take the reference count // from zero to one. - self.inner()? - .strong - // Relaxed is fine for the failure case because we don't have any expectations about the new state. - // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner - // value can be initialized after `Weak` references have already been created. In that case, we - // expect to observe the fully initialized value. - .fetch_update(Acquire, Relaxed, |n| { - // Any write of 0 we can observe leaves the field in permanently zero state. - if n == 0 { - return None; - } - // See comments in `Arc::clone` for why we do this (for `mem::forget`). - assert!(n <= MAX_REFCOUNT, "{}", INTERNAL_OVERFLOW_ERROR); - Some(n + 1) - }) - .ok() - // null checked above - .map(|_| unsafe { Arc::from_inner(self.ptr) }) + // + // Relaxed is fine for the failure case because we don't have any expectations about the new state. + // Acquire is necessary for the success case to synchronise with `Arc::new_cyclic`, when the inner + // value can be initialized after `Weak` references have already been created. In that case, we + // expect to observe the fully initialized value. + if self.inner()?.strong.fetch_update(Acquire, Relaxed, checked_increment).is_ok() { + // SAFETY: pointer is not null, verified in checked_increment + unsafe { Some(Arc::from_inner_in(self.ptr, self.alloc.clone())) } + } else { + None + } } /// Gets the number of strong (`Arc`) pointers pointing to this allocation. @@ -2218,22 +2816,22 @@ impl<T: ?Sized> Weak<T> { #[must_use] #[stable(feature = "weak_counts", since = "1.41.0")] pub fn weak_count(&self) -> usize { - self.inner() - .map(|inner| { - let weak = inner.weak.load(Acquire); - let strong = inner.strong.load(Acquire); - if strong == 0 { - 0 - } else { - // Since we observed that there was at least one strong pointer - // after reading the weak count, we know that the implicit weak - // reference (present whenever any strong references are alive) - // was still around when we observed the weak count, and can - // therefore safely subtract it. - weak - 1 - } - }) - .unwrap_or(0) + if let Some(inner) = self.inner() { + let weak = inner.weak.load(Acquire); + let strong = inner.strong.load(Acquire); + if strong == 0 { + 0 + } else { + // Since we observed that there was at least one strong pointer + // after reading the weak count, we know that the implicit weak + // reference (present whenever any strong references are alive) + // was still around when we observed the weak count, and can + // therefore safely subtract it. + weak - 1 + } + } else { + 0 + } } /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`, @@ -2303,7 +2901,7 @@ impl<T: ?Sized> Weak<T> { } #[stable(feature = "arc_weak", since = "1.4.0")] -impl<T: ?Sized> Clone for Weak<T> { +impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> { /// Makes a clone of the `Weak` pointer that points to the same allocation. /// /// # Examples @@ -2316,11 +2914,11 @@ impl<T: ?Sized> Clone for Weak<T> { /// let _ = Weak::clone(&weak_five); /// ``` #[inline] - fn clone(&self) -> Weak<T> { + fn clone(&self) -> Weak<T, A> { let inner = if let Some(inner) = self.inner() { inner } else { - return Weak { ptr: self.ptr }; + return Weak { ptr: self.ptr, alloc: self.alloc.clone() }; }; // See comments in Arc::clone() for why this is relaxed. This can use a // fetch_add (ignoring the lock) because the weak count is only locked @@ -2333,7 +2931,7 @@ impl<T: ?Sized> Clone for Weak<T> { abort(); } - Weak { ptr: self.ptr } + Weak { ptr: self.ptr, alloc: self.alloc.clone() } } } @@ -2359,7 +2957,7 @@ impl<T> Default for Weak<T> { } #[stable(feature = "arc_weak", since = "1.4.0")] -unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak<T> { +unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> { /// Drops the `Weak` pointer. /// /// # Examples @@ -2397,25 +2995,27 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak<T> { if inner.weak.fetch_sub(1, Release) == 1 { acquire!(inner.weak); - unsafe { Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) } + unsafe { + self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr())) + } } } } #[stable(feature = "rust1", since = "1.0.0")] -trait ArcEqIdent<T: ?Sized + PartialEq> { - fn eq(&self, other: &Arc<T>) -> bool; - fn ne(&self, other: &Arc<T>) -> bool; +trait ArcEqIdent<T: ?Sized + PartialEq, A: Allocator> { + fn eq(&self, other: &Arc<T, A>) -> bool; + fn ne(&self, other: &Arc<T, A>) -> bool; } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + PartialEq> ArcEqIdent<T> for Arc<T> { +impl<T: ?Sized + PartialEq, A: Allocator> ArcEqIdent<T, A> for Arc<T, A> { #[inline] - default fn eq(&self, other: &Arc<T>) -> bool { + default fn eq(&self, other: &Arc<T, A>) -> bool { **self == **other } #[inline] - default fn ne(&self, other: &Arc<T>) -> bool { + default fn ne(&self, other: &Arc<T, A>) -> bool { **self != **other } } @@ -2428,20 +3028,20 @@ impl<T: ?Sized + PartialEq> ArcEqIdent<T> for Arc<T> { /// /// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive. #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + crate::rc::MarkerEq> ArcEqIdent<T> for Arc<T> { +impl<T: ?Sized + crate::rc::MarkerEq, A: Allocator> ArcEqIdent<T, A> for Arc<T, A> { #[inline] - fn eq(&self, other: &Arc<T>) -> bool { + fn eq(&self, other: &Arc<T, A>) -> bool { Arc::ptr_eq(self, other) || **self == **other } #[inline] - fn ne(&self, other: &Arc<T>) -> bool { + fn ne(&self, other: &Arc<T, A>) -> bool { !Arc::ptr_eq(self, other) && **self != **other } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + PartialEq> PartialEq for Arc<T> { +impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Arc<T, A> { /// Equality for two `Arc`s. /// /// Two `Arc`s are equal if their inner values are equal, even if they are @@ -2460,7 +3060,7 @@ impl<T: ?Sized + PartialEq> PartialEq for Arc<T> { /// assert!(five == Arc::new(5)); /// ``` #[inline] - fn eq(&self, other: &Arc<T>) -> bool { + fn eq(&self, other: &Arc<T, A>) -> bool { ArcEqIdent::eq(self, other) } @@ -2481,13 +3081,13 @@ impl<T: ?Sized + PartialEq> PartialEq for Arc<T> { /// assert!(five != Arc::new(6)); /// ``` #[inline] - fn ne(&self, other: &Arc<T>) -> bool { + fn ne(&self, other: &Arc<T, A>) -> bool { ArcEqIdent::ne(self, other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> { +impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Arc<T, A> { /// Partial comparison for two `Arc`s. /// /// The two are compared by calling `partial_cmp()` on their inner values. @@ -2502,7 +3102,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> { /// /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6))); /// ``` - fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> { + fn partial_cmp(&self, other: &Arc<T, A>) -> Option<Ordering> { (**self).partial_cmp(&**other) } @@ -2519,7 +3119,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> { /// /// assert!(five < Arc::new(6)); /// ``` - fn lt(&self, other: &Arc<T>) -> bool { + fn lt(&self, other: &Arc<T, A>) -> bool { *(*self) < *(*other) } @@ -2536,7 +3136,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> { /// /// assert!(five <= Arc::new(5)); /// ``` - fn le(&self, other: &Arc<T>) -> bool { + fn le(&self, other: &Arc<T, A>) -> bool { *(*self) <= *(*other) } @@ -2553,7 +3153,7 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> { /// /// assert!(five > Arc::new(4)); /// ``` - fn gt(&self, other: &Arc<T>) -> bool { + fn gt(&self, other: &Arc<T, A>) -> bool { *(*self) > *(*other) } @@ -2570,12 +3170,12 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> { /// /// assert!(five >= Arc::new(5)); /// ``` - fn ge(&self, other: &Arc<T>) -> bool { + fn ge(&self, other: &Arc<T, A>) -> bool { *(*self) >= *(*other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + Ord> Ord for Arc<T> { +impl<T: ?Sized + Ord, A: Allocator> Ord for Arc<T, A> { /// Comparison for two `Arc`s. /// /// The two are compared by calling `cmp()` on their inner values. @@ -2590,29 +3190,29 @@ impl<T: ?Sized + Ord> Ord for Arc<T> { /// /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6))); /// ``` - fn cmp(&self, other: &Arc<T>) -> Ordering { + fn cmp(&self, other: &Arc<T, A>) -> Ordering { (**self).cmp(&**other) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + Eq> Eq for Arc<T> {} +impl<T: ?Sized + Eq, A: Allocator> Eq for Arc<T, A> {} #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> { +impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Arc<T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Display::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> { +impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Arc<T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized> fmt::Pointer for Arc<T> { +impl<T: ?Sized, A: Allocator> fmt::Pointer for Arc<T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Pointer::fmt(&(&**self as *const T), f) } @@ -2637,7 +3237,7 @@ impl<T: Default> Default for Arc<T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized + Hash> Hash for Arc<T> { +impl<T: ?Sized + Hash, A: Allocator> Hash for Arc<T, A> { fn hash<H: Hasher>(&self, state: &mut H) { (**self).hash(state) } @@ -2724,7 +3324,7 @@ impl From<String> for Arc<str> { #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl<T: ?Sized> From<Box<T>> for Arc<T> { +impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Arc<T, A> { /// Move a boxed object to a new, reference-counted allocation. /// /// # Example @@ -2736,14 +3336,14 @@ impl<T: ?Sized> From<Box<T>> for Arc<T> { /// assert_eq!("eggplant", &shared[..]); /// ``` #[inline] - fn from(v: Box<T>) -> Arc<T> { - Arc::from_box(v) + fn from(v: Box<T, A>) -> Arc<T, A> { + Arc::from_box_in(v) } } #[cfg(not(no_global_oom_handling))] #[stable(feature = "shared_from_slice", since = "1.21.0")] -impl<T> From<Vec<T>> for Arc<[T]> { +impl<T, A: Allocator + Clone> From<Vec<T, A>> for Arc<[T], A> { /// Allocate a reference-counted slice and move `v`'s items into it. /// /// # Example @@ -2755,12 +3355,18 @@ impl<T> From<Vec<T>> for Arc<[T]> { /// assert_eq!(&[1, 2, 3], &shared[..]); /// ``` #[inline] - fn from(mut v: Vec<T>) -> Arc<[T]> { + fn from(v: Vec<T, A>) -> Arc<[T], A> { unsafe { - let rc = Arc::copy_from_slice(&v); - // Allow the Vec to free its memory, but not destroy its contents - v.set_len(0); - rc + let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc(); + + let rc_ptr = Self::allocate_for_slice_in(len, &alloc); + ptr::copy_nonoverlapping(vec_ptr, &mut (*rc_ptr).data as *mut [T] as *mut T, len); + + // Create a `Vec<T, &A>` with length 0, to deallocate the buffer + // without dropping its contents or the allocator + let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc); + + Self::from_ptr_in(rc_ptr, alloc) } } } @@ -2812,12 +3418,13 @@ impl From<Arc<str>> for Arc<[u8]> { } #[stable(feature = "boxed_slice_try_from", since = "1.43.0")] -impl<T, const N: usize> TryFrom<Arc<[T]>> for Arc<[T; N]> { - type Error = Arc<[T]>; +impl<T, A: Allocator + Clone, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N], A> { + type Error = Arc<[T], A>; - fn try_from(boxed_slice: Arc<[T]>) -> Result<Self, Self::Error> { + fn try_from(boxed_slice: Arc<[T], A>) -> Result<Self, Self::Error> { if boxed_slice.len() == N { - Ok(unsafe { Arc::from_raw(Arc::into_raw(boxed_slice) as *mut [T; N]) }) + let alloc = boxed_slice.alloc.clone(); + Ok(unsafe { Arc::from_raw_in(Arc::into_raw(boxed_slice) as *mut [T; N], alloc) }) } else { Err(boxed_slice) } @@ -2910,21 +3517,21 @@ impl<T, I: iter::TrustedLen<Item = T>> ToArcSlice<T> for I { } #[stable(feature = "rust1", since = "1.0.0")] -impl<T: ?Sized> borrow::Borrow<T> for Arc<T> { +impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Arc<T, A> { fn borrow(&self) -> &T { &**self } } #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")] -impl<T: ?Sized> AsRef<T> for Arc<T> { +impl<T: ?Sized, A: Allocator> AsRef<T> for Arc<T, A> { fn as_ref(&self) -> &T { &**self } } #[stable(feature = "pin", since = "1.33.0")] -impl<T: ?Sized> Unpin for Arc<T> {} +impl<T: ?Sized, A: Allocator> Unpin for Arc<T, A> {} /// Get the offset within an `ArcInner` for the payload behind a pointer. /// diff --git a/library/core/src/iter/traits/collect.rs b/library/core/src/iter/traits/collect.rs index 0675e56358f..e0ef5071c40 100644 --- a/library/core/src/iter/traits/collect.rs +++ b/library/core/src/iter/traits/collect.rs @@ -138,8 +138,6 @@ pub trait FromIterator<A>: Sized { /// /// # Examples /// - /// Basic usage: - /// /// ``` /// let five_fives = std::iter::repeat(5).take(5); /// @@ -255,8 +253,6 @@ pub trait IntoIterator { /// /// # Examples /// - /// Basic usage: - /// /// ``` /// let v = [1, 2, 3]; /// let mut iter = v.into_iter(); @@ -363,8 +359,6 @@ pub trait Extend<A> { /// /// # Examples /// - /// Basic usage: - /// /// ``` /// // You can extend a String with some chars: /// let mut message = String::from("abc"); diff --git a/library/core/src/result.rs b/library/core/src/result.rs index 1ee270f4c03..51b7616ffec 100644 --- a/library/core/src/result.rs +++ b/library/core/src/result.rs @@ -749,7 +749,7 @@ impl<T, E> Result<T, E> { } /// Returns the provided default (if [`Err`]), or - /// applies a function to the contained value (if [`Ok`]), + /// applies a function to the contained value (if [`Ok`]). /// /// Arguments passed to `map_or` are eagerly evaluated; if you are passing /// the result of a function call, it is recommended to use [`map_or_else`], diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index e2a2428fbc2..4f13ea9790d 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -2957,7 +2957,7 @@ impl<T> [T] { /// elements. /// /// This sort is unstable (i.e., may reorder equal elements), in-place - /// (i.e., does not allocate), and *O*(m \* *n* \* log(*n*)) worst-case, where the key function is + /// (i.e., does not allocate), and *O*(*m* \* *n* \* log(*n*)) worst-case, where the key function is /// *O*(*m*). /// /// # Current implementation diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index e5a710c0a96..fe66b1d41bf 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -97,7 +97,7 @@ def _download(path, url, probably_big, verbose, exception): print("downloading {}".format(url), file=sys.stderr) try: - if probably_big or verbose: + if (probably_big or verbose) and "GITHUB_ACTIONS" not in os.environ: option = "-#" else: option = "-s" @@ -914,12 +914,7 @@ class RustBuild(object): # preserve existing RUSTFLAGS env.setdefault("RUSTFLAGS", "") - # we need to explicitly add +xgot here so that we can successfully bootstrap - # a usable stage1 compiler - # FIXME: remove this if condition on the next bootstrap bump - # cfg(bootstrap) - if self.build_triple().startswith('mips'): - env["RUSTFLAGS"] += " -Ctarget-feature=+xgot" + target_features = [] if self.get_toml("crt-static", build_section) == "true": target_features += ["+crt-static"] diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index 535a005c396..766c187e8e8 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -395,7 +395,7 @@ impl StepDescription { eprintln!( "note: if you are adding a new Step to bootstrap itself, make sure you register it with `describe!`" ); - crate::detail_exit_macro!(1); + crate::exit!(1); } } } @@ -939,21 +939,6 @@ impl<'a> Builder<'a> { Self::new_internal(build, kind, paths.to_owned()) } - /// Creates a new standalone builder for use outside of the normal process - pub fn new_standalone( - build: &mut Build, - kind: Kind, - paths: Vec<PathBuf>, - stage: Option<u32>, - ) -> Builder<'_> { - // FIXME: don't mutate `build` - if let Some(stage) = stage { - build.config.stage = stage; - } - - Self::new_internal(build, kind, paths.to_owned()) - } - pub fn execute_cli(&self) { self.run_step_descriptions(&Builder::get_step_descriptions(self.kind), &self.paths); } @@ -1375,7 +1360,7 @@ impl<'a> Builder<'a> { "error: `x.py clippy` requires a host `rustc` toolchain with the `clippy` component" ); eprintln!("help: try `rustup component add clippy`"); - crate::detail_exit_macro!(1); + crate::exit!(1); }); if !t!(std::str::from_utf8(&output.stdout)).contains("nightly") { rustflags.arg("--cfg=bootstrap"); diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index 691e5ce4eb2..9795f22e2b5 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -135,6 +135,7 @@ impl Step for Std { let hostdir = builder.sysroot_libdir(compiler, compiler.host); add_to_sysroot(&builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target)); } + drop(_guard); // don't run on std twice with x.py clippy // don't check test dependencies if we haven't built libtest diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index 07a07983bd7..f3d95b57a76 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -222,13 +222,6 @@ fn copy_third_party_objects( ) -> Vec<(PathBuf, DependencyType)> { let mut target_deps = vec![]; - // FIXME: remove this in 2021 - if target == "x86_64-fortanix-unknown-sgx" { - if env::var_os("X86_FORTANIX_SGX_LIBS").is_some() { - builder.info("Warning: X86_FORTANIX_SGX_LIBS environment variable is ignored, libunwind is now compiled as part of rustbuild"); - } - } - if builder.config.sanitizers_enabled(target) && compiler.stage != 0 { // The sanitizers are only copied in stage1 or above, // to avoid creating dependency on LLVM. @@ -1820,7 +1813,7 @@ pub fn run_cargo( }); if !ok { - crate::detail_exit_macro!(1); + crate::exit!(1); } // Ok now we need to actually find all the files listed in `toplevel`. We've diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 34853743323..856a73a22b5 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -23,7 +23,7 @@ use crate::channel::{self, GitInfo}; pub use crate::flags::Subcommand; use crate::flags::{Color, Flags, Warnings}; use crate::util::{exe, output, t}; -use build_helper::detail_exit_macro; +use build_helper::exit; use once_cell::sync::OnceCell; use semver::Version; use serde::{Deserialize, Deserializer}; @@ -646,7 +646,7 @@ macro_rules! define_config { panic!("overriding existing option") } else { eprintln!("overriding existing option: `{}`", stringify!($field)); - detail_exit_macro!(2); + exit!(2); } } else { self.$field = other.$field; @@ -745,7 +745,7 @@ impl<T> Merge for Option<T> { panic!("overriding existing option") } else { eprintln!("overriding existing option"); - detail_exit_macro!(2); + exit!(2); } } else { *self = other; @@ -1101,7 +1101,7 @@ impl Config { .and_then(|table: toml::Value| TomlConfig::deserialize(table)) .unwrap_or_else(|err| { eprintln!("failed to parse TOML configuration '{}': {err}", file.display()); - detail_exit_macro!(2); + exit!(2); }) } Self::parse_inner(args, get_toml) @@ -1135,7 +1135,7 @@ impl Config { eprintln!( "Cannot use both `llvm_bolt_profile_generate` and `llvm_bolt_profile_use` at the same time" ); - detail_exit_macro!(1); + exit!(1); } // Infer the rest of the configuration. @@ -1259,7 +1259,7 @@ impl Config { } } eprintln!("failed to parse override `{option}`: `{err}"); - detail_exit_macro!(2) + exit!(2) } toml.merge(override_toml, ReplaceOpt::Override); @@ -1742,6 +1742,18 @@ impl Config { } } + /// Runs a command, printing out nice contextual information if it fails. + /// Exits if the command failed to execute at all, otherwise returns its + /// `status.success()`. + #[deprecated = "use `Builder::try_run` instead where possible"] + pub(crate) fn try_run(&self, cmd: &mut Command) -> Result<(), ()> { + if self.dry_run() { + return Ok(()); + } + self.verbose(&format!("running: {:?}", cmd)); + build_helper::util::try_run(cmd, self.is_verbose()) + } + /// A git invocation which runs inside the source directory. /// /// Use this rather than `Command::new("git")` in order to support out-of-tree builds. @@ -2007,7 +2019,7 @@ impl Config { "Unexpected rustc version: {}, we should use {}/{} to build source with {}", rustc_version, prev_version, source_version, source_version ); - detail_exit_macro!(1); + exit!(1); } } @@ -2043,7 +2055,7 @@ impl Config { println!("help: maybe your repository history is too shallow?"); println!("help: consider disabling `download-rustc`"); println!("help: or fetch enough history to include one upstream commit"); - crate::detail_exit_macro!(1); + crate::exit!(1); } // Warn if there were changes to the compiler or standard library since the ancestor commit. diff --git a/src/bootstrap/configure.py b/src/bootstrap/configure.py index e8eebdfb5a5..a7beb2a1767 100755 --- a/src/bootstrap/configure.py +++ b/src/bootstrap/configure.py @@ -550,6 +550,8 @@ if __name__ == "__main__": # If 'config.toml' already exists, exit the script at this point quit_if_file_exists('config.toml') + if "GITHUB_ACTIONS" in os.environ: + print("::group::Configure the build") p("processing command line") # Parse all known arguments into a configuration structure that reflects the # TOML we're going to write out @@ -572,3 +574,5 @@ if __name__ == "__main__": p("") p("run `python {}/x.py --help`".format(rust_dir)) + if "GITHUB_ACTIONS" in os.environ: + print("::endgroup::") diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index 2141bb0ddd9..8c71b7f7fc2 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -902,7 +902,9 @@ impl Step for Src { /// Creates the `rust-src` installer component fn run(self, builder: &Builder<'_>) -> GeneratedTarball { - builder.update_submodule(&Path::new("src/llvm-project")); + if !builder.config.dry_run() { + builder.update_submodule(&Path::new("src/llvm-project")); + } let tarball = Tarball::new_targetless(builder, "rust-src"); diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs index 0fd6b46d562..e58f736d67f 100644 --- a/src/bootstrap/doc.rs +++ b/src/bootstrap/doc.rs @@ -220,8 +220,11 @@ impl Step for TheBook { // build the version info page and CSS let shared_assets = builder.ensure(SharedAssets { target }); + // build the command first so we don't nest GHA groups + builder.rustdoc_cmd(compiler); + // build the redirect pages - builder.msg_doc(compiler, "book redirect pages", target); + let _guard = builder.msg_doc(compiler, "book redirect pages", target); for file in t!(fs::read_dir(builder.src.join(&relative_path).join("redirects"))) { let file = t!(file); let path = file.path(); @@ -305,7 +308,7 @@ impl Step for Standalone { fn run(self, builder: &Builder<'_>) { let target = self.target; let compiler = self.compiler; - builder.msg_doc(compiler, "standalone", target); + let _guard = builder.msg_doc(compiler, "standalone", target); let out = builder.doc_out(target); t!(fs::create_dir_all(&out)); @@ -563,10 +566,6 @@ fn doc_std( let compiler = builder.compiler(stage, builder.config.build); - let description = - format!("library{} in {} format", crate_description(&requested_crates), format.as_str()); - let _guard = builder.msg_doc(compiler, &description, target); - let target_doc_dir_name = if format == DocumentationFormat::JSON { "json-doc" } else { "doc" }; let target_dir = builder.stage_out(compiler, Mode::Std).join(target.triple).join(target_doc_dir_name); @@ -603,6 +602,10 @@ fn doc_std( cargo.arg("-p").arg(krate); } + let description = + format!("library{} in {} format", crate_description(&requested_crates), format.as_str()); + let _guard = builder.msg_doc(compiler, &description, target); + builder.run(&mut cargo.into()); builder.cp_r(&out_dir, &out); } @@ -799,8 +802,6 @@ macro_rules! tool_doc { SourceType::Submodule }; - builder.msg_doc(compiler, stringify!($tool).to_lowercase(), target); - // Symlink compiler docs to the output directory of rustdoc documentation. let out_dirs = [ builder.stage_out(compiler, Mode::ToolRustc).join(target.triple).join("doc"), @@ -839,6 +840,8 @@ macro_rules! tool_doc { cargo.rustdocflag("--show-type-layout"); cargo.rustdocflag("--generate-link-to-definition"); cargo.rustdocflag("-Zunstable-options"); + + let _guard = builder.msg_doc(compiler, stringify!($tool).to_lowercase(), target); builder.run(&mut cargo.into()); } } @@ -1060,7 +1063,16 @@ impl Step for RustcBook { // config.toml), then this needs to explicitly update the dylib search // path. builder.add_rustc_lib_path(self.compiler, &mut cmd); + let doc_generator_guard = builder.msg( + Kind::Run, + self.compiler.stage, + "lint-docs", + self.compiler.host, + self.target, + ); builder.run(&mut cmd); + drop(doc_generator_guard); + // Run rustbook/mdbook to generate the HTML pages. builder.ensure(RustbookSrc { target: self.target, diff --git a/src/bootstrap/download.rs b/src/bootstrap/download.rs index eb1941cd889..1316461fde0 100644 --- a/src/bootstrap/download.rs +++ b/src/bootstrap/download.rs @@ -7,7 +7,7 @@ use std::{ process::{Command, Stdio}, }; -use build_helper::util::try_run; +use build_helper::ci::CiEnv; use once_cell::sync::OnceCell; use xz2::bufread::XzDecoder; @@ -21,6 +21,12 @@ use crate::{ static SHOULD_FIX_BINS_AND_DYLIBS: OnceCell<bool> = OnceCell::new(); +/// `Config::try_run` wrapper for this module to avoid warnings on `try_run`, since we don't have access to a `builder` yet. +fn try_run(config: &Config, cmd: &mut Command) -> Result<(), ()> { + #[allow(deprecated)] + config.try_run(cmd) +} + /// Generic helpers that are useful anywhere in bootstrap. impl Config { pub fn is_verbose(&self) -> bool { @@ -52,17 +58,6 @@ impl Config { } /// Runs a command, printing out nice contextual information if it fails. - /// Exits if the command failed to execute at all, otherwise returns its - /// `status.success()`. - pub(crate) fn try_run(&self, cmd: &mut Command) -> Result<(), ()> { - if self.dry_run() { - return Ok(()); - } - self.verbose(&format!("running: {:?}", cmd)); - try_run(cmd, self.is_verbose()) - } - - /// Runs a command, printing out nice contextual information if it fails. /// Returns false if do not execute at all, otherwise returns its /// `status.success()`. pub(crate) fn check_run(&self, cmd: &mut Command) -> bool { @@ -156,14 +151,16 @@ impl Config { ]; } "; - nix_build_succeeded = self - .try_run(Command::new("nix-build").args(&[ + nix_build_succeeded = try_run( + self, + Command::new("nix-build").args(&[ Path::new("-E"), Path::new(NIX_EXPR), Path::new("-o"), &nix_deps_dir, - ])) - .is_ok(); + ]), + ) + .is_ok(); nix_deps_dir }); if !nix_build_succeeded { @@ -188,7 +185,7 @@ impl Config { patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]); } - let _ = self.try_run(patchelf.arg(fname)); + let _ = try_run(self, patchelf.arg(fname)); } fn download_file(&self, url: &str, dest_path: &Path, help_on_error: &str) { @@ -213,7 +210,6 @@ impl Config { // Try curl. If that fails and we are on windows, fallback to PowerShell. let mut curl = Command::new("curl"); curl.args(&[ - "-#", "-y", "30", "-Y", @@ -224,6 +220,12 @@ impl Config { "3", "-SRf", ]); + // Don't print progress in CI; the \r wrapping looks bad and downloads don't take long enough for progress to be useful. + if CiEnv::is_ci() { + curl.arg("-s"); + } else { + curl.arg("--progress-bar"); + } curl.arg(url); let f = File::create(tempfile).unwrap(); curl.stdout(Stdio::from(f)); @@ -231,7 +233,7 @@ impl Config { if self.build.contains("windows-msvc") { eprintln!("Fallback to PowerShell"); for _ in 0..3 { - if self.try_run(Command::new("PowerShell.exe").args(&[ + if try_run(self, Command::new("PowerShell.exe").args(&[ "/nologo", "-Command", "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;", @@ -248,7 +250,7 @@ impl Config { if !help_on_error.is_empty() { eprintln!("{}", help_on_error); } - crate::detail_exit_macro!(1); + crate::exit!(1); } } diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index a882336c3c4..d67aafff841 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -193,7 +193,7 @@ impl Flags { } else { panic!("No paths available for subcommand `{}`", subcommand.as_str()); } - crate::detail_exit_macro!(0); + crate::exit!(0); } Flags::parse_from(it) @@ -538,7 +538,7 @@ pub fn get_completion<G: clap_complete::Generator>(shell: G, path: &Path) -> Opt } else { std::fs::read_to_string(path).unwrap_or_else(|_| { eprintln!("couldn't read {}", path.display()); - crate::detail_exit_macro!(1) + crate::exit!(1) }) }; let mut buf = Vec::new(); diff --git a/src/bootstrap/format.rs b/src/bootstrap/format.rs index 4330e126398..cfc9c4de60f 100644 --- a/src/bootstrap/format.rs +++ b/src/bootstrap/format.rs @@ -40,7 +40,7 @@ fn rustfmt(src: &Path, rustfmt: &Path, paths: &[PathBuf], check: bool) -> impl F code, run `./x.py fmt` instead.", cmd_debug, ); - crate::detail_exit_macro!(1); + crate::exit!(1); } true } @@ -200,7 +200,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) { let rustfmt_path = build.initial_rustfmt().unwrap_or_else(|| { eprintln!("./x.py fmt is not supported on this channel"); - crate::detail_exit_macro!(1); + crate::exit!(1); }); assert!(rustfmt_path.exists(), "{}", rustfmt_path.display()); let src = build.src.clone(); diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index f5ad4f336a7..de43a401926 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -27,7 +27,7 @@ use std::process::{Command, Stdio}; use std::str; use build_helper::ci::{gha, CiEnv}; -use build_helper::detail_exit_macro; +use build_helper::exit; use channel::GitInfo; use config::{DryRun, Target}; use filetime::FileTime; @@ -191,7 +191,7 @@ pub enum GitRepo { /// although most functions are implemented as free functions rather than /// methods specifically on this structure itself (to make it easier to /// organize). -#[cfg_attr(not(feature = "build-metrics"), derive(Clone))] +#[derive(Clone)] pub struct Build { /// User-specified configuration from `config.toml`. config: Config, @@ -333,7 +333,6 @@ forward! { create(path: &Path, s: &str), remove(f: &Path), tempdir() -> PathBuf, - try_run(cmd: &mut Command) -> Result<(), ()>, llvm_link_shared() -> bool, download_rustc() -> bool, initial_rustfmt() -> Option<PathBuf>, @@ -617,7 +616,9 @@ impl Build { } // Save any local changes, but avoid running `git stash pop` if there are none (since it will exit with an error). + #[allow(deprecated)] // diff-index reports the modifications through the exit status let has_local_modifications = self + .config .try_run( Command::new("git") .args(&["diff-index", "--quiet", "HEAD"]) @@ -711,7 +712,7 @@ impl Build { for failure in failures.iter() { eprintln!(" - {}\n", failure); } - detail_exit_macro!(1); + exit!(1); } #[cfg(feature = "build-metrics")] @@ -971,12 +972,36 @@ impl Build { /// Runs a command, printing out nice contextual information if it fails. /// Exits if the command failed to execute at all, otherwise returns its /// `status.success()`. - fn try_run_quiet(&self, cmd: &mut Command) -> bool { + fn run_quiet_delaying_failure(&self, cmd: &mut Command) -> bool { if self.config.dry_run() { return true; } - self.verbose(&format!("running: {:?}", cmd)); - try_run_suppressed(cmd) + if !self.fail_fast { + self.verbose(&format!("running: {:?}", cmd)); + if !try_run_suppressed(cmd) { + let mut failures = self.delayed_failures.borrow_mut(); + failures.push(format!("{:?}", cmd)); + return false; + } + } else { + self.run_quiet(cmd); + } + true + } + + /// Runs a command, printing out contextual info if it fails, and delaying errors until the build finishes. + pub(crate) fn run_delaying_failure(&self, cmd: &mut Command) -> bool { + if !self.fail_fast { + #[allow(deprecated)] // can't use Build::try_run, that's us + if self.config.try_run(cmd).is_err() { + let mut failures = self.delayed_failures.borrow_mut(); + failures.push(format!("{:?}", cmd)); + return false; + } + } else { + self.run(cmd); + } + true } pub fn is_verbose_than(&self, level: usize) -> bool { @@ -999,6 +1024,8 @@ impl Build { } } + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] fn msg_check( &self, what: impl Display, @@ -1007,6 +1034,8 @@ impl Build { self.msg(Kind::Check, self.config.stage, what, self.config.build, target) } + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] fn msg_doc( &self, compiler: Compiler, @@ -1016,6 +1045,8 @@ impl Build { self.msg(Kind::Doc, compiler.stage, what, compiler.host, target.into()) } + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] fn msg_build( &self, compiler: Compiler, @@ -1028,6 +1059,8 @@ impl Build { /// Return a `Group` guard for a [`Step`] that is built for each `--stage`. /// /// [`Step`]: crate::builder::Step + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] fn msg( &self, action: impl Into<Kind>, @@ -1054,6 +1087,8 @@ impl Build { /// Return a `Group` guard for a [`Step`] that is only built once and isn't affected by `--stage`. /// /// [`Step`]: crate::builder::Step + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] fn msg_unstaged( &self, action: impl Into<Kind>, @@ -1065,6 +1100,8 @@ impl Build { self.group(&msg) } + #[must_use = "Groups should not be dropped until the Step finishes running"] + #[track_caller] fn msg_sysroot_tool( &self, action: impl Into<Kind>, @@ -1083,6 +1120,7 @@ impl Build { self.group(&msg) } + #[track_caller] fn group(&self, msg: &str) -> Option<gha::Group> { match self.config.dry_run { DryRun::SelfCheck => None, @@ -1516,7 +1554,7 @@ impl Build { "Error: Unable to find the stamp file {}, did you try to keep a nonexistent build stage?", stamp.display() ); - crate::detail_exit_macro!(1); + crate::exit!(1); } let mut paths = Vec::new(); @@ -1708,7 +1746,7 @@ Alternatively, set `download-ci-llvm = true` in that `[llvm]` section to download LLVM rather than building it. " ); - detail_exit_macro!(1); + exit!(1); } } diff --git a/src/bootstrap/llvm.rs b/src/bootstrap/llvm.rs index 7e27960f3e9..07719a71178 100644 --- a/src/bootstrap/llvm.rs +++ b/src/bootstrap/llvm.rs @@ -1056,6 +1056,9 @@ fn supported_sanitizers( "s390x-unknown-linux-musl" => { common_libs("linux", "s390x", &["asan", "lsan", "msan", "tsan"]) } + "x86_64-unknown-linux-ohos" => { + common_libs("linux", "x86_64", &["asan", "lsan", "msan", "tsan"]) + } _ => Vec::new(), } } diff --git a/src/bootstrap/metrics.rs b/src/bootstrap/metrics.rs index b73df7fe822..cf8d33dfcb0 100644 --- a/src/bootstrap/metrics.rs +++ b/src/bootstrap/metrics.rs @@ -40,6 +40,13 @@ pub(crate) struct BuildMetrics { state: RefCell<MetricsState>, } +/// NOTE: this isn't really cloning anything, but `x suggest` doesn't need metrics so this is probably ok. +impl Clone for BuildMetrics { + fn clone(&self) -> Self { + Self::init() + } +} + impl BuildMetrics { pub(crate) fn init() -> Self { let state = RefCell::new(MetricsState { diff --git a/src/bootstrap/render_tests.rs b/src/bootstrap/render_tests.rs index ccd067053ef..6802bf4511b 100644 --- a/src/bootstrap/render_tests.rs +++ b/src/bootstrap/render_tests.rs @@ -30,7 +30,7 @@ pub(crate) fn try_run_tests(builder: &Builder<'_>, cmd: &mut Command, stream: bo if !run_tests(builder, cmd, stream) { if builder.fail_fast { - crate::detail_exit_macro!(1); + crate::exit!(1); } else { let mut failures = builder.delayed_failures.borrow_mut(); failures.push(format!("{cmd:?}")); diff --git a/src/bootstrap/run.rs b/src/bootstrap/run.rs index 70b91700043..4082f5bb9b1 100644 --- a/src/bootstrap/run.rs +++ b/src/bootstrap/run.rs @@ -24,8 +24,7 @@ impl Step for ExpandYamlAnchors { /// anchors in them, since GitHub Actions doesn't support them. fn run(self, builder: &Builder<'_>) { builder.info("Expanding YAML anchors in the GitHub Actions configuration"); - try_run( - builder, + builder.run_delaying_failure( &mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src), ); } @@ -39,19 +38,6 @@ impl Step for ExpandYamlAnchors { } } -fn try_run(builder: &Builder<'_>, cmd: &mut Command) -> bool { - if !builder.fail_fast { - if builder.try_run(cmd).is_err() { - let mut failures = builder.delayed_failures.borrow_mut(); - failures.push(format!("{:?}", cmd)); - return false; - } - } else { - builder.run(cmd); - } - true -} - #[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] pub struct BuildManifest; diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs index 8f5ba42736b..9321fc1bcb8 100644 --- a/src/bootstrap/sanity.rs +++ b/src/bootstrap/sanity.rs @@ -104,7 +104,7 @@ You should install cmake, or set `download-ci-llvm = true` in the than building it. " ); - crate::detail_exit_macro!(1); + crate::exit!(1); } } diff --git a/src/bootstrap/setup.rs b/src/bootstrap/setup.rs index 34c6ccf1365..2075c58598d 100644 --- a/src/bootstrap/setup.rs +++ b/src/bootstrap/setup.rs @@ -203,7 +203,7 @@ fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) { "note: this will use the configuration in {}", profile.include_path(&config.src).display() ); - crate::detail_exit_macro!(1); + crate::exit!(1); } let settings = format!( @@ -389,7 +389,7 @@ pub fn interactive_path() -> io::Result<Profile> { io::stdin().read_line(&mut input)?; if input.is_empty() { eprintln!("EOF on stdin, when expecting answer to question. Giving up."); - crate::detail_exit_macro!(1); + crate::exit!(1); } break match parse_with_abbrev(&input) { Ok(profile) => profile, diff --git a/src/bootstrap/suggest.rs b/src/bootstrap/suggest.rs index ff20ebec267..f225104bda9 100644 --- a/src/bootstrap/suggest.rs +++ b/src/bootstrap/suggest.rs @@ -4,18 +4,11 @@ use std::str::FromStr; use std::path::PathBuf; -use crate::{ - builder::{Builder, Kind}, - tool::Tool, -}; +use clap::Parser; -#[cfg(feature = "build-metrics")] -pub fn suggest(builder: &Builder<'_>, run: bool) { - panic!("`x suggest` is not supported with `build-metrics`") -} +use crate::{builder::Builder, tool::Tool}; /// Suggests a list of possible `x.py` commands to run based on modified files in branch. -#[cfg(not(feature = "build-metrics"))] pub fn suggest(builder: &Builder<'_>, run: bool) { let suggestions = builder.tool_cmd(Tool::SuggestTests).output().expect("failed to run `suggest-tests` tool"); @@ -67,12 +60,13 @@ pub fn suggest(builder: &Builder<'_>, run: bool) { if run { for sug in suggestions { - let mut build = builder.build.clone(); - - let builder = - Builder::new_standalone(&mut build, Kind::parse(&sug.0).unwrap(), sug.2, sug.1); - - builder.execute_cli() + let mut build: crate::Build = builder.build.clone(); + build.config.paths = sug.2; + build.config.cmd = crate::flags::Flags::parse_from(["x.py", sug.0]).cmd; + if let Some(stage) = sug.1 { + build.config.stage = stage; + } + build.build(); } } else { println!("help: consider using the `--run` flag to automatically run suggested tests"); diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index 284efff348d..44b8c2d8c01 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -48,32 +48,6 @@ const MIR_OPT_BLESS_TARGET_MAPPING: &[(&str, &str)] = &[ // build for, so there is no entry for "aarch64-apple-darwin" here. ]; -fn try_run(builder: &Builder<'_>, cmd: &mut Command) -> bool { - if !builder.fail_fast { - if builder.try_run(cmd).is_err() { - let mut failures = builder.delayed_failures.borrow_mut(); - failures.push(format!("{:?}", cmd)); - return false; - } - } else { - builder.run(cmd); - } - true -} - -fn try_run_quiet(builder: &Builder<'_>, cmd: &mut Command) -> bool { - if !builder.fail_fast { - if !builder.try_run_quiet(cmd) { - let mut failures = builder.delayed_failures.borrow_mut(); - failures.push(format!("{:?}", cmd)); - return false; - } - } else { - builder.run_quiet(cmd); - } - true -} - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct CrateBootstrap { path: Interned<PathBuf>, @@ -117,14 +91,8 @@ impl Step for CrateBootstrap { SourceType::InTree, &[], ); - builder.info(&format!( - "{} {} stage0 ({})", - builder.kind.description(), - path, - bootstrap_host, - )); let crate_name = path.rsplit_once('/').unwrap().1; - run_cargo_test(cargo, &[], &[], crate_name, compiler, bootstrap_host, builder); + run_cargo_test(cargo, &[], &[], crate_name, crate_name, compiler, bootstrap_host, builder); } } @@ -163,6 +131,7 @@ You can skip linkcheck with --exclude src/tools/linkchecker" // Test the linkchecker itself. let bootstrap_host = builder.config.build; let compiler = builder.compiler(0, bootstrap_host); + let cargo = tool::prepare_tool_cargo( builder, compiler, @@ -173,7 +142,16 @@ You can skip linkcheck with --exclude src/tools/linkchecker" SourceType::InTree, &[], ); - run_cargo_test(cargo, &[], &[], "linkchecker", compiler, bootstrap_host, builder); + run_cargo_test( + cargo, + &[], + &[], + "linkchecker", + "linkchecker self tests", + compiler, + bootstrap_host, + builder, + ); if builder.doc_tests == DocTests::No { return; @@ -182,12 +160,14 @@ You can skip linkcheck with --exclude src/tools/linkchecker" // Build all the default documentation. builder.default_doc(&[]); + // Build the linkchecker before calling `msg`, since GHA doesn't support nested groups. + let mut linkchecker = builder.tool_cmd(Tool::Linkchecker); + // Run the linkchecker. + let _guard = + builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host); let _time = util::timeit(&builder); - try_run( - builder, - builder.tool_cmd(Tool::Linkchecker).arg(builder.out.join(host.triple).join("doc")), - ); + builder.run_delaying_failure(linkchecker.arg(builder.out.join(host.triple).join("doc"))); } fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { @@ -240,7 +220,9 @@ impl Step for HtmlCheck { builder.default_doc(&[]); builder.ensure(crate::doc::Rustc::new(builder.top_stage, self.target, builder)); - try_run(builder, builder.tool_cmd(Tool::HtmlChecker).arg(builder.doc_out(self.target))); + builder.run_delaying_failure( + builder.tool_cmd(Tool::HtmlChecker).arg(builder.doc_out(self.target)), + ); } } @@ -279,8 +261,7 @@ impl Step for Cargotest { let _time = util::timeit(&builder); let mut cmd = builder.tool_cmd(Tool::CargoTest); - try_run( - builder, + builder.run_delaying_failure( cmd.arg(&cargo) .arg(&out_dir) .args(builder.config.test_args()) @@ -406,7 +387,7 @@ impl Step for RustAnalyzer { cargo.env("SKIP_SLOW_TESTS", "1"); cargo.add_rustc_lib_path(builder, compiler); - run_cargo_test(cargo, &[], &[], "rust-analyzer", compiler, host, builder); + run_cargo_test(cargo, &[], &[], "rust-analyzer", "rust-analyzer", compiler, host, builder); } } @@ -455,7 +436,7 @@ impl Step for Rustfmt { cargo.add_rustc_lib_path(builder, compiler); - run_cargo_test(cargo, &[], &[], "rustfmt", compiler, host, builder); + run_cargo_test(cargo, &[], &[], "rustfmt", "rustfmt", compiler, host, builder); } } @@ -503,7 +484,16 @@ impl Step for RustDemangler { cargo.env("RUST_DEMANGLER_DRIVER_PATH", rust_demangler); cargo.add_rustc_lib_path(builder, compiler); - run_cargo_test(cargo, &[], &[], "rust-demangler", compiler, host, builder); + run_cargo_test( + cargo, + &[], + &[], + "rust-demangler", + "rust-demangler", + compiler, + host, + builder, + ); } } @@ -547,6 +537,13 @@ impl Miri { cargo.env("RUST_BACKTRACE", "1"); let mut cargo = Command::from(cargo); + let _guard = builder.msg( + Kind::Build, + compiler.stage + 1, + "miri sysroot", + compiler.host, + compiler.host, + ); builder.run(&mut cargo); // # Determine where Miri put its sysroot. @@ -624,6 +621,8 @@ impl Step for Miri { SourceType::InTree, &[], ); + let _guard = builder.msg_sysroot_tool(Kind::Test, compiler.stage, "miri", host, host); + cargo.add_rustc_lib_path(builder, compiler); // miri tests need to know about the stage sysroot @@ -736,7 +735,16 @@ impl Step for CompiletestTest { &[], ); cargo.allow_features("test"); - run_cargo_test(cargo, &[], &[], "compiletest", compiler, host, builder); + run_cargo_test( + cargo, + &[], + &[], + "compiletest", + "compiletest self test", + compiler, + host, + builder, + ); } } @@ -792,13 +800,16 @@ impl Step for Clippy { cargo.env("BLESS", "Gesundheit"); } - if builder.try_run(&mut cargo).is_ok() { + let _guard = builder.msg_sysroot_tool(Kind::Test, compiler.stage, "clippy", host, host); + + #[allow(deprecated)] // Clippy reports errors if it blessed the outputs + if builder.config.try_run(&mut cargo).is_ok() { // The tests succeeded; nothing to do. return; } if !builder.config.cmd.bless() { - crate::detail_exit_macro!(1); + crate::exit!(1); } } } @@ -852,7 +863,7 @@ impl Step for RustdocTheme { util::lld_flag_no_threads(self.compiler.host.contains("windows")), ); } - try_run(builder, &mut cmd); + builder.run_delaying_failure(&mut cmd); } } @@ -904,6 +915,13 @@ impl Step for RustdocJSStd { builder, DocumentationFormat::HTML, )); + let _guard = builder.msg( + Kind::Test, + builder.top_stage, + "rustdoc-js-std", + builder.config.build, + self.target, + ); builder.run(&mut command); } } @@ -1042,6 +1060,13 @@ impl Step for RustdocGUI { } let _time = util::timeit(&builder); + let _guard = builder.msg_sysroot_tool( + Kind::Test, + self.compiler.stage, + "rustdoc-gui", + self.compiler.host, + self.target, + ); crate::render_tests::try_run_tests(builder, &mut cmd, true); } } @@ -1092,13 +1117,13 @@ help: to skip test's attempt to check tidiness, pass `--exclude src/tools/tidy` PATH = inferred_rustfmt_dir.display(), CHAN = builder.config.channel, ); - crate::detail_exit_macro!(1); + crate::exit!(1); } crate::format::format(&builder, !builder.config.cmd.bless(), &[]); } builder.info("tidy check"); - try_run(builder, &mut cmd); + builder.run_delaying_failure(&mut cmd); builder.ensure(ExpandYamlAnchors); @@ -1115,7 +1140,7 @@ help: to skip test's attempt to check tidiness, pass `--exclude src/tools/tidy` eprintln!( "x.py completions were changed; run `x.py run generate-completions` to update them" ); - crate::detail_exit_macro!(1); + crate::exit!(1); } } } @@ -1143,8 +1168,7 @@ impl Step for ExpandYamlAnchors { /// by the user before committing CI changes. fn run(self, builder: &Builder<'_>) { builder.info("Ensuring the YAML anchors in the GitHub Actions config were expanded"); - try_run( - builder, + builder.run_delaying_failure( &mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src), ); } @@ -1426,7 +1450,7 @@ help: to test the compiler, use `--stage 1` instead help: to test the standard library, use `--stage 0 library/std` instead note: if you're sure you want to do this, please open an issue as to why. In the meantime, you can override this with `COMPILETEST_FORCE_STAGE0=1`." ); - crate::detail_exit_macro!(1); + crate::exit!(1); } let mut compiler = self.compiler; @@ -1886,14 +1910,6 @@ impl Step for BookTest { /// /// This uses the `rustdoc` that sits next to `compiler`. fn run(self, builder: &Builder<'_>) { - let host = self.compiler.host; - let _guard = builder.msg( - Kind::Test, - self.compiler.stage, - &format!("book {}", self.name), - host, - host, - ); // External docs are different from local because: // - Some books need pre-processing by mdbook before being tested. // - They need to save their state to toolstate. @@ -1936,12 +1952,12 @@ impl BookTest { let _guard = builder.msg( Kind::Test, compiler.stage, - format_args!("rustbook {}", self.path.display()), + format_args!("mdbook {}", self.path.display()), compiler.host, compiler.host, ); let _time = util::timeit(&builder); - let toolstate = if try_run(builder, &mut rustbook_cmd) { + let toolstate = if builder.run_delaying_failure(&mut rustbook_cmd) { ToolState::TestPass } else { ToolState::TestFail @@ -1952,8 +1968,12 @@ impl BookTest { /// This runs `rustdoc --test` on all `.md` files in the path. fn run_local_doc(self, builder: &Builder<'_>) { let compiler = self.compiler; + let host = self.compiler.host; - builder.ensure(compile::Std::new(compiler, compiler.host)); + builder.ensure(compile::Std::new(compiler, host)); + + let _guard = + builder.msg(Kind::Test, compiler.stage, &format!("book {}", self.name), host, host); // Do a breadth-first traversal of the `src/doc` directory and just run // tests for all files that end in `*.md` @@ -2099,9 +2119,9 @@ fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) -> cmd.arg("--test-args").arg(test_args); if builder.config.verbose_tests { - try_run(builder, &mut cmd) + builder.run_delaying_failure(&mut cmd) } else { - try_run_quiet(builder, &mut cmd) + builder.run_quiet_delaying_failure(&mut cmd) } } @@ -2127,7 +2147,7 @@ impl Step for RustcGuide { let src = builder.src.join(relative_path); let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook); - let toolstate = if try_run(builder, rustbook_cmd.arg("linkcheck").arg(&src)) { + let toolstate = if builder.run_delaying_failure(rustbook_cmd.arg("linkcheck").arg(&src)) { ToolState::TestPass } else { ToolState::TestFail @@ -2178,11 +2198,12 @@ impl Step for CrateLibrustc { /// Given a `cargo test` subcommand, add the appropriate flags and run it. /// /// Returns whether the test succeeded. -fn run_cargo_test( +fn run_cargo_test<'a>( cargo: impl Into<Command>, libtest_args: &[&str], crates: &[Interned<String>], primary_crate: &str, + description: impl Into<Option<&'a str>>, compiler: Compiler, target: TargetSelection, builder: &Builder<'_>, @@ -2190,6 +2211,9 @@ fn run_cargo_test( let mut cargo = prepare_cargo_test(cargo, libtest_args, crates, primary_crate, compiler, target, builder); let _time = util::timeit(&builder); + let _group = description.into().and_then(|what| { + builder.msg_sysroot_tool(Kind::Test, compiler.stage, what, compiler.host, target) + }); #[cfg(feature = "build-metrics")] builder.metrics.begin_test_suite( @@ -2355,14 +2379,16 @@ impl Step for Crate { _ => panic!("can only test libraries"), }; - let _guard = builder.msg( - builder.kind, - compiler.stage, - crate_description(&self.crates), - compiler.host, + run_cargo_test( + cargo, + &[], + &self.crates, + &self.crates[0], + &*crate_description(&self.crates), + compiler, target, + builder, ); - run_cargo_test(cargo, &[], &self.crates, &self.crates[0], compiler, target, builder); } } @@ -2455,18 +2481,12 @@ impl Step for CrateRustdoc { dylib_path.insert(0, PathBuf::from(&*libdir)); cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); - let _guard = builder.msg_sysroot_tool( - builder.kind, - compiler.stage, - "rustdoc", - compiler.host, - target, - ); run_cargo_test( cargo, &[], &[INTERNER.intern_str("rustdoc:0.0.0")], "rustdoc", + "rustdoc", compiler, target, builder, @@ -2522,13 +2542,12 @@ impl Step for CrateRustdocJsonTypes { &[] }; - let _guard = - builder.msg(builder.kind, compiler.stage, "rustdoc-json-types", compiler.host, target); run_cargo_test( cargo, libtest_args, &[INTERNER.intern_str("rustdoc-json-types")], "rustdoc-json-types", + "rustdoc-json-types", compiler, target, builder, @@ -2669,6 +2688,10 @@ impl Step for Bootstrap { /// Tests the build system itself. fn run(self, builder: &Builder<'_>) { + let host = builder.config.build; + let compiler = builder.compiler(0, host); + let _guard = builder.msg(Kind::Test, 0, "bootstrap", host, host); + let mut check_bootstrap = Command::new(&builder.python()); check_bootstrap .args(["-m", "unittest", "bootstrap_test.py"]) @@ -2677,10 +2700,8 @@ impl Step for Bootstrap { .current_dir(builder.src.join("src/bootstrap/")); // NOTE: we intentionally don't pass test_args here because the args for unittest and cargo test are mutually incompatible. // Use `python -m unittest` manually if you want to pass arguments. - try_run(builder, &mut check_bootstrap); + builder.run_delaying_failure(&mut check_bootstrap); - let host = builder.config.build; - let compiler = builder.compiler(0, host); let mut cmd = Command::new(&builder.initial_cargo); cmd.arg("test") .current_dir(builder.src.join("src/bootstrap")) @@ -2697,7 +2718,7 @@ impl Step for Bootstrap { } // rustbuild tests are racy on directory creation so just run them one at a time. // Since there's not many this shouldn't be a problem. - run_cargo_test(cmd, &["--test-threads=1"], &[], "bootstrap", compiler, host, builder); + run_cargo_test(cmd, &["--test-threads=1"], &[], "bootstrap", None, compiler, host, builder); } fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { @@ -2748,8 +2769,14 @@ impl Step for TierCheck { cargo.arg("--verbose"); } - builder.info("platform support check"); - try_run(builder, &mut cargo.into()); + let _guard = builder.msg( + Kind::Test, + self.compiler.stage, + "platform support check", + self.compiler.host, + self.compiler.host, + ); + builder.run_delaying_failure(&mut cargo.into()); } } @@ -2796,8 +2823,6 @@ impl Step for RustInstaller { /// Ensure the version placeholder replacement tool builds fn run(self, builder: &Builder<'_>) { - builder.info("test rust-installer"); - let bootstrap_host = builder.config.build; let compiler = builder.compiler(0, bootstrap_host); let cargo = tool::prepare_tool_cargo( @@ -2810,7 +2835,15 @@ impl Step for RustInstaller { SourceType::InTree, &[], ); - run_cargo_test(cargo, &[], &[], "installer", compiler, bootstrap_host, builder); + + let _guard = builder.msg( + Kind::Test, + compiler.stage, + "rust-installer", + bootstrap_host, + bootstrap_host, + ); + run_cargo_test(cargo, &[], &[], "installer", None, compiler, bootstrap_host, builder); // We currently don't support running the test.sh script outside linux(?) environments. // Eventually this should likely migrate to #[test]s in rust-installer proper rather than a @@ -2829,7 +2862,7 @@ impl Step for RustInstaller { cmd.env("CARGO", &builder.initial_cargo); cmd.env("RUSTC", &builder.initial_rustc); cmd.env("TMP_DIR", &tmpdir); - try_run(builder, &mut cmd); + builder.run_delaying_failure(&mut cmd); } fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs index 915dceae389..725f864b718 100644 --- a/src/bootstrap/tool.rs +++ b/src/bootstrap/tool.rs @@ -34,6 +34,7 @@ struct ToolBuild { } impl Builder<'_> { + #[track_caller] fn msg_tool( &self, mode: Mode, @@ -107,7 +108,8 @@ impl Step for ToolBuild { ); let mut cargo = Command::from(cargo); - let is_expected = builder.try_run(&mut cargo).is_ok(); + #[allow(deprecated)] // we check this in `is_optional_tool` in a second + let is_expected = builder.config.try_run(&mut cargo).is_ok(); builder.save_toolstate( tool, @@ -116,7 +118,7 @@ impl Step for ToolBuild { if !is_expected { if !is_optional_tool { - crate::detail_exit_macro!(1); + crate::exit!(1); } else { None } diff --git a/src/bootstrap/toolstate.rs b/src/bootstrap/toolstate.rs index 9c4d0ea265d..eb65c8bee09 100644 --- a/src/bootstrap/toolstate.rs +++ b/src/bootstrap/toolstate.rs @@ -91,7 +91,7 @@ fn print_error(tool: &str, submodule: &str) { eprintln!("If you do NOT intend to update '{}', please ensure you did not accidentally", tool); eprintln!("change the submodule at '{}'. You may ask your reviewer for the", submodule); eprintln!("proper steps."); - crate::detail_exit_macro!(3); + crate::exit!(3); } fn check_changed_files(toolstates: &HashMap<Box<str>, ToolState>) { @@ -106,7 +106,7 @@ fn check_changed_files(toolstates: &HashMap<Box<str>, ToolState>) { Ok(o) => o, Err(e) => { eprintln!("Failed to get changed files: {:?}", e); - crate::detail_exit_macro!(1); + crate::exit!(1); } }; @@ -177,7 +177,7 @@ impl Step for ToolStateCheck { } if did_error { - crate::detail_exit_macro!(1); + crate::exit!(1); } check_changed_files(&toolstates); @@ -223,7 +223,7 @@ impl Step for ToolStateCheck { } if did_error { - crate::detail_exit_macro!(1); + crate::exit!(1); } if builder.config.channel == "nightly" && env::var_os("TOOLSTATE_PUBLISH").is_some() { @@ -262,6 +262,8 @@ impl Builder<'_> { /// `rust.save-toolstates` in `config.toml`. If unspecified, nothing will be /// done. The file is updated immediately after this function completes. pub fn save_toolstate(&self, tool: &str, state: ToolState) { + use std::io::Write; + // If we're in a dry run setting we don't want to save toolstates as // that means if we e.g. panic down the line it'll look like we tested // everything (but we actually haven't). @@ -286,7 +288,8 @@ impl Builder<'_> { current_toolstates.insert(tool.into(), state); t!(file.seek(SeekFrom::Start(0))); t!(file.set_len(0)); - t!(serde_json::to_writer(file, ¤t_toolstates)); + t!(serde_json::to_writer(&file, ¤t_toolstates)); + t!(writeln!(file)); // make sure this ends in a newline } } } diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs index b291584b300..2725813aba3 100644 --- a/src/bootstrap/util.rs +++ b/src/bootstrap/util.rs @@ -153,16 +153,6 @@ pub fn symlink_dir(config: &Config, original: &Path, link: &Path) -> io::Result< } } -/// The CI environment rustbuild is running in. This mainly affects how the logs -/// are printed. -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub enum CiEnv { - /// Not a CI environment. - None, - /// The GitHub Actions environment, for Linux (including Docker), Windows and macOS builds. - GitHubActions, -} - pub fn forcing_clang_based_tests() -> bool { if let Some(var) = env::var_os("RUSTBUILD_FORCE_CLANG_BASED_TESTS") { match &var.to_string_lossy().to_lowercase()[..] { @@ -229,7 +219,7 @@ pub fn is_valid_test_suite_arg<'a, P: AsRef<Path>>( pub fn run(cmd: &mut Command, print_cmd_on_fail: bool) { if try_run(cmd, print_cmd_on_fail).is_err() { - crate::detail_exit_macro!(1); + crate::exit!(1); } } @@ -253,7 +243,7 @@ pub fn check_run(cmd: &mut Command, print_cmd_on_fail: bool) -> bool { pub fn run_suppressed(cmd: &mut Command) { if !try_run_suppressed(cmd) { - crate::detail_exit_macro!(1); + crate::exit!(1); } } diff --git a/src/ci/docker/host-aarch64/aarch64-gnu/Dockerfile b/src/ci/docker/host-aarch64/aarch64-gnu/Dockerfile index 699938c3718..f8701044ec4 100644 --- a/src/ci/docker/host-aarch64/aarch64-gnu/Dockerfile +++ b/src/ci/docker/host-aarch64/aarch64-gnu/Dockerfile @@ -17,6 +17,9 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-ins xz-utils \ && rm -rf /var/lib/apt/lists/* +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/armhf-gnu/Dockerfile b/src/ci/docker/host-x86_64/armhf-gnu/Dockerfile index 57e63cd39d2..2d92d23d7b2 100644 --- a/src/ci/docker/host-x86_64/armhf-gnu/Dockerfile +++ b/src/ci/docker/host-x86_64/armhf-gnu/Dockerfile @@ -13,6 +13,7 @@ RUN apt-get update -y && DEBIAN_FRONTEND=noninteractive apt-get install -y --no- git \ libc6-dev \ libc6-dev-armhf-cross \ + libssl-dev \ make \ ninja-build \ python3 \ @@ -75,6 +76,9 @@ RUN arm-linux-gnueabihf-gcc addentropy.c -o rootfs/addentropy -static # Source of the file: https://github.com/vfdev-5/qemu-rpi2-vexpress/raw/master/vexpress-v2p-ca15-tc1.dtb RUN curl -O https://ci-mirrors.rust-lang.org/rustc/vexpress-v2p-ca15-tc1.dtb +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/disabled/dist-m68k-linux/Dockerfile b/src/ci/docker/host-x86_64/disabled/dist-m68k-linux/Dockerfile index 17203994cdf..cbac2310d0d 100644 --- a/src/ci/docker/host-x86_64/disabled/dist-m68k-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/disabled/dist-m68k-linux/Dockerfile @@ -16,6 +16,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ libssl-dev \ pkg-config +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/disabled/riscv64gc-linux/Dockerfile b/src/ci/docker/host-x86_64/disabled/riscv64gc-linux/Dockerfile index 4377608700b..07260be3587 100644 --- a/src/ci/docker/host-x86_64/disabled/riscv64gc-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/disabled/riscv64gc-linux/Dockerfile @@ -19,6 +19,7 @@ RUN apt-get update -y && apt-get install -y --no-install-recommends \ g++ \ libc6-dev \ libc6-dev-riscv64-cross \ + libssl-dev \ make \ ninja-build \ patch \ @@ -94,6 +95,9 @@ RUN mkdir build && cd build && \ WORKDIR /tmp RUN rm -rf /tmp/riscv-pk +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/dist-i686-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-i686-linux/Dockerfile index ea4a2a2427d..59e98689c2c 100644 --- a/src/ci/docker/host-x86_64/dist-i686-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-i686-linux/Dockerfile @@ -48,6 +48,9 @@ COPY host-x86_64/dist-x86_64-linux/shared.sh /tmp/ COPY host-x86_64/dist-x86_64-linux/build-gcc.sh /tmp/ RUN ./build-gcc.sh && yum remove -y gcc gcc-c++ +COPY scripts/cmake.sh /tmp/ +RUN ./cmake.sh + # Now build LLVM+Clang, afterwards configuring further compilations to use the # clang/clang++ compilers. COPY host-x86_64/dist-x86_64-linux/build-clang.sh /tmp/ diff --git a/src/ci/docker/host-x86_64/dist-powerpc64le-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-powerpc64le-linux/Dockerfile index 6b7b32a8bc7..2b2871c2ec0 100644 --- a/src/ci/docker/host-x86_64/dist-powerpc64le-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-powerpc64le-linux/Dockerfile @@ -14,6 +14,9 @@ RUN apt-get install -y --no-install-recommends rpm2cpio cpio COPY host-x86_64/dist-powerpc64le-linux/shared.sh host-x86_64/dist-powerpc64le-linux/build-powerpc64le-toolchain.sh /tmp/ RUN ./build-powerpc64le-toolchain.sh +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/dist-various-1/Dockerfile b/src/ci/docker/host-x86_64/dist-various-1/Dockerfile index 4576e6d4fa2..c2abe9f19a3 100644 --- a/src/ci/docker/host-x86_64/dist-various-1/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-various-1/Dockerfile @@ -186,6 +186,9 @@ ENV SCRIPT \ python3 ../x.py --stage 2 test --host='' --target $RUN_MAKE_TARGETS tests/run-make && \ python3 ../x.py dist --host='' --target $TARGETS +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + # sccache COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/dist-various-2/Dockerfile b/src/ci/docker/host-x86_64/dist-various-2/Dockerfile index 0f5df95a0dd..6367aacfdde 100644 --- a/src/ci/docker/host-x86_64/dist-various-2/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-various-2/Dockerfile @@ -96,6 +96,9 @@ RUN /tmp/build-wasi-toolchain.sh COPY scripts/freebsd-toolchain.sh /tmp/ RUN /tmp/freebsd-toolchain.sh i686 +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile index 1fcb99f6d5f..afd536a6602 100644 --- a/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-x86_64-linux/Dockerfile @@ -48,6 +48,10 @@ COPY host-x86_64/dist-x86_64-linux/shared.sh /tmp/ COPY host-x86_64/dist-x86_64-linux/build-gcc.sh /tmp/ RUN ./build-gcc.sh && yum remove -y gcc gcc-c++ +# LLVM 17 needs cmake 3.20 or higher. +COPY scripts/cmake.sh /tmp/ +RUN ./cmake.sh + # Now build LLVM+Clang, afterwards configuring further compilations to use the # clang/clang++ compilers. COPY host-x86_64/dist-x86_64-linux/build-clang.sh /tmp/ diff --git a/src/ci/docker/host-x86_64/dist-x86_64-netbsd/Dockerfile b/src/ci/docker/host-x86_64/dist-x86_64-netbsd/Dockerfile index d03c364547e..041bacf3397 100644 --- a/src/ci/docker/host-x86_64/dist-x86_64-netbsd/Dockerfile +++ b/src/ci/docker/host-x86_64/dist-x86_64-netbsd/Dockerfile @@ -7,6 +7,9 @@ RUN DEBIAN_FRONTEND=noninteractive apt-get install -y zlib1g-dev COPY host-x86_64/dist-x86_64-netbsd/build-netbsd-toolchain.sh /tmp/ RUN /tmp/build-netbsd-toolchain.sh +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/test-various/Dockerfile b/src/ci/docker/host-x86_64/test-various/Dockerfile index a1d06ab1844..002f221b1f3 100644 --- a/src/ci/docker/host-x86_64/test-various/Dockerfile +++ b/src/ci/docker/host-x86_64/test-various/Dockerfile @@ -37,6 +37,9 @@ COPY scripts/musl-toolchain.sh /build/ RUN bash musl-toolchain.sh x86_64 && rm -rf build WORKDIR / +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/wasm32/Dockerfile b/src/ci/docker/host-x86_64/wasm32/Dockerfile index ef1fde1c3b9..0e8989e10f2 100644 --- a/src/ci/docker/host-x86_64/wasm32/Dockerfile +++ b/src/ci/docker/host-x86_64/wasm32/Dockerfile @@ -13,12 +13,16 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-ins sudo \ gdb \ xz-utils \ + libssl-dev \ bzip2 \ && rm -rf /var/lib/apt/lists/* COPY scripts/emscripten.sh /scripts/ RUN bash /scripts/emscripten.sh +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-debug/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-debug/Dockerfile index 735d4d4dfcf..b404e3b982b 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-debug/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu-debug/Dockerfile @@ -26,6 +26,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ clang \ && rm -rf /var/lib/apt/lists/* +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/x86_64-gnu-nopt/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu-nopt/Dockerfile index 9fdc78406fb..1452c00a5d8 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu-nopt/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu-nopt/Dockerfile @@ -18,6 +18,9 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-ins xz-utils \ && rm -rf /var/lib/apt/lists/* +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/host-x86_64/x86_64-gnu/Dockerfile b/src/ci/docker/host-x86_64/x86_64-gnu/Dockerfile index fbec368c9ee..576487821dc 100644 --- a/src/ci/docker/host-x86_64/x86_64-gnu/Dockerfile +++ b/src/ci/docker/host-x86_64/x86_64-gnu/Dockerfile @@ -19,6 +19,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ mingw-w64 \ && rm -rf /var/lib/apt/lists/* +COPY scripts/cmake.sh /scripts/ +RUN /scripts/cmake.sh + COPY scripts/sccache.sh /scripts/ RUN sh /scripts/sccache.sh diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index 4b218d57727..da9d68672c4 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -79,7 +79,7 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then loaded_images=$(/usr/bin/timeout -k 720 600 docker load -i /tmp/rustci_docker_cache \ | sed 's/.* sha/sha/') set -e - echo "Downloaded containers:\n$loaded_images" + printf "Downloaded containers:\n$loaded_images\n" fi dockerfile="$docker_dir/$image/Dockerfile" @@ -89,12 +89,14 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then else context="$script_dir" fi + echo "::group::Building docker image for $image" retry docker \ build \ --rm \ -t rust-ci \ -f "$dockerfile" \ "$context" + echo "::endgroup::" if [ "$CI" != "" ]; then s3url="s3://$SCCACHE_BUCKET/docker/$cksum" diff --git a/src/ci/docker/scripts/cmake.sh b/src/ci/docker/scripts/cmake.sh index f124dbdaa6d..822666c8953 100755 --- a/src/ci/docker/scripts/cmake.sh +++ b/src/ci/docker/scripts/cmake.sh @@ -18,9 +18,9 @@ exit 1 set -x } -# LLVM 12 requires CMake 3.13.4 or higher. -# This script is not necessary for images using Ubuntu 20.04 or newer. -CMAKE=3.13.4 +# LLVM 17 requires CMake 3.20 or higher. +# This script is not necessary for images using Ubuntu 22.04 or newer. +CMAKE=3.20.3 curl -L https://github.com/Kitware/CMake/releases/download/v$CMAKE/cmake-$CMAKE.tar.gz | tar xzf - mkdir cmake-build diff --git a/src/ci/run.sh b/src/ci/run.sh index 48fb40d6a6d..da1960fc057 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -154,13 +154,13 @@ fi # check for clock drifts. An HTTP URL is used instead of HTTPS since on Azure # Pipelines it happened that the certificates were marked as expired. datecheck() { - echo "== clock drift check ==" + echo "::group::Clock drift check" echo -n " local time: " date echo -n " network time: " curl -fs --head http://ci-caches.rust-lang.org | grep ^Date: \ | sed 's/Date: //g' || true - echo "== end clock drift check ==" + echo "::endgroup::" } datecheck trap datecheck EXIT @@ -177,6 +177,7 @@ retry make prepare # Display the CPU and memory information. This helps us know why the CI timing # is fluctuating. +echo "::group::Display CPU and Memory information" if isMacOS; then system_profiler SPHardwareDataType || true sysctl hw || true @@ -186,6 +187,7 @@ else cat /proc/meminfo || true ncpus=$(grep processor /proc/cpuinfo | wc -l) fi +echo "::endgroup::" if [ ! -z "$SCRIPT" ]; then echo "Executing ${SCRIPT}" @@ -218,4 +220,6 @@ if [ "$RUN_CHECK_WITH_PARALLEL_QUERIES" != "" ]; then CARGO_INCREMENTAL=0 ../x check fi +echo "::group::sccache stats" sccache --show-stats || true +echo "::endgroup::" diff --git a/src/doc/rustc/src/SUMMARY.md b/src/doc/rustc/src/SUMMARY.md index f8af26326a7..96ee9a586de 100644 --- a/src/doc/rustc/src/SUMMARY.md +++ b/src/doc/rustc/src/SUMMARY.md @@ -36,8 +36,10 @@ - [m68k-unknown-linux-gnu](platform-support/m68k-unknown-linux-gnu.md) - [mips64-openwrt-linux-musl](platform-support/mips64-openwrt-linux-musl.md) - [mipsel-sony-psx](platform-support/mipsel-sony-psx.md) + - [mipsisa\*r6\*-unknown-linux-gnu\*](platform-support/mips-release-6.md) - [nvptx64-nvidia-cuda](platform-support/nvptx64-nvidia-cuda.md) - [riscv32imac-unknown-xous-elf](platform-support/riscv32imac-unknown-xous-elf.md) + - [sparc-unknown-none-elf](./platform-support/sparc-unknown-none-elf.md) - [*-pc-windows-gnullvm](platform-support/pc-windows-gnullvm.md) - [\*-nto-qnx-\*](platform-support/nto-qnx.md) - [\*-unknown-netbsd\*](platform-support/netbsd.md) diff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md index d2a25e612ec..351b03b9337 100644 --- a/src/doc/rustc/src/platform-support.md +++ b/src/doc/rustc/src/platform-support.md @@ -176,6 +176,7 @@ target | std | notes `thumbv8m.base-none-eabi` | * | Bare ARMv8-M Baseline `thumbv8m.main-none-eabi` | * | Bare ARMv8-M Mainline `thumbv8m.main-none-eabihf` | * | Bare ARMv8-M Mainline, hardfloat +[`sparc-unknown-none-elf`](./platform-support/sparc-unknown-none-elf.md) | * | Bare 32-bit SPARC V7+ `wasm32-unknown-emscripten` | ✓ | WebAssembly via Emscripten `wasm32-unknown-unknown` | ✓ | WebAssembly `wasm32-wasi` | ✓ | WebAssembly with WASI @@ -277,10 +278,10 @@ target | std | host | notes [`mipsel-sony-psx`](platform-support/mipsel-sony-psx.md) | * | | MIPS (LE) Sony PlayStation 1 (PSX) `mipsel-unknown-linux-uclibc` | ✓ | | MIPS (LE) Linux with uClibc `mipsel-unknown-none` | * | | Bare MIPS (LE) softfloat -`mipsisa32r6-unknown-linux-gnu` | ? | | -`mipsisa32r6el-unknown-linux-gnu` | ? | | -`mipsisa64r6-unknown-linux-gnuabi64` | ? | | -`mipsisa64r6el-unknown-linux-gnuabi64` | ? | | +[`mipsisa32r6-unknown-linux-gnu`](platform-support/mips-release-6.md) | ? | | 32-bit MIPS Release 6 Big Endian +[`mipsisa32r6el-unknown-linux-gnu`](platform-support/mips-release-6.md) | ? | | 32-bit MIPS Release 6 Little Endian +[`mipsisa64r6-unknown-linux-gnuabi64`](platform-support/mips-release-6.md) | ? | | 64-bit MIPS Release 6 Big Endian +[`mipsisa64r6el-unknown-linux-gnuabi64`](platform-support/mips-release-6.md) | ✓ | ✓ | 64-bit MIPS Release 6 Little Endian `msp430-none-elf` | * | | 16-bit MSP430 microcontrollers `powerpc-unknown-linux-gnuspe` | ✓ | | PowerPC SPE Linux `powerpc-unknown-linux-musl` | ? | | @@ -305,7 +306,7 @@ target | std | host | notes `riscv64gc-unknown-freebsd` | | | RISC-V FreeBSD `riscv64gc-unknown-fuchsia` | | | RISC-V Fuchsia `riscv64gc-unknown-linux-musl` | | | RISC-V Linux (kernel 4.20, musl 1.2.0) -[`riscv64gc-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ? | RISC-V NetBSD +[`riscv64gc-unknown-netbsd`](platform-support/netbsd.md) | ✓ | ✓ | RISC-V NetBSD [`riscv64gc-unknown-openbsd`](platform-support/openbsd.md) | ✓ | ✓ | OpenBSD/riscv64 `s390x-unknown-linux-musl` | | | S390x Linux (kernel 3.2, MUSL) `sparc-unknown-linux-gnu` | ✓ | | 32-bit SPARC Linux @@ -328,6 +329,7 @@ target | std | host | notes `x86_64-unknown-haiku` | ✓ | ✓ | 64-bit Haiku `x86_64-unknown-hermit` | ✓ | | HermitCore `x86_64-unknown-l4re-uclibc` | ? | | +[`x86_64-unknown-linux-ohos`](platform-support/openharmony.md) | ✓ | | x86_64 OpenHarmony | [`x86_64-unknown-openbsd`](platform-support/openbsd.md) | ✓ | ✓ | 64-bit OpenBSD `x86_64-uwp-windows-gnu` | ✓ | | `x86_64-uwp-windows-msvc` | ✓ | | diff --git a/src/doc/rustc/src/platform-support/mips-release-6.md b/src/doc/rustc/src/platform-support/mips-release-6.md new file mode 100644 index 00000000000..3f1912fc6f9 --- /dev/null +++ b/src/doc/rustc/src/platform-support/mips-release-6.md @@ -0,0 +1,181 @@ +# mipsisa\*r6\*-unknown-linux-gnu\* + +**Tier: 3** + +[MIPS Release 6](https://s3-eu-west-1.amazonaws.com/downloads-mips/documents/MD00083-2B-MIPS64INT-AFP-06.01.pdf), or simply MIPS R6, is the latest iteration of the MIPS instruction set architecture (ISA). + +MIPS R6 is experimental in nature, as there is not yet real hardware. However, Qemu emulation is available and we have two Linux distros maintained for development and evaluation purposes. This documentation describes the Rust support for MIPS R6 targets under `mipsisa*r6*-unknown-linux-gnu*`. + +The target name follow this format: `<machine>-<vendor>-<os><abi_suffix>`, where `<machine>` specifies the CPU family/model, `<vendor>` specifies the vendor and `<os>` the operating system name. The `<abi_suffix>` denotes the base ABI (32/n32/64/o64). + +| ABI suffix | Description | +|------------|------------------------------------| +| abi64 | Uses the 64-bit (64) ABI | +| abin32 | Uses the n32 ABI | +| N/A | Uses the (assumed) 32-bit (32) ABI | + +## Target Maintainers + +- [Xuan Chen](https://github.com/chenx97) <henry.chen@oss.cipunited.com> +- [Walter Ji](https://github.com/709924470) <walter.ji@oss.cipunited.com> +- [Xinhui Yang](https://github.com/Cyanoxygen) <cyan@oss.cipunited.com> +- [Lain Yang](https://github.com/Fearyncess) <lain.yang@oss.cipunited.com> + +## Requirements + +### C/C++ Toolchain + +A GNU toolchain for one of the MIPS R6 target is required. [AOSC OS](https://aosc.io/) provides working native and cross-compiling build environments. You may also supply your own a toolchain consisting of recent versions of GCC and Binutils. + +### Target libraries + +A minimum set of libraries is required to perform dynamic linking: + +- GNU glibc +- OpenSSL +- Zlib +- Linux API Headers + +This set of libraries should be installed to make up minimal target sysroot. + +For AOSC OS, You may install such a sysroot with the following commands: + +```sh +cd /tmp + +# linux+api, glibc, and file system structure are included in the toolchain. +sudo apt install gcc+cross-mips64r6el binutils+cross-mips64r6el + +# Download and extract required libraries. +wget https://repo.aosc.io/debs/pool/stable/main/z/zlib_1.2.13-0_mips64r6el.deb -O zlib.deb +wget https://repo.aosc.io/debs/pool/stable/main/o/openssl_1.1.1q-1_mips64r6el.deb -O openssl.deb + +# Extract them to your desired location. +for i in zlib openssl ; do + sudo dpkg-deb -vx $i.deb /var/ab/cross-root/mips64r6el +done + +# Workaround a possible ld bug when using -Wl,-Bdynamic. +sudo sed -i 's|/usr|=/usr|g' /var/ab/cross-root/mips64r6el/usr/lib/libc.so +``` + +For other distros, you may build them manually. + +## Building + +The following procedure outlines the build process for the MIPS64 R6 target with 64-bit (64) ABI (`mipsisa64r6el-unknown-linux-gnuabi64`). + +### Prerequisite: Disable debuginfo + +A LLVM bug makes rustc crash if debug or debug info generation is enabled. You need to edit `config.toml` to disable this: + +```toml +[rust] +debug = false +debug-info-level = 0 +``` + +### Prerequisite: Enable rustix's libc backend + +The crate `rustix` may try to link itself against MIPS R2 assembly, resulting in linkage error. To avoid this, you may force `rustix` to use its fallback `libc` backend by setting relevant `RUSTFLAGS`: + +```sh +export RUSTFLAGS="--cfg rustix_use_libc" +``` + +This will trigger warnings during build, as `-D warnings` is enabled by default. Disable `-D warnings` by editing `config.toml` to append the following: + +```toml +[rust] +deny-warnings = false +``` + +### Prerequisite: Supplying OpenSSL + +As a Tier 3 target, `openssl_sys` lacks the vendored OpenSSL library for this target. You will need to provide a prebuilt OpenSSL library to link `cargo`. Since we have a pre-configured sysroot, we can point to it directly: + +```sh +export MIPSISA64R6EL_UNKNOWN_LINUX_GNUABI64_OPENSSL_NO_VENDOR=y +export MIPSISA64R6EL_UNKNOWN_LINUX_GNUABI64_OPENSSL_DIR="/var/ab/cross-root/mips64r6el/usr" +``` + +On Debian, you may need to provide library path and include path separately: + +```sh +export MIPSISA64R6EL_UNKNOWN_LINUX_GNUABI64_OPENSSL_NO_VENDOR=y +export MIPSISA64R6EL_UNKNOWN_LINUX_GNUABI64_OPENSSL_LIB_DIR="/usr/lib/mipsisa64r6el-linux-gnuabi64/" +export MIPSISA64R6EL_UNKNOWN_LINUX_GNUABI64_OPENSSL_INCLUDE_DIR="/usr/include" +``` + +### Launching `x.py` + +```toml +[build] +target = ["mipsisa64r6el-unknown-linux-gnuabi64"] +``` + +Make sure that `mipsisa64r6el-unknown-linux-gnuabi64-gcc` is available from your executable search path (`$PATH`). + +Alternatively, you can specify the directories to all necessary toolchain executables in `config.toml`: + +```toml +[target.mipsisa64r6el-unknown-linux-gnuabi64] +# Adjust the paths below to point to your toolchain installation prefix. +cc = "/toolchain_prefix/bin/mipsisa64r6el-unknown-linux-gnuabi64-gcc" +cxx = "/toolchain_prefix/bin/mipsisa64r6el-unknown-linux-gnuabi64-g++" +ar = "/toolchain_prefix/bin/mipsisa64r6el-unknown-linux-gnuabi64-gcc-ar" +ranlib = "/toolchain_prefix/bin/mipsisa64r6el-unknown-linux-gnuabi64-ranlib" +linker = "/toolchain_prefix/bin/mipsisa64r6el-unknown-linux-gnuabi64-gcc" +``` + +Or, you can specify your cross compiler toolchain with an environment variable: + +```sh +export CROSS_COMPILE="/opt/abcross/mips64r6el/bin/mipsisa64r6el-aosc-linux-gnuabi64-" +``` + +Finally, launch the build script: + +```sh +./x.py build +``` + +### Tips + +- Avoid setting `cargo-native-static` to `false`, as this will result in a redundant artifact error while building clippy: + ```text + duplicate artifacts found when compiling a tool, this typically means that something was recompiled because a transitive dependency has different features activated than in a previous build: + + the following dependencies have different features: + syn 2.0.8 (registry+https://github.com/rust-lang/crates.io-index) + `clippy-driver` additionally enabled features {"full"} at ... + `cargo` additionally enabled features {} at ... + + to fix this you will probably want to edit the local src/tools/rustc-workspace-hack/Cargo.toml crate, as that will update the dependency graph to ensure that these crates all share the same feature set + thread 'main' panicked at 'tools should not compile multiple copies of the same crate', tool.rs:250:13 + note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace + ``` + +## Building Rust programs + +To build Rust programs for MIPS R6 targets, for instance, the `mipsisa64r6el-unknown-linux-gnuabi64` target: + +```bash +cargo build --target mipsisa64r6el-unknown-linux-gnuabi64 +``` + +## Testing + +To test a cross-compiled binary on your build system, install the Qemu user emulator that support the MIPS R6 architecture (`qemu-user-mipsel` or `qemu-user-mips64el`). GCC runtime libraries (`libgcc_s`) for the target architecture should be present in target sysroot to run the program. + +```sh +env \ + CARGO_TARGET_MIPSISA64R6EL_UNKNOWN_LINUX_GNUABI64_LINKER="/opt/abcross/mips64r6el/bin/mipsisa64r6el-aosc-linux-gnuabi64-gcc" \ + CARGO_TARGET_MIPSISA64R6EL_UNKNOWN_LINUX_GNUABI64_RUNNER="qemu-mips64el-static -L /var/ab/cross-root/mips64r6el" \ + cargo run --release \ + --target mipsisa64r6el-unknown-linux-gnuabi64 +``` + +## Tips for building Rust programs for MIPS R6 + +- Until we finalize a fix, please make sure the aforementioned workarounds for `rustix` crate and LLVM are always applied. This can be achieved by setting the relevant environment variables, and editing `Cargo.toml` before building. diff --git a/src/doc/rustc/src/platform-support/openharmony.md b/src/doc/rustc/src/platform-support/openharmony.md index a8dcc644346..89539f3888c 100644 --- a/src/doc/rustc/src/platform-support/openharmony.md +++ b/src/doc/rustc/src/platform-support/openharmony.md @@ -71,6 +71,28 @@ exec /path/to/ohos-sdk/linux/native/llvm/bin/clang++ \ "$@" ``` +`x86_64-unknown-linux-ohos-clang.sh` + +```sh +#!/bin/sh +exec /path/to/ohos-sdk/linux/native/llvm/bin/clang \ + -target x86_64-linux-ohos \ + --sysroot=/path/to/ohos-sdk/linux/native/sysroot \ + -D__MUSL__ \ + "$@" +``` + +`x86_64-unknown-linux-ohos-clang++.sh` + +```sh +#!/bin/sh +exec /path/to/ohos-sdk/linux/native/llvm/bin/clang++ \ + -target x86_64-linux-ohos \ + --sysroot=/path/to/ohos-sdk/linux/native/sysroot \ + -D__MUSL__ \ + "$@" +``` + Future versions of the OpenHarmony SDK will avoid the need for this process. ## Building the target @@ -98,6 +120,13 @@ cxx = "/path/to/armv7-unknown-linux-ohos-clang++.sh" ar = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ar" ranlib = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ranlib" linker = "/path/to/armv7-unknown-linux-ohos-clang.sh" + +[target.x86_64-unknown-linux-ohos] +cc = "/path/to/x86_64-unknown-linux-ohos-clang.sh" +cxx = "/path/to/x86_64-unknown-linux-ohos-clang++.sh" +ar = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ar" +ranlib = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ranlib" +linker = "/path/to/x86_64-unknown-linux-ohos-clang.sh" ``` ## Building Rust programs @@ -116,6 +145,10 @@ linker = "/path/to/aarch64-unknown-linux-ohos-clang.sh" [target.armv7-unknown-linux-ohos] ar = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ar" linker = "/path/to/armv7-unknown-linux-ohos-clang.sh" + +[target.x86_64-unknown-linux-ohos] +ar = "/path/to/ohos-sdk/linux/native/llvm/bin/llvm-ar" +linker = "/path/to/x86_64-unknown-linux-ohos-clang.sh" ``` ## Testing diff --git a/src/doc/rustc/src/platform-support/sparc-unknown-none-elf.md b/src/doc/rustc/src/platform-support/sparc-unknown-none-elf.md new file mode 100644 index 00000000000..efd58e8302f --- /dev/null +++ b/src/doc/rustc/src/platform-support/sparc-unknown-none-elf.md @@ -0,0 +1,164 @@ +# `sparc-unknown-none-elf` + +**Tier: 3** + +Rust for bare-metal 32-bit SPARC V7 and V8 systems, e.g. the Gaisler LEON3. + +| Target | Descriptions | +| ---------------------- | ----------------------------------------- | +| sparc-unknown-none-elf | SPARC V7 32-bit (freestanding, hardfloat) | + +## Target maintainers + +- Jonathan Pallant, <jonathan.pallant@ferrous-systems.com>, https://ferrous-systems.com + +## Requirements + +This target is cross-compiled. There is no support for `std`. There is no +default allocator, but it's possible to use `alloc` by supplying an allocator. + +This allows the generated code to run in environments, such as kernels, which +may need to avoid the use of such registers or which may have special +considerations about the use of such registers (e.g. saving and restoring them +to avoid breaking userspace code using the same registers). You can change code +generation to use additional CPU features via the `-C target-feature=` codegen +options to rustc, or via the `#[target_feature]` mechanism within Rust code. + +By default, code generated with this target should run on any `SPARC` hardware; +enabling additional target features may raise this baseline. + +- `-Ctarget-cpu=v8` adds the extra SPARC V8 instructions. + +- `-Ctarget-cpu=leon3` adds the SPARC V8 instructions and sets up scheduling to + suit the Gaisler Leon3. + +Functions marked `extern "C"` use the [standard SPARC architecture calling +convention](https://sparc.org/technical-documents/). + +This target generates ELF binaries. Any alternate formats or special +considerations for binary layout will require linker options or linker scripts. + +## Building the target + +You can build Rust with support for the target by adding it to the `target` +list in `config.toml`: + +```toml +[build] +build-stage = 1 +target = ["sparc-unknown-none-elf"] +``` + +## Building Rust programs + +```text +cargo build --target sparc-unknown-none-elf +``` + +This target uses GCC as a linker, and so you will need an appropriate GCC +compatible `sparc-unknown-none` toolchain. + +The default linker name is `sparc-elf-gcc`, but you can override this in your +project configuration. + +## Testing + +As `sparc-unknown-none-elf` supports a variety of different environments and does +not support `std`, this target does not support running the Rust test suite. + +## Cross-compilation toolchains and C code + +This target was initially tested using [BCC2] from Gaisler, along with the TSIM +Leon3 processor simulator. Both [BCC2] GCC and [BCC2] Clang have been shown to +work. To work with these tools, your project configuration should contain +something like: + +[BCC2]: https://www.gaisler.com/index.php/downloads/compilers + +`.cargo/config.toml`: +```toml +[target.sparc-unknown-none-elf] +linker = "sparc-gaisler-elf-gcc" +runner = "tsim-leon3" + +[build] +target = ["sparc-unknown-none-elf"] +rustflags = "-Ctarget-cpu=leon3" + +[unstable] +build-std = ["core"] +``` + +With this configuration, running `cargo run` will compile your code for the +SPARC V8 compatible Gaisler Leon3 processor and then start the `tsim-leon3` +simulator. Once the simulator is running, simply enter the command +`run` to start the code executing in the simulator. + +The default C toolchain libraries are linked in, so with the Gaisler [BCC2] +toolchain, and using its default Leon3 BSP, you can use call the C `putchar` +function and friends to output to the simulator console. + +Here's a complete example: + +```rust,ignore (cannot-test-this-because-it-assumes-special-libc-functions) +#![no_std] +#![no_main] + +extern "C" { + fn putchar(ch: i32); + fn _exit(code: i32) -> !; +} + +#[no_mangle] +extern "C" fn main() -> i32 { + let message = "Hello, this is Rust!"; + for b in message.bytes() { + unsafe { + putchar(b as i32); + } + } + 0 +} + +#[panic_handler] +fn panic(_panic: &core::panic::PanicInfo) -> ! { + unsafe { + _exit(1); + } +} +``` + +```console +$ cargo run --target=sparc-unknown-none-elf + Compiling sparc-demo-rust v0.1.0 (/work/sparc-demo-rust) + Finished dev [unoptimized + debuginfo] target(s) in 3.44s + Running `tsim-leon3 target/sparc-unknown-none-elf/debug/sparc-demo-rust` + + TSIM3 LEON3 SPARC simulator, version 3.1.9 (evaluation version) + + Copyright (C) 2023, Frontgrade Gaisler - all rights reserved. + This software may only be used with a valid license. + For latest updates, go to https://www.gaisler.com/ + Comments or bug-reports to support@gaisler.com + + This TSIM evaluation version will expire 2023-11-28 + +Number of CPUs: 2 +system frequency: 50.000 MHz +icache: 1 * 4 KiB, 16 bytes/line (4 KiB total) +dcache: 1 * 4 KiB, 16 bytes/line (4 KiB total) +Allocated 8192 KiB SRAM memory, in 1 bank at 0x40000000 +Allocated 32 MiB SDRAM memory, in 1 bank at 0x60000000 +Allocated 8192 KiB ROM memory at 0x00000000 +section: .text, addr: 0x40000000, size: 20528 bytes +section: .rodata, addr: 0x40005030, size: 128 bytes +section: .data, addr: 0x400050b0, size: 1176 bytes +read 347 symbols + +tsim> run + Initializing and starting from 0x40000000 +Hello, this is Rust! + + Program exited normally on CPU 0. +tsim> +``` diff --git a/src/doc/rustdoc/src/what-is-rustdoc.md b/src/doc/rustdoc/src/what-is-rustdoc.md index 7a444d77c09..7179ee0cf03 100644 --- a/src/doc/rustdoc/src/what-is-rustdoc.md +++ b/src/doc/rustdoc/src/what-is-rustdoc.md @@ -37,7 +37,7 @@ top, with no contents. ## Configuring rustdoc There are two problems with this: first, why does it -think that our package is named "lib"? Second, why does it not have any +think that our crate is named "lib"? Second, why does it not have any contents? The first problem is due to `rustdoc` trying to be helpful; like `rustc`, diff --git a/src/etc/natvis/liballoc.natvis b/src/etc/natvis/liballoc.natvis index c4ad98ec1d3..00c17d83322 100644 --- a/src/etc/natvis/liballoc.natvis +++ b/src/etc/natvis/liballoc.natvis @@ -66,7 +66,10 @@ dyn pointees. Rc<[T]> and Arc<[T]> are handled separately altogether so we can actually show - the slice values. + the slice values. These visualizers have a second wildcard `foo<slice2$<*>, *>` + which accounts for the allocator parameter. This isn't needed for the other visualizers since + their inner `*` eats the type parameter but since the slice ones match part of the type params + it is necessary for them. --> <!-- alloc::rc::Rc<T> --> <Type Name="alloc::rc::Rc<*>"> @@ -84,7 +87,7 @@ </Type> <!-- alloc::rc::Rc<[T]> --> - <Type Name="alloc::rc::Rc<slice2$<*> >"> + <Type Name="alloc::rc::Rc<slice2$<*>,*>"> <DisplayString>{{ len={ptr.pointer.length} }}</DisplayString> <Expand> <Item Name="[Length]" ExcludeView="simple">ptr.pointer.length</Item> @@ -114,7 +117,7 @@ </Type> <!-- alloc::rc::Weak<[T]> --> - <Type Name="alloc::rc::Weak<slice2$<*> >"> + <Type Name="alloc::rc::Weak<slice2$<*>,*>"> <DisplayString>{{ len={ptr.pointer.length} }}</DisplayString> <Expand> <Item Name="[Length]" ExcludeView="simple">ptr.pointer.length</Item> @@ -143,7 +146,7 @@ </Type> <!-- alloc::sync::Arc<[T]> --> - <Type Name="alloc::sync::Arc<slice2$<*> >"> + <Type Name="alloc::sync::Arc<slice2$<*>,*>"> <DisplayString>{{ len={ptr.pointer.length} }}</DisplayString> <Expand> <Item Name="[Length]" ExcludeView="simple">ptr.pointer.length</Item> @@ -172,7 +175,7 @@ </Type> <!-- alloc::sync::Weak<[T]> --> - <Type Name="alloc::sync::Weak<slice2$<*> >"> + <Type Name="alloc::sync::Weak<slice2$<*>,*>"> <DisplayString>{{ len={ptr.pointer.length} }}</DisplayString> <Expand> <Item Name="[Length]" ExcludeView="simple">ptr.pointer.length</Item> diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index a98d466cd81..5e2e2d24950 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -473,7 +473,7 @@ pub(crate) fn build_impl( associated_trait.def_id, ) .unwrap(); // corresponding associated item has to exist - !tcx.is_doc_hidden(trait_item.def_id) + document_hidden || !tcx.is_doc_hidden(trait_item.def_id) } else { item.visibility(tcx).is_public() } @@ -496,7 +496,7 @@ pub(crate) fn build_impl( let mut stack: Vec<&Type> = vec![&for_]; if let Some(did) = trait_.as_ref().map(|t| t.def_id()) { - if tcx.is_doc_hidden(did) { + if !document_hidden && tcx.is_doc_hidden(did) { return; } } @@ -505,7 +505,7 @@ pub(crate) fn build_impl( } while let Some(ty) = stack.pop() { - if let Some(did) = ty.def_id(&cx.cache) && tcx.is_doc_hidden(did) { + if let Some(did) = ty.def_id(&cx.cache) && !document_hidden && tcx.is_doc_hidden(did) { return; } if let Some(generics) = ty.generics() { diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index d62c4b707e1..2d00c53951f 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -53,7 +53,7 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext< let mut inserted = FxHashSet::default(); items.extend(doc.foreigns.iter().map(|(item, renamed)| { let item = clean_maybe_renamed_foreign_item(cx, item, *renamed); - if let Some(name) = item.name && !item.is_doc_hidden() { + if let Some(name) = item.name && (cx.render_options.document_hidden || !item.is_doc_hidden()) { inserted.insert((item.type_(), name)); } item @@ -63,7 +63,7 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext< return None; } let item = clean_doc_module(x, cx); - if item.is_doc_hidden() { + if !cx.render_options.document_hidden && item.is_doc_hidden() { // Hidden modules are stripped at a later stage. // If a hidden module has the same name as a visible one, we want // to keep both of them around. @@ -84,7 +84,7 @@ pub(crate) fn clean_doc_module<'tcx>(doc: &DocModule<'tcx>, cx: &mut DocContext< } let v = clean_maybe_renamed_item(cx, item, *renamed, *import_id); for item in &v { - if let Some(name) = item.name && !item.is_doc_hidden() { + if let Some(name) = item.name && (cx.render_options.document_hidden || !item.is_doc_hidden()) { inserted.insert((item.type_(), name)); } } @@ -441,7 +441,7 @@ fn clean_projection<'tcx>( let bounds = cx .tcx .explicit_item_bounds(ty.skip_binder().def_id) - .arg_iter_copied(cx.tcx, ty.skip_binder().args) + .iter_instantiated_copied(cx.tcx, ty.skip_binder().args) .map(|(pred, _)| pred) .collect::<Vec<_>>(); return clean_middle_opaque_bounds(cx, bounds); @@ -2071,7 +2071,7 @@ pub(crate) fn clean_middle_ty<'tcx>( let bounds = cx .tcx .explicit_item_bounds(def_id) - .arg_iter_copied(cx.tcx, args) + .iter_instantiated_copied(cx.tcx, args) .map(|(bound, _)| bound) .collect::<Vec<_>>(); let ty = clean_middle_opaque_bounds(cx, bounds); @@ -2326,7 +2326,7 @@ fn get_all_import_attributes<'hir>( attrs = import_attrs.iter().map(|attr| (Cow::Borrowed(attr), Some(def_id))).collect(); first = false; // We don't add attributes of an intermediate re-export if it has `#[doc(hidden)]`. - } else if !cx.tcx.is_doc_hidden(def_id) { + } else if cx.render_options.document_hidden || !cx.tcx.is_doc_hidden(def_id) { add_without_unwanted_attributes(&mut attrs, import_attrs, is_inline, Some(def_id)); } } diff --git a/src/librustdoc/clean/types/tests.rs b/src/librustdoc/clean/types/tests.rs index 394954208a4..4907a55270b 100644 --- a/src/librustdoc/clean/types/tests.rs +++ b/src/librustdoc/clean/types/tests.rs @@ -73,7 +73,7 @@ fn should_not_trim() { fn is_same_generic() { use crate::clean::types::{PrimitiveType, Type}; use crate::formats::cache::Cache; - let cache = Cache::new(false); + let cache = Cache::new(false, false); let generic = Type::Generic(rustc_span::symbol::sym::Any); let unit = Type::Primitive(PrimitiveType::Unit); assert!(!generic.is_doc_subtype_of(&unit, &cache)); diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 1bd40aea823..82a1fe310b3 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -345,7 +345,7 @@ pub(crate) fn run_global_ctxt( impl_trait_bounds: Default::default(), generated_synthetics: Default::default(), auto_traits, - cache: Cache::new(render_options.document_private), + cache: Cache::new(render_options.document_private, render_options.document_hidden), inlined: FxHashSet::default(), output_format, render_options, diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs index 6b48936cc59..d1deda0c716 100644 --- a/src/librustdoc/formats/cache.rs +++ b/src/librustdoc/formats/cache.rs @@ -86,6 +86,9 @@ pub(crate) struct Cache { /// Whether to document private items. /// This is stored in `Cache` so it doesn't need to be passed through all rustdoc functions. pub(crate) document_private: bool, + /// Whether to document hidden items. + /// This is stored in `Cache` so it doesn't need to be passed through all rustdoc functions. + pub(crate) document_hidden: bool, /// Crates marked with [`#[doc(masked)]`][doc_masked]. /// @@ -137,8 +140,8 @@ struct CacheBuilder<'a, 'tcx> { } impl Cache { - pub(crate) fn new(document_private: bool) -> Self { - Cache { document_private, ..Cache::default() } + pub(crate) fn new(document_private: bool, document_hidden: bool) -> Self { + Cache { document_private, document_hidden, ..Cache::default() } } /// Populates the `Cache` with more data. The returned `Crate` will be missing some data that was diff --git a/src/librustdoc/html/render/context.rs b/src/librustdoc/html/render/context.rs index d0a1e223eb8..037c88cb85d 100644 --- a/src/librustdoc/html/render/context.rs +++ b/src/librustdoc/html/render/context.rs @@ -349,12 +349,7 @@ impl<'tcx> Context<'tcx> { let e = ExternalCrate { crate_num: cnum }; (e.name(self.tcx()), e.src_root(self.tcx())) } - ExternalLocation::Unknown => { - let e = ExternalCrate { crate_num: cnum }; - let name = e.name(self.tcx()); - root = name.to_string(); - (name, e.src_root(self.tcx())) - } + ExternalLocation::Unknown => return None, }; let href = RefCell::new(PathBuf::new()); @@ -803,7 +798,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> { if let Some(def_id) = item.def_id() && self.cache().inlined_items.contains(&def_id) { self.is_inside_inlined_module = true; } - } else if item.is_doc_hidden() { + } else if !self.cache().document_hidden && item.is_doc_hidden() { // We're not inside an inlined module anymore since this one cannot be re-exported. self.is_inside_inlined_module = false; } diff --git a/src/librustdoc/html/render/mod.rs b/src/librustdoc/html/render/mod.rs index 8a1ce9fb135..575ce6aa99a 100644 --- a/src/librustdoc/html/render/mod.rs +++ b/src/librustdoc/html/render/mod.rs @@ -1821,9 +1821,9 @@ fn render_rightside( ); if let Some(l) = src_href { if has_stability { - write!(rightside, " · <a class=\"srclink\" href=\"{}\">source</a>", l) + write!(rightside, " · <a class=\"src\" href=\"{}\">source</a>", l) } else { - write!(rightside, "<a class=\"srclink rightside\" href=\"{}\">source</a>", l) + write!(rightside, "<a class=\"src rightside\" href=\"{}\">source</a>", l) } } if has_stability && has_src_ref { diff --git a/src/librustdoc/html/render/write_shared.rs b/src/librustdoc/html/render/write_shared.rs index 54749e9a317..3f41765a5af 100644 --- a/src/librustdoc/html/render/write_shared.rs +++ b/src/librustdoc/html/render/write_shared.rs @@ -270,7 +270,7 @@ pub(super) fn write_shared( hierarchy.add_path(source); } let hierarchy = Rc::try_unwrap(hierarchy).unwrap(); - let dst = cx.dst.join(&format!("source-files{}.js", cx.shared.resource_suffix)); + let dst = cx.dst.join(&format!("src-files{}.js", cx.shared.resource_suffix)); let make_sources = || { let (mut all_sources, _krates) = try_err!(collect_json(&dst, krate.name(cx.tcx()).as_str()), &dst); @@ -286,12 +286,12 @@ pub(super) fn write_shared( .replace("\\\"", "\\\\\"") )); all_sources.sort(); - let mut v = String::from("var sourcesIndex = JSON.parse('{\\\n"); + let mut v = String::from("var srcIndex = JSON.parse('{\\\n"); v.push_str(&all_sources.join(",\\\n")); - v.push_str("\\\n}');\ncreateSourceSidebar();\n"); + v.push_str("\\\n}');\ncreateSrcSidebar();\n"); Ok(v.into_bytes()) }; - write_invocation_specific("source-files.js", &make_sources)?; + write_invocation_specific("src-files.js", &make_sources)?; } // Update the search index and crate list. diff --git a/src/librustdoc/html/sources.rs b/src/librustdoc/html/sources.rs index a26fa374912..3e3fc8a0e72 100644 --- a/src/librustdoc/html/sources.rs +++ b/src/librustdoc/html/sources.rs @@ -227,7 +227,7 @@ impl SourceCollector<'_, '_> { let desc = format!("Source of the Rust file `{}`.", filename.prefer_remapped()); let page = layout::Page { title: &title, - css_class: "source", + css_class: "src", root_path: &root_path, static_root_path: shared.static_root_path.as_deref(), description: &desc, diff --git a/src/librustdoc/html/static/css/noscript.css b/src/librustdoc/html/static/css/noscript.css index 54e8b6561f3..93aa11a5852 100644 --- a/src/librustdoc/html/static/css/noscript.css +++ b/src/librustdoc/html/static/css/noscript.css @@ -19,7 +19,7 @@ nav.sub { display: none; } -.source .sidebar { +.src .sidebar { display: none; } diff --git a/src/librustdoc/html/static/css/rustdoc.css b/src/librustdoc/html/static/css/rustdoc.css index b7f455259ef..9209915895a 100644 --- a/src/librustdoc/html/static/css/rustdoc.css +++ b/src/librustdoc/html/static/css/rustdoc.css @@ -194,7 +194,7 @@ h1, h2, h3, h4, h5, h6, .item-name > a, .out-of-band, span.since, -a.srclink, +a.src, #help-button > a, summary.hideme, .scraped-example-list, @@ -206,7 +206,7 @@ ul.all-items { #toggle-all-docs, a.anchor, .small-section-header a, -#source-sidebar a, +#src-sidebar a, .rust a, .sidebar h2 a, .sidebar h3 a, @@ -315,7 +315,7 @@ main { min-width: 0; /* avoid growing beyond the size limit */ } -.source main { +.src main { padding: 15px; } @@ -350,10 +350,10 @@ pre.item-decl { contain: initial; } -.source .content pre { +.src .content pre { padding: 20px; } -.rustdoc.source .example-wrap pre.src-line-numbers { +.rustdoc.src .example-wrap pre.src-line-numbers { padding: 20px 0 20px 4px; } @@ -392,7 +392,7 @@ img { left: 0; } -.rustdoc.source .sidebar { +.rustdoc.src .sidebar { flex-basis: 50px; border-right: 1px solid; overflow-x: hidden; @@ -402,7 +402,7 @@ img { } .sidebar, .mobile-topbar, .sidebar-menu-toggle, -#src-sidebar-toggle, #source-sidebar { +#src-sidebar-toggle, #src-sidebar { background-color: var(--sidebar-background-color); } @@ -410,16 +410,16 @@ img { background-color: var(--sidebar-background-color-hover); } -.source .sidebar > *:not(#src-sidebar-toggle) { +.src .sidebar > *:not(#src-sidebar-toggle) { visibility: hidden; } -.source-sidebar-expanded .source .sidebar { +.src-sidebar-expanded .src .sidebar { overflow-y: auto; flex-basis: 300px; } -.source-sidebar-expanded .source .sidebar > *:not(#src-sidebar-toggle) { +.src-sidebar-expanded .src .sidebar > *:not(#src-sidebar-toggle) { visibility: visible; } @@ -544,7 +544,7 @@ ul.block, .block li { flex-grow: 1; } -.rustdoc:not(.source) .example-wrap pre { +.rustdoc:not(.src) .example-wrap pre { overflow: auto hidden; } @@ -619,7 +619,7 @@ ul.block, .block li { } .docblock code, .docblock-short code, -pre, .rustdoc.source .example-wrap { +pre, .rustdoc.src .example-wrap { background-color: var(--code-block-background-color); } @@ -676,7 +676,7 @@ nav.sub { height: 34px; flex-grow: 1; } -.source nav.sub { +.src nav.sub { margin: 0 0 15px 0; } @@ -1074,7 +1074,7 @@ pre.rust .doccomment { color: var(--code-highlight-doc-comment-color); } -.rustdoc.source .example-wrap pre.rust a { +.rustdoc.src .example-wrap pre.rust a { background: var(--codeblock-link-background); } @@ -1301,22 +1301,22 @@ a.tooltip:hover::after { align-items: stretch; z-index: 10; } -#source-sidebar { +#src-sidebar { width: 100%; overflow: auto; } -#source-sidebar > .title { +#src-sidebar > .title { font-size: 1.5rem; text-align: center; border-bottom: 1px solid var(--border-color); margin-bottom: 6px; } -#source-sidebar div.files > a:hover, details.dir-entry summary:hover, -#source-sidebar div.files > a:focus, details.dir-entry summary:focus { - background-color: var(--source-sidebar-background-hover); +#src-sidebar div.files > a:hover, details.dir-entry summary:hover, +#src-sidebar div.files > a:focus, details.dir-entry summary:focus { + background-color: var(--src-sidebar-background-hover); } -#source-sidebar div.files > a.selected { - background-color: var(--source-sidebar-background-selected); +#src-sidebar div.files > a.selected { + background-color: var(--src-sidebar-background-selected); } #src-sidebar-toggle > button { font-size: inherit; @@ -1562,7 +1562,7 @@ However, it's not needed with smaller screen width because the doc/code block is /* WARNING: RUSTDOC_MOBILE_BREAKPOINT MEDIA QUERY If you update this line, then you also need to update the line with the same warning -in source-script.js +in src-script.js */ @media (max-width: 700px) { /* When linking to an item with an `id` (for instance, by clicking a link in the sidebar, @@ -1619,8 +1619,8 @@ in source-script.js /* The source view uses a different design for the sidebar toggle, and doesn't have a topbar, so don't bump down the main content or the sidebar. */ - .source main, - .rustdoc.source .sidebar { + .src main, + .rustdoc.src .sidebar { top: 0; padding: 0; height: 100vh; @@ -1628,8 +1628,8 @@ in source-script.js } .sidebar.shown, - .source-sidebar-expanded .source .sidebar, - .rustdoc:not(.source) .sidebar:focus-within { + .src-sidebar-expanded .src .sidebar, + .rustdoc:not(.src) .sidebar:focus-within { left: 0; } @@ -1709,7 +1709,7 @@ in source-script.js border-left: 0; } - .source-sidebar-expanded #src-sidebar-toggle { + .src-sidebar-expanded #src-sidebar-toggle { left: unset; top: unset; width: unset; @@ -1749,7 +1749,7 @@ in source-script.js display: inline; } - .source-sidebar-expanded .source .sidebar { + .src-sidebar-expanded .src .sidebar { max-width: 100vw; width: 100vw; } @@ -1769,7 +1769,7 @@ in source-script.js margin-left: 34px; } - .source nav.sub { + .src nav.sub { margin: 0; padding: var(--nav-sub-mobile-padding); } @@ -1792,7 +1792,7 @@ in source-script.js } @media print { - nav.sidebar, nav.sub, .out-of-band, a.srclink, #copy-path, + nav.sidebar, nav.sub, .out-of-band, a.src, #copy-path, details.toggle[open] > summary::before, details.toggle > summary::before, details.toggle.top-doc > summary { display: none; diff --git a/src/librustdoc/html/static/css/themes/ayu.css b/src/librustdoc/html/static/css/themes/ayu.css index 7145baad256..d8dae51eb1b 100644 --- a/src/librustdoc/html/static/css/themes/ayu.css +++ b/src/librustdoc/html/static/css/themes/ayu.css @@ -89,8 +89,8 @@ Original by Dempfi (https://github.com/dempfi/ayu) --crate-search-div-hover-filter: invert(98%) sepia(12%) saturate(81%) hue-rotate(343deg) brightness(113%) contrast(76%); --crate-search-hover-border: #e0e0e0; - --source-sidebar-background-selected: #14191f; - --source-sidebar-background-hover: #14191f; + --src-sidebar-background-selected: #14191f; + --src-sidebar-background-hover: #14191f; --table-alt-row-background-color: #191f26; --codeblock-link-background: #333; --scrape-example-toggle-line-background: #999; @@ -107,7 +107,7 @@ Original by Dempfi (https://github.com/dempfi/ayu) h1, h2, h3, h4, h1 a, .sidebar h2 a, .sidebar h3 a, -#source-sidebar > .title { +#src-sidebar > .title { color: #fff; } h4 { @@ -124,15 +124,15 @@ h4 { .docblock pre > code, pre, pre > code, .item-info code, -.rustdoc.source .example-wrap { +.rustdoc.src .example-wrap { color: #e6e1cf; } .sidebar .current, .sidebar a:hover, -#source-sidebar div.files > a:hover, details.dir-entry summary:hover, -#source-sidebar div.files > a:focus, details.dir-entry summary:focus, -#source-sidebar div.files > a.selected { +#src-sidebar div.files > a:hover, details.dir-entry summary:hover, +#src-sidebar div.files > a:focus, details.dir-entry summary:focus, +#src-sidebar div.files > a.selected { color: #ffb44c; } diff --git a/src/librustdoc/html/static/css/themes/dark.css b/src/librustdoc/html/static/css/themes/dark.css index 3c1186a5649..b653f61d536 100644 --- a/src/librustdoc/html/static/css/themes/dark.css +++ b/src/librustdoc/html/static/css/themes/dark.css @@ -84,8 +84,8 @@ --crate-search-div-hover-filter: invert(69%) sepia(60%) saturate(6613%) hue-rotate(184deg) brightness(100%) contrast(91%); --crate-search-hover-border: #2196f3; - --source-sidebar-background-selected: #333; - --source-sidebar-background-hover: #444; + --src-sidebar-background-selected: #333; + --src-sidebar-background-hover: #444; --table-alt-row-background-color: #2A2A2A; --codeblock-link-background: #333; --scrape-example-toggle-line-background: #999; diff --git a/src/librustdoc/html/static/css/themes/light.css b/src/librustdoc/html/static/css/themes/light.css index f8c287137de..6be25fc0544 100644 --- a/src/librustdoc/html/static/css/themes/light.css +++ b/src/librustdoc/html/static/css/themes/light.css @@ -81,8 +81,8 @@ --crate-search-div-hover-filter: invert(44%) sepia(18%) saturate(23%) hue-rotate(317deg) brightness(96%) contrast(93%); --crate-search-hover-border: #717171; - --source-sidebar-background-selected: #fff; - --source-sidebar-background-hover: #e0e0e0; + --src-sidebar-background-selected: #fff; + --src-sidebar-background-hover: #e0e0e0; --table-alt-row-background-color: #F5F5F5; --codeblock-link-background: #eee; --scrape-example-toggle-line-background: #ccc; diff --git a/src/librustdoc/html/static/js/source-script.js b/src/librustdoc/html/static/js/src-script.js index 6eb99136040..679c2341f02 100644 --- a/src/librustdoc/html/static/js/source-script.js +++ b/src/librustdoc/html/static/js/src-script.js @@ -1,5 +1,5 @@ // From rust: -/* global sourcesIndex */ +/* global srcIndex */ // Local js definitions: /* global addClass, getCurrentValue, onEachLazy, removeClass, browserSupportsHistoryApi */ @@ -74,11 +74,11 @@ function createDirEntry(elem, parent, fullPath, hasFoundFile) { function toggleSidebar() { const child = this.parentNode.children[0]; if (child.innerText === ">") { - addClass(document.documentElement, "source-sidebar-expanded"); + addClass(document.documentElement, "src-sidebar-expanded"); child.innerText = "<"; updateLocalStorage("source-sidebar-show", "true"); } else { - removeClass(document.documentElement, "source-sidebar-expanded"); + removeClass(document.documentElement, "src-sidebar-expanded"); child.innerText = ">"; updateLocalStorage("source-sidebar-show", "false"); } @@ -101,16 +101,16 @@ function createSidebarToggle() { return sidebarToggle; } -// This function is called from "source-files.js", generated in `html/render/write_shared.rs`. +// This function is called from "src-files.js", generated in `html/render/write_shared.rs`. // eslint-disable-next-line no-unused-vars -function createSourceSidebar() { +function createSrcSidebar() { const container = document.querySelector("nav.sidebar"); const sidebarToggle = createSidebarToggle(); container.insertBefore(sidebarToggle, container.firstChild); const sidebar = document.createElement("div"); - sidebar.id = "source-sidebar"; + sidebar.id = "src-sidebar"; let hasFoundFile = false; @@ -118,9 +118,9 @@ function createSourceSidebar() { title.className = "title"; title.innerText = "Files"; sidebar.appendChild(title); - Object.keys(sourcesIndex).forEach(key => { - sourcesIndex[key][NAME_OFFSET] = key; - hasFoundFile = createDirEntry(sourcesIndex[key], sidebar, "", hasFoundFile); + Object.keys(srcIndex).forEach(key => { + srcIndex[key][NAME_OFFSET] = key; + hasFoundFile = createDirEntry(srcIndex[key], sidebar, "", hasFoundFile); }); container.appendChild(sidebar); @@ -133,7 +133,7 @@ function createSourceSidebar() { const lineNumbersRegex = /^#?(\d+)(?:-(\d+))?$/; -function highlightSourceLines(match) { +function highlightSrcLines(match) { if (typeof match === "undefined") { match = window.location.hash.match(lineNumbersRegex); } @@ -172,7 +172,7 @@ function highlightSourceLines(match) { } } -const handleSourceHighlight = (function() { +const handleSrcHighlight = (function() { let prev_line_id = 0; const set_fragment = name => { @@ -180,7 +180,7 @@ const handleSourceHighlight = (function() { y = window.scrollY; if (browserSupportsHistoryApi()) { history.replaceState(null, null, "#" + name); - highlightSourceLines(); + highlightSrcLines(); } else { location.replace("#" + name); } @@ -221,15 +221,15 @@ const handleSourceHighlight = (function() { window.addEventListener("hashchange", () => { const match = window.location.hash.match(lineNumbersRegex); if (match) { - return highlightSourceLines(match); + return highlightSrcLines(match); } }); onEachLazy(document.getElementsByClassName("src-line-numbers"), el => { - el.addEventListener("click", handleSourceHighlight); + el.addEventListener("click", handleSrcHighlight); }); -highlightSourceLines(); +highlightSrcLines(); -window.createSourceSidebar = createSourceSidebar; +window.createSrcSidebar = createSrcSidebar; })(); diff --git a/src/librustdoc/html/static/js/storage.js b/src/librustdoc/html/static/js/storage.js index 71961f6f2a9..af3ca42a6c0 100644 --- a/src/librustdoc/html/static/js/storage.js +++ b/src/librustdoc/html/static/js/storage.js @@ -185,7 +185,7 @@ updateTheme(); if (getSettingValue("source-sidebar-show") === "true") { // At this point in page load, `document.body` is not available yet. // Set a class on the `<html>` element instead. - addClass(document.documentElement, "source-sidebar-expanded"); + addClass(document.documentElement, "src-sidebar-expanded"); } // If we navigate away (for example to a settings page), and then use the back or diff --git a/src/librustdoc/html/static_files.rs b/src/librustdoc/html/static_files.rs index 767b974cc91..5d2e4b073c1 100644 --- a/src/librustdoc/html/static_files.rs +++ b/src/librustdoc/html/static_files.rs @@ -97,7 +97,7 @@ static_files! { main_js => "static/js/main.js", search_js => "static/js/search.js", settings_js => "static/js/settings.js", - source_script_js => "static/js/source-script.js", + src_script_js => "static/js/src-script.js", storage_js => "static/js/storage.js", scrape_examples_js => "static/js/scrape-examples.js", wheel_svg => "static/images/wheel.svg", diff --git a/src/librustdoc/html/templates/page.html b/src/librustdoc/html/templates/page.html index d4ec9c34b6f..60ccfe4da44 100644 --- a/src/librustdoc/html/templates/page.html +++ b/src/librustdoc/html/templates/page.html @@ -42,9 +42,9 @@ <script src="{{static_root_path|safe}}{{files.storage_js}}"></script> {# #} {% if page.css_class.contains("crate") %} <script defer src="{{page.root_path|safe}}crates{{page.resource_suffix}}.js"></script> {# #} - {% else if page.css_class == "source" %} - <script defer src="{{static_root_path|safe}}{{files.source_script_js}}"></script> {# #} - <script defer src="{{page.root_path|safe}}source-files{{page.resource_suffix}}.js"></script> {# #} + {% else if page.css_class == "src" %} + <script defer src="{{static_root_path|safe}}{{files.src_script_js}}"></script> {# #} + <script defer src="{{page.root_path|safe}}src-files{{page.resource_suffix}}.js"></script> {# #} {% else if !page.css_class.contains("mod") %} <script defer src="sidebar-items{{page.resource_suffix}}.js"></script> {# #} {% endif %} @@ -85,7 +85,7 @@ </div> {# #} <![endif]--> {# #} {{ layout.external_html.before_content|safe }} - {% if page.css_class != "source" %} + {% if page.css_class != "src" %} <nav class="mobile-topbar"> {# #} <button class="sidebar-menu-toggle">☰</button> {# #} <a class="logo-container" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {# #} @@ -99,7 +99,7 @@ </nav> {# #} {% endif %} <nav class="sidebar"> {# #} - {% if page.css_class != "source" %} + {% if page.css_class != "src" %} <a class="logo-container" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {# #} {% if !layout.logo.is_empty() %} <img src="{{layout.logo}}" alt="logo"> {# #} @@ -111,9 +111,9 @@ {{ sidebar|safe }} </nav> {# #} <main> {# #} - {% if page.css_class != "source" %}<div class="width-limiter">{% endif %} + {% if page.css_class != "src" %}<div class="width-limiter">{% endif %} <nav class="sub"> {# #} - {% if page.css_class == "source" %} + {% if page.css_class == "src" %} <a class="sub-logo-container" href="{{page.root_path|safe}}{{krate_with_trailing_slash|safe}}index.html"> {# #} {% if !layout.logo.is_empty() %} <img src="{{layout.logo}}" alt="logo"> {# #} @@ -144,7 +144,7 @@ </form> {# #} </nav> {# #} <section id="main-content" class="content">{{ content|safe }}</section> {# #} - {% if page.css_class != "source" %}</div>{% endif %} + {% if page.css_class != "src" %}</div>{% endif %} </main> {# #} {{ layout.external_html.after_content|safe }} </body> {# #} diff --git a/src/librustdoc/html/templates/print_item.html b/src/librustdoc/html/templates/print_item.html index 68a295ae095..1d215c26968 100644 --- a/src/librustdoc/html/templates/print_item.html +++ b/src/librustdoc/html/templates/print_item.html @@ -18,7 +18,7 @@ {% endif %} {% match src_href %} {% when Some with (href) %} - <a class="srclink" href="{{href|safe}}">source</a> · {#+ #} + <a class="src" href="{{href|safe}}">source</a> · {#+ #} {% else %} {% endmatch %} <button id="toggle-all-docs" title="collapse all docs"> {# #} diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index f28deae791a..36d087a7d5b 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -744,9 +744,6 @@ fn main_args(handler: &mut EarlyErrorHandler, at_args: &[String]) -> MainResult } }; - // Set parallel mode before error handler creation, which will create `Lock`s. - interface::set_thread_safe_mode(&options.unstable_opts); - let diag = core::new_handler( options.error_format, None, diff --git a/src/librustdoc/passes/check_doc_test_visibility.rs b/src/librustdoc/passes/check_doc_test_visibility.rs index 1aa12e3ced2..011ca9a4961 100644 --- a/src/librustdoc/passes/check_doc_test_visibility.rs +++ b/src/librustdoc/passes/check_doc_test_visibility.rs @@ -92,8 +92,8 @@ pub(crate) fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) - return false; } - if cx.tcx.is_doc_hidden(def_id.to_def_id()) - || inherits_doc_hidden(cx.tcx, def_id, None) + if (!cx.render_options.document_hidden + && (cx.tcx.is_doc_hidden(def_id.to_def_id()) || inherits_doc_hidden(cx.tcx, def_id, None))) || cx.tcx.def_span(def_id.to_def_id()).in_derive_expansion() { return false; diff --git a/src/librustdoc/passes/strip_hidden.rs b/src/librustdoc/passes/strip_hidden.rs index e2e38d3e79f..7b990cd4348 100644 --- a/src/librustdoc/passes/strip_hidden.rs +++ b/src/librustdoc/passes/strip_hidden.rs @@ -42,6 +42,7 @@ pub(crate) fn strip_hidden(krate: clean::Crate, cx: &mut DocContext<'_>) -> clea cache: &cx.cache, is_json_output, document_private: cx.render_options.document_private, + document_hidden: cx.render_options.document_hidden, }; stripper.fold_crate(krate) } diff --git a/src/librustdoc/passes/strip_priv_imports.rs b/src/librustdoc/passes/strip_priv_imports.rs index 4c992e94833..468712ba3d0 100644 --- a/src/librustdoc/passes/strip_priv_imports.rs +++ b/src/librustdoc/passes/strip_priv_imports.rs @@ -13,5 +13,10 @@ pub(crate) const STRIP_PRIV_IMPORTS: Pass = Pass { pub(crate) fn strip_priv_imports(krate: clean::Crate, cx: &mut DocContext<'_>) -> clean::Crate { let is_json_output = cx.output_format.is_json() && !cx.show_coverage; - ImportStripper { tcx: cx.tcx, is_json_output }.fold_crate(krate) + ImportStripper { + tcx: cx.tcx, + is_json_output, + document_hidden: cx.render_options.document_hidden, + } + .fold_crate(krate) } diff --git a/src/librustdoc/passes/strip_private.rs b/src/librustdoc/passes/strip_private.rs index bb6dccb7c94..3b6f484fde6 100644 --- a/src/librustdoc/passes/strip_private.rs +++ b/src/librustdoc/passes/strip_private.rs @@ -28,8 +28,12 @@ pub(crate) fn strip_private(mut krate: clean::Crate, cx: &mut DocContext<'_>) -> is_json_output, tcx: cx.tcx, }; - krate = - ImportStripper { tcx: cx.tcx, is_json_output }.fold_crate(stripper.fold_crate(krate)); + krate = ImportStripper { + tcx: cx.tcx, + is_json_output, + document_hidden: cx.render_options.document_hidden, + } + .fold_crate(stripper.fold_crate(krate)); } // strip all impls referencing private items @@ -39,6 +43,7 @@ pub(crate) fn strip_private(mut krate: clean::Crate, cx: &mut DocContext<'_>) -> cache: &cx.cache, is_json_output, document_private: cx.render_options.document_private, + document_hidden: cx.render_options.document_hidden, }; stripper.fold_crate(krate) } diff --git a/src/librustdoc/passes/stripper.rs b/src/librustdoc/passes/stripper.rs index 90c361d9d28..a6d31534f1d 100644 --- a/src/librustdoc/passes/stripper.rs +++ b/src/librustdoc/passes/stripper.rs @@ -6,6 +6,7 @@ use std::mem; use crate::clean::{self, Item, ItemId, ItemIdSet}; use crate::fold::{strip_item, DocFolder}; use crate::formats::cache::Cache; +use crate::visit_ast::inherits_doc_hidden; use crate::visit_lib::RustdocEffectiveVisibilities; pub(crate) struct Stripper<'a, 'tcx> { @@ -151,6 +152,7 @@ pub(crate) struct ImplStripper<'a, 'tcx> { pub(crate) cache: &'a Cache, pub(crate) is_json_output: bool, pub(crate) document_private: bool, + pub(crate) document_hidden: bool, } impl<'a> ImplStripper<'a, '_> { @@ -162,7 +164,13 @@ impl<'a> ImplStripper<'a, '_> { // If the "for" item is exported and the impl block isn't `#[doc(hidden)]`, then we // need to keep it. self.cache.effective_visibilities.is_exported(self.tcx, for_def_id) - && !item.is_doc_hidden() + && (self.document_hidden + || ((!item.is_doc_hidden() + && for_def_id + .as_local() + .map(|def_id| !inherits_doc_hidden(self.tcx, def_id, None)) + .unwrap_or(true)) + || self.cache.inlined_items.contains(&for_def_id))) } else { false } @@ -231,6 +239,7 @@ impl<'a> DocFolder for ImplStripper<'a, '_> { pub(crate) struct ImportStripper<'tcx> { pub(crate) tcx: TyCtxt<'tcx>, pub(crate) is_json_output: bool, + pub(crate) document_hidden: bool, } impl<'tcx> ImportStripper<'tcx> { @@ -247,8 +256,12 @@ impl<'tcx> ImportStripper<'tcx> { impl<'tcx> DocFolder for ImportStripper<'tcx> { fn fold_item(&mut self, i: Item) -> Option<Item> { match *i.kind { - clean::ImportItem(imp) if self.import_should_be_hidden(&i, &imp) => None, - clean::ImportItem(_) if i.is_doc_hidden() => None, + clean::ImportItem(imp) + if !self.document_hidden && self.import_should_be_hidden(&i, &imp) => + { + None + } + // clean::ImportItem(_) if !self.document_hidden && i.is_doc_hidden() => None, clean::ExternCrateItem { .. } | clean::ImportItem(..) if i.visibility(self.tcx) != Some(Visibility::Public) => { diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index fcf591a9328..265123ddf6c 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -262,10 +262,11 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { return false; }; + let document_hidden = self.cx.render_options.document_hidden; let use_attrs = tcx.hir().attrs(tcx.hir().local_def_id_to_hir_id(def_id)); // Don't inline `doc(hidden)` imports so they can be stripped at a later stage. let is_no_inline = use_attrs.lists(sym::doc).has_word(sym::no_inline) - || use_attrs.lists(sym::doc).has_word(sym::hidden); + || (document_hidden && use_attrs.lists(sym::doc).has_word(sym::hidden)); if is_no_inline { return false; @@ -285,11 +286,11 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { }; let is_private = !self.cx.cache.effective_visibilities.is_directly_public(tcx, ori_res_did); - let is_hidden = tcx.is_doc_hidden(ori_res_did); + let is_hidden = !document_hidden && tcx.is_doc_hidden(ori_res_did); let item = tcx.hir().get_by_def_id(res_did); if !please_inline { - let inherits_hidden = inherits_doc_hidden(tcx, res_did, None); + let inherits_hidden = !document_hidden && inherits_doc_hidden(tcx, res_did, None); // Only inline if requested or if the item would otherwise be stripped. if (!is_private && !inherits_hidden) || ( is_hidden && @@ -359,6 +360,9 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { import_def_id: LocalDefId, target_def_id: LocalDefId, ) -> bool { + if self.cx.render_options.document_hidden { + return true; + } let tcx = self.cx.tcx; let item_def_id = reexport_chain(tcx, import_def_id, target_def_id) .iter() diff --git a/src/librustdoc/visit_lib.rs b/src/librustdoc/visit_lib.rs index fd4f9254107..82c97466111 100644 --- a/src/librustdoc/visit_lib.rs +++ b/src/librustdoc/visit_lib.rs @@ -33,6 +33,7 @@ pub(crate) fn lib_embargo_visit_item(cx: &mut DocContext<'_>, def_id: DefId) { tcx: cx.tcx, extern_public: &mut cx.cache.effective_visibilities.extern_public, visited_mods: Default::default(), + document_hidden: cx.render_options.document_hidden, } .visit_item(def_id) } @@ -45,6 +46,7 @@ struct LibEmbargoVisitor<'a, 'tcx> { extern_public: &'a mut DefIdSet, // Keeps track of already visited modules, in case a module re-exports its parent visited_mods: DefIdSet, + document_hidden: bool, } impl LibEmbargoVisitor<'_, '_> { @@ -63,7 +65,7 @@ impl LibEmbargoVisitor<'_, '_> { } fn visit_item(&mut self, def_id: DefId) { - if !self.tcx.is_doc_hidden(def_id) { + if self.document_hidden || !self.tcx.is_doc_hidden(def_id) { self.extern_public.insert(def_id); if self.tcx.def_kind(def_id) == DefKind::Mod { self.visit_mod(def_id); diff --git a/src/tools/build-manifest/src/main.rs b/src/tools/build-manifest/src/main.rs index 8b28c68e04f..14618f89aed 100644 --- a/src/tools/build-manifest/src/main.rs +++ b/src/tools/build-manifest/src/main.rs @@ -127,6 +127,7 @@ static TARGETS: &[&str] = &[ "s390x-unknown-linux-gnu", "sparc64-unknown-linux-gnu", "sparcv9-sun-solaris", + "sparc-unknown-none-elf", "thumbv6m-none-eabi", "thumbv7em-none-eabi", "thumbv7em-none-eabihf", diff --git a/src/tools/build_helper/src/ci.rs b/src/tools/build_helper/src/ci.rs index 893195b69c2..b70ea5ec113 100644 --- a/src/tools/build_helper/src/ci.rs +++ b/src/tools/build_helper/src/ci.rs @@ -36,15 +36,25 @@ impl CiEnv { } pub mod gha { + use std::sync::atomic::{AtomicBool, Ordering}; + + static GROUP_ACTIVE: AtomicBool = AtomicBool::new(false); + /// All github actions log messages from this call to the Drop of the return value /// will be grouped and hidden by default in logs. Note that nesting these does /// not really work. + #[track_caller] pub fn group(name: impl std::fmt::Display) -> Group { if std::env::var_os("GITHUB_ACTIONS").is_some() { eprintln!("::group::{name}"); } else { eprintln!("{name}") } + // https://github.com/actions/toolkit/issues/1001 + assert!( + !GROUP_ACTIVE.swap(true, Ordering::Relaxed), + "nested groups are not supported by GHA!" + ); Group(()) } @@ -57,6 +67,10 @@ pub mod gha { if std::env::var_os("GITHUB_ACTIONS").is_some() { eprintln!("::endgroup::"); } + assert!( + GROUP_ACTIVE.swap(false, Ordering::Relaxed), + "group dropped but no group active!" + ); } } } diff --git a/src/tools/build_helper/src/util.rs b/src/tools/build_helper/src/util.rs index 11b8a228b8a..5801a8648f2 100644 --- a/src/tools/build_helper/src/util.rs +++ b/src/tools/build_helper/src/util.rs @@ -1,10 +1,12 @@ use std::process::Command; /// Invokes `build_helper::util::detail_exit` with `cfg!(test)` +/// +/// This is a macro instead of a function so that it uses `cfg(test)` in the *calling* crate, not in build helper. #[macro_export] -macro_rules! detail_exit_macro { +macro_rules! exit { ($code:expr) => { - build_helper::util::detail_exit($code, cfg!(test)); + $crate::util::detail_exit($code, cfg!(test)); }; } diff --git a/src/tools/clippy/.github/workflows/clippy.yml b/src/tools/clippy/.github/workflows/clippy.yml index c582c28cd3c..410ff53a251 100644 --- a/src/tools/clippy/.github/workflows/clippy.yml +++ b/src/tools/clippy/.github/workflows/clippy.yml @@ -50,7 +50,7 @@ jobs: echo "LD_LIBRARY_PATH=${SYSROOT}/lib${LD_LIBRARY_PATH+:${LD_LIBRARY_PATH}}" >> $GITHUB_ENV - name: Build - run: cargo build --features deny-warnings,internal + run: cargo build --tests --features deny-warnings,internal - name: Test run: cargo test --features deny-warnings,internal diff --git a/src/tools/clippy/.github/workflows/clippy_bors.yml b/src/tools/clippy/.github/workflows/clippy_bors.yml index d5ab313ba0e..4eb11a3ac85 100644 --- a/src/tools/clippy/.github/workflows/clippy_bors.yml +++ b/src/tools/clippy/.github/workflows/clippy_bors.yml @@ -106,7 +106,7 @@ jobs: echo "$SYSROOT/bin" >> $GITHUB_PATH - name: Build - run: cargo build --features deny-warnings,internal + run: cargo build --tests --features deny-warnings,internal - name: Test if: runner.os == 'Linux' diff --git a/src/tools/clippy/CHANGELOG.md b/src/tools/clippy/CHANGELOG.md index 14d822083d8..b3b6e3b865f 100644 --- a/src/tools/clippy/CHANGELOG.md +++ b/src/tools/clippy/CHANGELOG.md @@ -6,13 +6,74 @@ document. ## Unreleased / Beta / In Rust Nightly -[83e42a23...master](https://github.com/rust-lang/rust-clippy/compare/83e42a23...master) +[435a8ad8...master](https://github.com/rust-lang/rust-clippy/compare/435a8ad8...master) + +## Rust 1.71 + +Current stable, released 2023-07-13 + +<!-- FIXME: Remove the request for feedback, with the next changelog --> + +We're trying out a new shorter changelog format, that only contains significant changes. +You can check out the list of merged pull requests for a list of all changes. +If you have any feedback related to the new format, please share it in +[#10847](https://github.com/rust-lang/rust-clippy/issues/10847) + +[View all 78 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2023-04-11T20%3A05%3A26Z..2023-05-20T13%3A48%3A17Z+base%3Amaster) + +### New Lints + +* [`non_minimal_cfg`] + [#10763](https://github.com/rust-lang/rust-clippy/pull/10763) +* [`manual_next_back`] + [#10769](https://github.com/rust-lang/rust-clippy/pull/10769) +* [`ref_patterns`] + [#10736](https://github.com/rust-lang/rust-clippy/pull/10736) +* [`default_constructed_unit_structs`] + [#10716](https://github.com/rust-lang/rust-clippy/pull/10716) +* [`manual_while_let_some`] + [#10647](https://github.com/rust-lang/rust-clippy/pull/10647) +* [`needless_bool_assign`] + [#10432](https://github.com/rust-lang/rust-clippy/pull/10432) +* [`items_after_test_module`] + [#10578](https://github.com/rust-lang/rust-clippy/pull/10578) + +### Moves and Deprecations + +* Rename `integer_arithmetic` to `arithmetic_side_effects` + [#10674](https://github.com/rust-lang/rust-clippy/pull/10674) +* Moved [`redundant_clone`] to `nursery` (Now allow-by-default) + [#10873](https://github.com/rust-lang/rust-clippy/pull/10873) + +### Enhancements + +* [`invalid_regex`]: Now supports the new syntax introduced after regex v1.8.0 + [#10682](https://github.com/rust-lang/rust-clippy/pull/10682) +* [`semicolon_outside_block`]: Added [`semicolon-outside-block-ignore-multiline`] as a new config value. + [#10656](https://github.com/rust-lang/rust-clippy/pull/10656) +* [`semicolon_inside_block`]: Added [`semicolon-inside-block-ignore-singleline`] as a new config value. + [#10656](https://github.com/rust-lang/rust-clippy/pull/10656) +* [`unnecessary_box_returns`]: Added [`unnecessary-box-size`] as a new config value to set the maximum + size of `T` in `Box<T>` to be linted. + [#10651](https://github.com/rust-lang/rust-clippy/pull/10651) + +### Documentation Improvements + +* `cargo clippy --explain LINT` now shows possible configuration options for the explained lint + [#10751](https://github.com/rust-lang/rust-clippy/pull/10751) +* New config values mentioned in this changelog will now be linked. + [#10889](https://github.com/rust-lang/rust-clippy/pull/10889) +* Several sections of [Clippy's book] have been reworked + [#10652](https://github.com/rust-lang/rust-clippy/pull/10652) + [#10622](https://github.com/rust-lang/rust-clippy/pull/10622) + +[Clippy's book]: https://doc.rust-lang.org/clippy/ ## Rust 1.70 -Current stable, released 2023-06-01 +Released 2023-06-01 -[**View 85 PRs merged since 1.69**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2023-04-20..2023-06-01+base%3Amaster+sort%3Amerged-desc+) +[View all 91 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2023-02-26T01%3A05%3A43Z..2023-04-11T13%3A27%3A30Z+base%3Amaster) ### New Lints @@ -137,7 +198,7 @@ Current stable, released 2023-06-01 Released 2023-04-20 -[**View 86 PRs merged since 1.68**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2023-03-09..2023-04-20+base%3Amaster+sort%3Amerged-desc+) +[View all 72 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2023-01-13T06%3A12%3A46Z..2023-02-25T23%3A48%3A10Z+base%3Amaster) ### New Lints @@ -252,7 +313,7 @@ Released 2023-04-20 Released 2023-03-09 -[**View 85 PRs merged since 1.67**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2023-01-26..2023-03-09+base%3Amaster+sort%3Amerged-desc+) +[View all 76 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-12-01T20%3A40%3A04Z..2023-01-12T18%3A58%3A59Z+base%3Amaster) ### New Lints @@ -399,7 +460,7 @@ Released 2023-03-09 Released 2023-01-26 -[**View 68 PRs merged since 1.66**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-12-15..2023-01-26+base%3Amaster+sort%3Amerged-desc+) +[View all 104 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-10-23T13%3A35%3A19Z..2022-12-01T13%3A34%3A39Z+base%3Amaster) ### New Lints @@ -590,8 +651,7 @@ Released 2023-01-26 Released 2022-12-15 -[**View 93 PRs merged since 1.65**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-11-03..2022-12-15+base%3Amaster+sort%3Amerged-desc+) - +[View all 116 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-09-09T17%3A32%3A39Z..2022-10-23T11%3A27%3A24Z+base%3Amaster) ### New Lints @@ -762,8 +822,7 @@ Released 2022-12-15 Released 2022-11-03 -[**View 129 PRs merged since 1.64**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-09-22..2022-11-03+base%3Amaster+sort%3Amerged-desc+) - +[View all 86 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-07-29T01%3A09%3A31Z..2022-09-09T00%3A01%3A54Z+base%3Amaster) ### Important Changes @@ -907,8 +966,7 @@ Released 2022-11-03 Released 2022-09-22 -[**View 92 PRs merged since 1.63**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-08-11..2022-09-22+base%3Amaster+sort%3Amerged-desc+) - +[View all 110 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-06-17T21%3A25%3A31Z..2022-07-28T17%3A11%3A18Z+base%3Amaster) ### New Lints @@ -1058,8 +1116,7 @@ Released 2022-09-22 Released 2022-08-11 -[**View 100 PRs merged since 1.62**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-06-30..2022-08-11+base%3Amaster+sort%3Amerged-desc+) - +[View all 91 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-05-05T17%3A24%3A22Z..2022-06-16T14%3A24%3A48Z+base%3Amaster) ### New Lints @@ -1205,8 +1262,7 @@ Released 2022-08-11 Released 2022-06-30 -[**View 104 PRs merged since 1.61**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-05-19..2022-06-30+base%3Amaster+sort%3Amerged-desc+) - +[View all 90 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-03-25T17%3A22%3A30Z..2022-05-05T13%3A29%3A44Z+base%3Amaster) ### New Lints @@ -1363,8 +1419,7 @@ Released 2022-06-30 Released 2022-05-19 -[**View 93 PRs merged since 1.60**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-04-07..2022-05-19+base%3Amaster+sort%3Amerged-desc+) - +[View all 60 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2022-02-11T16%3A54%3A41Z..2022-03-24T13%3A42%3A25Z+base%3Amaster) ### New Lints @@ -1465,8 +1520,7 @@ Released 2022-05-19 Released 2022-04-07 -[**View 75 PRs merged since 1.59**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-02-24..2022-04-07+base%3Amaster+sort%3Amerged-desc+) - +[View all 73 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-12-31T17%3A53%3A37Z..2022-02-10T17%3A31%3A37Z+base%3Amaster) ### New Lints @@ -1598,8 +1652,7 @@ Released 2022-04-07 Released 2022-02-24 -[**View 63 PRs merged since 1.58**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2022-01-13..2022-02-24+base%3Amaster+sort%3Amerged-desc+) - +[View all 94 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-11-04T12%3A40%3A18Z..2021-12-30T13%3A36%3A20Z+base%3Amaster) ### New Lints @@ -1763,8 +1816,7 @@ Released 2022-02-24 Released 2022-01-13 -[**View 73 PRs merged since 1.57**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-12-02..2022-01-13+base%3Amaster+sort%3Amerged-desc+) - +[View all 68 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-10-07T09%3A49%3A18Z..2021-11-04T12%3A20%3A12Z+base%3Amaster) ### Rust 1.58.1 @@ -1885,8 +1937,7 @@ Released 2022-01-13 Released 2021-12-02 -[**View 92 PRs merged since 1.56**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-10-21..2021-12-02+base%3Amaster+sort%3Amerged-desc+) - +[View all 148 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-08-12T20%3A36%3A04Z..2021-11-03T17%3A57%3A59Z+base%3Amaster) ### New Lints @@ -2037,7 +2088,7 @@ Released 2021-12-02 Released 2021-10-21 -[**View 92 PRs merged since 1.55**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-09-09..2021-10-21+base%3Amaster+sort%3Amerged-desc+) +[View all 38 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-07-19T14%3A33%3A33Z..2021-08-12T09%3A28%3A38Z+base%3Amaster) ### New Lints @@ -2103,7 +2154,7 @@ Released 2021-10-21 Released 2021-09-09 -[**View 61 PRs merged since 1.54**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-07-29..2021-09-09+base%3Amaster+sort%3Amerged-desc+) +[View all 83 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-06-03T07%3A23%3A59Z..2021-07-29T11%3A47%3A32Z+base%3Amaster) ### Important Changes @@ -2221,8 +2272,7 @@ Released 2021-09-09 Released 2021-07-29 -[**View 68 PRs merged since 1.53**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-06-17..2021-07-29+base%3Amaster+sort%3Amerged-desc+) - +[View all 74 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-04-27T23%3A51%3A18Z..2021-06-03T06%3A54%3A07Z+base%3Amaster) ### New Lints @@ -2350,7 +2400,7 @@ Released 2021-07-29 Released 2021-06-17 -[**View 80 PRs merged since 1.52**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-05-06..2021-06-17+base%3Amaster+sort%3Amerged-desc+) +[View all 126 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-03-12T22%3A49%3A20Z..2021-04-27T14%3A38%3A20Z+base%3Amaster) ### New Lints @@ -2534,8 +2584,7 @@ Released 2021-06-17 Released 2021-05-06 -[**View 113 PRs merged since 1.51**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-03-25..2021-05-06+base%3Amaster+sort%3Amerged-desc+) - +[View all 102 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2021-02-03T15%3A59%3A06Z..2021-03-11T20%3A06%3A43Z+base%3Amaster) ### New Lints @@ -2670,8 +2719,7 @@ Released 2021-05-06 Released 2021-03-25 -[**View 117 PRs merged since 1.50**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2021-02-11..2021-03-25+base%3Amaster+sort%3Amerged-desc+) - +[View all 78 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-12-21T15%3A43%3A04Z..2021-02-03T04%3A21%3A10Z+base%3Amaster) ### New Lints @@ -2786,8 +2834,7 @@ Released 2021-03-25 Released 2021-02-11 -[**View 90 PRs merged since 1.49**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-12-31..2021-02-11+base%3Amaster+sort%3Amerged-desc+) - +[View all 119 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-11-06T18%3A32%3A40Z..2021-01-03T14%3A51%3A18Z+base%3Amaster) ### New Lints @@ -2916,8 +2963,7 @@ Released 2021-02-11 Released 2020-12-31 -[**View 85 PRs merged since 1.48**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-11-19..2020-12-31+base%3Amaster+sort%3Amerged-desc+) - +[View all 107 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-09-24T14%3A05%3A12Z..2020-11-05T13%3A35%3A44Z+base%3Amaster) ### New Lints @@ -3023,7 +3069,7 @@ Released 2020-12-31 Released 2020-11-19 -[**View 112 PRs merged since 1.47**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-10-08..2020-11-19+base%3Amaster+sort%3Amerged-desc+) +[View all 99 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-08-11T13%3A14%3A38Z..2020-09-23T18%3A55%3A22Z+base%3Amaster) ### New lints @@ -3141,8 +3187,7 @@ Released 2020-11-19 Released 2020-10-08 -[**View 80 PRs merged since 1.46**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-08-27..2020-10-08+base%3Amaster+sort%3Amerged-desc+) - +[View all 76 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-06-23T16%3A27%3A11Z..2020-08-11T12%3A52%3A41Z+base%3Amaster) ### New lints @@ -3244,8 +3289,7 @@ Released 2020-10-08 Released 2020-08-27 -[**View 93 PRs merged since 1.45**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-07-16..2020-08-27+base%3Amaster+sort%3Amerged-desc+) - +[View all 48 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-05-31T12%3A50%3A53Z..2020-06-23T15%3A00%3A32Z+base%3Amaster) ### New lints @@ -3307,8 +3351,7 @@ Released 2020-08-27 Released 2020-07-16 -[**View 65 PRs merged since 1.44**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-06-04..2020-07-16+base%3Amaster+sort%3Amerged-desc+) - +[View all 81 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-04-18T20%3A18%3A04Z..2020-05-27T19%3A25%3A04Z+base%3Amaster) ### New lints @@ -3385,8 +3428,7 @@ and [`similar_names`]. [#5651](https://github.com/rust-lang/rust-clippy/pull/565 Released 2020-06-04 -[**View 88 PRs merged since 1.43**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-04-23..2020-06-04+base%3Amaster+sort%3Amerged-desc+) - +[View all 124 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-03-05T17%3A30%3A53Z..2020-04-18T09%3A20%3A51Z+base%3Amaster) ### New lints @@ -3469,8 +3511,7 @@ Released 2020-06-04 Released 2020-04-23 -[**View 121 PRs merged since 1.42**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-03-12..2020-04-23+base%3Amaster+sort%3Amerged-desc+) - +[View all 91 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2020-01-26T16%3A01%3A11Z..2020-03-04T16%3A45%3A37Z+base%3Amaster) ### New lints @@ -3528,7 +3569,7 @@ Released 2020-04-23 Released 2020-03-12 -[**View 106 PRs merged since 1.41**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2020-01-30..2020-03-12+base%3Amaster+sort%3Amerged-desc+) +[View all 101 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-12-15T01%3A40%3A34Z..2020-01-26T11%3A22%3A13Z+base%3Amaster) ### New lints @@ -3595,7 +3636,7 @@ Released 2020-03-12 Released 2020-01-30 -[**View 107 PRs merged since 1.40**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-12-19..2020-01-30+base%3Amaster+sort%3Amerged-desc+) +[View all 74 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-10-28T20%3A50%3A24Z..2019-12-12T00%3A53%3A03Z+base%3Amaster) * New Lints: * [`exit`] [#4697](https://github.com/rust-lang/rust-clippy/pull/4697) @@ -3640,8 +3681,7 @@ Released 2020-01-30 Released 2019-12-19 -[**View 69 😺 PRs merged since 1.39**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-11-07..2019-12-19+base%3Amaster+sort%3Amerged-desc+) - +[View all 76 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-09-23T06%3A18%3A04Z..2019-10-28T17%3A34%3A55Z+base%3Amaster) * New Lints: * [`unneeded_wildcard_pattern`] [#4537](https://github.com/rust-lang/rust-clippy/pull/4537) @@ -3683,7 +3723,7 @@ Released 2019-12-19 Released 2019-11-07 -[**View 84 PRs merged since 1.38**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-09-26..2019-11-07+base%3Amaster+sort%3Amerged-desc+) +[View all 100 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-08-11T19%3A21%3A38Z..2019-09-22T12%3A07%3A39Z+base%3Amaster) * New Lints: * [`uninit_assumed_init`] [#4479](https://github.com/rust-lang/rust-clippy/pull/4479) @@ -3727,7 +3767,7 @@ Released 2019-11-07 Released 2019-09-26 -[**View 102 PRs merged since 1.37**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-08-15..2019-09-26+base%3Amaster+sort%3Amerged-desc+) +[View all 76 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-06-30T13%3A40%3A26Z..2019-08-11T09%3A47%3A27Z+base%3Amaster) * New Lints: * [`main_recursion`] [#4203](https://github.com/rust-lang/rust-clippy/pull/4203) @@ -3757,7 +3797,7 @@ Released 2019-09-26 Released 2019-08-15 -[**View 83 PRs merged since 1.36**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-07-04..2019-08-15+base%3Amaster+sort%3Amerged-desc+) +[View all 72 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-05-19T08%3A11%3A23Z..2019-06-25T23%3A22%3A22Z+base%3Amaster) * New Lints: * [`checked_conversions`] [#4088](https://github.com/rust-lang/rust-clippy/pull/4088) @@ -3781,8 +3821,7 @@ Released 2019-08-15 Released 2019-07-04 -[**View 75 PRs merged since 1.35**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-05-20..2019-07-04+base%3Amaster+sort%3Amerged-desc+) - +[View all 81 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-04-10T09%3A41%3A56Z..2019-05-18T00%3A29%3A40Z+base%3Amaster) * New lints: [`find_map`], [`filter_map_next`] [#4039](https://github.com/rust-lang/rust-clippy/pull/4039) * New lint: [`path_buf_push_overwrite`] [#3954](https://github.com/rust-lang/rust-clippy/pull/3954) @@ -3813,8 +3852,7 @@ Released 2019-07-04 Released 2019-05-20 -[**View 90 PRs merged since 1.34**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-04-10..2019-05-20+base%3Amaster+sort%3Amerged-desc+) - +[1fac380..37f5c1e](https://github.com/rust-lang/rust-clippy/compare/1fac380...37f5c1e) * New lint: `drop_bounds` to detect `T: Drop` bounds * Split [`redundant_closure`] into [`redundant_closure`] and [`redundant_closure_for_method_calls`] [#4110](https://github.com/rust-lang/rust-clippy/pull/4101) @@ -3842,8 +3880,7 @@ Released 2019-05-20 Released 2019-04-10 -[**View 66 PRs merged since 1.33**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-02-26..2019-04-10+base%3Amaster+sort%3Amerged-desc+) - +[View all 61 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2019-01-17T17%3A45%3A39Z..2019-02-19T08%3A24%3A05Z+base%3Amaster) * New lint: [`assertions_on_constants`] to detect for example `assert!(true)` * New lint: [`dbg_macro`] to detect uses of the `dbg!` macro @@ -3873,7 +3910,7 @@ Released 2019-04-10 Released 2019-02-26 -[**View 83 PRs merged since 1.32**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2019-01-17..2019-02-26+base%3Amaster+sort%3Amerged-desc+) +[View all 120 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2018-11-28T06%3A19%3A50Z..2019-01-15T09%3A27%3A02Z+base%3Amaster) * New lints: [`implicit_return`], [`vec_box`], [`cast_ref_to_mut`] * The `rust-clippy` repository is now part of the `rust-lang` org. @@ -3906,7 +3943,7 @@ Released 2019-02-26 Released 2019-01-17 -[**View 106 PRs merged since 1.31**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2018-12-06..2019-01-17+base%3Amaster+sort%3Amerged-desc+) +[View all 71 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2018-10-24T05%3A02%3A21Z..2018-11-27T17%3A29%3A34Z+base%3Amaster) * New lints: [`slow_vector_initialization`], `mem_discriminant_non_enum`, [`redundant_clone`], [`wildcard_dependencies`], @@ -3936,8 +3973,7 @@ Released 2019-01-17 Released 2018-12-06 -[**View 85 PRs merged since 1.30**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2018-10-25..2018-12-06+base%3Amaster+sort%3Amerged-desc+) - +[125907ad..2e26fdc2](https://github.com/rust-lang/rust-clippy/compare/125907ad..2e26fdc2) * Clippy has been relicensed under a dual MIT / Apache license. See [#3093](https://github.com/rust-lang/rust-clippy/issues/3093) for more @@ -3977,9 +4013,7 @@ Released 2018-12-06 Released 2018-10-25 -[**View 106 PRs merged since 1.29**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2018-09-13..2018-10-25+base%3Amaster+sort%3Amerged-desc+) - - +[View all 88 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2018-08-02T16%3A54%3A12Z..2018-09-17T09%3A44%3A06Z+base%3Amaster) * Deprecate `assign_ops` lint * New lints: [`mistyped_literal_suffixes`], [`ptr_offset_with_cast`], [`needless_collect`], [`copy_iterator`] @@ -4861,6 +4895,7 @@ Released 2018-09-13 [`inconsistent_digit_grouping`]: https://rust-lang.github.io/rust-clippy/master/index.html#inconsistent_digit_grouping [`inconsistent_struct_constructor`]: https://rust-lang.github.io/rust-clippy/master/index.html#inconsistent_struct_constructor [`incorrect_clone_impl_on_copy_type`]: https://rust-lang.github.io/rust-clippy/master/index.html#incorrect_clone_impl_on_copy_type +[`incorrect_partial_ord_impl_on_ord_type`]: https://rust-lang.github.io/rust-clippy/master/index.html#incorrect_partial_ord_impl_on_ord_type [`index_refutable_slice`]: https://rust-lang.github.io/rust-clippy/master/index.html#index_refutable_slice [`indexing_slicing`]: https://rust-lang.github.io/rust-clippy/master/index.html#indexing_slicing [`ineffective_bit_mask`]: https://rust-lang.github.io/rust-clippy/master/index.html#ineffective_bit_mask @@ -4941,6 +4976,8 @@ Released 2018-09-13 [`manual_flatten`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_flatten [`manual_instant_elapsed`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_instant_elapsed [`manual_is_ascii_check`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_ascii_check +[`manual_is_finite`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_finite +[`manual_is_infinite`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_infinite [`manual_let_else`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_let_else [`manual_main_separator_str`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_main_separator_str [`manual_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_map @@ -5047,6 +5084,7 @@ Released 2018-09-13 [`needless_option_as_deref`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_option_as_deref [`needless_option_take`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_option_take [`needless_parens_on_range_literals`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_parens_on_range_literals +[`needless_pass_by_ref_mut`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_pass_by_ref_mut [`needless_pass_by_value`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_pass_by_value [`needless_pub_self`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_pub_self [`needless_question_mark`]: https://rust-lang.github.io/rust-clippy/master/index.html#needless_question_mark @@ -5134,6 +5172,7 @@ Released 2018-09-13 [`rc_buffer`]: https://rust-lang.github.io/rust-clippy/master/index.html#rc_buffer [`rc_clone_in_vec_init`]: https://rust-lang.github.io/rust-clippy/master/index.html#rc_clone_in_vec_init [`rc_mutex`]: https://rust-lang.github.io/rust-clippy/master/index.html#rc_mutex +[`read_line_without_trim`]: https://rust-lang.github.io/rust-clippy/master/index.html#read_line_without_trim [`read_zero_byte_vec`]: https://rust-lang.github.io/rust-clippy/master/index.html#read_zero_byte_vec [`recursive_format_impl`]: https://rust-lang.github.io/rust-clippy/master/index.html#recursive_format_impl [`redundant_allocation`]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_allocation @@ -5266,6 +5305,7 @@ Released 2018-09-13 [`try_err`]: https://rust-lang.github.io/rust-clippy/master/index.html#try_err [`tuple_array_conversions`]: https://rust-lang.github.io/rust-clippy/master/index.html#tuple_array_conversions [`type_complexity`]: https://rust-lang.github.io/rust-clippy/master/index.html#type_complexity +[`type_id_on_box`]: https://rust-lang.github.io/rust-clippy/master/index.html#type_id_on_box [`type_repetition_in_bounds`]: https://rust-lang.github.io/rust-clippy/master/index.html#type_repetition_in_bounds [`unchecked_duration_subtraction`]: https://rust-lang.github.io/rust-clippy/master/index.html#unchecked_duration_subtraction [`undocumented_unsafe_blocks`]: https://rust-lang.github.io/rust-clippy/master/index.html#undocumented_unsafe_blocks diff --git a/src/tools/clippy/Cargo.toml b/src/tools/clippy/Cargo.toml index 76c804f935e..0fb3a3a984b 100644 --- a/src/tools/clippy/Cargo.toml +++ b/src/tools/clippy/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "clippy" -version = "0.1.72" +version = "0.1.73" description = "A bunch of helpful lints to avoid common pitfalls in Rust" repository = "https://github.com/rust-lang/rust-clippy" readme = "README.md" @@ -36,6 +36,17 @@ walkdir = "2.3" filetime = "0.2" itertools = "0.10.1" +# UI test dependencies +clippy_utils = { path = "clippy_utils" } +derive-new = "0.5" +if_chain = "1.0" +quote = "1.0" +serde = { version = "1.0.125", features = ["derive"] } +syn = { version = "2.0", features = ["full"] } +futures = "0.3" +parking_lot = "0.12" +tokio = { version = "1", features = ["io-util"] } + [build-dependencies] rustc_tools_util = "0.3.0" diff --git a/src/tools/clippy/README.md b/src/tools/clippy/README.md index d712d3e6750..5d490645d89 100644 --- a/src/tools/clippy/README.md +++ b/src/tools/clippy/README.md @@ -5,7 +5,7 @@ A collection of lints to catch common mistakes and improve your [Rust](https://github.com/rust-lang/rust) code. -[There are over 600 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html) +[There are over 650 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html) Lints are divided into categories, each with a default [lint level](https://doc.rust-lang.org/rustc/lints/levels.html). You can choose how much Clippy is supposed to ~~annoy~~ help you by changing the lint level by category. diff --git a/src/tools/clippy/book/src/README.md b/src/tools/clippy/book/src/README.md index 3b627096268..486ea3df704 100644 --- a/src/tools/clippy/book/src/README.md +++ b/src/tools/clippy/book/src/README.md @@ -6,7 +6,7 @@ A collection of lints to catch common mistakes and improve your [Rust](https://github.com/rust-lang/rust) code. -[There are over 600 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html) +[There are over 650 lints included in this crate!](https://rust-lang.github.io/rust-clippy/master/index.html) Lints are divided into categories, each with a default [lint level](https://doc.rust-lang.org/rustc/lints/levels.html). You can choose how diff --git a/src/tools/clippy/book/src/development/infrastructure/changelog_update.md b/src/tools/clippy/book/src/development/infrastructure/changelog_update.md index 52445494436..df9b1bbe18f 100644 --- a/src/tools/clippy/book/src/development/infrastructure/changelog_update.md +++ b/src/tools/clippy/book/src/development/infrastructure/changelog_update.md @@ -56,28 +56,6 @@ and open that file in your editor of choice. When updating the changelog it's also a good idea to make sure that `commit1` is already correct in the current changelog. -#### PR ranges - -We developed the concept of PR ranges to help the user understand the size of a new update. To create a PR range, -get the current release date and the date that the last version was released (YYYY-MM-DD) and use the following link: - -``` -[**View <NUMBER OF PRs> PRs merged since 1.<LAST VERSION NUM>**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A<LAST VERSION DATE>..<CURRENT VERSION DATE>+base%3Amaster+sort%3Amerged-desc+) -``` - -> Note: Be sure to check click the link and check how many PRs got merged between - -Example: - -``` -[**View 85 PRs merged since 1.69**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2023-04-20..2023-06-01+base%3Amaster+sort%3Amerged-desc+) -``` - -Which renders to: -[**View 85 PRs merged since 1.69**](https://github.com/rust-lang/rust-clippy/pulls?q=is%3Apr+is%3Aclosed+merged%3A2023-04-20..2023-06-01+base%3Amaster+sort%3Amerged-desc+) - -Note that **commit ranges should not be included**, only PR ranges. - ### 3. Authoring the final changelog The above script should have dumped all the relevant PRs to the file you diff --git a/src/tools/clippy/book/src/development/speedtest.md b/src/tools/clippy/book/src/development/speedtest.md new file mode 100644 index 00000000000..0db718e6ad6 --- /dev/null +++ b/src/tools/clippy/book/src/development/speedtest.md @@ -0,0 +1,24 @@ +# Speedtest +`SPEEDTEST` is the tool we use to measure lint's performance, it works by executing the same test several times. + +It's useful for measuring changes to current lints and deciding if the performance changes too much. `SPEEDTEST` is +accessed by the `SPEEDTEST` (and `SPEEDTEST_*`) environment variables. + +## Checking Speedtest + +To do a simple speed test of a lint (e.g. `allow_attributes`), use this command. + +```sh +$ SPEEDTEST=ui TESTNAME="allow_attributes" cargo uitest -- --nocapture +``` + +This will test all `ui` tests (`SPEEDTEST=ui`) whose names start with `allow_attributes`. By default, `SPEEDTEST` will +iterate your test 1000 times. But you can change this with `SPEEDTEST_ITERATIONS`. + +```sh +$ SPEEDTEST=toml SPEEDTEST_ITERATIONS=100 TESTNAME="semicolon_block" cargo uitest -- --nocapture +``` + +> **WARNING**: Be sure to use `-- --nocapture` at the end of the command to see the average test time. If you don't +> use `-- --nocapture` (e.g. `SPEEDTEST=ui` `TESTNAME="let_underscore_untyped" cargo uitest -- --nocapture`), this +> will not show up. diff --git a/src/tools/clippy/book/src/lint_configuration.md b/src/tools/clippy/book/src/lint_configuration.md index 60d7ce6e615..f8073dac330 100644 --- a/src/tools/clippy/book/src/lint_configuration.md +++ b/src/tools/clippy/book/src/lint_configuration.md @@ -175,7 +175,7 @@ The maximum amount of nesting a block can reside in ## `disallowed-names` The list of disallowed names to lint about. NB: `bar` is not here since it has legitimate uses. The value -`".."` can be used as part of the list to indicate, that the configured values should be appended to the +`".."` can be used as part of the list to indicate that the configured values should be appended to the default configuration of Clippy. By default, any configuration will replace the default value. **Default Value:** `["foo", "baz", "quux"]` (`Vec<String>`) diff --git a/src/tools/clippy/clippy_dev/src/new_lint.rs b/src/tools/clippy/clippy_dev/src/new_lint.rs index f11aa547bd7..f39bc06e6d7 100644 --- a/src/tools/clippy/clippy_dev/src/new_lint.rs +++ b/src/tools/clippy/clippy_dev/src/new_lint.rs @@ -358,6 +358,10 @@ fn create_lint_for_ty(lint: &LintData<'_>, enable_msrv: bool, ty: &str) -> io::R let mod_file_path = ty_dir.join("mod.rs"); let context_import = setup_mod_file(&mod_file_path, lint)?; + let pass_lifetimes = match context_import { + "LateContext" => "<'_>", + _ => "", + }; let name_upper = lint.name.to_uppercase(); let mut lint_file_contents = String::new(); @@ -372,7 +376,7 @@ fn create_lint_for_ty(lint: &LintData<'_>, enable_msrv: bool, ty: &str) -> io::R use super::{name_upper}; // TODO: Adjust the parameters as necessary - pub(super) fn check(cx: &{context_import}, msrv: &Msrv) {{ + pub(super) fn check(cx: &{context_import}{pass_lifetimes}, msrv: &Msrv) {{ if !msrv.meets(todo!("Add a new entry in `clippy_utils/src/msrvs`")) {{ return; }} @@ -389,7 +393,7 @@ fn create_lint_for_ty(lint: &LintData<'_>, enable_msrv: bool, ty: &str) -> io::R use super::{name_upper}; // TODO: Adjust the parameters as necessary - pub(super) fn check(cx: &{context_import}) {{ + pub(super) fn check(cx: &{context_import}{pass_lifetimes}) {{ todo!(); }} "# diff --git a/src/tools/clippy/clippy_dev/src/setup/intellij.rs b/src/tools/clippy/clippy_dev/src/setup/intellij.rs index efdb158c21e..a7138f36a4e 100644 --- a/src/tools/clippy/clippy_dev/src/setup/intellij.rs +++ b/src/tools/clippy/clippy_dev/src/setup/intellij.rs @@ -37,7 +37,7 @@ impl ClippyProjectInfo { pub fn setup_rustc_src(rustc_path: &str) { let Ok(rustc_source_dir) = check_and_get_rustc_dir(rustc_path) else { - return + return; }; for project in CLIPPY_PROJECTS { diff --git a/src/tools/clippy/clippy_dev/src/update_lints.rs b/src/tools/clippy/clippy_dev/src/update_lints.rs index 7213c9dfede..7c2e06ea69a 100644 --- a/src/tools/clippy/clippy_dev/src/update_lints.rs +++ b/src/tools/clippy/clippy_dev/src/update_lints.rs @@ -340,7 +340,10 @@ pub fn deprecate(name: &str, reason: Option<&String>) { let name_upper = name.to_uppercase(); let (mut lints, deprecated_lints, renamed_lints) = gather_all(); - let Some(lint) = lints.iter().find(|l| l.name == name_lower) else { eprintln!("error: failed to find lint `{name}`"); return; }; + let Some(lint) = lints.iter().find(|l| l.name == name_lower) else { + eprintln!("error: failed to find lint `{name}`"); + return; + }; let mod_path = { let mut mod_path = PathBuf::from(format!("clippy_lints/src/{}", lint.module)); diff --git a/src/tools/clippy/clippy_lints/Cargo.toml b/src/tools/clippy/clippy_lints/Cargo.toml index c23054443bb..11136867ff0 100644 --- a/src/tools/clippy/clippy_lints/Cargo.toml +++ b/src/tools/clippy/clippy_lints/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "clippy_lints" -version = "0.1.72" +version = "0.1.73" description = "A bunch of helpful lints to avoid common pitfalls in Rust" repository = "https://github.com/rust-lang/rust-clippy" readme = "README.md" diff --git a/src/tools/clippy/clippy_lints/src/allow_attributes.rs b/src/tools/clippy/clippy_lints/src/allow_attributes.rs index eb21184713e..e1ef514edfd 100644 --- a/src/tools/clippy/clippy_lints/src/allow_attributes.rs +++ b/src/tools/clippy/clippy_lints/src/allow_attributes.rs @@ -1,5 +1,6 @@ use ast::{AttrStyle, Attribute}; -use clippy_utils::{diagnostics::span_lint_and_sugg, is_from_proc_macro}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::is_from_proc_macro; use rustc_ast as ast; use rustc_errors::Applicability; use rustc_lint::{LateContext, LateLintPass, LintContext}; diff --git a/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs b/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs index a1e44668e1a..7adcd9ad055 100644 --- a/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs +++ b/src/tools/clippy/clippy_lints/src/arc_with_non_send_sync.rs @@ -1,12 +1,11 @@ -use clippy_utils::diagnostics::span_lint_and_help; +use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::last_path_segment; use clippy_utils::ty::{implements_trait, is_type_diagnostic_item}; -use if_chain::if_chain; - use rustc_hir::{Expr, ExprKind}; -use rustc_lint::LateContext; -use rustc_lint::LateLintPass; +use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty; +use rustc_middle::ty::print::with_forced_trimmed_paths; +use rustc_middle::ty::GenericArgKind; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::symbol::sym; @@ -15,8 +14,8 @@ declare_clippy_lint! { /// This lint warns when you use `Arc` with a type that does not implement `Send` or `Sync`. /// /// ### Why is this bad? - /// Wrapping a type in Arc doesn't add thread safety to the underlying data, so data races - /// could occur when touching the underlying data. + /// `Arc<T>` is only `Send`/`Sync` when `T` is [both `Send` and `Sync`](https://doc.rust-lang.org/std/sync/struct.Arc.html#impl-Send-for-Arc%3CT%3E), + /// either `T` should be made `Send + Sync` or an `Rc` should be used instead of an `Arc` /// /// ### Example /// ```rust @@ -24,16 +23,17 @@ declare_clippy_lint! { /// # use std::sync::Arc; /// /// fn main() { - /// // This is safe, as `i32` implements `Send` and `Sync`. + /// // This is fine, as `i32` implements `Send` and `Sync`. /// let a = Arc::new(42); /// - /// // This is not safe, as `RefCell` does not implement `Sync`. + /// // `RefCell` is `!Sync`, so either the `Arc` should be replaced with an `Rc` + /// // or the `RefCell` replaced with something like a `RwLock` /// let b = Arc::new(RefCell::new(42)); /// } /// ``` #[clippy::version = "1.72.0"] pub ARC_WITH_NON_SEND_SYNC, - correctness, + suspicious, "using `Arc` with a type that does not implement `Send` or `Sync`" } declare_lint_pass!(ArcWithNonSendSync => [ARC_WITH_NON_SEND_SYNC]); @@ -41,32 +41,38 @@ declare_lint_pass!(ArcWithNonSendSync => [ARC_WITH_NON_SEND_SYNC]); impl LateLintPass<'_> for ArcWithNonSendSync { fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { let ty = cx.typeck_results().expr_ty(expr); - if_chain! { - if is_type_diagnostic_item(cx, ty, sym::Arc); - if let ExprKind::Call(func, [arg]) = expr.kind; - if let ExprKind::Path(func_path) = func.kind; - if last_path_segment(&func_path).ident.name == sym::new; - if let arg_ty = cx.typeck_results().expr_ty(arg); - if !matches!(arg_ty.kind(), ty::Param(_)); - if !cx.tcx - .lang_items() - .sync_trait() - .map_or(false, |id| implements_trait(cx, arg_ty, id, &[])) || - !cx.tcx - .get_diagnostic_item(sym::Send) - .map_or(false, |id| implements_trait(cx, arg_ty, id, &[])); + if is_type_diagnostic_item(cx, ty, sym::Arc) + && let ExprKind::Call(func, [arg]) = expr.kind + && let ExprKind::Path(func_path) = func.kind + && last_path_segment(&func_path).ident.name == sym::new + && let arg_ty = cx.typeck_results().expr_ty(arg) + // make sure that the type is not and does not contain any type parameters + && arg_ty.walk().all(|arg| { + !matches!(arg.unpack(), GenericArgKind::Type(ty) if matches!(ty.kind(), ty::Param(_))) + }) + && let Some(send) = cx.tcx.get_diagnostic_item(sym::Send) + && let Some(sync) = cx.tcx.lang_items().sync_trait() + && let [is_send, is_sync] = [send, sync].map(|id| implements_trait(cx, arg_ty, id, &[])) + && !(is_send && is_sync) + { + span_lint_and_then( + cx, + ARC_WITH_NON_SEND_SYNC, + expr.span, + "usage of an `Arc` that is not `Send` or `Sync`", + |diag| with_forced_trimmed_paths!({ + if !is_send { + diag.note(format!("the trait `Send` is not implemented for `{arg_ty}`")); + } + if !is_sync { + diag.note(format!("the trait `Sync` is not implemented for `{arg_ty}`")); + } + + diag.note(format!("required for `{ty}` to implement `Send` and `Sync`")); - then { - span_lint_and_help( - cx, - ARC_WITH_NON_SEND_SYNC, - expr.span, - "usage of `Arc<T>` where `T` is not `Send` or `Sync`", - None, - "consider using `Rc<T>` instead or wrapping `T` in a std::sync type like \ - `Mutex<T>`", - ); - } + diag.help("consider using an `Rc` instead or wrapping the inner type with a `Mutex`"); + } + )); } } } diff --git a/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs b/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs index a8dc0cb3b58..b90914e936a 100644 --- a/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs +++ b/src/tools/clippy/clippy_lints/src/assertions_on_constants.rs @@ -31,14 +31,20 @@ declare_lint_pass!(AssertionsOnConstants => [ASSERTIONS_ON_CONSTANTS]); impl<'tcx> LateLintPass<'tcx> for AssertionsOnConstants { fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) { - let Some(macro_call) = root_macro_call_first_node(cx, e) else { return }; + let Some(macro_call) = root_macro_call_first_node(cx, e) else { + return; + }; let is_debug = match cx.tcx.get_diagnostic_name(macro_call.def_id) { Some(sym::debug_assert_macro) => true, Some(sym::assert_macro) => false, _ => return, }; - let Some((condition, panic_expn)) = find_assert_args(cx, e, macro_call.expn) else { return }; - let Some(Constant::Bool(val)) = constant(cx, cx.typeck_results(), condition) else { return }; + let Some((condition, panic_expn)) = find_assert_args(cx, e, macro_call.expn) else { + return; + }; + let Some(Constant::Bool(val)) = constant(cx, cx.typeck_results(), condition) else { + return; + }; if val { span_lint_and_help( cx, diff --git a/src/tools/clippy/clippy_lints/src/attrs.rs b/src/tools/clippy/clippy_lints/src/attrs.rs index 2ba78f99569..2a5be275615 100644 --- a/src/tools/clippy/clippy_lints/src/attrs.rs +++ b/src/tools/clippy/clippy_lints/src/attrs.rs @@ -1,12 +1,10 @@ //! checks for attributes +use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_sugg, span_lint_and_then}; +use clippy_utils::is_from_proc_macro; use clippy_utils::macros::{is_panic, macro_backtrace}; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::{first_line_of_span, is_present_in_source, snippet_opt, without_block_comments}; -use clippy_utils::{ - diagnostics::{span_lint, span_lint_and_help, span_lint_and_sugg, span_lint_and_then}, - is_from_proc_macro, -}; use if_chain::if_chain; use rustc_ast::{AttrKind, AttrStyle, Attribute, LitKind, MetaItemKind, MetaItemLit, NestedMetaItem}; use rustc_errors::Applicability; diff --git a/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs b/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs index 9c053247402..1593d7b0fb3 100644 --- a/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs +++ b/src/tools/clippy/clippy_lints/src/blocks_in_if_conditions.rs @@ -1,9 +1,8 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg}; -use clippy_utils::get_parent_expr; -use clippy_utils::higher; use clippy_utils::source::snippet_block_with_applicability; use clippy_utils::ty::implements_trait; use clippy_utils::visitors::{for_each_expr, Descend}; +use clippy_utils::{get_parent_expr, higher}; use core::ops::ControlFlow; use if_chain::if_chain; use rustc_errors::Applicability; @@ -85,8 +84,7 @@ impl<'tcx> LateLintPass<'tcx> for BlocksInIfConditions { ); } } else { - let span = - block.expr.as_ref().map_or_else(|| block.stmts[0].span, |e| e.span); + let span = block.expr.as_ref().map_or_else(|| block.stmts[0].span, |e| e.span); if span.from_expansion() || expr.span.from_expansion() { return; } diff --git a/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs b/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs index d984fddc57a..4503597713a 100644 --- a/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs +++ b/src/tools/clippy/clippy_lints/src/bool_assert_comparison.rs @@ -61,7 +61,7 @@ fn is_impl_not_trait_with_bool_out<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) - ) }) .map_or(false, |assoc_item| { - let proj = Ty::new_projection(cx.tcx,assoc_item.def_id, cx.tcx.mk_args_trait(ty, [])); + let proj = Ty::new_projection(cx.tcx, assoc_item.def_id, cx.tcx.mk_args_trait(ty, [])); let nty = cx.tcx.normalize_erasing_regions(cx.param_env, proj); nty.is_bool() @@ -70,14 +70,18 @@ fn is_impl_not_trait_with_bool_out<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) - impl<'tcx> LateLintPass<'tcx> for BoolAssertComparison { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { - let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return }; + let Some(macro_call) = root_macro_call_first_node(cx, expr) else { + return; + }; let macro_name = cx.tcx.item_name(macro_call.def_id); let eq_macro = match macro_name.as_str() { "assert_eq" | "debug_assert_eq" => true, "assert_ne" | "debug_assert_ne" => false, _ => return, }; - let Some ((a, b, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else { return }; + let Some((a, b, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else { + return; + }; let a_span = a.span.source_callsite(); let b_span = b.span.source_callsite(); @@ -126,7 +130,9 @@ impl<'tcx> LateLintPass<'tcx> for BoolAssertComparison { let mut suggestions = vec![(name_span, non_eq_mac.to_string()), (lit_span, String::new())]; if bool_value ^ eq_macro { - let Some(sugg) = Sugg::hir_opt(cx, non_lit_expr) else { return }; + let Some(sugg) = Sugg::hir_opt(cx, non_lit_expr) else { + return; + }; suggestions.push((non_lit_expr.span, (!sugg).to_string())); } diff --git a/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs b/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs index bdb3a011602..1828dd65152 100644 --- a/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs +++ b/src/tools/clippy/clippy_lints/src/bool_to_int_with_if.rs @@ -4,7 +4,9 @@ use rustc_hir::{Block, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; -use clippy_utils::{diagnostics::span_lint_and_then, in_constant, is_else_clause, is_integer_literal, sugg::Sugg}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::sugg::Sugg; +use clippy_utils::{in_constant, is_else_clause, is_integer_literal}; use rustc_errors::Applicability; declare_clippy_lint! { diff --git a/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs b/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs index 814108ed8a7..b3dbbb08f8e 100644 --- a/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs +++ b/src/tools/clippy/clippy_lints/src/borrow_deref_ref.rs @@ -1,9 +1,8 @@ use crate::reference::DEREF_ADDROF; use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::is_from_proc_macro; use clippy_utils::source::snippet_opt; use clippy_utils::ty::implements_trait; -use clippy_utils::{get_parent_expr, is_lint_allowed}; +use clippy_utils::{get_parent_expr, is_from_proc_macro, is_lint_allowed}; use rustc_errors::Applicability; use rustc_hir::{ExprKind, UnOp}; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/box_default.rs b/src/tools/clippy/clippy_lints/src/box_default.rs index e42c3fe2432..fa9c525fc08 100644 --- a/src/tools/clippy/clippy_lints/src/box_default.rs +++ b/src/tools/clippy/clippy_lints/src/box_default.rs @@ -1,12 +1,10 @@ -use clippy_utils::{ - diagnostics::span_lint_and_sugg, get_parent_node, is_default_equivalent, macros::macro_backtrace, match_path, - path_def_id, paths, ty::expr_sig, -}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::macros::macro_backtrace; +use clippy_utils::ty::expr_sig; +use clippy_utils::{get_parent_node, is_default_equivalent, match_path, path_def_id, paths}; use rustc_errors::Applicability; -use rustc_hir::{ - intravisit::{walk_ty, Visitor}, - Block, Expr, ExprKind, Local, Node, QPath, TyKind, -}; +use rustc_hir::intravisit::{walk_ty, Visitor}; +use rustc_hir::{Block, Expr, ExprKind, Local, Node, QPath, TyKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_middle::ty::print::with_forced_trimmed_paths; diff --git a/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs b/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs index fa1550a0ef9..1e56ed5f450 100644 --- a/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs +++ b/src/tools/clippy/clippy_lints/src/casts/as_ptr_cast_mut.rs @@ -3,10 +3,8 @@ use clippy_utils::source::snippet_opt; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::LateContext; -use rustc_middle::{ - mir::Mutability, - ty::{self, Ty, TypeAndMut}, -}; +use rustc_middle::mir::Mutability; +use rustc_middle::ty::{self, Ty, TypeAndMut}; use super::AS_PTR_CAST_MUT; diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs b/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs index 27cc5a1c3f0..4d9cc4cacc3 100644 --- a/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs +++ b/src/tools/clippy/clippy_lints/src/casts/cast_slice_different_sizes.rs @@ -1,10 +1,12 @@ +use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::msrvs::{self, Msrv}; -use clippy_utils::{diagnostics::span_lint_and_then, source}; +use clippy_utils::source; use if_chain::if_chain; use rustc_ast::Mutability; use rustc_hir::{Expr, ExprKind, Node}; use rustc_lint::LateContext; -use rustc_middle::ty::{self, layout::LayoutOf, Ty, TypeAndMut}; +use rustc_middle::ty::layout::LayoutOf; +use rustc_middle::ty::{self, Ty, TypeAndMut}; use super::CAST_SLICE_DIFFERENT_SIZES; diff --git a/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs b/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs index 1233c632a79..5e0123842b0 100644 --- a/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs +++ b/src/tools/clippy/clippy_lints/src/casts/cast_slice_from_raw_parts.rs @@ -4,7 +4,8 @@ use clippy_utils::source::snippet_with_context; use clippy_utils::{match_def_path, paths}; use if_chain::if_chain; use rustc_errors::Applicability; -use rustc_hir::{def_id::DefId, Expr, ExprKind}; +use rustc_hir::def_id::DefId; +use rustc_hir::{Expr, ExprKind}; use rustc_lint::LateContext; use rustc_middle::ty::{self, Ty}; diff --git a/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs b/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs index f0c1df01430..ce1ab10910c 100644 --- a/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs +++ b/src/tools/clippy/clippy_lints/src/casts/ptr_cast_constness.rs @@ -1,6 +1,6 @@ -use clippy_utils::msrvs::POINTER_CAST_CONSTNESS; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::msrvs::{Msrv, POINTER_CAST_CONSTNESS}; use clippy_utils::sugg::Sugg; -use clippy_utils::{diagnostics::span_lint_and_sugg, msrvs::Msrv}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::{Expr, Mutability}; diff --git a/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs b/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs index 71cf2aea0f8..ae56f38d9ad 100644 --- a/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs +++ b/src/tools/clippy/clippy_lints/src/casts/unnecessary_cast.rs @@ -56,7 +56,7 @@ pub(super) fn check<'tcx>( &format!("casting raw pointers to the same type and constness is unnecessary (`{cast_from}` -> `{cast_to}`)"), "try", cast_str.clone(), - Applicability::MachineApplicable, + Applicability::MaybeIncorrect, ); } } diff --git a/src/tools/clippy/clippy_lints/src/copies.rs b/src/tools/clippy/clippy_lints/src/copies.rs index 1c321f46e2d..e3a09636e24 100644 --- a/src/tools/clippy/clippy_lints/src/copies.rs +++ b/src/tools/clippy/clippy_lints/src/copies.rs @@ -10,8 +10,7 @@ use core::iter; use core::ops::ControlFlow; use rustc_errors::Applicability; use rustc_hir::def_id::DefIdSet; -use rustc_hir::intravisit; -use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, HirIdSet, Stmt, StmtKind}; +use rustc_hir::{intravisit, BinOpKind, Block, Expr, ExprKind, HirId, HirIdSet, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::query::Key; use rustc_session::{declare_tool_lint, impl_lint_pass}; diff --git a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs index 7436e9ce811..726674d88f1 100644 --- a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs +++ b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs @@ -5,7 +5,8 @@ use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; -use rustc_span::{symbol::sym, Span}; +use rustc_span::symbol::sym; +use rustc_span::Span; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/dbg_macro.rs b/src/tools/clippy/clippy_lints/src/dbg_macro.rs index ea17e7a6071..49452136d6f 100644 --- a/src/tools/clippy/clippy_lints/src/dbg_macro.rs +++ b/src/tools/clippy/clippy_lints/src/dbg_macro.rs @@ -71,7 +71,9 @@ impl DbgMacro { impl LateLintPass<'_> for DbgMacro { fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { - let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return }; + let Some(macro_call) = root_macro_call_first_node(cx, expr) else { + return; + }; if cx.tcx.is_diagnostic_item(sym::dbg_macro, macro_call.def_id) { // allows `dbg!` in test code if allow-dbg-in-test is set to true in clippy.toml if self.allow_dbg_in_tests diff --git a/src/tools/clippy/clippy_lints/src/declared_lints.rs b/src/tools/clippy/clippy_lints/src/declared_lints.rs index b60f0738f64..498d657b31f 100644 --- a/src/tools/clippy/clippy_lints/src/declared_lints.rs +++ b/src/tools/clippy/clippy_lints/src/declared_lints.rs @@ -206,6 +206,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[ crate::implicit_saturating_sub::IMPLICIT_SATURATING_SUB_INFO, crate::inconsistent_struct_constructor::INCONSISTENT_STRUCT_CONSTRUCTOR_INFO, crate::incorrect_impls::INCORRECT_CLONE_IMPL_ON_COPY_TYPE_INFO, + crate::incorrect_impls::INCORRECT_PARTIAL_ORD_IMPL_ON_ORD_TYPE_INFO, crate::index_refutable_slice::INDEX_REFUTABLE_SLICE_INFO, crate::indexing_slicing::INDEXING_SLICING_INFO, crate::indexing_slicing::OUT_OF_BOUNDS_INDEXING_INFO, @@ -272,6 +273,8 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[ crate::manual_async_fn::MANUAL_ASYNC_FN_INFO, crate::manual_bits::MANUAL_BITS_INFO, crate::manual_clamp::MANUAL_CLAMP_INFO, + crate::manual_float_methods::MANUAL_IS_FINITE_INFO, + crate::manual_float_methods::MANUAL_IS_INFINITE_INFO, crate::manual_is_ascii_check::MANUAL_IS_ASCII_CHECK_INFO, crate::manual_let_else::MANUAL_LET_ELSE_INFO, crate::manual_main_separator_str::MANUAL_MAIN_SEPARATOR_STR_INFO, @@ -388,6 +391,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[ crate::methods::OR_THEN_UNWRAP_INFO, crate::methods::PATH_BUF_PUSH_OVERWRITE_INFO, crate::methods::RANGE_ZIP_WITH_LEN_INFO, + crate::methods::READ_LINE_WITHOUT_TRIM_INFO, crate::methods::REPEAT_ONCE_INFO, crate::methods::RESULT_MAP_OR_INTO_OPTION_INFO, crate::methods::SEARCH_IS_SOME_INFO, @@ -403,6 +407,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[ crate::methods::SUSPICIOUS_MAP_INFO, crate::methods::SUSPICIOUS_SPLITN_INFO, crate::methods::SUSPICIOUS_TO_OWNED_INFO, + crate::methods::TYPE_ID_ON_BOX_INFO, crate::methods::UNINIT_ASSUMED_INIT_INFO, crate::methods::UNIT_HASH_INFO, crate::methods::UNNECESSARY_FILTER_MAP_INFO, @@ -468,6 +473,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[ crate::needless_if::NEEDLESS_IF_INFO, crate::needless_late_init::NEEDLESS_LATE_INIT_INFO, crate::needless_parens_on_range_literals::NEEDLESS_PARENS_ON_RANGE_LITERALS_INFO, + crate::needless_pass_by_ref_mut::NEEDLESS_PASS_BY_REF_MUT_INFO, crate::needless_pass_by_value::NEEDLESS_PASS_BY_VALUE_INFO, crate::needless_question_mark::NEEDLESS_QUESTION_MARK_INFO, crate::needless_update::NEEDLESS_UPDATE_INFO, diff --git a/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs b/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs index ca9514ccc7d..a294c693787 100644 --- a/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs +++ b/src/tools/clippy/clippy_lints/src/default_constructed_unit_structs.rs @@ -1,5 +1,7 @@ -use clippy_utils::{diagnostics::span_lint_and_sugg, is_ty_alias, match_def_path, paths}; -use hir::{def::Res, ExprKind}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::{is_ty_alias, match_def_path, paths}; +use hir::def::Res; +use hir::ExprKind; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs b/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs index f296b80d283..572990aaba1 100644 --- a/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs +++ b/src/tools/clippy/clippy_lints/src/default_instead_of_iter_empty.rs @@ -1,7 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::last_path_segment; use clippy_utils::source::snippet_with_context; -use clippy_utils::{match_def_path, paths}; +use clippy_utils::{last_path_segment, match_def_path, paths}; use rustc_errors::Applicability; use rustc_hir::{def, Expr, ExprKind, GenericArg, QPath, TyKind}; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs index 9217edcef07..d09428dbc1f 100644 --- a/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs +++ b/src/tools/clippy/clippy_lints/src/default_numeric_fallback.rs @@ -4,15 +4,11 @@ use clippy_utils::{get_parent_node, numeric_literal}; use if_chain::if_chain; use rustc_ast::ast::{LitFloatType, LitIntType, LitKind}; use rustc_errors::Applicability; -use rustc_hir::{ - intravisit::{walk_expr, walk_stmt, Visitor}, - Body, Expr, ExprKind, HirId, ItemKind, Lit, Node, Stmt, StmtKind, -}; +use rustc_hir::intravisit::{walk_expr, walk_stmt, Visitor}; +use rustc_hir::{Body, Expr, ExprKind, HirId, ItemKind, Lit, Node, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; -use rustc_middle::{ - lint::in_external_macro, - ty::{self, FloatTy, IntTy, PolyFnSig, Ty}, -}; +use rustc_middle::lint::in_external_macro; +use rustc_middle::ty::{self, FloatTy, IntTy, PolyFnSig, Ty}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use std::iter; diff --git a/src/tools/clippy/clippy_lints/src/dereference.rs b/src/tools/clippy/clippy_lints/src/dereference.rs index 0e7efd53390..8a9d978a106 100644 --- a/src/tools/clippy/clippy_lints/src/dereference.rs +++ b/src/tools/clippy/clippy_lints/src/dereference.rs @@ -12,12 +12,11 @@ use rustc_ast::util::parser::{PREC_POSTFIX, PREC_PREFIX}; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::graph::iterate::{CycleDetector, TriColorDepthFirstSearch}; use rustc_errors::Applicability; +use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_hir::intravisit::{walk_ty, Visitor}; use rustc_hir::{ - self as hir, - def_id::{DefId, LocalDefId}, - BindingAnnotation, Body, BodyId, BorrowKind, Closure, Expr, ExprKind, FnRetTy, GenericArg, HirId, ImplItem, - ImplItemKind, Item, ItemKind, Local, MatchSource, Mutability, Node, Pat, PatKind, Path, QPath, TraitItem, + self as hir, BindingAnnotation, Body, BodyId, BorrowKind, Closure, Expr, ExprKind, FnRetTy, GenericArg, HirId, + ImplItem, ImplItemKind, Item, ItemKind, Local, MatchSource, Mutability, Node, Pat, PatKind, Path, QPath, TraitItem, TraitItemKind, TyKind, UnOp, }; use rustc_index::bit_set::BitSet; @@ -30,9 +29,11 @@ use rustc_middle::ty::{ ProjectionPredicate, Ty, TyCtxt, TypeVisitableExt, TypeckResults, }; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::{symbol::sym, Span, Symbol}; +use rustc_span::symbol::sym; +use rustc_span::{Span, Symbol}; use rustc_trait_selection::infer::InferCtxtExt as _; -use rustc_trait_selection::traits::{query::evaluate_obligation::InferCtxtExt as _, Obligation, ObligationCause}; +use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _; +use rustc_trait_selection::traits::{Obligation, ObligationCause}; use std::collections::VecDeque; declare_clippy_lint! { @@ -77,6 +78,11 @@ declare_clippy_lint! { /// Suggests that the receiver of the expression borrows /// the expression. /// + /// ### Known problems + /// The lint cannot tell when the implementation of a trait + /// for `&T` and `T` do different things. Removing a borrow + /// in such a case can change the semantics of the code. + /// /// ### Example /// ```rust /// fn fun(_a: &i32) {} @@ -589,7 +595,7 @@ impl<'tcx> LateLintPass<'tcx> for Dereferencing<'tcx> { pat.spans, "this pattern creates a reference to a reference", |diag| { - diag.multipart_suggestion("try this", replacements, app); + diag.multipart_suggestion("try", replacements, app); }, ); } @@ -1123,7 +1129,9 @@ fn needless_borrow_impl_arg_position<'tcx>( let destruct_trait_def_id = cx.tcx.lang_items().destruct_trait(); let sized_trait_def_id = cx.tcx.lang_items().sized_trait(); - let Some(callee_def_id) = fn_def_id(cx, parent) else { return Position::Other(precedence) }; + let Some(callee_def_id) = fn_def_id(cx, parent) else { + return Position::Other(precedence); + }; let fn_sig = cx.tcx.fn_sig(callee_def_id).instantiate_identity().skip_binder(); let args_with_expr_ty = cx .typeck_results() @@ -1252,7 +1260,12 @@ fn has_ref_mut_self_method(cx: &LateContext<'_>, trait_def_id: DefId) -> bool { .in_definition_order() .any(|assoc_item| { if assoc_item.fn_has_self_parameter { - let self_ty = cx.tcx.fn_sig(assoc_item.def_id).instantiate_identity().skip_binder().inputs()[0]; + let self_ty = cx + .tcx + .fn_sig(assoc_item.def_id) + .instantiate_identity() + .skip_binder() + .inputs()[0]; matches!(self_ty.kind(), ty::Ref(_, _, Mutability::Mut)) } else { false @@ -1296,8 +1309,8 @@ fn referent_used_exactly_once<'tcx>( possible_borrowers: &mut Vec<(LocalDefId, PossibleBorrowerMap<'tcx, 'tcx>)>, reference: &Expr<'tcx>, ) -> bool { - let mir = enclosing_mir(cx.tcx, reference.hir_id); - if let Some(local) = expr_local(cx.tcx, reference) + if let Some(mir) = enclosing_mir(cx.tcx, reference.hir_id) + && let Some(local) = expr_local(cx.tcx, reference) && let [location] = *local_assignments(mir, local).as_slice() && let Some(statement) = mir.basic_blocks[location.block].statements.get(location.statement_index) && let StatementKind::Assign(box (_, Rvalue::Ref(_, _, place))) = statement.kind @@ -1442,9 +1455,7 @@ fn ty_auto_deref_stability<'tcx>( ty::Adt(..) if ty.has_placeholders() || ty.has_opaque_types() => { Position::ReborrowStable(precedence).into() }, - ty::Adt(_, args) if args.has_non_region_param() => { - TyPosition::new_deref_stable_for_result(precedence, ty) - }, + ty::Adt(_, args) if args.has_non_region_param() => TyPosition::new_deref_stable_for_result(precedence, ty), ty::Bool | ty::Char | ty::Int(_) @@ -1531,7 +1542,7 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data Mutability::Not => "explicit `deref` method call", Mutability::Mut => "explicit `deref_mut` method call", }, - "try this", + "try", format!("{addr_of_str}{deref_str}{expr_str}"), app, ); @@ -1593,7 +1604,7 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data } else { format!("{prefix}{snip}") }; - diag.span_suggestion(data.span, "try this", sugg, app); + diag.span_suggestion(data.span, "try", sugg, app); }, ); }, @@ -1620,7 +1631,7 @@ fn report<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, state: State, data |diag| { let mut app = Applicability::MachineApplicable; let snip = snippet_with_context(cx, expr.span, data.span.ctxt(), "..", &mut app).0; - diag.span_suggestion(data.span, "try this", snip.into_owned(), app); + diag.span_suggestion(data.span, "try", snip.into_owned(), app); }, ); }, diff --git a/src/tools/clippy/clippy_lints/src/derivable_impls.rs b/src/tools/clippy/clippy_lints/src/derivable_impls.rs index 71b5104bed8..9a85cc4ce2d 100644 --- a/src/tools/clippy/clippy_lints/src/derivable_impls.rs +++ b/src/tools/clippy/clippy_lints/src/derivable_impls.rs @@ -3,10 +3,9 @@ use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::indent_of; use clippy_utils::{is_default_equivalent, peel_blocks}; use rustc_errors::Applicability; +use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res}; use rustc_hir::{ - self as hir, - def::{CtorKind, CtorOf, DefKind, Res}, - Body, Expr, ExprKind, GenericArg, Impl, ImplItemKind, Item, ItemKind, Node, PathSegment, QPath, TyKind, + self as hir, Body, Expr, ExprKind, GenericArg, Impl, ImplItemKind, Item, ItemKind, Node, PathSegment, QPath, TyKind, }; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::adjustment::{Adjust, PointerCoercion}; @@ -99,10 +98,11 @@ fn check_struct<'tcx>( if let Some(PathSegment { args, .. }) = p.segments.last() { let args = args.map(|a| a.args).unwrap_or(&[]); - // ty_args contains the generic parameters of the type declaration, while args contains the arguments - // used at instantiation time. If both len are not equal, it means that some parameters were not - // provided (which means that the default values were used); in this case we will not risk - // suggesting too broad a rewrite. We won't either if any argument is a type or a const. + // ty_args contains the generic parameters of the type declaration, while args contains the + // arguments used at instantiation time. If both len are not equal, it means that some + // parameters were not provided (which means that the default values were used); in this + // case we will not risk suggesting too broad a rewrite. We won't either if any argument + // is a type or a const. if ty_args.len() != args.len() || args.iter().any(|arg| !matches!(arg, GenericArg::Lifetime(_))) { return; } diff --git a/src/tools/clippy/clippy_lints/src/derive.rs b/src/tools/clippy/clippy_lints/src/derive.rs index 78e7f93e2bf..c343f248d06 100644 --- a/src/tools/clippy/clippy_lints/src/derive.rs +++ b/src/tools/clippy/clippy_lints/src/derive.rs @@ -1,7 +1,6 @@ use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_note, span_lint_and_sugg, span_lint_and_then}; -use clippy_utils::paths; use clippy_utils::ty::{implements_trait, implements_trait_with_env, is_copy}; -use clippy_utils::{is_lint_allowed, match_def_path}; +use clippy_utils::{is_lint_allowed, match_def_path, paths}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::def_id::DefId; @@ -334,7 +333,9 @@ fn check_copy_clone<'tcx>(cx: &LateContext<'tcx>, item: &Item<'_>, trait_ref: &h Some(id) if trait_ref.trait_def_id() == Some(id) => id, _ => return, }; - let Some(copy_id) = cx.tcx.lang_items().copy_trait() else { return }; + let Some(copy_id) = cx.tcx.lang_items().copy_trait() else { + return; + }; let (ty_adt, ty_subs) = match *ty.kind() { // Unions can't derive clone. ty::Adt(adt, subs) if !adt.is_union() => (adt, subs), diff --git a/src/tools/clippy/clippy_lints/src/disallowed_methods.rs b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs index ca8671c8f1a..95d3f7547b4 100644 --- a/src/tools/clippy/clippy_lints/src/disallowed_methods.rs +++ b/src/tools/clippy/clippy_lints/src/disallowed_methods.rs @@ -94,7 +94,7 @@ impl<'tcx> LateLintPass<'tcx> for DisallowedMethods { path_def_id(cx, expr) }; let Some(def_id) = uncalled_path.or_else(|| fn_def_id(cx, expr)) else { - return + return; }; let conf = match self.disallowed.get(&def_id) { Some(&index) => &self.conf_disallowed[index], diff --git a/src/tools/clippy/clippy_lints/src/disallowed_names.rs b/src/tools/clippy/clippy_lints/src/disallowed_names.rs index 6e6615f08ee..04c2d44137a 100644 --- a/src/tools/clippy/clippy_lints/src/disallowed_names.rs +++ b/src/tools/clippy/clippy_lints/src/disallowed_names.rs @@ -1,4 +1,5 @@ -use clippy_utils::{diagnostics::span_lint, is_test_module_or_function}; +use clippy_utils::diagnostics::span_lint; +use clippy_utils::is_test_module_or_function; use rustc_data_structures::fx::FxHashSet; use rustc_hir::{Item, Pat, PatKind}; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/doc.rs b/src/tools/clippy/clippy_lints/src/doc.rs index 87d88f70752..e5f39d102cd 100644 --- a/src/tools/clippy/clippy_lints/src/doc.rs +++ b/src/tools/clippy/clippy_lints/src/doc.rs @@ -31,9 +31,8 @@ use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::edition::Edition; use rustc_span::source_map::{BytePos, FilePathMapping, SourceMap, Span}; use rustc_span::{sym, FileName, Pos}; -use std::io; use std::ops::Range; -use std::thread; +use std::{io, thread}; use url::Url; declare_clippy_lint! { @@ -295,7 +294,9 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown { fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'_>) { let attrs = cx.tcx.hir().attrs(item.hir_id()); - let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else { return }; + let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else { + return; + }; match item.kind { hir::ItemKind::Fn(ref sig, _, body_id) => { if !(is_entrypoint_fn(cx, item.owner_id.to_def_id()) || in_external_macro(cx.tcx.sess, item.span)) { @@ -339,7 +340,9 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown { fn check_trait_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::TraitItem<'_>) { let attrs = cx.tcx.hir().attrs(item.hir_id()); - let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else { return }; + let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else { + return; + }; if let hir::TraitItemKind::Fn(ref sig, ..) = item.kind { if !in_external_macro(cx.tcx.sess, item.span) { lint_for_missing_headers(cx, item.owner_id, sig, headers, None, None); @@ -349,7 +352,9 @@ impl<'tcx> LateLintPass<'tcx> for DocMarkdown { fn check_impl_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::ImplItem<'_>) { let attrs = cx.tcx.hir().attrs(item.hir_id()); - let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else { return }; + let Some(headers) = check_attrs(cx, &self.valid_idents, attrs) else { + return; + }; if self.in_trait_impl || in_external_macro(cx.tcx.sess, item.span) { return; } diff --git a/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs b/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs index 976ce47e869..14122abbf2c 100644 --- a/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs +++ b/src/tools/clippy/clippy_lints/src/drop_forget_ref.rs @@ -1,7 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_note; -use clippy_utils::get_parent_node; -use clippy_utils::is_must_use_func_call; use clippy_utils::ty::{is_copy, is_must_use_ty, is_type_lang_item}; +use clippy_utils::{get_parent_node, is_must_use_func_call}; use rustc_hir::{Arm, Expr, ExprKind, LangItem, Node}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; diff --git a/src/tools/clippy/clippy_lints/src/empty_drop.rs b/src/tools/clippy/clippy_lints/src/empty_drop.rs index ec063c0f777..209fb66fa40 100644 --- a/src/tools/clippy/clippy_lints/src/empty_drop.rs +++ b/src/tools/clippy/clippy_lints/src/empty_drop.rs @@ -1,4 +1,5 @@ -use clippy_utils::{diagnostics::span_lint_and_sugg, peel_blocks}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::peel_blocks; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::{Body, ExprKind, Impl, ImplItemKind, Item, ItemKind, Node}; diff --git a/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs b/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs index c3a020433de..282157181ab 100644 --- a/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs +++ b/src/tools/clippy/clippy_lints/src/empty_structs_with_brackets.rs @@ -1,4 +1,5 @@ -use clippy_utils::{diagnostics::span_lint_and_then, source::snippet_opt}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::source::snippet_opt; use rustc_ast::ast::{Item, ItemKind, VariantData}; use rustc_errors::Applicability; use rustc_lexer::TokenKind; diff --git a/src/tools/clippy/clippy_lints/src/endian_bytes.rs b/src/tools/clippy/clippy_lints/src/endian_bytes.rs index f470987833e..dda14b4df53 100644 --- a/src/tools/clippy/clippy_lints/src/endian_bytes.rs +++ b/src/tools/clippy/clippy_lints/src/endian_bytes.rs @@ -1,8 +1,10 @@ use crate::Lint; -use clippy_utils::{diagnostics::span_lint_and_then, is_lint_allowed}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::is_lint_allowed; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; -use rustc_middle::{lint::in_external_macro, ty::Ty}; +use rustc_middle::lint::in_external_macro; +use rustc_middle::ty::Ty; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::Symbol; use std::borrow::Cow; diff --git a/src/tools/clippy/clippy_lints/src/entry.rs b/src/tools/clippy/clippy_lints/src/entry.rs index ee5a875ade7..6197b5b19eb 100644 --- a/src/tools/clippy/clippy_lints/src/entry.rs +++ b/src/tools/clippy/clippy_lints/src/entry.rs @@ -1,18 +1,14 @@ -use clippy_utils::higher; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::source::{reindent_multiline, snippet_indent, snippet_with_applicability, snippet_with_context}; use clippy_utils::{ - can_move_expr_to_closure_no_visit, - diagnostics::span_lint_and_sugg, - is_expr_final_block_expr, is_expr_used_or_unified, match_def_path, paths, peel_hir_expr_while, - source::{reindent_multiline, snippet_indent, snippet_with_applicability, snippet_with_context}, - SpanlessEq, + can_move_expr_to_closure_no_visit, higher, is_expr_final_block_expr, is_expr_used_or_unified, match_def_path, + paths, peel_hir_expr_while, SpanlessEq, }; use core::fmt::{self, Write}; use rustc_errors::Applicability; -use rustc_hir::{ - hir_id::HirIdSet, - intravisit::{walk_expr, Visitor}, - Block, Expr, ExprKind, Guard, HirId, Let, Pat, Stmt, StmtKind, UnOp, -}; +use rustc_hir::hir_id::HirIdSet; +use rustc_hir::intravisit::{walk_expr, Visitor}; +use rustc_hir::{Block, Expr, ExprKind, Guard, HirId, Let, Pat, Stmt, StmtKind, UnOp}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::{Span, SyntaxContext, DUMMY_SP}; @@ -69,16 +65,21 @@ impl<'tcx> LateLintPass<'tcx> for HashMapPass { return; } - let Some(higher::If { cond: cond_expr, then: then_expr, r#else: else_expr }) = higher::If::hir(expr) else { - return + let Some(higher::If { + cond: cond_expr, + then: then_expr, + r#else: else_expr, + }) = higher::If::hir(expr) + else { + return; }; let Some((map_ty, contains_expr)) = try_parse_contains(cx, cond_expr) else { - return + return; }; let Some(then_search) = find_insert_calls(cx, &contains_expr, then_expr) else { - return + return; }; let mut app = Applicability::MachineApplicable; @@ -186,7 +187,7 @@ impl<'tcx> LateLintPass<'tcx> for HashMapPass { MAP_ENTRY, expr.span, &format!("usage of `contains_key` followed by `insert` on a `{}`", map_ty.name()), - "try this", + "try", sugg, app, ); diff --git a/src/tools/clippy/clippy_lints/src/escape.rs b/src/tools/clippy/clippy_lints/src/escape.rs index a51a8ee09f6..dbe3453e7bf 100644 --- a/src/tools/clippy/clippy_lints/src/escape.rs +++ b/src/tools/clippy/clippy_lints/src/escape.rs @@ -1,6 +1,5 @@ use clippy_utils::diagnostics::span_lint_hir; -use rustc_hir::intravisit; -use rustc_hir::{self, AssocItemKind, Body, FnDecl, HirId, HirIdSet, Impl, ItemKind, Node, Pat, PatKind}; +use rustc_hir::{self, intravisit, AssocItemKind, Body, FnDecl, HirId, HirIdSet, Impl, ItemKind, Node, Pat, PatKind}; use rustc_hir_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId}; use rustc_infer::infer::TyCtxtInferExt; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/excessive_nesting.rs b/src/tools/clippy/clippy_lints/src/excessive_nesting.rs index d04d833e630..8911f1872c2 100644 --- a/src/tools/clippy/clippy_lints/src/excessive_nesting.rs +++ b/src/tools/clippy/clippy_lints/src/excessive_nesting.rs @@ -1,9 +1,8 @@ -use clippy_utils::{diagnostics::span_lint_and_help, source::snippet}; -use rustc_ast::{ - node_id::NodeSet, - visit::{walk_block, walk_item, Visitor}, - Block, Crate, Inline, Item, ItemKind, ModKind, NodeId, -}; +use clippy_utils::diagnostics::span_lint_and_help; +use clippy_utils::source::snippet; +use rustc_ast::node_id::NodeSet; +use rustc_ast::visit::{walk_block, walk_item, Visitor}; +use rustc_ast::{Block, Crate, Inline, Item, ItemKind, ModKind, NodeId}; use rustc_lint::{EarlyContext, EarlyLintPass, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_session::{declare_tool_lint, impl_lint_pass}; diff --git a/src/tools/clippy/clippy_lints/src/explicit_write.rs b/src/tools/clippy/clippy_lints/src/explicit_write.rs index 315df6c714f..4b9ca8c917e 100644 --- a/src/tools/clippy/clippy_lints/src/explicit_write.rs +++ b/src/tools/clippy/clippy_lints/src/explicit_write.rs @@ -100,7 +100,7 @@ impl<'tcx> LateLintPass<'tcx> for ExplicitWrite { EXPLICIT_WRITE, expr.span, &format!("use of `{used}.unwrap()`"), - "try this", + "try", format!("{prefix}{sugg_mac}!({inputs_snippet})"), applicability, ); diff --git a/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs b/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs index 126bed6789c..c18006a71c2 100644 --- a/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs +++ b/src/tools/clippy/clippy_lints/src/extra_unused_type_parameters.rs @@ -1,6 +1,5 @@ use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_then}; -use clippy_utils::is_from_proc_macro; -use clippy_utils::trait_ref_of_method; +use clippy_utils::{is_from_proc_macro, trait_ref_of_method}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_errors::Applicability; use rustc_hir::intravisit::{walk_impl_item, walk_item, walk_param_bound, walk_ty, Visitor}; @@ -12,10 +11,8 @@ use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::hir::nested_filter; use rustc_middle::lint::in_external_macro; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::{ - def_id::{DefId, LocalDefId}, - Span, -}; +use rustc_span::def_id::{DefId, LocalDefId}; +use rustc_span::Span; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs b/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs index 5e0fcd74339..29e5315f88b 100644 --- a/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs +++ b/src/tools/clippy/clippy_lints/src/floating_point_arithmetic.rs @@ -1,10 +1,8 @@ -use clippy_utils::consts::{ - constant, constant_simple, Constant, - Constant::{Int, F32, F64}, -}; +use clippy_utils::consts::Constant::{Int, F32, F64}; +use clippy_utils::consts::{constant, constant_simple, Constant}; +use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::{ - diagnostics::span_lint_and_sugg, eq_expr_value, get_parent_expr, higher, in_constant, is_no_std_crate, - numeric_literal, peel_blocks, sugg, + eq_expr_value, get_parent_expr, higher, in_constant, is_no_std_crate, numeric_literal, peel_blocks, sugg, }; use if_chain::if_chain; use rustc_errors::Applicability; diff --git a/src/tools/clippy/clippy_lints/src/format.rs b/src/tools/clippy/clippy_lints/src/format.rs index d34d6e9279e..f4f8bdc2c44 100644 --- a/src/tools/clippy/clippy_lints/src/format.rs +++ b/src/tools/clippy/clippy_lints/src/format.rs @@ -43,7 +43,9 @@ declare_lint_pass!(UselessFormat => [USELESS_FORMAT]); impl<'tcx> LateLintPass<'tcx> for UselessFormat { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { - let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return }; + let Some(macro_call) = root_macro_call_first_node(cx, expr) else { + return; + }; if !cx.tcx.is_diagnostic_item(sym::format_macro, macro_call.def_id) { return; } diff --git a/src/tools/clippy/clippy_lints/src/format_args.rs b/src/tools/clippy/clippy_lints/src/format_args.rs index 08e45ed7d0e..01c714c414b 100644 --- a/src/tools/clippy/clippy_lints/src/format_args.rs +++ b/src/tools/clippy/clippy_lints/src/format_args.rs @@ -14,10 +14,8 @@ use rustc_ast::{ FormatArgPosition, FormatArgPositionKind, FormatArgsPiece, FormatArgumentKind, FormatCount, FormatOptions, FormatPlaceholder, FormatTrait, }; -use rustc_errors::{ - Applicability, - SuggestionStyle::{CompletelyHidden, ShowCode}, -}; +use rustc_errors::Applicability; +use rustc_errors::SuggestionStyle::{CompletelyHidden, ShowCode}; use rustc_hir::{Expr, ExprKind, LangItem}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::ty::adjustment::{Adjust, Adjustment}; @@ -188,7 +186,9 @@ impl FormatArgs { impl<'tcx> LateLintPass<'tcx> for FormatArgs { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { - let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return }; + let Some(macro_call) = root_macro_call_first_node(cx, expr) else { + return; + }; if !is_format_macro(cx, macro_call.def_id) { return; } diff --git a/src/tools/clippy/clippy_lints/src/format_impl.rs b/src/tools/clippy/clippy_lints/src/format_impl.rs index 3ddee1842a3..76369bccf9e 100644 --- a/src/tools/clippy/clippy_lints/src/format_impl.rs +++ b/src/tools/clippy/clippy_lints/src/format_impl.rs @@ -7,8 +7,8 @@ use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, Impl, ImplItem, ImplItemKind, QPath}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::Span; -use rustc_span::{sym, symbol::kw, Symbol}; +use rustc_span::symbol::kw; +use rustc_span::{sym, Span, Symbol}; declare_clippy_lint! { /// ### What it does @@ -127,7 +127,9 @@ impl<'tcx> LateLintPass<'tcx> for FormatImpl { } fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { - let Some(format_trait_impl) = self.format_trait_impl else { return }; + let Some(format_trait_impl) = self.format_trait_impl else { + return; + }; if format_trait_impl.name == sym::Display { check_to_string_in_display(cx, expr); diff --git a/src/tools/clippy/clippy_lints/src/from_over_into.rs b/src/tools/clippy/clippy_lints/src/from_over_into.rs index 35061fc64c9..2b899e21ef5 100644 --- a/src/tools/clippy/clippy_lints/src/from_over_into.rs +++ b/src/tools/clippy/clippy_lints/src/from_over_into.rs @@ -10,7 +10,8 @@ use rustc_hir::{ TyKind, }; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::{hir::nested_filter::OnlyBodies, ty}; +use rustc_middle::hir::nested_filter::OnlyBodies; +use rustc_middle::ty; use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::symbol::{kw, sym}; use rustc_span::{Span, Symbol}; @@ -76,7 +77,8 @@ impl<'tcx> LateLintPass<'tcx> for FromOverInto { && let Some(into_trait_seg) = hir_trait_ref.path.segments.last() // `impl Into<target_ty> for self_ty` && let Some(GenericArgs { args: [GenericArg::Type(target_ty)], .. }) = into_trait_seg.args - && let Some(middle_trait_ref) = cx.tcx.impl_trait_ref(item.owner_id).map(ty::EarlyBinder::instantiate_identity) + && let Some(middle_trait_ref) = cx.tcx.impl_trait_ref(item.owner_id) + .map(ty::EarlyBinder::instantiate_identity) && cx.tcx.is_diagnostic_item(sym::Into, middle_trait_ref.def_id) && !matches!(middle_trait_ref.args.type_at(1).kind(), ty::Alias(ty::Opaque, _)) { @@ -163,10 +165,14 @@ fn convert_to_from( return None; } let impl_item = cx.tcx.hir().impl_item(impl_item_ref.id); - let ImplItemKind::Fn(ref sig, body_id) = impl_item.kind else { return None }; + let ImplItemKind::Fn(ref sig, body_id) = impl_item.kind else { + return None; + }; let body = cx.tcx.hir().body(body_id); let [input] = body.params else { return None }; - let PatKind::Binding(.., self_ident, None) = input.pat.kind else { return None }; + let PatKind::Binding(.., self_ident, None) = input.pat.kind else { + return None; + }; let from = snippet_opt(cx, self_ty.span)?; let into = snippet_opt(cx, target_ty.span)?; diff --git a/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs b/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs index 096508dc4f1..5e859d97c62 100644 --- a/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs +++ b/src/tools/clippy/clippy_lints/src/from_raw_with_void_ptr.rs @@ -4,8 +4,7 @@ use clippy_utils::{match_def_path, path_def_id, paths}; use rustc_hir::def_id::DefId; use rustc_hir::{Expr, ExprKind, QPath}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty::RawPtr; -use rustc_middle::ty::TypeAndMut; +use rustc_middle::ty::{RawPtr, TypeAndMut}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::sym; diff --git a/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs b/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs index d3d0d91c1be..597fca88885 100644 --- a/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs +++ b/src/tools/clippy/clippy_lints/src/functions/impl_trait_in_params.rs @@ -1,6 +1,8 @@ -use clippy_utils::{diagnostics::span_lint_and_then, is_in_test_function}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::is_in_test_function; -use rustc_hir::{intravisit::FnKind, Body, HirId}; +use rustc_hir::intravisit::FnKind; +use rustc_hir::{Body, HirId}; use rustc_lint::LateContext; use rustc_span::Span; diff --git a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs index b244b913314..18f7368dafb 100644 --- a/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs +++ b/src/tools/clippy/clippy_lints/src/functions/misnamed_getters.rs @@ -1,7 +1,8 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::snippet; use rustc_errors::Applicability; -use rustc_hir::{intravisit::FnKind, Body, ExprKind, FnDecl, ImplicitSelfKind, Unsafety}; +use rustc_hir::intravisit::FnKind; +use rustc_hir::{Body, ExprKind, FnDecl, ImplicitSelfKind, Unsafety}; use rustc_lint::LateContext; use rustc_middle::ty; use rustc_span::Span; @@ -12,8 +13,8 @@ use super::MISNAMED_GETTERS; pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body: &Body<'_>, span: Span) { let FnKind::Method(ref ident, sig) = kind else { - return; - }; + return; + }; // Takes only &(mut) self if decl.inputs.len() != 1 { @@ -25,8 +26,8 @@ pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body: let name = match decl.implicit_self { ImplicitSelfKind::MutRef => { let Some(name) = name.strip_suffix("_mut") else { - return; - }; + return; + }; name }, ImplicitSelfKind::Imm | ImplicitSelfKind::Mut | ImplicitSelfKind::ImmRef => name, @@ -76,7 +77,7 @@ pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body: for adjusted_type in iter::once(typeck_results.expr_ty(self_data)) .chain(typeck_results.expr_adjustments(self_data).iter().map(|adj| adj.target)) { - let ty::Adt(def,_) = adjusted_type.kind() else { + let ty::Adt(def, _) = adjusted_type.kind() else { continue; }; @@ -91,13 +92,15 @@ pub fn check_fn(cx: &LateContext<'_>, kind: FnKind<'_>, decl: &FnDecl<'_>, body: } let Some(used_field) = used_field else { - // Can happen if the field access is a tuple. We don't lint those because the getter name could not start with a number. + // Can happen if the field access is a tuple. We don't lint those because the getter name could not + // start with a number. return; }; let Some(correct_field) = correct_field else { // There is no field corresponding to the getter name. - // FIXME: This can be a false positive if the correct field is reachable through deeper autodereferences than used_field is + // FIXME: This can be a false positive if the correct field is reachable through deeper + // autodereferences than used_field is return; }; diff --git a/src/tools/clippy/clippy_lints/src/functions/must_use.rs b/src/tools/clippy/clippy_lints/src/functions/must_use.rs index 1b173de856b..83445c7a0ca 100644 --- a/src/tools/clippy/clippy_lints/src/functions/must_use.rs +++ b/src/tools/clippy/clippy_lints/src/functions/must_use.rs @@ -1,14 +1,13 @@ use hir::FnSig; use rustc_ast::ast::Attribute; use rustc_errors::Applicability; +use rustc_hir::def::Res; use rustc_hir::def_id::DefIdSet; -use rustc_hir::{self as hir, def::Res, QPath}; +use rustc_hir::{self as hir, QPath}; use rustc_infer::infer::TyCtxtInferExt; use rustc_lint::{LateContext, LintContext}; -use rustc_middle::{ - lint::in_external_macro, - ty::{self, Ty}, -}; +use rustc_middle::lint::in_external_macro; +use rustc_middle::ty::{self, Ty}; use rustc_span::{sym, Span, Symbol}; use clippy_utils::attrs::is_proc_macro; diff --git a/src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs b/src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs index bd473ac7e51..34f1bf3b2b1 100644 --- a/src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs +++ b/src/tools/clippy/clippy_lints/src/functions/too_many_lines.rs @@ -23,7 +23,7 @@ pub(super) fn check_fn( } let Some(code_snippet) = snippet_opt(cx, body.value.span) else { - return + return; }; let mut line_count: u64 = 0; let mut in_comment = false; diff --git a/src/tools/clippy/clippy_lints/src/future_not_send.rs b/src/tools/clippy/clippy_lints/src/future_not_send.rs index e54429aee8e..621415c881c 100644 --- a/src/tools/clippy/clippy_lints/src/future_not_send.rs +++ b/src/tools/clippy/clippy_lints/src/future_not_send.rs @@ -66,7 +66,7 @@ impl<'tcx> LateLintPass<'tcx> for FutureNotSend { if let ty::Alias(ty::Opaque, AliasTy { def_id, args, .. }) = *ret_ty.kind() { let preds = cx.tcx.explicit_item_bounds(def_id); let mut is_future = false; - for (p, _span) in preds.arg_iter_copied(cx.tcx, args) { + for (p, _span) in preds.iter_instantiated_copied(cx.tcx, args) { if let Some(trait_pred) = p.as_trait_clause() { if Some(trait_pred.skip_binder().trait_ref.def_id) == cx.tcx.lang_items().future_trait() { is_future = true; diff --git a/src/tools/clippy/clippy_lints/src/if_let_mutex.rs b/src/tools/clippy/clippy_lints/src/if_let_mutex.rs index 9ea8c494cfc..e614a8f694f 100644 --- a/src/tools/clippy/clippy_lints/src/if_let_mutex.rs +++ b/src/tools/clippy/clippy_lints/src/if_let_mutex.rs @@ -1,7 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::higher; use clippy_utils::ty::is_type_diagnostic_item; -use clippy_utils::SpanlessEq; +use clippy_utils::{higher, SpanlessEq}; use if_chain::if_chain; use rustc_errors::Diagnostic; use rustc_hir::intravisit::{self as visit, Visitor}; diff --git a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs index 725bd3d54bc..ab6ad3f3b3a 100644 --- a/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs +++ b/src/tools/clippy/clippy_lints/src/if_then_some_else_none.rs @@ -119,7 +119,13 @@ impl<'tcx> LateLintPass<'tcx> for IfThenSomeElseNone { fn stmts_contains_early_return(stmts: &[Stmt<'_>]) -> bool { stmts.iter().any(|stmt| { - let Stmt { kind: StmtKind::Semi(e), .. } = stmt else { return false }; + let Stmt { + kind: StmtKind::Semi(e), + .. + } = stmt + else { + return false; + }; contains_return(e) }) diff --git a/src/tools/clippy/clippy_lints/src/implicit_return.rs b/src/tools/clippy/clippy_lints/src/implicit_return.rs index 372b6ead3fe..a6b035d5106 100644 --- a/src/tools/clippy/clippy_lints/src/implicit_return.rs +++ b/src/tools/clippy/clippy_lints/src/implicit_return.rs @@ -1,9 +1,7 @@ -use clippy_utils::{ - diagnostics::span_lint_hir_and_then, - get_async_fn_body, is_async_fn, - source::{snippet_with_applicability, snippet_with_context, walk_span_to_context}, - visitors::for_each_expr, -}; +use clippy_utils::diagnostics::span_lint_hir_and_then; +use clippy_utils::source::{snippet_with_applicability, snippet_with_context, walk_span_to_context}; +use clippy_utils::visitors::for_each_expr; +use clippy_utils::{get_async_fn_body, is_async_fn}; use core::ops::ControlFlow; use rustc_errors::Applicability; use rustc_hir::intravisit::FnKind; diff --git a/src/tools/clippy/clippy_lints/src/incorrect_impls.rs b/src/tools/clippy/clippy_lints/src/incorrect_impls.rs index 7b95116ee4e..e6c42ebb832 100644 --- a/src/tools/clippy/clippy_lints/src/incorrect_impls.rs +++ b/src/tools/clippy/clippy_lints/src/incorrect_impls.rs @@ -1,11 +1,14 @@ -use clippy_utils::{diagnostics::span_lint_and_sugg, get_parent_node, last_path_segment, ty::implements_trait}; +use clippy_utils::diagnostics::{span_lint_and_sugg, span_lint_and_then}; +use clippy_utils::ty::implements_trait; +use clippy_utils::{get_parent_node, is_res_lang_ctor, last_path_segment, path_res}; use rustc_errors::Applicability; -use rustc_hir::{ExprKind, ImplItem, ImplItemKind, ItemKind, Node, UnOp}; -use rustc_hir_analysis::hir_ty_to_ty; +use rustc_hir::def::Res; +use rustc_hir::{Expr, ExprKind, ImplItem, ImplItemKind, ItemKind, LangItem, Node, UnOp}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::EarlyBinder; use rustc_session::{declare_lint_pass, declare_tool_lint}; -use rustc_span::{sym, symbol}; +use rustc_span::sym; +use rustc_span::symbol::kw; declare_clippy_lint! { /// ### What it does @@ -46,25 +49,80 @@ declare_clippy_lint! { correctness, "manual implementation of `Clone` on a `Copy` type" } -declare_lint_pass!(IncorrectImpls => [INCORRECT_CLONE_IMPL_ON_COPY_TYPE]); +declare_clippy_lint! { + /// ### What it does + /// Checks for manual implementations of both `PartialOrd` and `Ord` when only `Ord` is + /// necessary. + /// + /// ### Why is this bad? + /// If both `PartialOrd` and `Ord` are implemented, they must agree. This is commonly done by + /// wrapping the result of `cmp` in `Some` for `partial_cmp`. Not doing this may silently + /// introduce an error upon refactoring. + /// + /// ### Limitations + /// Will not lint if `Self` and `Rhs` do not have the same type. + /// + /// ### Example + /// ```rust + /// # use std::cmp::Ordering; + /// #[derive(Eq, PartialEq)] + /// struct A(u32); + /// + /// impl Ord for A { + /// fn cmp(&self, other: &Self) -> Ordering { + /// // ... + /// # todo!(); + /// } + /// } + /// + /// impl PartialOrd for A { + /// fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + /// // ... + /// # todo!(); + /// } + /// } + /// ``` + /// Use instead: + /// ```rust + /// # use std::cmp::Ordering; + /// #[derive(Eq, PartialEq)] + /// struct A(u32); + /// + /// impl Ord for A { + /// fn cmp(&self, other: &Self) -> Ordering { + /// // ... + /// # todo!(); + /// } + /// } + /// + /// impl PartialOrd for A { + /// fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + /// Some(self.cmp(other)) + /// } + /// } + /// ``` + #[clippy::version = "1.72.0"] + pub INCORRECT_PARTIAL_ORD_IMPL_ON_ORD_TYPE, + correctness, + "manual implementation of `PartialOrd` when `Ord` is already implemented" +} +declare_lint_pass!(IncorrectImpls => [INCORRECT_CLONE_IMPL_ON_COPY_TYPE, INCORRECT_PARTIAL_ORD_IMPL_ON_ORD_TYPE]); impl LateLintPass<'_> for IncorrectImpls { - #[expect(clippy::needless_return)] + #[expect(clippy::too_many_lines)] fn check_impl_item(&mut self, cx: &LateContext<'_>, impl_item: &ImplItem<'_>) { - let node = get_parent_node(cx.tcx, impl_item.hir_id()); - let Some(Node::Item(item)) = node else { - return; - }; - let ItemKind::Impl(imp) = item.kind else { + let Some(Node::Item(item)) = get_parent_node(cx.tcx, impl_item.hir_id()) else { return; }; let Some(trait_impl) = cx.tcx.impl_trait_ref(item.owner_id).map(EarlyBinder::skip_binder) else { return; }; - let trait_impl_def_id = trait_impl.def_id; if cx.tcx.is_automatically_derived(item.owner_id.to_def_id()) { return; } + let ItemKind::Impl(_) = item.kind else { + return; + }; let ImplItemKind::Fn(_, impl_item_id) = cx.tcx.hir().impl_item(impl_item.impl_item_id()).kind else { return; }; @@ -72,15 +130,12 @@ impl LateLintPass<'_> for IncorrectImpls { let ExprKind::Block(block, ..) = body.value.kind else { return; }; - // Above is duplicated from the `duplicate_manual_partial_ord_impl` branch. - // Remove it while solving conflicts once that PR is merged. - // Actual implementation; remove this comment once aforementioned PR is merged - if cx.tcx.is_diagnostic_item(sym::Clone, trait_impl_def_id) + if cx.tcx.is_diagnostic_item(sym::Clone, trait_impl.def_id) && let Some(copy_def_id) = cx.tcx.get_diagnostic_item(sym::Copy) && implements_trait( cx, - hir_ty_to_ty(cx.tcx, imp.self_ty), + trait_impl.self_ty(), copy_def_id, &[], ) @@ -88,9 +143,9 @@ impl LateLintPass<'_> for IncorrectImpls { if impl_item.ident.name == sym::clone { if block.stmts.is_empty() && let Some(expr) = block.expr - && let ExprKind::Unary(UnOp::Deref, inner) = expr.kind - && let ExprKind::Path(qpath) = inner.kind - && last_path_segment(&qpath).ident.name == symbol::kw::SelfLower + && let ExprKind::Unary(UnOp::Deref, deref) = expr.kind + && let ExprKind::Path(qpath) = deref.kind + && last_path_segment(&qpath).ident.name == kw::SelfLower {} else { span_lint_and_sugg( cx, @@ -112,7 +167,7 @@ impl LateLintPass<'_> for IncorrectImpls { INCORRECT_CLONE_IMPL_ON_COPY_TYPE, impl_item.span, "incorrect implementation of `clone_from` on a `Copy` type", - "remove this", + "remove it", String::new(), Applicability::MaybeIncorrect, ); @@ -120,5 +175,69 @@ impl LateLintPass<'_> for IncorrectImpls { return; } } + + if cx.tcx.is_diagnostic_item(sym::PartialOrd, trait_impl.def_id) + && impl_item.ident.name == sym::partial_cmp + && let Some(ord_def_id) = cx + .tcx + .diagnostic_items(trait_impl.def_id.krate) + .name_to_id + .get(&sym::Ord) + && implements_trait( + cx, + trait_impl.self_ty(), + *ord_def_id, + &[], + ) + { + if block.stmts.is_empty() + && let Some(expr) = block.expr + && let ExprKind::Call( + Expr { + kind: ExprKind::Path(some_path), + hir_id: some_hir_id, + .. + }, + [cmp_expr], + ) = expr.kind + && is_res_lang_ctor(cx, cx.qpath_res(some_path, *some_hir_id), LangItem::OptionSome) + && let ExprKind::MethodCall(cmp_path, _, [other_expr], ..) = cmp_expr.kind + && cmp_path.ident.name == sym::cmp + && let Res::Local(..) = path_res(cx, other_expr) + {} else { + // If `Self` and `Rhs` are not the same type, bail. This makes creating a valid + // suggestion tons more complex. + if let [lhs, rhs, ..] = trait_impl.args.as_slice() && lhs != rhs { + return; + } + + span_lint_and_then( + cx, + INCORRECT_PARTIAL_ORD_IMPL_ON_ORD_TYPE, + item.span, + "incorrect implementation of `partial_cmp` on an `Ord` type", + |diag| { + let [_, other] = body.params else { + return; + }; + + let suggs = if let Some(other_ident) = other.pat.simple_ident() { + vec![(block.span, format!("{{ Some(self.cmp({})) }}", other_ident.name))] + } else { + vec![ + (block.span, "{ Some(self.cmp(other)) }".to_owned()), + (other.pat.span, "other".to_owned()), + ] + }; + + diag.multipart_suggestion( + "change this to", + suggs, + Applicability::Unspecified, + ); + } + ); + } + } } } diff --git a/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs index 7a269e98ff1..01a7c497cbe 100644 --- a/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs +++ b/src/tools/clippy/clippy_lints/src/index_refutable_slice.rs @@ -13,7 +13,8 @@ use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::hir::nested_filter; use rustc_middle::ty; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::{symbol::Ident, Span}; +use rustc_span::symbol::Ident; +use rustc_span::Span; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/inherent_impl.rs b/src/tools/clippy/clippy_lints/src/inherent_impl.rs index 3ac40401e89..3d1113ff9cc 100644 --- a/src/tools/clippy/clippy_lints/src/inherent_impl.rs +++ b/src/tools/clippy/clippy_lints/src/inherent_impl.rs @@ -3,7 +3,8 @@ use clippy_utils::diagnostics::span_lint_and_note; use clippy_utils::is_lint_allowed; use rustc_data_structures::fx::FxHashMap; -use rustc_hir::{def_id::LocalDefId, Item, ItemKind, Node}; +use rustc_hir::def_id::LocalDefId; +use rustc_hir::{Item, ItemKind, Node}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::Span; diff --git a/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs b/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs index 7e1548531f1..f95d2c2edb1 100644 --- a/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs +++ b/src/tools/clippy/clippy_lints/src/init_numbered_fields.rs @@ -71,7 +71,7 @@ impl<'tcx> LateLintPass<'tcx> for NumberedFields { INIT_NUMBERED_FIELDS, e.span, "used a field initializer for a tuple struct", - "try this instead", + "try", snippet, appl, ); diff --git a/src/tools/clippy/clippy_lints/src/instant_subtraction.rs b/src/tools/clippy/clippy_lints/src/instant_subtraction.rs index 34e9991582c..8df7dfb8b9e 100644 --- a/src/tools/clippy/clippy_lints/src/instant_subtraction.rs +++ b/src/tools/clippy/clippy_lints/src/instant_subtraction.rs @@ -7,7 +7,8 @@ use rustc_errors::Applicability; use rustc_hir::{BinOpKind, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::{source_map::Spanned, sym}; +use rustc_span::source_map::Spanned; +use rustc_span::sym; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/items_after_test_module.rs b/src/tools/clippy/clippy_lints/src/items_after_test_module.rs index 40378ee8205..55a43e91562 100644 --- a/src/tools/clippy/clippy_lints/src/items_after_test_module.rs +++ b/src/tools/clippy/clippy_lints/src/items_after_test_module.rs @@ -1,4 +1,5 @@ -use clippy_utils::{diagnostics::span_lint_and_help, is_from_proc_macro, is_in_cfg_test}; +use clippy_utils::diagnostics::span_lint_and_help; +use clippy_utils::{is_from_proc_macro, is_in_cfg_test}; use rustc_hir::{HirId, ItemId, ItemKind, Mod}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::lint::in_external_macro; @@ -32,7 +33,7 @@ declare_clippy_lint! { /// // [...] /// } /// ``` - #[clippy::version = "1.70.0"] + #[clippy::version = "1.71.0"] pub ITEMS_AFTER_TEST_MODULE, style, "An item was found after the testing module `tests`" diff --git a/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs b/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs index bb100ec632e..066d2c4b787 100644 --- a/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs +++ b/src/tools/clippy/clippy_lints/src/iter_not_returning_iterator.rs @@ -1,5 +1,8 @@ -use clippy_utils::{diagnostics::span_lint, get_parent_node, ty::implements_trait}; -use rustc_hir::{def_id::LocalDefId, FnSig, ImplItem, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind}; +use clippy_utils::diagnostics::span_lint; +use clippy_utils::get_parent_node; +use clippy_utils::ty::implements_trait; +use rustc_hir::def_id::LocalDefId; +use rustc_hir::{FnSig, ImplItem, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::symbol::sym; diff --git a/src/tools/clippy/clippy_lints/src/large_enum_variant.rs b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs index 693218f8a9c..b22b57a3006 100644 --- a/src/tools/clippy/clippy_lints/src/large_enum_variant.rs +++ b/src/tools/clippy/clippy_lints/src/large_enum_variant.rs @@ -1,10 +1,8 @@ //! lint when there is a large size difference between variants on an enum +use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::snippet_with_applicability; -use clippy_utils::{ - diagnostics::span_lint_and_then, - ty::{approx_ty_size, is_copy, AdtVariantInfo}, -}; +use clippy_utils::ty::{approx_ty_size, is_copy, AdtVariantInfo}; use rustc_errors::Applicability; use rustc_hir::{Item, ItemKind}; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/large_futures.rs b/src/tools/clippy/clippy_lints/src/large_futures.rs index 087c4a65250..d67d5899350 100644 --- a/src/tools/clippy/clippy_lints/src/large_futures.rs +++ b/src/tools/clippy/clippy_lints/src/large_futures.rs @@ -1,5 +1,6 @@ +use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet; -use clippy_utils::{diagnostics::span_lint_and_sugg, ty::implements_trait}; +use clippy_utils::ty::implements_trait; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, LangItem, MatchSource, QPath}; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/large_include_file.rs b/src/tools/clippy/clippy_lints/src/large_include_file.rs index 424c0d9e798..566901de347 100644 --- a/src/tools/clippy/clippy_lints/src/large_include_file.rs +++ b/src/tools/clippy/clippy_lints/src/large_include_file.rs @@ -2,8 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_note; use clippy_utils::is_lint_allowed; use clippy_utils::macros::root_macro_call_first_node; use rustc_ast::LitKind; -use rustc_hir::Expr; -use rustc_hir::ExprKind; +use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::sym; diff --git a/src/tools/clippy/clippy_lints/src/large_stack_frames.rs b/src/tools/clippy/clippy_lints/src/large_stack_frames.rs index 9c0cc978a39..7aa1446d53d 100644 --- a/src/tools/clippy/clippy_lints/src/large_stack_frames.rs +++ b/src/tools/clippy/clippy_lints/src/large_stack_frames.rs @@ -4,11 +4,9 @@ use clippy_utils::diagnostics::span_lint_and_note; use clippy_utils::fn_has_unsatisfiable_preds; use rustc_hir::def_id::LocalDefId; use rustc_hir::intravisit::FnKind; -use rustc_hir::Body; -use rustc_hir::FnDecl; +use rustc_hir::{Body, FnDecl}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_session::declare_tool_lint; -use rustc_session::impl_lint_pass; +use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::Span; declare_clippy_lint! { diff --git a/src/tools/clippy/clippy_lints/src/len_zero.rs b/src/tools/clippy/clippy_lints/src/len_zero.rs index 83fa25c795a..50c433d3161 100644 --- a/src/tools/clippy/clippy_lints/src/len_zero.rs +++ b/src/tools/clippy/clippy_lints/src/len_zero.rs @@ -1,22 +1,23 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_then}; use clippy_utils::source::snippet_with_context; -use clippy_utils::{get_item_name, get_parent_as_impl, is_lint_allowed, peel_ref_operators, sugg::Sugg}; +use clippy_utils::sugg::Sugg; +use clippy_utils::{get_item_name, get_parent_as_impl, is_lint_allowed, peel_ref_operators}; use if_chain::if_chain; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; -use rustc_hir::def_id::DefIdSet; +use rustc_hir::def::Res; +use rustc_hir::def_id::{DefId, DefIdSet}; +use rustc_hir::lang_items::LangItem; use rustc_hir::{ - def::Res, def_id::DefId, lang_items::LangItem, AssocItemKind, BinOpKind, Expr, ExprKind, FnRetTy, GenericArg, - GenericBound, ImplItem, ImplItemKind, ImplicitSelfKind, Item, ItemKind, Mutability, Node, PathSegment, PrimTy, - QPath, TraitItemRef, TyKind, TypeBindingKind, + AssocItemKind, BinOpKind, Expr, ExprKind, FnRetTy, GenericArg, GenericBound, ImplItem, ImplItemKind, + ImplicitSelfKind, Item, ItemKind, Mutability, Node, PathSegment, PrimTy, QPath, TraitItemRef, TyKind, + TypeBindingKind, }; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self, AssocKind, FnSig, Ty}; use rustc_session::{declare_lint_pass, declare_tool_lint}; -use rustc_span::{ - source_map::{Span, Spanned, Symbol}, - symbol::sym, -}; +use rustc_span::source_map::{Span, Spanned, Symbol}; +use rustc_span::symbol::sym; declare_clippy_lint! { /// ### What it does @@ -145,7 +146,10 @@ impl<'tcx> LateLintPass<'tcx> for LenZero { if let Some(local_id) = ty_id.as_local(); let ty_hir_id = cx.tcx.hir().local_def_id_to_hir_id(local_id); if !is_lint_allowed(cx, LEN_WITHOUT_IS_EMPTY, ty_hir_id); - if let Some(output) = parse_len_output(cx, cx.tcx.fn_sig(item.owner_id).instantiate_identity().skip_binder()); + if let Some(output) = parse_len_output( + cx, + cx.tcx.fn_sig(item.owner_id).instantiate_identity().skip_binder() + ); then { let (name, kind) = match cx.tcx.hir().find(ty_hir_id) { Some(Node::ForeignItem(x)) => (x.ident.name, "extern type"), diff --git a/src/tools/clippy/clippy_lints/src/let_if_seq.rs b/src/tools/clippy/clippy_lints/src/let_if_seq.rs index db41bc67da1..2f6f36c3960 100644 --- a/src/tools/clippy/clippy_lints/src/let_if_seq.rs +++ b/src/tools/clippy/clippy_lints/src/let_if_seq.rs @@ -1,6 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::path_to_local_id; use clippy_utils::source::snippet; -use clippy_utils::{path_to_local_id, visitors::is_local_used}; +use clippy_utils::visitors::is_local_used; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir as hir; diff --git a/src/tools/clippy/clippy_lints/src/let_underscore.rs b/src/tools/clippy/clippy_lints/src/let_underscore.rs index b7a47002080..e7c875ab3a9 100644 --- a/src/tools/clippy/clippy_lints/src/let_underscore.rs +++ b/src/tools/clippy/clippy_lints/src/let_underscore.rs @@ -1,12 +1,10 @@ use clippy_utils::diagnostics::span_lint_and_help; -use clippy_utils::is_from_proc_macro; use clippy_utils::ty::{implements_trait, is_must_use_ty, match_type}; -use clippy_utils::{is_must_use_func_call, paths}; +use clippy_utils::{is_from_proc_macro, is_must_use_func_call, paths}; use rustc_hir::{Local, PatKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::lint::in_external_macro; -use rustc_middle::ty::GenericArgKind; -use rustc_middle::ty::IsSuggestable; +use rustc_middle::ty::{GenericArgKind, IsSuggestable}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::{BytePos, Span}; diff --git a/src/tools/clippy/clippy_lints/src/lib.rs b/src/tools/clippy/clippy_lints/src/lib.rs index 365cc4c59ad..5e62cfd70ec 100644 --- a/src/tools/clippy/clippy_lints/src/lib.rs +++ b/src/tools/clippy/clippy_lints/src/lib.rs @@ -183,6 +183,7 @@ mod manual_assert; mod manual_async_fn; mod manual_bits; mod manual_clamp; +mod manual_float_methods; mod manual_is_ascii_check; mod manual_let_else; mod manual_main_separator_str; @@ -228,6 +229,7 @@ mod needless_for_each; mod needless_if; mod needless_late_init; mod needless_parens_on_range_literals; +mod needless_pass_by_ref_mut; mod needless_pass_by_value; mod needless_question_mark; mod needless_update; @@ -345,11 +347,10 @@ mod zero_div_zero; mod zero_sized_map_values; // end lints modules, do not remove this comment, it’s used in `update_lints` +use crate::utils::conf::metadata::get_configuration_metadata; +use crate::utils::conf::TryConf; pub use crate::utils::conf::{lookup_conf_file, Conf}; -use crate::utils::{ - conf::{metadata::get_configuration_metadata, TryConf}, - FindAll, -}; +use crate::utils::FindAll; /// Register all pre expansion lints /// @@ -662,7 +663,6 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: }); store.register_late_pass(move |_| Box::new(matches::Matches::new(msrv()))); let matches_for_let_else = conf.matches_for_let_else; - store.register_late_pass(move |_| Box::new(manual_let_else::ManualLetElse::new(msrv(), matches_for_let_else))); store.register_early_pass(move || Box::new(manual_non_exhaustive::ManualNonExhaustiveStruct::new(msrv()))); store.register_late_pass(move |_| Box::new(manual_non_exhaustive::ManualNonExhaustiveEnum::new(msrv()))); store.register_late_pass(move |_| Box::new(manual_strip::ManualStrip::new(msrv()))); @@ -722,7 +722,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: store.register_late_pass(|_| Box::new(drop_forget_ref::DropForgetRef)); store.register_late_pass(|_| Box::new(empty_enum::EmptyEnum)); store.register_late_pass(|_| Box::new(invalid_upcast_comparisons::InvalidUpcastComparisons)); - store.register_late_pass(|_| Box::new(regex::Regex)); + store.register_late_pass(|_| Box::<regex::Regex>::default()); let ignore_interior_mutability = conf.ignore_interior_mutability.clone(); store.register_late_pass(move |_| Box::new(copies::CopyAndPaste::new(ignore_interior_mutability.clone()))); store.register_late_pass(|_| Box::new(copy_iterator::CopyIterator)); @@ -771,7 +771,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: store.register_late_pass(|_| Box::<useless_conversion::UselessConversion>::default()); store.register_late_pass(|_| Box::new(implicit_hasher::ImplicitHasher)); store.register_late_pass(|_| Box::new(fallible_impl_from::FallibleImplFrom)); - store.register_late_pass(|_| Box::<question_mark::QuestionMark>::default()); + store.register_late_pass(move |_| Box::new(question_mark::QuestionMark::new(msrv(), matches_for_let_else))); store.register_late_pass(|_| Box::new(question_mark_used::QuestionMarkUsed)); store.register_early_pass(|| Box::new(suspicious_operation_groupings::SuspiciousOperationGroupings)); store.register_late_pass(|_| Box::new(suspicious_trait_impl::SuspiciousImpl)); @@ -1056,6 +1056,11 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: let stack_size_threshold = conf.stack_size_threshold; store.register_late_pass(move |_| Box::new(large_stack_frames::LargeStackFrames::new(stack_size_threshold))); store.register_late_pass(|_| Box::new(single_range_in_vec_init::SingleRangeInVecInit)); + store.register_late_pass(move |_| { + Box::new(needless_pass_by_ref_mut::NeedlessPassByRefMut::new( + avoid_breaking_exported_api, + )) + }); store.register_late_pass(|_| Box::new(incorrect_impls::IncorrectImpls)); store.register_late_pass(move |_| { Box::new(single_call_fn::SingleCallFn { @@ -1072,6 +1077,7 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf: store.register_late_pass(|_| Box::new(manual_range_patterns::ManualRangePatterns)); store.register_early_pass(|| Box::new(visibility::Visibility)); store.register_late_pass(move |_| Box::new(tuple_array_conversions::TupleArrayConversions { msrv: msrv() })); + store.register_late_pass(|_| Box::new(manual_float_methods::ManualFloatMethods)); // add lints here, do not remove this comment, it's used in `new_lint` } diff --git a/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs b/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs index 09b2032e20f..49425ff0a8e 100644 --- a/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs +++ b/src/tools/clippy/clippy_lints/src/lines_filter_map_ok.rs @@ -1,7 +1,6 @@ -use clippy_utils::{ - diagnostics::span_lint_and_then, is_diag_item_method, is_trait_method, match_def_path, path_to_local_id, paths, - ty::match_type, -}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::ty::match_type; +use clippy_utils::{is_diag_item_method, is_trait_method, match_def_path, path_to_local_id, paths}; use rustc_errors::Applicability; use rustc_hir::{Body, Closure, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/literal_representation.rs b/src/tools/clippy/clippy_lints/src/literal_representation.rs index dadcd9c5135..09ca0317337 100644 --- a/src/tools/clippy/clippy_lints/src/literal_representation.rs +++ b/src/tools/clippy/clippy_lints/src/literal_representation.rs @@ -264,7 +264,7 @@ impl LiteralDigitGrouping { return; } - if Self::is_literal_uuid_formatted(&mut num_lit) { + if Self::is_literal_uuid_formatted(&num_lit) { return; } @@ -376,7 +376,7 @@ impl LiteralDigitGrouping { /// /// Returns `true` if the radix is hexadecimal, and the groups match the /// UUID format of 8-4-4-4-12. - fn is_literal_uuid_formatted(num_lit: &mut NumericLiteral<'_>) -> bool { + fn is_literal_uuid_formatted(num_lit: &NumericLiteral<'_>) -> bool { if num_lit.radix != Radix::Hexadecimal { return false; } diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_find.rs b/src/tools/clippy/clippy_lints/src/loops/manual_find.rs index 4bb9936e9cd..0aaa66e6bce 100644 --- a/src/tools/clippy/clippy_lints/src/loops/manual_find.rs +++ b/src/tools/clippy/clippy_lints/src/loops/manual_find.rs @@ -1,14 +1,14 @@ use super::utils::make_iterator_snippet; use super::MANUAL_FIND; -use clippy_utils::{ - diagnostics::span_lint_and_then, higher, is_res_lang_ctor, path_res, peel_blocks_with_stmt, - source::snippet_with_applicability, ty::implements_trait, -}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::source::snippet_with_applicability; +use clippy_utils::ty::implements_trait; +use clippy_utils::{higher, is_res_lang_ctor, path_res, peel_blocks_with_stmt}; use if_chain::if_chain; use rustc_errors::Applicability; -use rustc_hir::{ - def::Res, lang_items::LangItem, BindingAnnotation, Block, Expr, ExprKind, HirId, Node, Pat, PatKind, Stmt, StmtKind, -}; +use rustc_hir::def::Res; +use rustc_hir::lang_items::LangItem; +use rustc_hir::{BindingAnnotation, Block, Expr, ExprKind, HirId, Node, Pat, PatKind, Stmt, StmtKind}; use rustc_lint::LateContext; use rustc_span::source_map::Span; diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs b/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs index 1e02a30e35f..559a2c03f14 100644 --- a/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs +++ b/src/tools/clippy/clippy_lints/src/loops/manual_flatten.rs @@ -1,9 +1,8 @@ use super::utils::make_iterator_snippet; use super::MANUAL_FLATTEN; use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::higher; use clippy_utils::visitors::is_local_used; -use clippy_utils::{path_to_local_id, peel_blocks_with_stmt}; +use clippy_utils::{higher, path_to_local_id, peel_blocks_with_stmt}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; diff --git a/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs b/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs index cb9c84be4c7..ca584a454d0 100644 --- a/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs +++ b/src/tools/clippy/clippy_lints/src/loops/manual_while_let_some.rs @@ -1,9 +1,6 @@ -use clippy_utils::{ - diagnostics::{multispan_sugg_with_applicability, span_lint_and_then}, - match_def_path, paths, - source::snippet, - SpanlessEq, -}; +use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and_then}; +use clippy_utils::source::snippet; +use clippy_utils::{match_def_path, paths, SpanlessEq}; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, Pat, Stmt, StmtKind, UnOp}; use rustc_lint::LateContext; diff --git a/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs b/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs index 0e49f08d898..7b7d19c753f 100644 --- a/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs +++ b/src/tools/clippy/clippy_lints/src/loops/missing_spin_loop.rs @@ -43,7 +43,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, cond: &'tcx Expr<'_>, body: &' MISSING_SPIN_LOOP, body.span, "busy-waiting loop should at least have a spin loop hint", - "try this", + "try", (if is_no_std_crate(cx) { "{ core::hint::spin_loop() }" } else { diff --git a/src/tools/clippy/clippy_lints/src/loops/mod.rs b/src/tools/clippy/clippy_lints/src/loops/mod.rs index 529189b52ac..ffd29ab7630 100644 --- a/src/tools/clippy/clippy_lints/src/loops/mod.rs +++ b/src/tools/clippy/clippy_lints/src/loops/mod.rs @@ -601,7 +601,7 @@ declare_clippy_lint! { /// // use `number` /// } /// ``` - #[clippy::version = "1.70.0"] + #[clippy::version = "1.71.0"] pub MANUAL_WHILE_LET_SOME, style, "checking for emptiness of a `Vec` in the loop condition and popping an element in the body" diff --git a/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs b/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs index 4dae93f6028..b83d148b5f2 100644 --- a/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs +++ b/src/tools/clippy/clippy_lints/src/loops/mut_range_bound.rs @@ -7,7 +7,8 @@ use rustc_hir::{BindingAnnotation, Expr, ExprKind, HirId, Node, PatKind}; use rustc_hir_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId}; use rustc_infer::infer::TyCtxtInferExt; use rustc_lint::LateContext; -use rustc_middle::{mir::FakeReadCause, ty}; +use rustc_middle::mir::FakeReadCause; +use rustc_middle::ty; use rustc_span::source_map::Span; pub(super) fn check(cx: &LateContext<'_>, arg: &Expr<'_>, body: &Expr<'_>) { diff --git a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs index ee338c6beb0..7da9121fbb3 100644 --- a/src/tools/clippy/clippy_lints/src/loops/never_loop.rs +++ b/src/tools/clippy/clippy_lints/src/loops/never_loop.rs @@ -1,9 +1,9 @@ use super::utils::make_iterator_snippet; use super::NEVER_LOOP; -use clippy_utils::consts::constant; +use clippy_utils::consts::{constant, Constant}; +use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::higher::ForLoop; use clippy_utils::source::snippet; -use clippy_utils::{consts::Constant, diagnostics::span_lint_and_then}; use rustc_errors::Applicability; use rustc_hir::{Block, Destination, Expr, ExprKind, HirId, InlineAsmOperand, Pat, Stmt, StmtKind}; use rustc_lint::LateContext; diff --git a/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs b/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs index d1a1f773f87..7f24f3c5dc2 100644 --- a/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs +++ b/src/tools/clippy/clippy_lints/src/loops/while_immutable_condition.rs @@ -6,8 +6,7 @@ use if_chain::if_chain; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::DefIdMap; use rustc_hir::intravisit::{walk_expr, Visitor}; -use rustc_hir::HirIdSet; -use rustc_hir::{Expr, ExprKind, QPath}; +use rustc_hir::{Expr, ExprKind, HirIdSet, QPath}; use rustc_lint::LateContext; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, cond: &'tcx Expr<'_>, expr: &'tcx Expr<'_>) { diff --git a/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs b/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs index 55989f8a446..8019f906aca 100644 --- a/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs +++ b/src/tools/clippy/clippy_lints/src/loops/while_let_on_iterator.rs @@ -1,18 +1,18 @@ use super::WHILE_LET_ON_ITERATOR; use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::higher; use clippy_utils::source::snippet_with_applicability; -use clippy_utils::{ - get_enclosing_loop_or_multi_call_closure, is_refutable, is_res_lang_ctor, is_trait_method, visitors::is_res_used, -}; +use clippy_utils::visitors::is_res_used; +use clippy_utils::{get_enclosing_loop_or_multi_call_closure, higher, is_refutable, is_res_lang_ctor, is_trait_method}; use if_chain::if_chain; use rustc_errors::Applicability; +use rustc_hir::def::Res; use rustc_hir::intravisit::{walk_expr, Visitor}; -use rustc_hir::{def::Res, Closure, Expr, ExprKind, HirId, LangItem, Local, Mutability, PatKind, UnOp}; +use rustc_hir::{Closure, Expr, ExprKind, HirId, LangItem, Local, Mutability, PatKind, UnOp}; use rustc_lint::LateContext; use rustc_middle::hir::nested_filter::OnlyBodies; use rustc_middle::ty::adjustment::Adjust; -use rustc_span::{symbol::sym, Symbol}; +use rustc_span::symbol::sym; +use rustc_span::Symbol; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { let (scrutinee_expr, iter_expr_struct, iter_expr, some_pat, loop_expr) = if_chain! { @@ -332,7 +332,7 @@ fn needs_mutable_borrow(cx: &LateContext<'_>, iter_expr: &IterExpr, loop_expr: & if let Some(e) = get_enclosing_loop_or_multi_call_closure(cx, loop_expr) { let Res::Local(local_id) = iter_expr.path else { - return true + return true; }; let mut v = NestedLoopVisitor { cx, diff --git a/src/tools/clippy/clippy_lints/src/macro_use.rs b/src/tools/clippy/clippy_lints/src/macro_use.rs index 8e322a97907..9b158f18f62 100644 --- a/src/tools/clippy/clippy_lints/src/macro_use.rs +++ b/src/tools/clippy/clippy_lints/src/macro_use.rs @@ -8,7 +8,8 @@ use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::{edition::Edition, sym, Span}; +use rustc_span::edition::Edition; +use rustc_span::{sym, Span}; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/manual_clamp.rs b/src/tools/clippy/clippy_lints/src/manual_clamp.rs index 440362b96b4..e75666e61f5 100644 --- a/src/tools/clippy/clippy_lints/src/manual_clamp.rs +++ b/src/tools/clippy/clippy_lints/src/manual_clamp.rs @@ -4,21 +4,19 @@ use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::sugg::Sugg; use clippy_utils::ty::implements_trait; use clippy_utils::visitors::is_const_evaluatable; -use clippy_utils::MaybePath; use clippy_utils::{ eq_expr_value, in_constant, is_diag_trait_item, is_trait_method, path_res, path_to_local_id, peel_blocks, - peel_blocks_with_stmt, + peel_blocks_with_stmt, MaybePath, }; use itertools::Itertools; -use rustc_errors::Applicability; -use rustc_errors::Diagnostic; -use rustc_hir::{ - def::Res, Arm, BinOpKind, Block, Expr, ExprKind, Guard, HirId, PatKind, PathSegment, PrimTy, QPath, StmtKind, -}; +use rustc_errors::{Applicability, Diagnostic}; +use rustc_hir::def::Res; +use rustc_hir::{Arm, BinOpKind, Block, Expr, ExprKind, Guard, HirId, PatKind, PathSegment, PrimTy, QPath, StmtKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::Ty; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::{symbol::sym, Span}; +use rustc_span::symbol::sym; +use rustc_span::Span; use std::ops::Deref; declare_clippy_lint! { diff --git a/src/tools/clippy/clippy_lints/src/manual_float_methods.rs b/src/tools/clippy/clippy_lints/src/manual_float_methods.rs new file mode 100644 index 00000000000..085c73a5f9f --- /dev/null +++ b/src/tools/clippy/clippy_lints/src/manual_float_methods.rs @@ -0,0 +1,173 @@ +use clippy_utils::consts::{constant, Constant}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::source::snippet_opt; +use clippy_utils::{is_from_proc_macro, path_to_local}; +use rustc_errors::Applicability; +use rustc_hir::{BinOpKind, Expr, ExprKind}; +use rustc_lint::{LateContext, LateLintPass, Lint, LintContext}; +use rustc_middle::lint::in_external_macro; +use rustc_session::{declare_lint_pass, declare_tool_lint}; + +declare_clippy_lint! { + /// ### What it does + /// Checks for manual `is_infinite` reimplementations + /// (i.e., `x == <float>::INFINITY || x == <float>::NEG_INFINITY`). + /// + /// ### Why is this bad? + /// The method `is_infinite` is shorter and more readable. + /// + /// ### Example + /// ```rust + /// # let x = 1.0f32; + /// if x == f32::INFINITY || x == f32::NEG_INFINITY {} + /// ``` + /// Use instead: + /// ```rust + /// # let x = 1.0f32; + /// if x.is_infinite() {} + /// ``` + #[clippy::version = "1.72.0"] + pub MANUAL_IS_INFINITE, + style, + "use dedicated method to check if a float is infinite" +} +declare_clippy_lint! { + /// ### What it does + /// Checks for manual `is_finite` reimplementations + /// (i.e., `x != <float>::INFINITY && x != <float>::NEG_INFINITY`). + /// + /// ### Why is this bad? + /// The method `is_finite` is shorter and more readable. + /// + /// ### Example + /// ```rust + /// # let x = 1.0f32; + /// if x != f32::INFINITY && x != f32::NEG_INFINITY {} + /// if x.abs() < f32::INFINITY {} + /// ``` + /// Use instead: + /// ```rust + /// # let x = 1.0f32; + /// if x.is_finite() {} + /// if x.is_finite() {} + /// ``` + #[clippy::version = "1.72.0"] + pub MANUAL_IS_FINITE, + style, + "use dedicated method to check if a float is finite" +} +declare_lint_pass!(ManualFloatMethods => [MANUAL_IS_INFINITE, MANUAL_IS_FINITE]); + +#[derive(Clone, Copy)] +enum Variant { + ManualIsInfinite, + ManualIsFinite, +} + +impl Variant { + pub fn lint(self) -> &'static Lint { + match self { + Self::ManualIsInfinite => MANUAL_IS_INFINITE, + Self::ManualIsFinite => MANUAL_IS_FINITE, + } + } + + pub fn msg(self) -> &'static str { + match self { + Self::ManualIsInfinite => "manually checking if a float is infinite", + Self::ManualIsFinite => "manually checking if a float is finite", + } + } +} + +impl<'tcx> LateLintPass<'tcx> for ManualFloatMethods { + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { + if !in_external_macro(cx.sess(), expr.span) + && (!cx.param_env.is_const() || cx.tcx.features().active(sym!(const_float_classify))) + && let ExprKind::Binary(kind, lhs, rhs) = expr.kind + && let ExprKind::Binary(lhs_kind, lhs_lhs, lhs_rhs) = lhs.kind + && let ExprKind::Binary(rhs_kind, rhs_lhs, rhs_rhs) = rhs.kind + // Checking all possible scenarios using a function would be a hopeless task, as we have + // 16 possible alignments of constants/operands. For now, let's use `partition`. + && let (operands, constants) = [lhs_lhs, lhs_rhs, rhs_lhs, rhs_rhs] + .into_iter() + .partition::<Vec<&Expr<'_>>, _>(|i| path_to_local(i).is_some()) + && let [first, second] = &*operands + && let Some([const_1, const_2]) = constants + .into_iter() + .map(|i| constant(cx, cx.typeck_results(), i)) + .collect::<Option<Vec<_>>>() + .as_deref() + && path_to_local(first).is_some_and(|f| path_to_local(second).is_some_and(|s| f == s)) + // The actual infinity check, we also allow `NEG_INFINITY` before` INFINITY` just in + // case somebody does that for some reason + && (is_infinity(const_1) && is_neg_infinity(const_2) + || is_neg_infinity(const_1) && is_infinity(const_2)) + && !is_from_proc_macro(cx, expr) + && let Some(local_snippet) = snippet_opt(cx, first.span) + { + let variant = match (kind.node, lhs_kind.node, rhs_kind.node) { + (BinOpKind::Or, BinOpKind::Eq, BinOpKind::Eq) => Variant::ManualIsInfinite, + (BinOpKind::And, BinOpKind::Ne, BinOpKind::Ne) => Variant::ManualIsFinite, + _ => return, + }; + + span_lint_and_then( + cx, + variant.lint(), + expr.span, + variant.msg(), + |diag| { + match variant { + Variant::ManualIsInfinite => { + diag.span_suggestion( + expr.span, + "use the dedicated method instead", + format!("{local_snippet}.is_infinite()"), + Applicability::MachineApplicable, + ); + }, + Variant::ManualIsFinite => { + // TODO: There's probably some better way to do this, i.e., create + // multiple suggestions with notes between each of them + diag.span_suggestion_verbose( + expr.span, + "use the dedicated method instead", + format!("{local_snippet}.is_finite()"), + Applicability::MaybeIncorrect, + ) + .span_suggestion_verbose( + expr.span, + "this will alter how it handles NaN; if that is a problem, use instead", + format!("{local_snippet}.is_finite() || {local_snippet}.is_nan()"), + Applicability::MaybeIncorrect, + ) + .span_suggestion_verbose( + expr.span, + "or, for conciseness", + format!("!{local_snippet}.is_infinite()"), + Applicability::MaybeIncorrect, + ); + }, + } + }, + ); + } + } +} + +fn is_infinity(constant: &Constant<'_>) -> bool { + match constant { + Constant::F32(float) => *float == f32::INFINITY, + Constant::F64(float) => *float == f64::INFINITY, + _ => false, + } +} + +fn is_neg_infinity(constant: &Constant<'_>) -> bool { + match constant { + Constant::F32(float) => *float == f32::NEG_INFINITY, + Constant::F64(float) => *float == f64::NEG_INFINITY, + _ => false, + } +} diff --git a/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs b/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs index 31264261f5d..f264424470d 100644 --- a/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs +++ b/src/tools/clippy/clippy_lints/src/manual_is_ascii_check.rs @@ -1,12 +1,16 @@ +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::macros::root_macro_call; use clippy_utils::msrvs::{self, Msrv}; -use clippy_utils::{diagnostics::span_lint_and_sugg, higher, in_constant, macros::root_macro_call, sugg::Sugg}; +use clippy_utils::sugg::Sugg; +use clippy_utils::{higher, in_constant}; use rustc_ast::ast::RangeLimits; use rustc_ast::LitKind::{Byte, Char}; use rustc_errors::Applicability; use rustc_hir::{BorrowKind, Expr, ExprKind, PatKind, RangeEnd}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::{def_id::DefId, sym, Span}; +use rustc_span::def_id::DefId; +use rustc_span::{sym, Span}; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/manual_let_else.rs b/src/tools/clippy/clippy_lints/src/manual_let_else.rs index 59e421c1622..c531137b785 100644 --- a/src/tools/clippy/clippy_lints/src/manual_let_else.rs +++ b/src/tools/clippy/clippy_lints/src/manual_let_else.rs @@ -1,18 +1,17 @@ +use crate::question_mark::{QuestionMark, QUESTION_MARK}; use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::higher::IfLetOrMatch; -use clippy_utils::msrvs::{self, Msrv}; -use clippy_utils::peel_blocks; use clippy_utils::source::snippet_with_context; use clippy_utils::ty::is_type_diagnostic_item; use clippy_utils::visitors::{Descend, Visitable}; -use if_chain::if_chain; +use clippy_utils::{is_lint_allowed, msrvs, pat_and_expr_can_be_question_mark, peel_blocks}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_errors::Applicability; use rustc_hir::intravisit::{walk_expr, Visitor}; use rustc_hir::{Expr, ExprKind, HirId, ItemId, Local, MatchSource, Pat, PatKind, QPath, Stmt, StmtKind, Ty}; -use rustc_lint::{LateContext, LateLintPass, LintContext}; +use rustc_lint::{LateContext, LintContext}; use rustc_middle::lint::in_external_macro; -use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_session::declare_tool_lint; use rustc_span::symbol::{sym, Symbol}; use rustc_span::Span; use serde::Deserialize; @@ -50,25 +49,8 @@ declare_clippy_lint! { "manual implementation of a let...else statement" } -pub struct ManualLetElse { - msrv: Msrv, - matches_behaviour: MatchLintBehaviour, -} - -impl ManualLetElse { - #[must_use] - pub fn new(msrv: Msrv, matches_behaviour: MatchLintBehaviour) -> Self { - Self { - msrv, - matches_behaviour, - } - } -} - -impl_lint_pass!(ManualLetElse => [MANUAL_LET_ELSE]); - -impl<'tcx> LateLintPass<'tcx> for ManualLetElse { - fn check_stmt(&mut self, cx: &LateContext<'_>, stmt: &'tcx Stmt<'tcx>) { +impl<'tcx> QuestionMark { + pub(crate) fn check_manual_let_else(&mut self, cx: &LateContext<'_>, stmt: &'tcx Stmt<'tcx>) { if !self.msrv.meets(msrvs::LET_ELSE) || in_external_macro(cx.sess(), stmt.span) { return; } @@ -81,11 +63,14 @@ impl<'tcx> LateLintPass<'tcx> for ManualLetElse { let Some(if_let_or_match) = IfLetOrMatch::parse(cx, init) { match if_let_or_match { - IfLetOrMatch::IfLet(if_let_expr, let_pat, if_then, if_else) => if_chain! { - if let Some(ident_map) = expr_simple_identity_map(local.pat, let_pat, if_then); - if let Some(if_else) = if_else; - if expr_diverges(cx, if_else); - then { + IfLetOrMatch::IfLet(if_let_expr, let_pat, if_then, if_else) => { + if + let Some(ident_map) = expr_simple_identity_map(local.pat, let_pat, if_then) && + let Some(if_else) = if_else && + expr_diverges(cx, if_else) && + let qm_allowed = is_lint_allowed(cx, QUESTION_MARK, stmt.hir_id) && + (qm_allowed || pat_and_expr_can_be_question_mark(cx, let_pat, if_else).is_none()) + { emit_manual_let_else(cx, stmt.span, if_let_expr, &ident_map, let_pat, if_else); } }, @@ -128,8 +113,6 @@ impl<'tcx> LateLintPass<'tcx> for ManualLetElse { } }; } - - extract_msrv_attr!(LateContext); } fn emit_manual_let_else( @@ -208,7 +191,9 @@ fn replace_in_pattern( match pat.kind { PatKind::Binding(_ann, _id, binding_name, opt_subpt) => { - let Some(pat_to_put) = ident_map.get(&binding_name.name) else { break 'a }; + let Some(pat_to_put) = ident_map.get(&binding_name.name) else { + break 'a; + }; let (sn_ptp, _) = snippet_with_context(cx, pat_to_put.span, span.ctxt(), "", app); if let Some(subpt) = opt_subpt { let subpt = replace_in_pattern(cx, span, ident_map, subpt, app, false); diff --git a/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs b/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs index 65ff555209a..39d8b20d38d 100644 --- a/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs +++ b/src/tools/clippy/clippy_lints/src/manual_range_patterns.rs @@ -2,12 +2,8 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use rustc_ast::LitKind; use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; -use rustc_hir::Expr; -use rustc_hir::ExprKind; -use rustc_hir::PatKind; -use rustc_hir::RangeEnd; -use rustc_lint::LintContext; -use rustc_lint::{LateContext, LateLintPass}; +use rustc_hir::{Expr, ExprKind, PatKind, RangeEnd, UnOp}; +use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_session::{declare_lint_pass, declare_tool_lint}; @@ -19,6 +15,10 @@ declare_clippy_lint! { /// ### Why is this bad? /// Using an explicit range is more concise and easier to read. /// + /// ### Known issues + /// This lint intentionally does not handle numbers greater than `i128::MAX` for `u128` literals + /// in order to support negative numbers. + /// /// ### Example /// ```rust /// let x = 6; @@ -36,11 +36,14 @@ declare_clippy_lint! { } declare_lint_pass!(ManualRangePatterns => [MANUAL_RANGE_PATTERNS]); -fn expr_as_u128(expr: &Expr<'_>) -> Option<u128> { - if let ExprKind::Lit(lit) = expr.kind +fn expr_as_i128(expr: &Expr<'_>) -> Option<i128> { + if let ExprKind::Unary(UnOp::Neg, expr) = expr.kind { + expr_as_i128(expr).map(|num| -num) + } else if let ExprKind::Lit(lit) = expr.kind && let LitKind::Int(num, _) = lit.node { - Some(num) + // Intentionally not handling numbers greater than i128::MAX (for u128 literals) for now. + num.try_into().ok() } else { None } @@ -56,22 +59,22 @@ impl LateLintPass<'_> for ManualRangePatterns { if let PatKind::Or(pats) = pat.kind && pats.len() >= 3 { - let mut min = u128::MAX; - let mut max = 0; + let mut min = i128::MAX; + let mut max = i128::MIN; let mut numbers_found = FxHashSet::default(); let mut ranges_found = Vec::new(); for pat in pats { if let PatKind::Lit(lit) = pat.kind - && let Some(num) = expr_as_u128(lit) + && let Some(num) = expr_as_i128(lit) { numbers_found.insert(num); min = min.min(num); max = max.max(num); } else if let PatKind::Range(Some(left), Some(right), end) = pat.kind - && let Some(left) = expr_as_u128(left) - && let Some(right) = expr_as_u128(right) + && let Some(left) = expr_as_i128(left) + && let Some(right) = expr_as_i128(right) && right >= left { min = min.min(left); diff --git a/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs b/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs index aafee92713f..0e89ca132ef 100644 --- a/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs +++ b/src/tools/clippy/clippy_lints/src/manual_rem_euclid.rs @@ -119,7 +119,7 @@ fn check_for_either_unsigned_int_constant<'a>( } fn check_for_unsigned_int_constant<'a>(cx: &'a LateContext<'_>, expr: &'a Expr<'_>) -> Option<u128> { - let Some(int_const) = constant_full_int(cx, cx.typeck_results(), expr) else { return None }; + let int_const = constant_full_int(cx, cx.typeck_results(), expr)?; match int_const { FullInt::S(s) => s.try_into().ok(), FullInt::U(u) => Some(u), diff --git a/src/tools/clippy/clippy_lints/src/manual_strip.rs b/src/tools/clippy/clippy_lints/src/manual_strip.rs index 93d977a5c96..2b9def1a688 100644 --- a/src/tools/clippy/clippy_lints/src/manual_strip.rs +++ b/src/tools/clippy/clippy_lints/src/manual_strip.rs @@ -8,8 +8,7 @@ use if_chain::if_chain; use rustc_ast::ast::LitKind; use rustc_hir::def::Res; use rustc_hir::intravisit::{walk_expr, Visitor}; -use rustc_hir::BinOpKind; -use rustc_hir::{BorrowKind, Expr, ExprKind}; +use rustc_hir::{BinOpKind, BorrowKind, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty; use rustc_session::{declare_tool_lint, impl_lint_pass}; diff --git a/src/tools/clippy/clippy_lints/src/map_unit_fn.rs b/src/tools/clippy/clippy_lints/src/map_unit_fn.rs index 75605fb3091..f0a0f482af2 100644 --- a/src/tools/clippy/clippy_lints/src/map_unit_fn.rs +++ b/src/tools/clippy/clippy_lints/src/map_unit_fn.rs @@ -226,7 +226,7 @@ fn lint_map_unit_fn( ); span_lint_and_then(cx, lint, expr.span, &msg, |diag| { - diag.span_suggestion(stmt.span, "try this", suggestion, applicability); + diag.span_suggestion(stmt.span, "try", suggestion, applicability); }); } else if let Some((binding, closure_expr)) = unit_closure(cx, fn_arg) { let msg = suggestion_msg("closure", map_type); @@ -241,7 +241,7 @@ fn lint_map_unit_fn( snippet_with_applicability(cx, var_arg.span, "_", &mut applicability), snippet_with_context(cx, reduced_expr_span, var_arg.span.ctxt(), "_", &mut applicability).0, ); - diag.span_suggestion(stmt.span, "try this", suggestion, applicability); + diag.span_suggestion(stmt.span, "try", suggestion, applicability); } else { let suggestion = format!( "if let {0}({1}) = {2} {{ ... }}", @@ -249,7 +249,7 @@ fn lint_map_unit_fn( snippet(cx, binding.pat.span, "_"), snippet(cx, var_arg.span, "_"), ); - diag.span_suggestion(stmt.span, "try this", suggestion, Applicability::HasPlaceholders); + diag.span_suggestion(stmt.span, "try", suggestion, Applicability::HasPlaceholders); } }); } diff --git a/src/tools/clippy/clippy_lints/src/match_result_ok.rs b/src/tools/clippy/clippy_lints/src/match_result_ok.rs index 6ec9784038c..841c020f2d3 100644 --- a/src/tools/clippy/clippy_lints/src/match_result_ok.rs +++ b/src/tools/clippy/clippy_lints/src/match_result_ok.rs @@ -1,8 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::higher; -use clippy_utils::is_res_lang_ctor; use clippy_utils::source::snippet_with_context; use clippy_utils::ty::is_type_diagnostic_item; +use clippy_utils::{higher, is_res_lang_ctor}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, LangItem, PatKind}; diff --git a/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs b/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs index d18c92caba2..3329f93b73c 100644 --- a/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs +++ b/src/tools/clippy/clippy_lints/src/matches/infallible_destructuring_match.rs @@ -28,7 +28,7 @@ pub(crate) fn check(cx: &LateContext<'_>, local: &Local<'_>) -> bool { local.span, "you seem to be trying to use `match` to destructure a single infallible pattern. \ Consider using `let`", - "try this", + "try", format!( "let {}({}{}) = {};", snippet_with_applicability(cx, variant_name.span, "..", &mut applicability), diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs b/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs index f6bf0e7aa1a..e0181a4757c 100644 --- a/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs +++ b/src/tools/clippy/clippy_lints/src/matches/manual_filter.rs @@ -143,7 +143,7 @@ fn check<'tcx>( MANUAL_FILTER, expr.span, "manual implementation of `Option::filter`", - "try this", + "try", if sugg_info.needs_brackets { format!( "{{ {}{}.filter({body_str}) }}", diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_map.rs b/src/tools/clippy/clippy_lints/src/matches/manual_map.rs index aaba239677f..ed3d8b09fdc 100644 --- a/src/tools/clippy/clippy_lints/src/matches/manual_map.rs +++ b/src/tools/clippy/clippy_lints/src/matches/manual_map.rs @@ -58,7 +58,7 @@ fn check<'tcx>( MANUAL_MAP, expr.span, "manual implementation of `Option::map`", - "try this", + "try", if sugg_info.needs_brackets { format!( "{{ {}{}.map({}) }}", diff --git a/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs b/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs index 5b7644a5383..6b611f567ae 100644 --- a/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs +++ b/src/tools/clippy/clippy_lints/src/matches/manual_utils.rs @@ -1,14 +1,17 @@ -use crate::{map_unit_fn::OPTION_MAP_UNIT_FN, matches::MATCH_AS_REF}; +use crate::map_unit_fn::OPTION_MAP_UNIT_FN; +use crate::matches::MATCH_AS_REF; use clippy_utils::source::{snippet_with_applicability, snippet_with_context}; +use clippy_utils::sugg::Sugg; use clippy_utils::ty::{is_copy, is_type_diagnostic_item, peel_mid_ty_refs_is_mutable, type_is_unsafe_function}; use clippy_utils::{ can_move_expr_to_closure, is_else_clause, is_lint_allowed, is_res_lang_ctor, path_res, path_to_local_id, - peel_blocks, peel_hir_expr_refs, peel_hir_expr_while, sugg::Sugg, CaptureKind, + peel_blocks, peel_hir_expr_refs, peel_hir_expr_while, CaptureKind, }; use rustc_ast::util::parser::PREC_POSTFIX; use rustc_errors::Applicability; +use rustc_hir::def::Res; use rustc_hir::LangItem::{OptionNone, OptionSome}; -use rustc_hir::{def::Res, BindingAnnotation, Expr, ExprKind, HirId, Mutability, Pat, PatKind, Path, QPath}; +use rustc_hir::{BindingAnnotation, Expr, ExprKind, HirId, Mutability, Pat, PatKind, Path, QPath}; use rustc_lint::LateContext; use rustc_span::{sym, SyntaxContext}; diff --git a/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs b/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs index 29c6e11134f..d51cca040a2 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_as_ref.rs @@ -46,7 +46,7 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: MATCH_AS_REF, expr.span, &format!("use `{suggestion}()` instead"), - "try this", + "try", format!( "{}.{suggestion}(){cast}", snippet_with_applicability(cx, ex.span, "_", &mut applicability), diff --git a/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs b/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs index 0064619ef89..e2ddf11abe2 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_like_matches.rs @@ -1,9 +1,7 @@ use super::REDUNDANT_PATTERN_MATCHING; use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::is_lint_allowed; -use clippy_utils::is_wild; use clippy_utils::source::snippet_with_applicability; -use clippy_utils::span_contains_comment; +use clippy_utils::{is_lint_allowed, is_wild, span_contains_comment}; use rustc_ast::{Attribute, LitKind}; use rustc_errors::Applicability; use rustc_hir::{Arm, BorrowKind, Expr, ExprKind, Guard, Pat, PatKind, QPath}; @@ -139,7 +137,7 @@ where MATCH_LIKE_MATCHES_MACRO, expr.span, &format!("{} expression looks like `matches!` macro", if is_if_let { "if let .. else" } else { "match" }), - "try this", + "try", format!( "{}matches!({}, {pat_and_guard})", if b0 { "" } else { "!" }, diff --git a/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs b/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs index 2917f85c45f..89dcac4d849 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_on_vec_items.rs @@ -22,7 +22,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, scrutinee: &'tcx Expr<'_>) { MATCH_ON_VEC_ITEMS, scrutinee.span, "indexing into a vector may panic", - "try this", + "try", format!( "{}.get({})", snippet(cx, vec.span, ".."), diff --git a/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs index 3d2fbea63f5..6fc79faddbe 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_same_arms.rs @@ -2,8 +2,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::snippet; use clippy_utils::{is_lint_allowed, path_to_local, search_same, SpanlessEq, SpanlessHash}; use core::cmp::Ordering; -use core::iter; -use core::slice; +use core::{iter, slice}; use rustc_arena::DroplessArena; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; @@ -240,7 +239,7 @@ impl<'a> NormalizedPat<'a> { }, PatKind::TupleStruct(ref path, pats, wild_idx) => { let Some(adt) = cx.typeck_results().pat_ty(pat).ty_adt_def() else { - return Self::Wild + return Self::Wild; }; let (var_id, variant) = if adt.is_enum() { match cx.qpath_res(path, pat.hir_id).opt_def_id() { diff --git a/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs b/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs index 3126b590180..8d22ceb47f8 100644 --- a/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs +++ b/src/tools/clippy/clippy_lints/src/matches/match_wild_enum.rs @@ -143,7 +143,7 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) { MATCH_WILDCARD_FOR_SINGLE_VARIANTS, wildcard_span, "wildcard matches only a single variant and will also match any future added variants", - "try this", + "try", format_suggestion(x), Applicability::MaybeIncorrect, ), @@ -161,7 +161,7 @@ pub(crate) fn check(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) { WILDCARD_ENUM_MATCH_ARM, wildcard_span, message, - "try this", + "try", suggestions.join(" | "), Applicability::MaybeIncorrect, ); diff --git a/src/tools/clippy/clippy_lints/src/matches/mod.rs b/src/tools/clippy/clippy_lints/src/matches/mod.rs index 00fa3eb9bfe..d1061171e4d 100644 --- a/src/tools/clippy/clippy_lints/src/matches/mod.rs +++ b/src/tools/clippy/clippy_lints/src/matches/mod.rs @@ -1125,8 +1125,8 @@ fn contains_cfg_arm(cx: &LateContext<'_>, e: &Expr<'_>, scrutinee: &Expr<'_>, ar //|^ let found = arm_spans.try_fold(start, |start, range| { let Some((end, next_start)) = range else { - // Shouldn't happen as macros can't expand to match arms, but treat this as though a `cfg` attribute were - // found. + // Shouldn't happen as macros can't expand to match arms, but treat this as though a `cfg` attribute + // were found. return Err(()); }; let span = SpanData { diff --git a/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs index 039c2134cf8..ad38f1394b4 100644 --- a/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs +++ b/src/tools/clippy/clippy_lints/src/matches/redundant_pattern_match.rs @@ -45,49 +45,39 @@ fn try_get_generic_ty(ty: Ty<'_>, index: usize) -> Option<Ty<'_>> { } } -fn find_sugg_for_if_let<'tcx>( +fn find_method_and_type<'tcx>( cx: &LateContext<'tcx>, - expr: &'tcx Expr<'_>, - let_pat: &Pat<'_>, - let_expr: &'tcx Expr<'_>, - keyword: &'static str, - has_else: bool, -) { - // also look inside refs - // if we have &None for example, peel it so we can detect "if let None = x" - let check_pat = match let_pat.kind { - PatKind::Ref(inner, _mutability) => inner, - _ => let_pat, - }; - let op_ty = cx.typeck_results().expr_ty(let_expr); - // Determine which function should be used, and the type contained by the corresponding - // variant. - let (good_method, inner_ty) = match check_pat.kind { + check_pat: &Pat<'_>, + op_ty: Ty<'tcx>, +) -> Option<(&'static str, Ty<'tcx>)> { + match check_pat.kind { PatKind::TupleStruct(ref qpath, args, rest) => { let is_wildcard = matches!(args.first().map(|p| &p.kind), Some(PatKind::Wild)); let is_rest = matches!((args, rest.as_opt_usize()), ([], Some(_))); if is_wildcard || is_rest { let res = cx.typeck_results().qpath_res(qpath, check_pat.hir_id); - let Some(id) = res.opt_def_id().map(|ctor_id| cx.tcx.parent(ctor_id)) else { return }; + let Some(id) = res.opt_def_id().map(|ctor_id| cx.tcx.parent(ctor_id)) else { + return None; + }; let lang_items = cx.tcx.lang_items(); if Some(id) == lang_items.result_ok_variant() { - ("is_ok()", try_get_generic_ty(op_ty, 0).unwrap_or(op_ty)) + Some(("is_ok()", try_get_generic_ty(op_ty, 0).unwrap_or(op_ty))) } else if Some(id) == lang_items.result_err_variant() { - ("is_err()", try_get_generic_ty(op_ty, 1).unwrap_or(op_ty)) + Some(("is_err()", try_get_generic_ty(op_ty, 1).unwrap_or(op_ty))) } else if Some(id) == lang_items.option_some_variant() { - ("is_some()", op_ty) + Some(("is_some()", op_ty)) } else if Some(id) == lang_items.poll_ready_variant() { - ("is_ready()", op_ty) + Some(("is_ready()", op_ty)) } else if is_pat_variant(cx, check_pat, qpath, Item::Diag(sym::IpAddr, sym!(V4))) { - ("is_ipv4()", op_ty) + Some(("is_ipv4()", op_ty)) } else if is_pat_variant(cx, check_pat, qpath, Item::Diag(sym::IpAddr, sym!(V6))) { - ("is_ipv6()", op_ty) + Some(("is_ipv6()", op_ty)) } else { - return; + None } } else { - return; + None } }, PatKind::Path(ref path) => { @@ -99,15 +89,37 @@ fn find_sugg_for_if_let<'tcx>( } else if cx.tcx.lang_items().poll_pending_variant() == Some(variant_id) { "is_pending()" } else { - return; + return None; }; // `None` and `Pending` don't have an inner type. - (method, cx.tcx.types.unit) + Some((method, cx.tcx.types.unit)) } else { - return; + None } }, - _ => return, + _ => None, + } +} + +fn find_sugg_for_if_let<'tcx>( + cx: &LateContext<'tcx>, + expr: &'tcx Expr<'_>, + let_pat: &Pat<'_>, + let_expr: &'tcx Expr<'_>, + keyword: &'static str, + has_else: bool, +) { + // also look inside refs + // if we have &None for example, peel it so we can detect "if let None = x" + let check_pat = match let_pat.kind { + PatKind::Ref(inner, _mutability) => inner, + _ => let_pat, + }; + let op_ty = cx.typeck_results().expr_ty(let_expr); + // Determine which function should be used, and the type contained by the corresponding + // variant. + let Some((good_method, inner_ty)) = find_method_and_type(cx, check_pat, op_ty) else { + return; }; // If this is the last expression in a block or there is an else clause then the whole @@ -175,7 +187,7 @@ fn find_sugg_for_if_let<'tcx>( .maybe_par() .to_string(); - diag.span_suggestion(span, "try this", format!("{keyword} {sugg}.{good_method}"), app); + diag.span_suggestion(span, "try", format!("{keyword} {sugg}.{good_method}"), app); if needs_drop { diag.note("this will change drop order of the result, as well as all temporaries"); @@ -200,7 +212,7 @@ pub(super) fn check_match<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, op REDUNDANT_PATTERN_MATCHING, span, &format!("redundant pattern matching, consider using `{good_method}`"), - "try this", + "try", format!("{}.{good_method}", snippet(cx, result_expr.span, "_")), Applicability::MachineApplicable, ); @@ -336,7 +348,9 @@ enum Item { } fn is_pat_variant(cx: &LateContext<'_>, pat: &Pat<'_>, path: &QPath<'_>, expected_item: Item) -> bool { - let Some(id) = cx.typeck_results().qpath_res(path, pat.hir_id).opt_def_id() else { return false }; + let Some(id) = cx.typeck_results().qpath_res(path, pat.hir_id).opt_def_id() else { + return false; + }; match expected_item { Item::Lang(expected_lang_item) => cx diff --git a/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs index 02500fac293..ee0fdb35313 100644 --- a/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs +++ b/src/tools/clippy/clippy_lints/src/matches/significant_drop_in_scrutinee.rs @@ -6,8 +6,7 @@ use rustc_errors::{Applicability, Diagnostic}; use rustc_hir::intravisit::{walk_expr, Visitor}; use rustc_hir::{Arm, Expr, ExprKind, MatchSource}; use rustc_lint::{LateContext, LintContext}; -use rustc_middle::ty::GenericArgKind; -use rustc_middle::ty::{Ty, TypeAndMut}; +use rustc_middle::ty::{GenericArgKind, Ty, TypeAndMut}; use rustc_span::Span; use super::SIGNIFICANT_DROP_IN_SCRUTINEE; diff --git a/src/tools/clippy/clippy_lints/src/matches/single_match.rs b/src/tools/clippy/clippy_lints/src/matches/single_match.rs index 35627d6c649..6b05c6bfffd 100644 --- a/src/tools/clippy/clippy_lints/src/matches/single_match.rs +++ b/src/tools/clippy/clippy_lints/src/matches/single_match.rs @@ -136,7 +136,7 @@ fn report_single_pattern( } }; - span_lint_and_sugg(cx, lint, expr.span, msg, "try this", sugg, app); + span_lint_and_sugg(cx, lint, expr.span, msg, "try", sugg, app); } fn check_opt_like<'a>( diff --git a/src/tools/clippy/clippy_lints/src/matches/try_err.rs b/src/tools/clippy/clippy_lints/src/matches/try_err.rs index 3a7f1e034f3..99a748489b4 100644 --- a/src/tools/clippy/clippy_lints/src/matches/try_err.rs +++ b/src/tools/clippy/clippy_lints/src/matches/try_err.rs @@ -70,7 +70,7 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, scrutine TRY_ERR, expr.span, "returning an `Err(_)` with the `?` operator", - "try this", + "try", suggestion, applicability, ); diff --git a/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs index 00853348840..3a8cc41748e 100644 --- a/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs +++ b/src/tools/clippy/clippy_lints/src/methods/bind_instead_of_map.rs @@ -1,7 +1,8 @@ use super::{contains_return, BIND_INSTEAD_OF_MAP}; use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and_sugg, span_lint_and_then}; +use clippy_utils::peel_blocks; use clippy_utils::source::{snippet, snippet_with_context}; -use clippy_utils::{peel_blocks, visitors::find_all_ret_expressions}; +use clippy_utils::visitors::find_all_ret_expressions; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir as hir; @@ -87,7 +88,7 @@ pub(crate) trait BindInsteadOfMap { BIND_INSTEAD_OF_MAP, expr.span, &msg, - "try this", + "try", note, app, ); @@ -124,7 +125,7 @@ pub(crate) trait BindInsteadOfMap { span_lint_and_then(cx, BIND_INSTEAD_OF_MAP, expr.span, &msg, |diag| { multispan_sugg_with_applicability( diag, - "try this", + "try", Applicability::MachineApplicable, std::iter::once((span, Self::GOOD_METHOD_NAME.into())).chain( suggs diff --git a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs index fb10f782f7a..d5897822eda 100644 --- a/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs +++ b/src/tools/clippy/clippy_lints/src/methods/case_sensitive_file_extension_comparisons.rs @@ -1,13 +1,13 @@ use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::source::snippet_opt; -use clippy_utils::source::{indent_of, reindent_multiline}; +use clippy_utils::source::{indent_of, reindent_multiline, snippet_opt}; use clippy_utils::ty::is_type_lang_item; use if_chain::if_chain; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, LangItem}; use rustc_lint::LateContext; -use rustc_span::{source_map::Spanned, Span}; +use rustc_span::source_map::Spanned; +use rustc_span::Span; use super::CASE_SENSITIVE_FILE_EXTENSION_COMPARISONS; diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs b/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs index 079df2226d1..0e41f3c2107 100644 --- a/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs +++ b/src/tools/clippy/clippy_lints/src/methods/chars_cmp.rs @@ -4,8 +4,7 @@ use clippy_utils::{method_chain_args, path_def_id}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir as hir; -use rustc_lint::LateContext; -use rustc_lint::Lint; +use rustc_lint::{LateContext, Lint}; use rustc_middle::ty; /// Wrapper fn for `CHARS_NEXT_CMP` and `CHARS_LAST_CMP` lints. diff --git a/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs index 8984b2cf8fd..c9d50a5b03d 100644 --- a/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs +++ b/src/tools/clippy/clippy_lints/src/methods/chars_cmp_with_unwrap.rs @@ -5,8 +5,7 @@ use if_chain::if_chain; use rustc_ast::ast; use rustc_errors::Applicability; use rustc_hir as hir; -use rustc_lint::LateContext; -use rustc_lint::Lint; +use rustc_lint::{LateContext, Lint}; /// Wrapper fn for `CHARS_NEXT_CMP` and `CHARS_LAST_CMP` lints with `unwrap()`. pub(super) fn check( diff --git a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs index 65fd50dff58..7eb325ee7b5 100644 --- a/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs +++ b/src/tools/clippy/clippy_lints/src/methods/clone_on_copy.rs @@ -5,7 +5,9 @@ use clippy_utils::ty::is_copy; use rustc_errors::Applicability; use rustc_hir::{BindingAnnotation, ByRef, Expr, ExprKind, MatchSource, Node, PatKind, QPath}; use rustc_lint::LateContext; -use rustc_middle::ty::{self, adjustment::Adjust, print::with_forced_trimmed_paths}; +use rustc_middle::ty::adjustment::Adjust; +use rustc_middle::ty::print::with_forced_trimmed_paths; +use rustc_middle::ty::{self}; use rustc_span::symbol::{sym, Symbol}; use super::CLONE_ON_COPY; diff --git a/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs b/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs index 5e8ad0861f3..ddf3c9f27df 100644 --- a/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs +++ b/src/tools/clippy/clippy_lints/src/methods/clone_on_ref_ptr.rs @@ -42,7 +42,7 @@ pub(super) fn check( CLONE_ON_REF_PTR, expr.span, "using `.clone()` on a ref-counted pointer", - "try this", + "try", format!("{caller_type}::<{}>::clone(&{snippet})", subst.type_at(0)), app, ); diff --git a/src/tools/clippy/clippy_lints/src/methods/collapsible_str_replace.rs b/src/tools/clippy/clippy_lints/src/methods/collapsible_str_replace.rs index 5e01ed90ff0..5409ede6008 100644 --- a/src/tools/clippy/clippy_lints/src/methods/collapsible_str_replace.rs +++ b/src/tools/clippy/clippy_lints/src/methods/collapsible_str_replace.rs @@ -8,8 +8,7 @@ use rustc_hir as hir; use rustc_lint::LateContext; use std::collections::VecDeque; -use super::method_call; -use super::COLLAPSIBLE_STR_REPLACE; +use super::{method_call, COLLAPSIBLE_STR_REPLACE}; pub(super) fn check<'tcx>( cx: &LateContext<'tcx>, diff --git a/src/tools/clippy/clippy_lints/src/methods/drain_collect.rs b/src/tools/clippy/clippy_lints/src/methods/drain_collect.rs index d0c79dc11b0..6a82d8f756a 100644 --- a/src/tools/clippy/clippy_lints/src/methods/drain_collect.rs +++ b/src/tools/clippy/clippy_lints/src/methods/drain_collect.rs @@ -4,17 +4,12 @@ use clippy_utils::is_range_full; use clippy_utils::source::snippet; use clippy_utils::ty::is_type_lang_item; use rustc_errors::Applicability; -use rustc_hir::Expr; -use rustc_hir::ExprKind; -use rustc_hir::LangItem; -use rustc_hir::Path; -use rustc_hir::QPath; +use rustc_hir::{Expr, ExprKind, LangItem, Path, QPath}; use rustc_lint::LateContext; use rustc_middle::query::Key; use rustc_middle::ty; use rustc_middle::ty::Ty; -use rustc_span::sym; -use rustc_span::Symbol; +use rustc_span::{sym, Symbol}; /// Checks if both types match the given diagnostic item, e.g.: /// diff --git a/src/tools/clippy/clippy_lints/src/methods/err_expect.rs b/src/tools/clippy/clippy_lints/src/methods/err_expect.rs index 4ab06bf2ae8..3d82441c0e6 100644 --- a/src/tools/clippy/clippy_lints/src/methods/err_expect.rs +++ b/src/tools/clippy/clippy_lints/src/methods/err_expect.rs @@ -1,8 +1,7 @@ use super::ERR_EXPECT; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::msrvs::{self, Msrv}; -use clippy_utils::ty::has_debug_impl; -use clippy_utils::ty::is_type_diagnostic_item; +use clippy_utils::ty::{has_debug_impl, is_type_diagnostic_item}; use rustc_errors::Applicability; use rustc_lint::LateContext; use rustc_middle::ty; diff --git a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs index 9af11f08c26..d3e90e4bba3 100644 --- a/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs +++ b/src/tools/clippy/clippy_lints/src/methods/expect_fun_call.rs @@ -144,7 +144,7 @@ pub(super) fn check<'tcx>( EXPECT_FUN_CALL, span_replace_word, &format!("use of `{name}` followed by a function call"), - "try this", + "try", format!("unwrap_or_else({closure_args} panic!({sugg}))"), applicability, ); @@ -162,7 +162,7 @@ pub(super) fn check<'tcx>( EXPECT_FUN_CALL, span_replace_word, &format!("use of `{name}` followed by a function call"), - "try this", + "try", format!("unwrap_or_else({closure_args} {{ panic!(\"{{}}\", {arg_root_snippet}) }})"), applicability, ); diff --git a/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs b/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs index 37b28463527..495b266529b 100644 --- a/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs +++ b/src/tools/clippy/clippy_lints/src/methods/extend_with_drain.rs @@ -31,7 +31,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, arg: EXTEND_WITH_DRAIN, expr.span, "use of `extend` instead of `append` for adding the full range of a second vector", - "try this", + "try", format!( "{}.append({}{})", snippet_with_applicability(cx, recv.span, "..", &mut applicability), diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map.rs index fc80f2eeae0..597a423b537 100644 --- a/src/tools/clippy/clippy_lints/src/methods/filter_map.rs +++ b/src/tools/clippy/clippy_lints/src/methods/filter_map.rs @@ -13,9 +13,7 @@ use rustc_span::source_map::Span; use rustc_span::symbol::{sym, Symbol}; use std::borrow::Cow; -use super::MANUAL_FILTER_MAP; -use super::MANUAL_FIND_MAP; -use super::OPTION_FILTER_MAP; +use super::{MANUAL_FILTER_MAP, MANUAL_FIND_MAP, OPTION_FILTER_MAP}; fn is_method(cx: &LateContext<'_>, expr: &hir::Expr<'_>, method_name: Symbol) -> bool { match &expr.kind { diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs index d1b5e945dfd..3337b250c0e 100644 --- a/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs +++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_identity.rs @@ -3,7 +3,8 @@ use clippy_utils::{is_expr_identity_function, is_trait_method}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; -use rustc_span::{source_map::Span, sym}; +use rustc_span::source_map::Span; +use rustc_span::sym; use super::FILTER_MAP_IDENTITY; diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs b/src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs index 175e04f8ac0..3f89e593148 100644 --- a/src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs +++ b/src/tools/clippy/clippy_lints/src/methods/filter_map_next.rs @@ -31,7 +31,7 @@ pub(super) fn check<'tcx>( FILTER_MAP_NEXT, expr.span, msg, - "try this", + "try", format!("{iter_snippet}.find_map({filter_snippet})"), Applicability::MachineApplicable, ); diff --git a/src/tools/clippy/clippy_lints/src/methods/filter_next.rs b/src/tools/clippy/clippy_lints/src/methods/filter_next.rs index edcec0fc101..ce7f9997b5a 100644 --- a/src/tools/clippy/clippy_lints/src/methods/filter_next.rs +++ b/src/tools/clippy/clippy_lints/src/methods/filter_next.rs @@ -1,6 +1,7 @@ -use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg}; +use clippy_utils::diagnostics::{span_lint, span_lint_and_then}; use clippy_utils::source::snippet; use clippy_utils::ty::implements_trait; +use rustc_ast::{BindingAnnotation, Mutability}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; @@ -8,6 +9,21 @@ use rustc_span::sym; use super::FILTER_NEXT; +fn path_to_local(expr: &hir::Expr<'_>) -> Option<hir::HirId> { + match expr.kind { + hir::ExprKind::Field(f, _) => path_to_local(f), + hir::ExprKind::Index(recv, _) => path_to_local(recv), + hir::ExprKind::Path(hir::QPath::Resolved( + _, + hir::Path { + res: rustc_hir::def::Res::Local(local), + .. + }, + )) => Some(*local), + _ => None, + } +} + /// lint use of `filter().next()` for `Iterators` pub(super) fn check<'tcx>( cx: &LateContext<'tcx>, @@ -26,15 +42,30 @@ pub(super) fn check<'tcx>( if filter_snippet.lines().count() <= 1 { let iter_snippet = snippet(cx, recv.span, ".."); // add note if not multi-line - span_lint_and_sugg( - cx, - FILTER_NEXT, - expr.span, - msg, - "try this", - format!("{iter_snippet}.find({filter_snippet})"), - Applicability::MachineApplicable, - ); + span_lint_and_then(cx, FILTER_NEXT, expr.span, msg, |diag| { + let (applicability, pat) = if let Some(id) = path_to_local(recv) + && let Some(hir::Node::Pat(pat)) = cx.tcx.hir().find(id) + && let hir::PatKind::Binding(BindingAnnotation(_, Mutability::Not), _, ident, _) = pat.kind + { + (Applicability::Unspecified, Some((pat.span, ident))) + } else { + (Applicability::MachineApplicable, None) + }; + + diag.span_suggestion( + expr.span, + "try", + format!("{iter_snippet}.find({filter_snippet})"), + applicability, + ); + + if let Some((pat_span, ident)) = pat { + diag.span_help( + pat_span, + format!("you will also need to make `{ident}` mutable, because `find` takes `&mut self`"), + ); + } + }); } else { span_lint(cx, FILTER_NEXT, expr.span, msg); } diff --git a/src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs b/src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs index 6f911d79d0b..84a21de0ac8 100644 --- a/src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs +++ b/src/tools/clippy/clippy_lints/src/methods/flat_map_identity.rs @@ -3,7 +3,8 @@ use clippy_utils::{is_expr_identity_function, is_trait_method}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; -use rustc_span::{source_map::Span, sym}; +use rustc_span::source_map::Span; +use rustc_span::sym; use super::FLAT_MAP_IDENTITY; diff --git a/src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs b/src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs index 4737a101345..172c397fbc8 100644 --- a/src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs +++ b/src/tools/clippy/clippy_lints/src/methods/flat_map_option.rs @@ -4,7 +4,8 @@ use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; use rustc_middle::ty; -use rustc_span::{source_map::Span, sym}; +use rustc_span::source_map::Span; +use rustc_span::sym; use super::FLAT_MAP_OPTION; use clippy_utils::ty::is_type_diagnostic_item; diff --git a/src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs index e35fb12ed79..a8f090d1de1 100644 --- a/src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs +++ b/src/tools/clippy/clippy_lints/src/methods/get_unwrap.rs @@ -71,7 +71,7 @@ pub(super) fn check<'tcx>( GET_UNWRAP, span, &format!("called `.get{mut_str}().unwrap()` on a {caller_type}. Using `[]` is more clear and more concise"), - "try this", + "try", format!( "{borrow_str}{}[{get_args_str}]", snippet_with_applicability(cx, recv.span, "..", &mut applicability) diff --git a/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs b/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs index 37da52dc733..631741d9290 100644 --- a/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs +++ b/src/tools/clippy/clippy_lints/src/methods/inefficient_to_string.rs @@ -7,7 +7,7 @@ use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; use rustc_middle::ty::{self, Ty}; -use rustc_span::symbol::{Symbol, sym}; +use rustc_span::symbol::{sym, Symbol}; use super::INEFFICIENT_TO_STRING; diff --git a/src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs b/src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs index 7fd3ef1a622..23cc192c38e 100644 --- a/src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs +++ b/src/tools/clippy/clippy_lints/src/methods/inspect_for_each.rs @@ -2,7 +2,8 @@ use clippy_utils::diagnostics::span_lint_and_help; use clippy_utils::is_trait_method; use rustc_hir as hir; use rustc_lint::LateContext; -use rustc_span::{source_map::Span, sym}; +use rustc_span::source_map::Span; +use rustc_span::sym; use super::INSPECT_FOR_EACH; diff --git a/src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs b/src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs index 301aff5ae6a..120f3d5f42c 100644 --- a/src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs +++ b/src/tools/clippy/clippy_lints/src/methods/is_digit_ascii_radix.rs @@ -1,10 +1,10 @@ //! Lint for `c.is_digit(10)` use super::IS_DIGIT_ASCII_RADIX; +use clippy_utils::consts::{constant_full_int, FullInt}; +use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::msrvs::{self, Msrv}; -use clippy_utils::{ - consts::constant_full_int, consts::FullInt, diagnostics::span_lint_and_sugg, source::snippet_with_applicability, -}; +use clippy_utils::source::snippet_with_applicability; use rustc_errors::Applicability; use rustc_hir::Expr; use rustc_lint::LateContext; diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs b/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs index c87f5daab6f..674d3451748 100644 --- a/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs +++ b/src/tools/clippy/clippy_lints/src/methods/iter_kv_map.rs @@ -9,8 +9,7 @@ use clippy_utils::visitors::is_local_used; use rustc_hir::{BindingAnnotation, Body, BorrowKind, ByRef, Expr, ExprKind, Mutability, Pat, PatKind}; use rustc_lint::{LateContext, LintContext}; use rustc_middle::ty; -use rustc_span::sym; -use rustc_span::Span; +use rustc_span::{sym, Span}; /// lint use of: /// - `hashmap.iter().map(|(_, v)| v)` diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs index b4210d87510..9f7ec19aa59 100644 --- a/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/iter_overeager_cloned.rs @@ -51,7 +51,7 @@ pub(super) fn check<'tcx>( if let Some(mut snip) = snippet_opt(cx, method_span) { snip.push_str(trailing_clone); let replace_span = expr.span.with_lo(cloned_recv.span.hi()); - diag.span_suggestion(replace_span, "try this", snip, Applicability::MachineApplicable); + diag.span_suggestion(replace_span, "try", snip, Applicability::MachineApplicable); } } ); diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs b/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs index 279175e20c3..39af52141bb 100644 --- a/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs +++ b/src/tools/clippy/clippy_lints/src/methods/iter_skip_next.rs @@ -1,7 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::is_trait_method; -use clippy_utils::path_to_local; use clippy_utils::source::snippet; +use clippy_utils::{is_trait_method, path_to_local}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_hir::{BindingAnnotation, Node, PatKind}; diff --git a/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs b/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs index f6772c5c6b3..2ab721ace84 100644 --- a/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs +++ b/src/tools/clippy/clippy_lints/src/methods/iter_with_drain.rs @@ -21,7 +21,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, span ITER_WITH_DRAIN, span.with_hi(expr.span.hi()), &format!("`drain(..)` used on a `{ty_name}`"), - "try this", + "try", "into_iter()".to_string(), Applicability::MaybeIncorrect, ); diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs b/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs index a7284c64497..540425eef8c 100644 --- a/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs +++ b/src/tools/clippy/clippy_lints/src/methods/manual_saturating_arithmetic.rs @@ -21,13 +21,13 @@ pub fn check( return; } - let Some(mm) = is_min_or_max(cx, unwrap_arg) else { return }; + let Some(mm) = is_min_or_max(cx, unwrap_arg) else { + return; + }; if ty.is_signed() { - use self::{ - MinMax::{Max, Min}, - Sign::{Neg, Pos}, - }; + use self::MinMax::{Max, Min}; + use self::Sign::{Neg, Pos}; let Some(sign) = lit_sign(arith_rhs) else { return; diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs index a08f7254053..ab13d30d845 100644 --- a/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs +++ b/src/tools/clippy/clippy_lints/src/methods/manual_str_repeat.rs @@ -88,7 +88,7 @@ pub(super) fn check( MANUAL_STR_REPEAT, collect_expr.span, "manual implementation of `str::repeat` using iterators", - "try this", + "try", format!("{val_str}.repeat({count_snip})"), app ) diff --git a/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs b/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs index 576a58499b1..dabed0affcf 100644 --- a/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs +++ b/src/tools/clippy/clippy_lints/src/methods/manual_try_fold.rs @@ -1,15 +1,11 @@ -use clippy_utils::{ - diagnostics::span_lint_and_sugg, - is_from_proc_macro, - msrvs::{Msrv, ITERATOR_TRY_FOLD}, - source::snippet_opt, - ty::implements_trait, -}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::is_from_proc_macro; +use clippy_utils::msrvs::{Msrv, ITERATOR_TRY_FOLD}; +use clippy_utils::source::snippet_opt; +use clippy_utils::ty::implements_trait; use rustc_errors::Applicability; -use rustc_hir::{ - def::{DefKind, Res}, - Expr, ExprKind, -}; +use rustc_hir::def::{DefKind, Res}; +use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_span::Span; diff --git a/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs b/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs index 042bab805b8..01cdd02e602 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_collect_result_unit.rs @@ -25,7 +25,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, iter: &hir::Expr MAP_COLLECT_RESULT_UNIT, expr.span, "`.map().collect()` can be replaced with `.try_for_each()`", - "try this", + "try", format!( "{}.try_for_each({})", snippet(cx, iter.span, ".."), diff --git a/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs index 950902d455b..e74a764551c 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_flatten.rs @@ -6,7 +6,8 @@ use rustc_errors::Applicability; use rustc_hir::Expr; use rustc_lint::LateContext; use rustc_middle::ty; -use rustc_span::{symbol::sym, Span}; +use rustc_span::symbol::sym; +use rustc_span::Span; use super::MAP_FLATTEN; diff --git a/src/tools/clippy/clippy_lints/src/methods/map_identity.rs b/src/tools/clippy/clippy_lints/src/methods/map_identity.rs index 0f25ef82ed4..7be1ce483f6 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_identity.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_identity.rs @@ -4,7 +4,8 @@ use clippy_utils::{is_expr_identity_function, is_trait_method}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; -use rustc_span::{source_map::Span, sym}; +use rustc_span::source_map::Span; +use rustc_span::sym; use super::MAP_IDENTITY; diff --git a/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs index 3122f72ee91..5464e455dea 100644 --- a/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs +++ b/src/tools/clippy/clippy_lints/src/methods/map_unwrap_or.rs @@ -11,7 +11,8 @@ use rustc_span::symbol::sym; use super::MAP_UNWRAP_OR; /// lint use of `map().unwrap_or_else()` for `Option`s and `Result`s -/// Return true if lint triggered +/// +/// Returns true if the lint was emitted pub(super) fn check<'tcx>( cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>, @@ -63,7 +64,7 @@ pub(super) fn check<'tcx>( MAP_UNWRAP_OR, expr.span, msg, - "try this", + "try", format!("{var_snippet}.map_or_else({unwrap_snippet}, {map_snippet})"), Applicability::MachineApplicable, ); diff --git a/src/tools/clippy/clippy_lints/src/methods/mod.rs b/src/tools/clippy/clippy_lints/src/methods/mod.rs index 9ab78c6f8e0..123ab520e48 100644 --- a/src/tools/clippy/clippy_lints/src/methods/mod.rs +++ b/src/tools/clippy/clippy_lints/src/methods/mod.rs @@ -72,6 +72,7 @@ mod or_fun_call; mod or_then_unwrap; mod path_buf_push_overwrite; mod range_zip_with_len; +mod read_line_without_trim; mod repeat_once; mod search_is_some; mod seek_from_current; @@ -88,6 +89,7 @@ mod suspicious_command_arg_space; mod suspicious_map; mod suspicious_splitn; mod suspicious_to_owned; +mod type_id_on_box; mod uninit_assumed_init; mod unit_hash; mod unnecessary_filter_map; @@ -2927,6 +2929,37 @@ declare_clippy_lint! { declare_clippy_lint! { /// ### What it does + /// Looks for calls to `<Box<dyn Any> as Any>::type_id`. + /// + /// ### Why is this bad? + /// This most certainly does not do what the user expects and is very easy to miss. + /// Calling `type_id` on a `Box<dyn Any>` calls `type_id` on the `Box<..>` itself, + /// so this will return the `TypeId` of the `Box<dyn Any>` type (not the type id + /// of the value referenced by the box!). + /// + /// ### Example + /// ```rust,ignore + /// use std::any::{Any, TypeId}; + /// + /// let any_box: Box<dyn Any> = Box::new(42_i32); + /// assert_eq!(any_box.type_id(), TypeId::of::<i32>()); // ⚠️ this fails! + /// ``` + /// Use instead: + /// ```rust + /// use std::any::{Any, TypeId}; + /// + /// let any_box: Box<dyn Any> = Box::new(42_i32); + /// assert_eq!((*any_box).type_id(), TypeId::of::<i32>()); + /// // ^ dereference first, to call `type_id` on `dyn Any` + /// ``` + #[clippy::version = "1.72.0"] + pub TYPE_ID_ON_BOX, + suspicious, + "calling `.type_id()` on `Box<dyn Any>`" +} + +declare_clippy_lint! { + /// ### What it does /// Detects `().hash(_)`. /// /// ### Why is this bad? @@ -3316,6 +3349,35 @@ declare_clippy_lint! { "checks for usage of `Iterator::fold` with a type that implements `Try`" } +declare_clippy_lint! { + /// Looks for calls to [`Stdin::read_line`] to read a line from the standard input + /// into a string, then later attempting to parse this string into a type without first trimming it, which will + /// always fail because the string has a trailing newline in it. + /// + /// ### Why is this bad? + /// The `.parse()` call will always fail. + /// + /// ### Example + /// ```rust,ignore + /// let mut input = String::new(); + /// std::io::stdin().read_line(&mut input).expect("Failed to read a line"); + /// let num: i32 = input.parse().expect("Not a number!"); + /// assert_eq!(num, 42); // we never even get here! + /// ``` + /// Use instead: + /// ```rust,ignore + /// let mut input = String::new(); + /// std::io::stdin().read_line(&mut input).expect("Failed to read a line"); + /// let num: i32 = input.trim_end().parse().expect("Not a number!"); + /// // ^^^^^^^^^^^ remove the trailing newline + /// assert_eq!(num, 42); + /// ``` + #[clippy::version = "1.72.0"] + pub READ_LINE_WITHOUT_TRIM, + correctness, + "calling `Stdin::read_line`, then trying to parse it without first trimming" +} + pub struct Methods { avoid_breaking_exported_api: bool, msrv: Msrv, @@ -3389,6 +3451,7 @@ impl_lint_pass!(Methods => [ STRING_EXTEND_CHARS, ITER_CLONED_COLLECT, ITER_WITH_DRAIN, + TYPE_ID_ON_BOX, USELESS_ASREF, UNNECESSARY_FOLD, UNNECESSARY_FILTER_MAP, @@ -3435,6 +3498,7 @@ impl_lint_pass!(Methods => [ REPEAT_ONCE, STABLE_SORT_PRIMITIVE, UNIT_HASH, + READ_LINE_WITHOUT_TRIM, UNNECESSARY_SORT_BY, VEC_RESIZE_TO_ZERO, VERBOSE_FILE_READS, @@ -3846,6 +3910,9 @@ impl Methods { ("read_to_string", [_]) => { verbose_file_reads::check(cx, expr, recv, verbose_file_reads::READ_TO_STRING_MSG); }, + ("read_line", [arg]) => { + read_line_without_trim::check(cx, expr, recv, arg); + } ("repeat", [arg]) => { repeat_once::check(cx, expr, recv, arg); }, @@ -3914,6 +3981,9 @@ impl Methods { ("to_os_string" | "to_path_buf" | "to_vec", []) => { implicit_clone::check(cx, name, expr, recv); }, + ("type_id", []) => { + type_id_on_box::check(cx, recv, expr.span); + } ("unwrap", []) => { match method_call(recv) { Some(("get", recv, [get_arg], _, _)) => { @@ -3949,7 +4019,7 @@ impl Methods { } unnecessary_literal_unwrap::check(cx, expr, recv, name, args); }, - ("unwrap_or_default", []) => { + ("unwrap_or_default" | "unwrap_unchecked" | "unwrap_err_unchecked", []) => { unnecessary_literal_unwrap::check(cx, expr, recv, name, args); } ("unwrap_or_else", [u_arg]) => { @@ -4134,7 +4204,7 @@ impl SelfKind { }; let Some(trait_def_id) = cx.tcx.get_diagnostic_item(trait_sym) else { - return false + return false; }; implements_trait(cx, ty, trait_def_id, &[parent_ty.into()]) } diff --git a/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs b/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs index fe024ee8dff..2855e23bf5b 100644 --- a/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs +++ b/src/tools/clippy/clippy_lints/src/methods/mut_mutex_lock.rs @@ -1,5 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_sugg; -use clippy_utils::{expr_custom_deref_adjustment, ty::is_type_diagnostic_item}; +use clippy_utils::expr_custom_deref_adjustment; +use clippy_utils::ty::is_type_diagnostic_item; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::{Expr, Mutability}; diff --git a/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs b/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs index f34f1cf2704..dbd965d6506 100644 --- a/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs +++ b/src/tools/clippy/clippy_lints/src/methods/needless_collect.rs @@ -4,10 +4,9 @@ use clippy_utils::source::{snippet, snippet_with_applicability}; use clippy_utils::sugg::Sugg; use clippy_utils::ty::{is_type_diagnostic_item, make_normalized_projection, make_projection}; use clippy_utils::{ - can_move_expr_to_closure, get_enclosing_block, get_parent_node, is_trait_method, path_to_local, path_to_local_id, - CaptureKind, + can_move_expr_to_closure, fn_def_id, get_enclosing_block, get_parent_node, higher, is_trait_method, path_to_local, + path_to_local_id, CaptureKind, }; -use clippy_utils::{fn_def_id, higher}; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{Applicability, MultiSpan}; use rustc_hir::intravisit::{walk_block, walk_expr, Visitor}; diff --git a/src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs b/src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs index 7030baf19ff..eaae8613d44 100644 --- a/src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs +++ b/src/tools/clippy/clippy_lints/src/methods/needless_option_as_deref.rs @@ -17,7 +17,9 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, name if is_type_diagnostic_item(cx, outer_ty, sym::Option) && outer_ty == typeck.expr_ty(recv) { if name == "as_deref_mut" && recv.is_syntactic_place_expr() { - let Res::Local(binding_id) = path_res(cx, recv) else { return }; + let Res::Local(binding_id) = path_res(cx, recv) else { + return; + }; if local_used_after_expr(cx, binding_id, recv) { return; @@ -29,7 +31,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, recv: &Expr<'_>, name NEEDLESS_OPTION_AS_DEREF, expr.span, "derefed type is same as origin", - "try this", + "try", snippet_opt(cx, recv.span).unwrap(), Applicability::MachineApplicable, ); diff --git a/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs b/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs index eada530d670..697eab32a33 100644 --- a/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs +++ b/src/tools/clippy/clippy_lints/src/methods/obfuscated_if_else.rs @@ -1,5 +1,6 @@ use super::OBFUSCATED_IF_ELSE; -use clippy_utils::{diagnostics::span_lint_and_sugg, source::snippet_with_applicability}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::source::snippet_with_applicability; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; diff --git a/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs b/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs index 41ceef19e3a..cb6a2306857 100644 --- a/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs +++ b/src/tools/clippy/clippy_lints/src/methods/option_map_or_none.rs @@ -8,8 +8,7 @@ use rustc_hir::LangItem::{OptionNone, OptionSome}; use rustc_lint::LateContext; use rustc_span::symbol::sym; -use super::OPTION_MAP_OR_NONE; -use super::RESULT_MAP_OR_INTO_OPTION; +use super::{OPTION_MAP_OR_NONE, RESULT_MAP_OR_INTO_OPTION}; // The expression inside a closure may or may not have surrounding braces // which causes problems when generating a suggestion. diff --git a/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs b/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs index f4f158c0439..fcbe005fb28 100644 --- a/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs +++ b/src/tools/clippy/clippy_lints/src/methods/option_map_unwrap_or.rs @@ -1,17 +1,12 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::snippet_with_applicability; -use clippy_utils::ty::is_copy; -use clippy_utils::ty::is_type_diagnostic_item; +use clippy_utils::ty::{is_copy, is_type_diagnostic_item}; use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; use rustc_hir::def::Res; use rustc_hir::intravisit::{walk_path, Visitor}; -use rustc_hir::ExprKind; -use rustc_hir::Node; -use rustc_hir::PatKind; -use rustc_hir::QPath; -use rustc_hir::{self, HirId, Path}; +use rustc_hir::{self, ExprKind, HirId, Node, PatKind, Path, QPath}; use rustc_lint::LateContext; use rustc_middle::hir::nested_filter; use rustc_span::source_map::Span; diff --git a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs index 7ce28ea93e0..9165c1248f0 100644 --- a/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs +++ b/src/tools/clippy/clippy_lints/src/methods/or_fun_call.rs @@ -62,7 +62,7 @@ pub(super) fn check<'tcx>( OR_FUN_CALL, method_span.with_hi(span.hi()), &format!("use of `{name}` followed by a call to `{path}`"), - "try this", + "try", format!("{sugg}()"), Applicability::MachineApplicable, ); @@ -139,7 +139,7 @@ pub(super) fn check<'tcx>( OR_FUN_CALL, span_replace_word, &format!("use of `{name}` followed by a function call"), - "try this", + "try", format!("{name}_{suffix}({sugg})"), app, ); diff --git a/src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs index 55ba6e262df..7b0bdcf99e7 100644 --- a/src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs +++ b/src/tools/clippy/clippy_lints/src/methods/or_then_unwrap.rs @@ -1,8 +1,10 @@ +use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::is_type_diagnostic_item; -use clippy_utils::{diagnostics::span_lint_and_sugg, is_res_lang_ctor, path_res}; +use clippy_utils::{is_res_lang_ctor, path_res}; use rustc_errors::Applicability; -use rustc_hir::{lang_items::LangItem, Expr, ExprKind}; +use rustc_hir::lang_items::LangItem; +use rustc_hir::{Expr, ExprKind}; use rustc_lint::LateContext; use rustc_span::{sym, Span}; @@ -50,7 +52,7 @@ pub(super) fn check<'tcx>( OR_THEN_UNWRAP, unwrap_expr.span.with_lo(or_span.lo()), title, - "try this", + "try", suggestion, applicability, ); diff --git a/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs b/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs index 867a3b40237..f253d8de91f 100644 --- a/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs +++ b/src/tools/clippy/clippy_lints/src/methods/range_zip_with_len.rs @@ -1,7 +1,6 @@ use clippy_utils::diagnostics::span_lint; use clippy_utils::source::snippet; -use clippy_utils::{higher, SpanlessEq}; -use clippy_utils::{is_integer_const, is_trait_method}; +use clippy_utils::{higher, is_integer_const, is_trait_method, SpanlessEq}; use if_chain::if_chain; use rustc_hir::{Expr, ExprKind, QPath}; use rustc_lint::LateContext; diff --git a/src/tools/clippy/clippy_lints/src/methods/read_line_without_trim.rs b/src/tools/clippy/clippy_lints/src/methods/read_line_without_trim.rs new file mode 100644 index 00000000000..8add0656101 --- /dev/null +++ b/src/tools/clippy/clippy_lints/src/methods/read_line_without_trim.rs @@ -0,0 +1,74 @@ +use std::ops::ControlFlow; + +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::source::snippet; +use clippy_utils::ty::is_type_diagnostic_item; +use clippy_utils::visitors::for_each_local_use_after_expr; +use clippy_utils::{get_parent_expr, match_def_path}; +use rustc_errors::Applicability; +use rustc_hir::def::Res; +use rustc_hir::{Expr, ExprKind, QPath}; +use rustc_lint::LateContext; +use rustc_middle::ty::{self, Ty}; +use rustc_span::sym; + +use super::READ_LINE_WITHOUT_TRIM; + +/// Will a `.parse::<ty>()` call fail if the input has a trailing newline? +fn parse_fails_on_trailing_newline(ty: Ty<'_>) -> bool { + // only allow a very limited set of types for now, for which we 100% know parsing will fail + matches!(ty.kind(), ty::Float(_) | ty::Bool | ty::Int(_) | ty::Uint(_)) +} + +pub fn check(cx: &LateContext<'_>, call: &Expr<'_>, recv: &Expr<'_>, arg: &Expr<'_>) { + if let Some(recv_adt) = cx.typeck_results().expr_ty(recv).ty_adt_def() + && match_def_path(cx, recv_adt.did(), &["std", "io", "stdio", "Stdin"]) + && let ExprKind::Path(QPath::Resolved(_, path)) = arg.peel_borrows().kind + && let Res::Local(local_id) = path.res + { + // We've checked that `call` is a call to `Stdin::read_line()` with the right receiver, + // now let's check if the first use of the string passed to `::read_line()` is + // parsed into a type that will always fail if it has a trailing newline. + for_each_local_use_after_expr(cx, local_id, call.hir_id, |expr| { + if let Some(parent) = get_parent_expr(cx, expr) + && let ExprKind::MethodCall(segment, .., span) = parent.kind + && segment.ident.name == sym!(parse) + && let parse_result_ty = cx.typeck_results().expr_ty(parent) + && is_type_diagnostic_item(cx, parse_result_ty, sym::Result) + && let ty::Adt(_, substs) = parse_result_ty.kind() + && let Some(ok_ty) = substs[0].as_type() + && parse_fails_on_trailing_newline(ok_ty) + { + let local_snippet = snippet(cx, expr.span, "<expr>"); + span_lint_and_then( + cx, + READ_LINE_WITHOUT_TRIM, + span, + "calling `.parse()` without trimming the trailing newline character", + |diag| { + diag.span_note(call.span, "call to `.read_line()` here, \ + which leaves a trailing newline character in the buffer, \ + which in turn will cause `.parse()` to fail"); + + diag.span_suggestion( + expr.span, + "try", + format!("{local_snippet}.trim_end()"), + Applicability::MachineApplicable, + ); + } + ); + } + + // only consider the first use to prevent this scenario: + // ``` + // let mut s = String::new(); + // std::io::stdin().read_line(&mut s); + // s.pop(); + // let _x: i32 = s.parse().unwrap(); + // ``` + // this is actually fine, because the pop call removes the trailing newline. + ControlFlow::<(), ()>::Break(()) + }); + } +} diff --git a/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs b/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs index c028e954381..f3d6a15ede0 100644 --- a/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs +++ b/src/tools/clippy/clippy_lints/src/methods/seek_from_current.rs @@ -3,10 +3,10 @@ use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::LateContext; -use clippy_utils::{ - diagnostics::span_lint_and_sugg, get_trait_def_id, match_def_path, paths, source::snippet_with_applicability, - ty::implements_trait, -}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::source::snippet_with_applicability; +use clippy_utils::ty::implements_trait; +use clippy_utils::{get_trait_def_id, match_def_path, paths}; use super::SEEK_FROM_CURRENT; diff --git a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs index 88a3c2620a4..41986551da4 100644 --- a/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs +++ b/src/tools/clippy/clippy_lints/src/methods/str_splitn.rs @@ -55,7 +55,7 @@ fn lint_needless(cx: &LateContext<'_>, method_name: &str, expr: &Expr<'_>, self_ NEEDLESS_SPLITN, expr.span, &format!("unnecessary use of `{r}splitn`"), - "try this", + "try", format!( "{}.{r}split({})", snippet_with_context(cx, self_arg.span, expr.span.ctxt(), "..", &mut app).0, @@ -110,7 +110,7 @@ fn check_manual_split_once( IterUsageKind::Nth(_) => return, }; - span_lint_and_sugg(cx, MANUAL_SPLIT_ONCE, usage.span, msg, "try this", sugg, app); + span_lint_and_sugg(cx, MANUAL_SPLIT_ONCE, usage.span, msg, "try", sugg, app); } /// checks for diff --git a/src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs b/src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs index 2c20c6d752d..c7885f689d7 100644 --- a/src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs +++ b/src/tools/clippy/clippy_lints/src/methods/string_extend_chars.rs @@ -34,7 +34,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, recv: &hir::Expr STRING_EXTEND_CHARS, expr.span, "calling `.extend(_.chars())`", - "try this", + "try", format!( "{}.push_str({ref_str}{})", snippet_with_applicability(cx, recv.span, "..", &mut applicability), diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_command_arg_space.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_command_arg_space.rs index 73632c5a357..bc8f0176764 100644 --- a/src/tools/clippy/clippy_lints/src/methods/suspicious_command_arg_space.rs +++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_command_arg_space.rs @@ -1,11 +1,10 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::paths; use clippy_utils::ty::match_type; -use rustc_ast as ast; use rustc_errors::{Applicability, Diagnostic}; -use rustc_hir as hir; use rustc_lint::LateContext; use rustc_span::Span; +use {rustc_ast as ast, rustc_hir as hir}; use super::SUSPICIOUS_COMMAND_ARG_SPACE; diff --git a/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs b/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs index e818f1892e5..9eb8d6e6e78 100644 --- a/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/suspicious_to_owned.rs @@ -5,7 +5,8 @@ use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; -use rustc_middle::ty::{self, print::with_forced_trimmed_paths}; +use rustc_middle::ty::print::with_forced_trimmed_paths; +use rustc_middle::ty::{self}; use rustc_span::sym; use super::SUSPICIOUS_TO_OWNED; diff --git a/src/tools/clippy/clippy_lints/src/methods/type_id_on_box.rs b/src/tools/clippy/clippy_lints/src/methods/type_id_on_box.rs new file mode 100644 index 00000000000..35137c97101 --- /dev/null +++ b/src/tools/clippy/clippy_lints/src/methods/type_id_on_box.rs @@ -0,0 +1,62 @@ +use crate::methods::TYPE_ID_ON_BOX; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::source::snippet; +use rustc_errors::Applicability; +use rustc_hir::Expr; +use rustc_lint::LateContext; +use rustc_middle::ty::adjustment::{Adjust, Adjustment}; +use rustc_middle::ty::{self, ExistentialPredicate, Ty}; +use rustc_span::{sym, Span}; + +fn is_dyn_any(cx: &LateContext<'_>, ty: Ty<'_>) -> bool { + if let ty::Dynamic(preds, ..) = ty.kind() { + preds.iter().any(|p| match p.skip_binder() { + ExistentialPredicate::Trait(tr) => cx.tcx.is_diagnostic_item(sym::Any, tr.def_id), + _ => false, + }) + } else { + false + } +} + +pub(super) fn check(cx: &LateContext<'_>, receiver: &Expr<'_>, call_span: Span) { + let recv_adjusts = cx.typeck_results().expr_adjustments(receiver); + + if let Some(Adjustment { target: recv_ty, .. }) = recv_adjusts.last() + && let ty::Ref(_, ty, _) = recv_ty.kind() + && let ty::Adt(adt, substs) = ty.kind() + && adt.is_box() + && is_dyn_any(cx, substs.type_at(0)) + { + span_lint_and_then( + cx, + TYPE_ID_ON_BOX, + call_span, + "calling `.type_id()` on a `Box<dyn Any>`", + |diag| { + let derefs = recv_adjusts + .iter() + .filter(|adj| matches!(adj.kind, Adjust::Deref(None))) + .count(); + + let mut sugg = "*".repeat(derefs + 1); + sugg += &snippet(cx, receiver.span, "<expr>"); + + diag.note( + "this returns the type id of the literal type `Box<dyn Any>` instead of the \ + type id of the boxed value, which is most likely not what you want" + ) + .note( + "if this is intentional, use `TypeId::of::<Box<dyn Any>>()` instead, \ + which makes it more clear" + ) + .span_suggestion( + receiver.span, + "consider dereferencing first", + format!("({sugg})"), + Applicability::MaybeIncorrect, + ); + }, + ); + } +} diff --git a/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs b/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs index a1c6294737c..bc9c518dbcf 100644 --- a/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs +++ b/src/tools/clippy/clippy_lints/src/methods/uninit_assumed_init.rs @@ -1,5 +1,6 @@ use clippy_utils::diagnostics::span_lint; -use clippy_utils::{is_path_diagnostic_item, ty::is_uninit_value_valid_for_ty}; +use clippy_utils::is_path_diagnostic_item; +use clippy_utils::ty::is_uninit_value_valid_for_ty; use if_chain::if_chain; use rustc_hir as hir; use rustc_lint::LateContext; diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs index 1cef6226ad4..cc64a2e7948 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_filter_map.rs @@ -11,8 +11,7 @@ use rustc_lint::LateContext; use rustc_middle::ty; use rustc_span::sym; -use super::UNNECESSARY_FILTER_MAP; -use super::UNNECESSARY_FIND_MAP; +use super::{UNNECESSARY_FILTER_MAP, UNNECESSARY_FIND_MAP}; pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'tcx>, arg: &'tcx hir::Expr<'tcx>, name: &str) { if !is_trait_method(cx, expr, sym::Iterator) { diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs index 8ec15a1c1b7..6e23754bf46 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_fold.rs @@ -8,7 +8,8 @@ use rustc_hir as hir; use rustc_hir::PatKind; use rustc_lint::LateContext; use rustc_middle::ty; -use rustc_span::{source_map::Span, sym}; +use rustc_span::source_map::Span; +use rustc_span::sym; use super::UNNECESSARY_FOLD; diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs index 52a4ff7d1ae..0c72c13a3ca 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_iter_cloned.rs @@ -5,7 +5,8 @@ use clippy_utils::source::snippet_opt; use clippy_utils::ty::{get_iterator_item_ty, implements_trait}; use clippy_utils::{fn_def_id, get_parent_expr}; use rustc_errors::Applicability; -use rustc_hir::{def_id::DefId, Expr, ExprKind}; +use rustc_hir::def_id::DefId; +use rustc_hir::{Expr, ExprKind}; use rustc_lint::LateContext; use rustc_span::{sym, Symbol}; diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs index 087e1e4343b..d0c62fb56dc 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_join.rs @@ -1,4 +1,5 @@ -use clippy_utils::{diagnostics::span_lint_and_sugg, ty::is_type_lang_item}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::ty::is_type_lang_item; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, LangItem}; diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_literal_unwrap.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_literal_unwrap.rs index 7877f6a386c..111bcaaecec 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_literal_unwrap.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_literal_unwrap.rs @@ -1,4 +1,5 @@ -use clippy_utils::{diagnostics::span_lint_and_then, is_res_lang_ctor, last_path_segment, path_res, MaybePath}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::{is_res_lang_ctor, last_path_segment, path_res, MaybePath}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; @@ -29,6 +30,11 @@ pub(super) fn check( args: &[hir::Expr<'_>], ) { let init = clippy_utils::expr_or_init(cx, recv); + if init.span.from_expansion() { + // don't lint if the receiver or binding initializer comes from a macro + // (e.g. `let x = option_env!(..); x.unwrap()`) + return; + } let (constructor, call_args, ty) = if let hir::ExprKind::Call(call, call_args) = init.kind { let Some(qpath) = call.qpath_opt() else { return }; @@ -62,6 +68,22 @@ pub(super) fn check( (expr.span.with_hi(args[0].span.lo()), "panic!(".to_string()), (expr.span.with_lo(args[0].span.hi()), ")".to_string()), ]), + ("Some" | "Ok", "unwrap_unchecked", _) | ("Err", "unwrap_err_unchecked", _) => { + let mut suggs = vec![ + (recv.span.with_hi(call_args[0].span.lo()), String::new()), + (expr.span.with_lo(call_args[0].span.hi()), String::new()), + ]; + // try to also remove the unsafe block if present + if let hir::Node::Block(block) = cx.tcx.hir().get_parent(expr.hir_id) + && let hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::UserProvided) = block.rules + { + suggs.extend([ + (block.span.shrink_to_lo().to(expr.span.shrink_to_lo()), String::new()), + (expr.span.shrink_to_hi().to(block.span.shrink_to_hi()), String::new()) + ]); + } + Some(suggs) + }, (_, _, Some(_)) => None, ("Ok", "unwrap_err", None) | ("Err", "unwrap", None) => Some(vec![ ( diff --git a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs index 21e2638e5b9..5c5ee262052 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unnecessary_to_owned.rs @@ -7,16 +7,20 @@ use clippy_utils::ty::{get_iterator_item_ty, implements_trait, is_copy, peel_mid use clippy_utils::visitors::find_all_ret_expressions; use clippy_utils::{fn_def_id, get_parent_expr, is_diag_item_method, is_diag_trait_item, return_ty}; use rustc_errors::Applicability; -use rustc_hir::{def_id::DefId, BorrowKind, Expr, ExprKind, ItemKind, Node}; +use rustc_hir::def_id::DefId; +use rustc_hir::{BorrowKind, Expr, ExprKind, ItemKind, Node}; use rustc_hir_typeck::{FnCtxt, Inherited}; use rustc_infer::infer::TyCtxtInferExt; use rustc_lint::LateContext; use rustc_middle::mir::Mutability; use rustc_middle::ty::adjustment::{Adjust, Adjustment, OverloadedDeref}; -use rustc_middle::ty::{GenericArg, GenericArgKind, GenericArgsRef}; -use rustc_middle::ty::{self, ClauseKind, EarlyBinder, ParamTy, ProjectionPredicate, TraitPredicate, Ty}; +use rustc_middle::ty::{ + self, ClauseKind, EarlyBinder, GenericArg, GenericArgKind, GenericArgsRef, ParamTy, ProjectionPredicate, + TraitPredicate, Ty, +}; use rustc_span::{sym, Symbol}; -use rustc_trait_selection::traits::{query::evaluate_obligation::InferCtxtExt as _, Obligation, ObligationCause}; +use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _; +use rustc_trait_selection::traits::{Obligation, ObligationCause}; use super::UNNECESSARY_TO_OWNED; @@ -319,7 +323,12 @@ fn skip_addr_of_ancestors<'tcx>( fn get_callee_generic_args_and_args<'tcx>( cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>, -) -> Option<(DefId, GenericArgsRef<'tcx>, Option<&'tcx Expr<'tcx>>, &'tcx [Expr<'tcx>])> { +) -> Option<( + DefId, + GenericArgsRef<'tcx>, + Option<&'tcx Expr<'tcx>>, + &'tcx [Expr<'tcx>], +)> { if_chain! { if let ExprKind::Call(callee, args) = expr.kind; let callee_ty = cx.typeck_results().expr_ty(callee); @@ -388,7 +397,8 @@ fn can_change_type<'a>(cx: &LateContext<'a>, mut expr: &'a Expr<'a>, mut ty: Ty< } } Node::Expr(parent_expr) => { - if let Some((callee_def_id, call_generic_args, recv, call_args)) = get_callee_generic_args_and_args(cx, parent_expr) + if let Some((callee_def_id, call_generic_args, recv, call_args)) + = get_callee_generic_args_and_args(cx, parent_expr) { // FIXME: the `instantiate_identity()` below seems incorrect, since we eventually // call `tcx.try_subst_and_normalize_erasing_regions` further down diff --git a/src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs b/src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs index 045f739e64d..474a33b67e1 100644 --- a/src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs +++ b/src/tools/clippy/clippy_lints/src/methods/unwrap_or_else_default.rs @@ -1,10 +1,10 @@ //! Lint for `some_result_or_option.unwrap_or_else(Default::default)` use super::UNWRAP_OR_ELSE_DEFAULT; -use clippy_utils::{ - diagnostics::span_lint_and_sugg, is_default_equivalent_call, source::snippet_with_applicability, - ty::is_type_diagnostic_item, -}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::is_default_equivalent_call; +use clippy_utils::source::snippet_with_applicability; +use clippy_utils::ty::is_type_diagnostic_item; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; use rustc_hir as hir; diff --git a/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs index c1139d84e2f..b5f810eddf4 100644 --- a/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs +++ b/src/tools/clippy/clippy_lints/src/methods/useless_asref.rs @@ -37,7 +37,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, call_name: &str, USELESS_ASREF, expr.span, &format!("this call to `{call_name}` does nothing"), - "try this", + "try", snippet_with_applicability(cx, recvr.span, "..", &mut applicability).to_string(), applicability, ); diff --git a/src/tools/clippy/clippy_lints/src/min_ident_chars.rs b/src/tools/clippy/clippy_lints/src/min_ident_chars.rs index d49bb0ca6e2..2a60f2faca0 100644 --- a/src/tools/clippy/clippy_lints/src/min_ident_chars.rs +++ b/src/tools/clippy/clippy_lints/src/min_ident_chars.rs @@ -1,10 +1,9 @@ -use clippy_utils::{diagnostics::span_lint, is_from_proc_macro}; +use clippy_utils::diagnostics::span_lint; +use clippy_utils::is_from_proc_macro; use rustc_data_structures::fx::FxHashSet; -use rustc_hir::{ - def::{DefKind, Res}, - intravisit::{walk_item, Visitor}, - GenericParamKind, HirId, Item, ItemKind, ItemLocalId, Node, Pat, PatKind, -}; +use rustc_hir::def::{DefKind, Res}; +use rustc_hir::intravisit::{walk_item, Visitor}; +use rustc_hir::{GenericParamKind, HirId, Item, ItemKind, ItemLocalId, Node, Pat, PatKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_session::{declare_tool_lint, impl_lint_pass}; @@ -25,7 +24,7 @@ declare_clippy_lint! { /// ### Example /// ```rust,ignore /// for m in movies { - /// let title = m.t; + /// let title = m.t; /// } /// ``` /// Use instead: diff --git a/src/tools/clippy/clippy_lints/src/missing_assert_message.rs b/src/tools/clippy/clippy_lints/src/missing_assert_message.rs index 4dbb79334ca..c17f00c4275 100644 --- a/src/tools/clippy/clippy_lints/src/missing_assert_message.rs +++ b/src/tools/clippy/clippy_lints/src/missing_assert_message.rs @@ -46,7 +46,9 @@ declare_lint_pass!(MissingAssertMessage => [MISSING_ASSERT_MESSAGE]); impl<'tcx> LateLintPass<'tcx> for MissingAssertMessage { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { - let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return }; + let Some(macro_call) = root_macro_call_first_node(cx, expr) else { + return; + }; let single_argument = match cx.tcx.get_diagnostic_name(macro_call.def_id) { Some(sym::assert_macro | sym::debug_assert_macro) => true, Some( @@ -61,10 +63,14 @@ impl<'tcx> LateLintPass<'tcx> for MissingAssertMessage { } let panic_expn = if single_argument { - let Some((_, panic_expn)) = find_assert_args(cx, expr, macro_call.expn) else { return }; + let Some((_, panic_expn)) = find_assert_args(cx, expr, macro_call.expn) else { + return; + }; panic_expn } else { - let Some((_, _, panic_expn)) = find_assert_eq_args(cx, expr, macro_call.expn) else { return }; + let Some((_, _, panic_expn)) = find_assert_eq_args(cx, expr, macro_call.expn) else { + return; + }; panic_expn }; diff --git a/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs b/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs index 773174679db..96d83e114a4 100644 --- a/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs +++ b/src/tools/clippy/clippy_lints/src/missing_enforced_import_rename.rs @@ -1,8 +1,11 @@ -use clippy_utils::{diagnostics::span_lint_and_sugg, source::snippet_opt}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::source::snippet_opt; use rustc_data_structures::fx::FxHashMap; use rustc_errors::Applicability; -use rustc_hir::{def::Res, def_id::DefId, Item, ItemKind, UseKind}; +use rustc_hir::def::Res; +use rustc_hir::def_id::DefId; +use rustc_hir::{Item, ItemKind, UseKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::Symbol; diff --git a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs index 1138d1163a4..2f63b9b9f0b 100644 --- a/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs +++ b/src/tools/clippy/clippy_lints/src/missing_fields_in_debug.rs @@ -1,23 +1,17 @@ use std::ops::ControlFlow; -use clippy_utils::{ - diagnostics::span_lint_and_then, - is_path_lang_item, paths, - ty::match_type, - visitors::{for_each_expr, Visitable}, -}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::ty::match_type; +use clippy_utils::visitors::{for_each_expr, Visitable}; +use clippy_utils::{is_path_lang_item, paths}; use rustc_ast::LitKind; use rustc_data_structures::fx::FxHashSet; -use rustc_hir::Block; +use rustc_hir::def::{DefKind, Res}; use rustc_hir::{ - def::{DefKind, Res}, - Expr, ImplItemKind, LangItem, Node, + Block, Expr, ExprKind, Impl, ImplItem, ImplItemKind, Item, ItemKind, LangItem, Node, QPath, TyKind, VariantData, }; -use rustc_hir::{ExprKind, Impl, ItemKind, QPath, TyKind}; -use rustc_hir::{ImplItem, Item, VariantData}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty::Ty; -use rustc_middle::ty::TypeckResults; +use rustc_middle::ty::{Ty, TypeckResults}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::{sym, Span, Symbol}; @@ -207,11 +201,10 @@ impl<'tcx> LateLintPass<'tcx> for MissingFieldsInDebug { if let ItemKind::Impl(Impl { of_trait: Some(trait_ref), self_ty, items, .. }) = item.kind && let Res::Def(DefKind::Trait, trait_def_id) = trait_ref.path.res && let TyKind::Path(QPath::Resolved(_, self_path)) = &self_ty.kind - // don't trigger if self is a generic parameter, e.g. `impl<T> Debug for T` - // this can only happen in core itself, where the trait is defined, - // but it caused ICEs in the past: - // https://github.com/rust-lang/rust-clippy/issues/10887 - && !matches!(self_path.res, Res::Def(DefKind::TyParam, _)) + // make sure that the self type is either a struct, an enum or a union + // this prevents ICEs such as when self is a type parameter or a primitive type + // (see #10887, #11063) + && let Res::Def(DefKind::Struct | DefKind::Enum | DefKind::Union, self_path_did) = self_path.res && cx.match_def_path(trait_def_id, &[sym::core, sym::fmt, sym::Debug]) // don't trigger if this impl was derived && !cx.tcx.has_attr(item.owner_id, sym::automatically_derived) @@ -222,7 +215,7 @@ impl<'tcx> LateLintPass<'tcx> for MissingFieldsInDebug { && let body = cx.tcx.hir().body(*body_id) && let ExprKind::Block(block, _) = body.value.kind // inspect `self` - && let self_ty = cx.tcx.type_of(self_path.res.def_id()).skip_binder().peel_refs() + && let self_ty = cx.tcx.type_of(self_path_did).skip_binder().peel_refs() && let Some(self_adt) = self_ty.ty_adt_def() && let Some(self_def_id) = self_adt.did().as_local() && let Some(Node::Item(self_item)) = cx.tcx.hir().find_by_def_id(self_def_id) diff --git a/src/tools/clippy/clippy_lints/src/module_style.rs b/src/tools/clippy/clippy_lints/src/module_style.rs index 439cae812b7..efdc7560ee4 100644 --- a/src/tools/clippy/clippy_lints/src/module_style.rs +++ b/src/tools/clippy/clippy_lints/src/module_style.rs @@ -80,7 +80,9 @@ impl EarlyLintPass for ModStyle { let files = cx.sess().source_map().files(); - let Some(trim_to_src) = cx.sess().opts.working_dir.local_path() else { return }; + let Some(trim_to_src) = cx.sess().opts.working_dir.local_path() else { + return; + }; // `folder_segments` is all unique folder path segments `path/to/foo.rs` gives // `[path, to]` but not foo diff --git a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs index e6fd65f001a..fe35126aab2 100644 --- a/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs +++ b/src/tools/clippy/clippy_lints/src/multiple_unsafe_ops_per_block.rs @@ -1,12 +1,8 @@ -use clippy_utils::{ - diagnostics::span_lint_and_then, - visitors::{for_each_expr_with_closures, Descend, Visitable}, -}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::visitors::{for_each_expr_with_closures, Descend, Visitable}; use core::ops::ControlFlow::Continue; -use hir::{ - def::{DefKind, Res}, - BlockCheckMode, ExprKind, QPath, UnOp, Unsafety, -}; +use hir::def::{DefKind, Res}; +use hir::{BlockCheckMode, ExprKind, QPath, UnOp, Unsafety}; use rustc_ast::Mutability; use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs b/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs index d8647a99105..dea432fdbd5 100644 --- a/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs +++ b/src/tools/clippy/clippy_lints/src/mutable_debug_assertion.rs @@ -39,7 +39,9 @@ declare_lint_pass!(DebugAssertWithMutCall => [DEBUG_ASSERT_WITH_MUT_CALL]); impl<'tcx> LateLintPass<'tcx> for DebugAssertWithMutCall { fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) { - let Some(macro_call) = root_macro_call_first_node(cx, e) else { return }; + let Some(macro_call) = root_macro_call_first_node(cx, e) else { + return; + }; let macro_name = cx.tcx.item_name(macro_call.def_id); if !matches!( macro_name.as_str(), @@ -47,7 +49,9 @@ impl<'tcx> LateLintPass<'tcx> for DebugAssertWithMutCall { ) { return; } - let Some((lhs, rhs, _)) = find_assert_eq_args(cx, e, macro_call.expn) else { return }; + let Some((lhs, rhs, _)) = find_assert_eq_args(cx, e, macro_call.expn) else { + return; + }; for arg in [lhs, rhs] { let mut visitor = MutArgVisitor::new(cx); visitor.visit_expr(arg); diff --git a/src/tools/clippy/clippy_lints/src/needless_bool.rs b/src/tools/clippy/clippy_lints/src/needless_bool.rs index 62af42a3961..46457400abc 100644 --- a/src/tools/clippy/clippy_lints/src/needless_bool.rs +++ b/src/tools/clippy/clippy_lints/src/needless_bool.rs @@ -6,9 +6,9 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg}; use clippy_utils::source::snippet_with_applicability; use clippy_utils::sugg::Sugg; use clippy_utils::{ - get_parent_node, is_else_clause, is_expn_of, peel_blocks, peel_blocks_with_stmt, span_extract_comment, + get_parent_node, higher, is_else_clause, is_expn_of, peel_blocks, peel_blocks_with_stmt, span_extract_comment, + SpanlessEq, }; -use clippy_utils::{higher, SpanlessEq}; use rustc_ast::ast::LitKind; use rustc_errors::Applicability; use rustc_hir::{BinOpKind, Block, Expr, ExprKind, HirId, Node, UnOp}; @@ -106,7 +106,7 @@ declare_clippy_lint! { /// # let mut skip: bool; /// skip = !must_keep(x, y); /// ``` - #[clippy::version = "1.69.0"] + #[clippy::version = "1.71.0"] pub NEEDLESS_BOOL_ASSIGN, complexity, "setting the same boolean variable in both branches of an if-statement" diff --git a/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs b/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs index 498e1408e52..11bf9e9ca17 100644 --- a/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs +++ b/src/tools/clippy/clippy_lints/src/needless_borrowed_ref.rs @@ -52,7 +52,9 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessBorrowedRef { } // Only lint immutable refs, because `&mut ref T` may be useful. - let PatKind::Ref(pat, Mutability::Not) = ref_pat.kind else { return }; + let PatKind::Ref(pat, Mutability::Not) = ref_pat.kind else { + return; + }; match pat.kind { // Check sub_pat got a `ref` keyword (excluding `ref mut`). diff --git a/src/tools/clippy/clippy_lints/src/needless_else.rs b/src/tools/clippy/clippy_lints/src/needless_else.rs index 4ff1bf7ffc0..03bab86c6d7 100644 --- a/src/tools/clippy/clippy_lints/src/needless_else.rs +++ b/src/tools/clippy/clippy_lints/src/needless_else.rs @@ -1,5 +1,5 @@ -use clippy_utils::source::snippet_opt; -use clippy_utils::{diagnostics::span_lint_and_sugg, source::trim_span}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::source::{snippet_opt, trim_span}; use rustc_ast::ast::{Expr, ExprKind}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass, LintContext}; @@ -51,7 +51,7 @@ impl EarlyLintPass for NeedlessElse { cx, NEEDLESS_ELSE, span, - "this else branch is empty", + "this `else` branch is empty", "you can remove it", String::new(), Applicability::MachineApplicable, diff --git a/src/tools/clippy/clippy_lints/src/needless_for_each.rs b/src/tools/clippy/clippy_lints/src/needless_for_each.rs index c3b633fd6a0..98bf122fab7 100644 --- a/src/tools/clippy/clippy_lints/src/needless_for_each.rs +++ b/src/tools/clippy/clippy_lints/src/needless_for_each.rs @@ -1,11 +1,10 @@ use rustc_errors::Applicability; -use rustc_hir::{ - intravisit::{walk_expr, Visitor}, - Closure, Expr, ExprKind, Stmt, StmtKind, -}; +use rustc_hir::intravisit::{walk_expr, Visitor}; +use rustc_hir::{Closure, Expr, ExprKind, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; -use rustc_span::{source_map::Span, sym, Symbol}; +use rustc_span::source_map::Span; +use rustc_span::{sym, Symbol}; use if_chain::if_chain; @@ -50,7 +49,7 @@ declare_lint_pass!(NeedlessForEach => [NEEDLESS_FOR_EACH]); impl<'tcx> LateLintPass<'tcx> for NeedlessForEach { fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) { let (StmtKind::Expr(expr) | StmtKind::Semi(expr)) = stmt.kind else { - return + return; }; if_chain! { diff --git a/src/tools/clippy/clippy_lints/src/needless_if.rs b/src/tools/clippy/clippy_lints/src/needless_if.rs index ad5c3e1dc82..1ed7ea6b325 100644 --- a/src/tools/clippy/clippy_lints/src/needless_if.rs +++ b/src/tools/clippy/clippy_lints/src/needless_if.rs @@ -1,4 +1,7 @@ -use clippy_utils::{diagnostics::span_lint_and_sugg, higher::If, is_from_proc_macro, source::snippet_opt}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::higher::If; +use clippy_utils::is_from_proc_macro; +use clippy_utils::source::snippet_opt; use rustc_errors::Applicability; use rustc_hir::{ExprKind, Stmt, StmtKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; diff --git a/src/tools/clippy/clippy_lints/src/needless_late_init.rs b/src/tools/clippy/clippy_lints/src/needless_late_init.rs index 5a9387b34cc..948454d13ae 100644 --- a/src/tools/clippy/clippy_lints/src/needless_late_init.rs +++ b/src/tools/clippy/clippy_lints/src/needless_late_init.rs @@ -86,7 +86,9 @@ fn contains_let(cond: &Expr<'_>) -> bool { } fn stmt_needs_ordered_drop(cx: &LateContext<'_>, stmt: &Stmt<'_>) -> bool { - let StmtKind::Local(local) = stmt.kind else { return false }; + let StmtKind::Local(local) = stmt.kind else { + return false; + }; !local.pat.walk_short(|pat| { if let PatKind::Binding(.., None) = pat.kind { !needs_ordered_drop(cx, cx.typeck_results().pat_ty(pat)) diff --git a/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs b/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs index da1b9d99931..d17a383e882 100644 --- a/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs +++ b/src/tools/clippy/clippy_lints/src/needless_parens_on_range_literals.rs @@ -1,8 +1,6 @@ -use clippy_utils::{ - diagnostics::span_lint_and_then, - higher, - source::{snippet, snippet_with_applicability}, -}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::higher; +use clippy_utils::source::{snippet, snippet_with_applicability}; use rustc_ast::ast; use rustc_errors::Applicability; diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs new file mode 100644 index 00000000000..d323a16c2a7 --- /dev/null +++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_ref_mut.rs @@ -0,0 +1,273 @@ +use super::needless_pass_by_value::requires_exact_signature; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::source::snippet; +use clippy_utils::{is_from_proc_macro, is_self}; +use if_chain::if_chain; +use rustc_errors::Applicability; +use rustc_hir::intravisit::FnKind; +use rustc_hir::{Body, FnDecl, HirId, HirIdMap, HirIdSet, Impl, ItemKind, Mutability, Node, PatKind}; +use rustc_hir_typeck::expr_use_visitor as euv; +use rustc_infer::infer::TyCtxtInferExt; +use rustc_lint::{LateContext, LateLintPass}; +use rustc_middle::mir::FakeReadCause; +use rustc_middle::ty::{self, Ty}; +use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::def_id::LocalDefId; +use rustc_span::symbol::kw; +use rustc_span::Span; +use rustc_target::spec::abi::Abi; + +declare_clippy_lint! { + /// ### What it does + /// Check if a `&mut` function argument is actually used mutably. + /// + /// Be careful if the function is publicly reexported as it would break compatibility with + /// users of this function. + /// + /// ### Why is this bad? + /// Less `mut` means less fights with the borrow checker. It can also lead to more + /// opportunities for parallelization. + /// + /// ### Example + /// ```rust + /// fn foo(y: &mut i32) -> i32 { + /// 12 + *y + /// } + /// ``` + /// Use instead: + /// ```rust + /// fn foo(y: &i32) -> i32 { + /// 12 + *y + /// } + /// ``` + #[clippy::version = "1.72.0"] + pub NEEDLESS_PASS_BY_REF_MUT, + suspicious, + "using a `&mut` argument when it's not mutated" +} + +#[derive(Copy, Clone)] +pub struct NeedlessPassByRefMut { + avoid_breaking_exported_api: bool, +} + +impl NeedlessPassByRefMut { + pub fn new(avoid_breaking_exported_api: bool) -> Self { + Self { + avoid_breaking_exported_api, + } + } +} + +impl_lint_pass!(NeedlessPassByRefMut => [NEEDLESS_PASS_BY_REF_MUT]); + +fn should_skip<'tcx>( + cx: &LateContext<'tcx>, + input: rustc_hir::Ty<'tcx>, + ty: Ty<'_>, + arg: &rustc_hir::Param<'_>, +) -> bool { + // We check if this a `&mut`. `ref_mutability` returns `None` if it's not a reference. + if !matches!(ty.ref_mutability(), Some(Mutability::Mut)) { + return true; + } + + if is_self(arg) { + return true; + } + + if let PatKind::Binding(.., name, _) = arg.pat.kind { + // If it's a potentially unused variable, we don't check it. + if name.name == kw::Underscore || name.as_str().starts_with('_') { + return true; + } + } + + // All spans generated from a proc-macro invocation are the same... + is_from_proc_macro(cx, &input) +} + +impl<'tcx> LateLintPass<'tcx> for NeedlessPassByRefMut { + fn check_fn( + &mut self, + cx: &LateContext<'tcx>, + kind: FnKind<'tcx>, + decl: &'tcx FnDecl<'_>, + body: &'tcx Body<'_>, + span: Span, + fn_def_id: LocalDefId, + ) { + if span.from_expansion() { + return; + } + + let hir_id = cx.tcx.hir().local_def_id_to_hir_id(fn_def_id); + + match kind { + FnKind::ItemFn(.., header) => { + let attrs = cx.tcx.hir().attrs(hir_id); + if header.abi != Abi::Rust || requires_exact_signature(attrs) { + return; + } + }, + FnKind::Method(..) => (), + FnKind::Closure => return, + } + + // Exclude non-inherent impls + if let Some(Node::Item(item)) = cx.tcx.hir().find_parent(hir_id) { + if matches!( + item.kind, + ItemKind::Impl(Impl { of_trait: Some(_), .. }) | ItemKind::Trait(..) + ) { + return; + } + } + + let fn_sig = cx.tcx.fn_sig(fn_def_id).instantiate_identity(); + let fn_sig = cx.tcx.liberate_late_bound_regions(fn_def_id.to_def_id(), fn_sig); + + // If there are no `&mut` argument, no need to go any further. + if !decl + .inputs + .iter() + .zip(fn_sig.inputs()) + .zip(body.params) + .any(|((&input, &ty), arg)| !should_skip(cx, input, ty, arg)) + { + return; + } + + // Collect variables mutably used and spans which will need dereferencings from the + // function body. + let MutablyUsedVariablesCtxt { mutably_used_vars, .. } = { + let mut ctx = MutablyUsedVariablesCtxt::default(); + let infcx = cx.tcx.infer_ctxt().build(); + euv::ExprUseVisitor::new(&mut ctx, &infcx, fn_def_id, cx.param_env, cx.typeck_results()).consume_body(body); + ctx + }; + + let mut it = decl + .inputs + .iter() + .zip(fn_sig.inputs()) + .zip(body.params) + .filter(|((&input, &ty), arg)| !should_skip(cx, input, ty, arg)) + .peekable(); + if it.peek().is_none() { + return; + } + let show_semver_warning = self.avoid_breaking_exported_api && cx.effective_visibilities.is_exported(fn_def_id); + for ((&input, &_), arg) in it { + // Only take `&mut` arguments. + if_chain! { + if let PatKind::Binding(_, canonical_id, ..) = arg.pat.kind; + if !mutably_used_vars.contains(&canonical_id); + if let rustc_hir::TyKind::Ref(_, inner_ty) = input.kind; + then { + // If the argument is never used mutably, we emit the warning. + let sp = input.span; + span_lint_and_then( + cx, + NEEDLESS_PASS_BY_REF_MUT, + sp, + "this argument is a mutable reference, but not used mutably", + |diag| { + diag.span_suggestion( + sp, + "consider changing to".to_string(), + format!( + "&{}", + snippet(cx, cx.tcx.hir().span(inner_ty.ty.hir_id), "_"), + ), + Applicability::Unspecified, + ); + if show_semver_warning { + diag.warn("changing this function will impact semver compatibility"); + } + }, + ); + } + } + } + } +} + +#[derive(Default)] +struct MutablyUsedVariablesCtxt { + mutably_used_vars: HirIdSet, + prev_bind: Option<HirId>, + aliases: HirIdMap<HirId>, +} + +impl MutablyUsedVariablesCtxt { + fn add_mutably_used_var(&mut self, mut used_id: HirId) { + while let Some(id) = self.aliases.get(&used_id) { + self.mutably_used_vars.insert(used_id); + used_id = *id; + } + self.mutably_used_vars.insert(used_id); + } +} + +impl<'tcx> euv::Delegate<'tcx> for MutablyUsedVariablesCtxt { + fn consume(&mut self, cmt: &euv::PlaceWithHirId<'tcx>, _id: HirId) { + if let euv::Place { + base: euv::PlaceBase::Local(vid), + base_ty, + .. + } = &cmt.place + { + if let Some(bind_id) = self.prev_bind.take() { + self.aliases.insert(bind_id, *vid); + } else if matches!(base_ty.ref_mutability(), Some(Mutability::Mut)) { + self.add_mutably_used_var(*vid); + } + } + } + + fn borrow(&mut self, cmt: &euv::PlaceWithHirId<'tcx>, _id: HirId, borrow: ty::BorrowKind) { + self.prev_bind = None; + if let euv::Place { + base: euv::PlaceBase::Local(vid), + base_ty, + .. + } = &cmt.place + { + // If this is a mutable borrow, it was obviously used mutably so we add it. However + // for `UniqueImmBorrow`, it's interesting because if you do: `array[0] = value` inside + // a closure, it'll return this variant whereas if you have just an index access, it'll + // return `ImmBorrow`. So if there is "Unique" and it's a mutable reference, we add it + // to the mutably used variables set. + if borrow == ty::BorrowKind::MutBorrow + || (borrow == ty::BorrowKind::UniqueImmBorrow && base_ty.ref_mutability() == Some(Mutability::Mut)) + { + self.add_mutably_used_var(*vid); + } + } + } + + fn mutate(&mut self, cmt: &euv::PlaceWithHirId<'tcx>, _id: HirId) { + self.prev_bind = None; + if let euv::Place { + projections, + base: euv::PlaceBase::Local(vid), + .. + } = &cmt.place + { + if !projections.is_empty() { + self.add_mutably_used_var(*vid); + } + } + } + + fn copy(&mut self, _cmt: &euv::PlaceWithHirId<'tcx>, _id: HirId) { + self.prev_bind = None; + } + + fn fake_read(&mut self, _: &rustc_hir_typeck::expr_use_visitor::PlaceWithHirId<'tcx>, _: FakeReadCause, _: HirId) {} + + fn bind(&mut self, _cmt: &euv::PlaceWithHirId<'tcx>, id: HirId) { + self.prev_bind = Some(id); + } +} diff --git a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs index 55b6e1606ee..5e26601537f 100644 --- a/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs +++ b/src/tools/clippy/clippy_lints/src/needless_pass_by_value.rs @@ -10,14 +10,14 @@ use rustc_ast::ast::Attribute; use rustc_errors::{Applicability, Diagnostic}; use rustc_hir::intravisit::FnKind; use rustc_hir::{ - BindingAnnotation, Body, FnDecl, GenericArg, HirId, Impl, ItemKind, Mutability, Node, PatKind, QPath, TyKind, + BindingAnnotation, Body, FnDecl, GenericArg, HirId, HirIdSet, Impl, ItemKind, LangItem, Mutability, Node, PatKind, + QPath, TyKind, }; -use rustc_hir::{HirIdSet, LangItem}; use rustc_hir_typeck::expr_use_visitor as euv; use rustc_infer::infer::TyCtxtInferExt; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::mir::FakeReadCause; -use rustc_middle::ty::{self, TypeVisitableExt, Ty}; +use rustc_middle::ty::{self, Ty, TypeVisitableExt}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::def_id::LocalDefId; use rustc_span::symbol::kw; @@ -168,7 +168,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { ( preds.iter().any(|t| cx.tcx.is_diagnostic_item(sym::Borrow, t.def_id())), !preds.is_empty() && { - let ty_empty_region = Ty::new_imm_ref(cx.tcx,cx.tcx.lifetimes.re_erased, ty); + let ty_empty_region = Ty::new_imm_ref(cx.tcx, cx.tcx.lifetimes.re_erased, ty); preds.iter().all(|t| { let ty_params = t.trait_ref.args.iter().skip(1).collect::<Vec<_>>(); implements_trait(cx, ty_empty_region, t.def_id(), &ty_params) @@ -289,7 +289,7 @@ impl<'tcx> LateLintPass<'tcx> for NeedlessPassByValue { } /// Functions marked with these attributes must have the exact signature. -fn requires_exact_signature(attrs: &[Attribute]) -> bool { +pub(crate) fn requires_exact_signature(attrs: &[Attribute]) -> bool { attrs.iter().any(|attr| { [sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive] .iter() diff --git a/src/tools/clippy/clippy_lints/src/no_effect.rs b/src/tools/clippy/clippy_lints/src/no_effect.rs index a4c7da7e48d..e6d3e72d1e6 100644 --- a/src/tools/clippy/clippy_lints/src/no_effect.rs +++ b/src/tools/clippy/clippy_lints/src/no_effect.rs @@ -1,8 +1,7 @@ use clippy_utils::diagnostics::{span_lint_hir, span_lint_hir_and_then}; -use clippy_utils::peel_blocks; use clippy_utils::source::snippet_opt; use clippy_utils::ty::has_drop; -use clippy_utils::{get_parent_node, is_lint_allowed}; +use clippy_utils::{get_parent_node, is_lint_allowed, peel_blocks}; use rustc_errors::Applicability; use rustc_hir::def::{DefKind, Res}; use rustc_hir::{ diff --git a/src/tools/clippy/clippy_lints/src/non_copy_const.rs b/src/tools/clippy/clippy_lints/src/non_copy_const.rs index 12d4f33d428..87699fd0ca6 100644 --- a/src/tools/clippy/clippy_lints/src/non_copy_const.rs +++ b/src/tools/clippy/clippy_lints/src/non_copy_const.rs @@ -15,14 +15,12 @@ use rustc_hir::{ }; use rustc_hir_analysis::hir_ty_to_ty; use rustc_lint::{LateContext, LateLintPass, Lint}; -use rustc_middle::mir::interpret::ErrorHandled; +use rustc_middle::mir::interpret::{ErrorHandled, EvalToValTreeResult, GlobalId}; use rustc_middle::ty::adjustment::Adjust; use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::{sym, InnerSpan, Span}; use rustc_target::abi::VariantIdx; -use rustc_middle::mir::interpret::EvalToValTreeResult; -use rustc_middle::mir::interpret::GlobalId; // FIXME: this is a correctness problem but there's no suitable // warn-by-default category. @@ -154,24 +152,37 @@ fn is_value_unfrozen_raw<'tcx>( // As of 2022-09-08 miri doesn't track which union field is active so there's no safe way to check the // contained value. ty::Adt(def, ..) if def.is_union() => false, - ty::Array(ty, _) => { - val.unwrap_branch().iter().any(|field| inner(cx, *field, ty)) - }, + ty::Array(ty, _) => val.unwrap_branch().iter().any(|field| inner(cx, *field, ty)), ty::Adt(def, _) if def.is_union() => false, - ty::Adt(def, args) if def.is_enum() => { + ty::Adt(def, substs) if def.is_enum() => { let (&variant_index, fields) = val.unwrap_branch().split_first().unwrap(); - let variant_index = - VariantIdx::from_u32(variant_index.unwrap_leaf().try_to_u32().ok().unwrap()); - fields.iter().copied().zip( - def.variants()[variant_index] + let variant_index = VariantIdx::from_u32(variant_index.unwrap_leaf().try_to_u32().ok().unwrap()); + fields + .iter() + .copied() + .zip( + def.variants()[variant_index] + .fields + .iter() + .map(|field| field.ty(cx.tcx, substs)), + ) + .any(|(field, ty)| inner(cx, field, ty)) + }, + ty::Adt(def, substs) => val + .unwrap_branch() + .iter() + .zip( + def.non_enum_variant() .fields .iter() - .map(|field| field.ty(cx.tcx, args))).any(|(field, ty)| inner(cx, field, ty)) - } - ty::Adt(def, args) => { - val.unwrap_branch().iter().zip(def.non_enum_variant().fields.iter().map(|field| field.ty(cx.tcx, args))).any(|(field, ty)| inner(cx, *field, ty)) - } - ty::Tuple(tys) => val.unwrap_branch().iter().zip(tys).any(|(field, ty)| inner(cx, *field, ty)), + .map(|field| field.ty(cx.tcx, substs)), + ) + .any(|(field, ty)| inner(cx, *field, ty)), + ty::Tuple(tys) => val + .unwrap_branch() + .iter() + .zip(tys) + .any(|(field, ty)| inner(cx, *field, ty)), _ => false, } } @@ -208,7 +219,10 @@ fn is_value_unfrozen_poly<'tcx>(cx: &LateContext<'tcx>, body_id: BodyId, ty: Ty< let def_id = body_id.hir_id.owner.to_def_id(); let args = ty::GenericArgs::identity_for_item(cx.tcx, def_id); let instance = ty::Instance::new(def_id, args); - let cid = rustc_middle::mir::interpret::GlobalId { instance, promoted: None }; + let cid = rustc_middle::mir::interpret::GlobalId { + instance, + promoted: None, + }; let param_env = cx.tcx.param_env(def_id).with_reveal_all_normalized(cx.tcx); let result = cx.tcx.const_eval_global_id_for_typeck(param_env, cid, None); is_value_unfrozen_raw(cx, result, ty) @@ -221,7 +235,6 @@ fn is_value_unfrozen_expr<'tcx>(cx: &LateContext<'tcx>, hir_id: HirId, def_id: D is_value_unfrozen_raw(cx, result, ty) } - pub fn const_eval_resolve<'tcx>( tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, @@ -230,9 +243,12 @@ pub fn const_eval_resolve<'tcx>( ) -> EvalToValTreeResult<'tcx> { match ty::Instance::resolve(tcx, param_env, ct.def, ct.args) { Ok(Some(instance)) => { - let cid = GlobalId { instance, promoted: None }; + let cid = GlobalId { + instance, + promoted: None, + }; tcx.const_eval_global_id_for_typeck(param_env, cid, span) - } + }, Ok(None) => Err(ErrorHandled::TooGeneric), Err(err) => Err(ErrorHandled::Reported(err.into())), } @@ -392,7 +408,7 @@ impl<'tcx> LateLintPass<'tcx> for NonCopyConst { // Make sure it is a const item. let Res::Def(DefKind::Const | DefKind::AssocConst, item_def_id) = cx.qpath_res(qpath, expr.hir_id) else { - return + return; }; // Climb up to resolve any field access and explicit referencing. diff --git a/src/tools/clippy/clippy_lints/src/non_expressive_names.rs b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs index 9f6917c146f..d562047cbf1 100644 --- a/src/tools/clippy/clippy_lints/src/non_expressive_names.rs +++ b/src/tools/clippy/clippy_lints/src/non_expressive_names.rs @@ -91,7 +91,7 @@ struct ExistingName { struct SimilarNamesLocalVisitor<'a, 'tcx> { names: Vec<ExistingName>, cx: &'a EarlyContext<'tcx>, - lint: &'a NonExpressiveNames, + lint: NonExpressiveNames, /// A stack of scopes containing the single-character bindings in each scope. single_char_names: Vec<Vec<Ident>>, @@ -365,7 +365,7 @@ impl EarlyLintPass for NonExpressiveNames { .. }) = item.kind { - do_check(self, cx, &item.attrs, &sig.decl, blk); + do_check(*self, cx, &item.attrs, &sig.decl, blk); } } @@ -380,12 +380,12 @@ impl EarlyLintPass for NonExpressiveNames { .. }) = item.kind { - do_check(self, cx, &item.attrs, &sig.decl, blk); + do_check(*self, cx, &item.attrs, &sig.decl, blk); } } } -fn do_check(lint: &mut NonExpressiveNames, cx: &EarlyContext<'_>, attrs: &[Attribute], decl: &FnDecl, blk: &Block) { +fn do_check(lint: NonExpressiveNames, cx: &EarlyContext<'_>, attrs: &[Attribute], decl: &FnDecl, blk: &Block) { if !attrs.iter().any(|attr| attr.has_name(sym::test)) { let mut visitor = SimilarNamesLocalVisitor { names: Vec::new(), diff --git a/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs b/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs index 2d79a5c9008..bd194b93584 100644 --- a/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs +++ b/src/tools/clippy/clippy_lints/src/nonstandard_macro_braces.rs @@ -1,7 +1,5 @@ -use std::{ - fmt, - hash::{Hash, Hasher}, -}; +use std::fmt; +use std::hash::{Hash, Hasher}; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet_opt; diff --git a/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs b/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs index 3615c7ec9bf..3dc652f9dc0 100644 --- a/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs +++ b/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs @@ -7,8 +7,7 @@ use rustc_hir::def_id::DefId; use rustc_hir::hir_id::HirIdMap; use rustc_hir::{Body, Expr, ExprKind, HirId, ImplItem, ImplItemKind, Node, PatKind, TraitItem, TraitItemKind}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty::{EarlyBinder, GenericArgKind, GenericArgsRef}; -use rustc_middle::ty::{self, ConstKind}; +use rustc_middle::ty::{self, ConstKind, EarlyBinder, GenericArgKind, GenericArgsRef}; use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::symbol::{kw, Ident}; use rustc_span::Span; diff --git a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs index 5c240276b76..35dd8fabe6e 100644 --- a/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs +++ b/src/tools/clippy/clippy_lints/src/operators/arithmetic_side_effects.rs @@ -1,26 +1,20 @@ use super::ARITHMETIC_SIDE_EFFECTS; -use clippy_utils::is_from_proc_macro; -use clippy_utils::{ - consts::{constant, constant_simple, Constant}, - diagnostics::span_lint, - is_lint_allowed, peel_hir_expr_refs, peel_hir_expr_unary, -}; -use rustc_ast as ast; +use clippy_utils::consts::{constant, constant_simple, Constant}; +use clippy_utils::diagnostics::span_lint; +use clippy_utils::{is_from_proc_macro, is_lint_allowed, peel_hir_expr_refs, peel_hir_expr_unary}; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_hir as hir; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::Ty; use rustc_session::impl_lint_pass; -use rustc_span::{ - source_map::{Span, Spanned}, - Symbol, -}; +use rustc_span::source_map::{Span, Spanned}; +use rustc_span::Symbol; +use {rustc_ast as ast, rustc_hir as hir}; const HARD_CODED_ALLOWED_BINARY: &[[&str; 2]] = &[ ["f32", "f32"], ["f64", "f64"], - ["std::num::Saturating", "std::num::Saturating"], - ["std::num::Wrapping", "std::num::Wrapping"], + ["std::num::Saturating", "*"], + ["std::num::Wrapping", "*"], ["std::string::String", "str"], ]; const HARD_CODED_ALLOWED_UNARY: &[&str] = &["f32", "f64", "std::num::Saturating", "std::num::Wrapping"]; @@ -200,7 +194,9 @@ impl ArithmeticSideEffects { ps: &hir::PathSegment<'tcx>, receiver: &hir::Expr<'tcx>, ) { - let Some(arg) = args.first() else { return; }; + let Some(arg) = args.first() else { + return; + }; if constant_simple(cx, cx.typeck_results(), receiver).is_some() { return; } @@ -225,7 +221,9 @@ impl ArithmeticSideEffects { un_expr: &hir::Expr<'tcx>, un_op: hir::UnOp, ) { - let hir::UnOp::Neg = un_op else { return; }; + let hir::UnOp::Neg = un_op else { + return; + }; if constant(cx, cx.typeck_results(), un_expr).is_some() { return; } diff --git a/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs b/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs index 9bbf385fb59..c4572a09db6 100644 --- a/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs +++ b/src/tools/clippy/clippy_lints/src/operators/assign_op_pattern.rs @@ -1,9 +1,8 @@ -use clippy_utils::binop_traits; use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::snippet_opt; use clippy_utils::ty::implements_trait; use clippy_utils::visitors::for_each_expr; -use clippy_utils::{eq_expr_value, trait_ref_of_method}; +use clippy_utils::{binop_traits, eq_expr_value, trait_ref_of_method}; use core::ops::ControlFlow; use if_chain::if_chain; use rustc_errors::Applicability; diff --git a/src/tools/clippy/clippy_lints/src/operators/eq_op.rs b/src/tools/clippy/clippy_lints/src/operators/eq_op.rs index 78965b7d6f7..88d56631841 100644 --- a/src/tools/clippy/clippy_lints/src/operators/eq_op.rs +++ b/src/tools/clippy/clippy_lints/src/operators/eq_op.rs @@ -1,6 +1,7 @@ +use clippy_utils::ast_utils::is_useless_with_eq_exprs; use clippy_utils::diagnostics::{span_lint, span_lint_and_then}; use clippy_utils::macros::{find_assert_eq_args, first_node_macro_backtrace}; -use clippy_utils::{ast_utils::is_useless_with_eq_exprs, eq_expr_value, is_in_test_function}; +use clippy_utils::{eq_expr_value, is_in_test_function}; use rustc_hir::{BinOpKind, Expr}; use rustc_lint::LateContext; diff --git a/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs b/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs index 015f6c14e76..5eabb349ec1 100644 --- a/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs +++ b/src/tools/clippy/clippy_lints/src/operators/misrefactored_assign_op.rs @@ -1,7 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_then; -use clippy_utils::eq_expr_value; use clippy_utils::source::snippet_opt; -use clippy_utils::sugg; +use clippy_utils::{eq_expr_value, sugg}; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_lint::LateContext; diff --git a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs index d7917e86a86..932dd470f9e 100644 --- a/src/tools/clippy/clippy_lints/src/operators/op_ref.rs +++ b/src/tools/clippy/clippy_lints/src/operators/op_ref.rs @@ -4,7 +4,9 @@ use clippy_utils::source::snippet; use clippy_utils::ty::{implements_trait, is_copy}; use if_chain::if_chain; use rustc_errors::Applicability; -use rustc_hir::{def::Res, def_id::DefId, BinOpKind, BorrowKind, Expr, ExprKind, GenericArg, ItemKind, QPath, TyKind}; +use rustc_hir::def::Res; +use rustc_hir::def_id::DefId; +use rustc_hir::{BinOpKind, BorrowKind, Expr, ExprKind, GenericArg, ItemKind, QPath, TyKind}; use rustc_lint::LateContext; use rustc_middle::ty::{self, Ty}; diff --git a/src/tools/clippy/clippy_lints/src/option_if_let_else.rs b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs index abdccc47f54..40da002f4ff 100644 --- a/src/tools/clippy/clippy_lints/src/option_if_let_else.rs +++ b/src/tools/clippy/clippy_lints/src/option_if_let_else.rs @@ -6,10 +6,9 @@ use clippy_utils::{ }; use if_chain::if_chain; use rustc_errors::Applicability; +use rustc_hir::def::Res; use rustc_hir::LangItem::{OptionNone, OptionSome, ResultErr, ResultOk}; -use rustc_hir::{ - def::Res, Arm, BindingAnnotation, Expr, ExprKind, MatchSource, Mutability, Pat, PatKind, Path, QPath, UnOp, -}; +use rustc_hir::{Arm, BindingAnnotation, Expr, ExprKind, MatchSource, Mutability, Pat, PatKind, Path, QPath, UnOp}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::SyntaxContext; diff --git a/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs b/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs index 849cd03dd7b..a049427d85d 100644 --- a/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs +++ b/src/tools/clippy/clippy_lints/src/panic_in_result_fn.rs @@ -13,7 +13,7 @@ use rustc_span::{sym, Span}; declare_clippy_lint! { /// ### What it does - /// Checks for usage of `panic!`, `unimplemented!`, `todo!`, `unreachable!` or assertions in a function of type result. + /// Checks for usage of `panic!` or assertions in a function of type result. /// /// ### Why is this bad? /// For some codebases, it is desirable for functions of type result to return an error instead of crashing. Hence panicking macros should be avoided. @@ -37,7 +37,7 @@ declare_clippy_lint! { #[clippy::version = "1.48.0"] pub PANIC_IN_RESULT_FN, restriction, - "functions of type `Result<..>` that contain `panic!()`, `todo!()`, `unreachable()`, `unimplemented()` or assertion" + "functions of type `Result<..>` that contain `panic!()` or assertion" } declare_lint_pass!(PanicInResultFn => [PANIC_IN_RESULT_FN]); @@ -70,7 +70,7 @@ fn lint_impl_body<'tcx>(cx: &LateContext<'tcx>, impl_span: Span, body: &'tcx hir }; if matches!( cx.tcx.item_name(macro_call.def_id).as_str(), - "unimplemented" | "unreachable" | "panic" | "todo" | "assert" | "assert_eq" | "assert_ne" + "panic" | "assert" | "assert_eq" | "assert_ne" ) { panics.push(macro_call.span); ControlFlow::Continue(Descend::No) @@ -83,10 +83,10 @@ fn lint_impl_body<'tcx>(cx: &LateContext<'tcx>, impl_span: Span, body: &'tcx hir cx, PANIC_IN_RESULT_FN, impl_span, - "used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result`", + "used `panic!()` or assertion in a function that returns `Result`", move |diag| { diag.help( - "`unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing", + "`panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing", ); diag.span_note(panics, "return Err() instead of panicking"); }, diff --git a/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs b/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs index 2f3007658ea..a72aefe91c1 100644 --- a/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs +++ b/src/tools/clippy/clippy_lints/src/panic_unimplemented.rs @@ -76,7 +76,9 @@ declare_lint_pass!(PanicUnimplemented => [UNIMPLEMENTED, UNREACHABLE, TODO, PANI impl<'tcx> LateLintPass<'tcx> for PanicUnimplemented { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { - let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return }; + let Some(macro_call) = root_macro_call_first_node(cx, expr) else { + return; + }; if is_panic(cx, macro_call.def_id) { if cx.tcx.hir().is_inside_const_context(expr.hir_id) { return; diff --git a/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs b/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs index 456ded3fc02..d9f5d1642d7 100644 --- a/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs +++ b/src/tools/clippy/clippy_lints/src/partialeq_to_none.rs @@ -1,7 +1,6 @@ -use clippy_utils::{ - diagnostics::span_lint_and_sugg, is_res_lang_ctor, path_res, peel_hir_expr_refs, peel_ref_operators, sugg, - ty::is_type_diagnostic_item, -}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::ty::is_type_diagnostic_item; +use clippy_utils::{is_res_lang_ctor, path_res, peel_hir_expr_refs, peel_ref_operators, sugg}; use rustc_errors::Applicability; use rustc_hir::{BinOpKind, Expr, ExprKind, LangItem}; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs index 7b4812e98d8..41513647f05 100644 --- a/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs +++ b/src/tools/clippy/clippy_lints/src/pass_by_ref_or_value.rs @@ -1,5 +1,4 @@ -use std::cmp; -use std::iter; +use std::{cmp, iter}; use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet; diff --git a/src/tools/clippy/clippy_lints/src/ptr.rs b/src/tools/clippy/clippy_lints/src/ptr.rs index eb7c008c7a4..264c38df11f 100644 --- a/src/tools/clippy/clippy_lints/src/ptr.rs +++ b/src/tools/clippy/clippy_lints/src/ptr.rs @@ -28,8 +28,7 @@ use rustc_span::sym; use rustc_span::symbol::Symbol; use rustc_trait_selection::infer::InferCtxtExt as _; use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt as _; -use std::fmt; -use std::iter; +use std::{fmt, iter}; declare_clippy_lint! { /// ### What it does @@ -166,7 +165,11 @@ impl<'tcx> LateLintPass<'tcx> for Ptr { check_mut_from_ref(cx, sig, None); for arg in check_fn_args( cx, - cx.tcx.fn_sig(item.owner_id).instantiate_identity().skip_binder().inputs(), + cx.tcx + .fn_sig(item.owner_id) + .instantiate_identity() + .skip_binder() + .inputs(), sig.decl.inputs, &sig.decl.output, &[], @@ -389,11 +392,12 @@ impl<'tcx> DerefTy<'tcx> { fn ty(&self, cx: &LateContext<'tcx>) -> Ty<'tcx> { match *self { Self::Str => cx.tcx.types.str_, - Self::Path => Ty::new_adt(cx.tcx, + Self::Path => Ty::new_adt( + cx.tcx, cx.tcx.adt_def(cx.tcx.get_diagnostic_item(sym::Path).unwrap()), List::empty(), ), - Self::Slice(_, ty) => Ty::new_slice(cx.tcx,ty), + Self::Slice(_, ty) => Ty::new_slice(cx.tcx, ty), } } diff --git a/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs b/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs index 47b8891e123..20e032d4b01 100644 --- a/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs +++ b/src/tools/clippy/clippy_lints/src/ptr_offset_with_cast.rs @@ -50,12 +50,12 @@ impl<'tcx> LateLintPass<'tcx> for PtrOffsetWithCast { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { // Check if the expressions is a ptr.offset or ptr.wrapping_offset method call let Some((receiver_expr, arg_expr, method)) = expr_as_ptr_offset_call(cx, expr) else { - return + return; }; // Check if the argument to the method call is a cast from usize let Some(cast_lhs_expr) = expr_as_cast_from_usize(cx, arg_expr) else { - return + return; }; let msg = format!("use of `{method}` with a `usize` casted to an `isize`"); diff --git a/src/tools/clippy/clippy_lints/src/question_mark.rs b/src/tools/clippy/clippy_lints/src/question_mark.rs index e3d940ad2a4..f5502cffb66 100644 --- a/src/tools/clippy/clippy_lints/src/question_mark.rs +++ b/src/tools/clippy/clippy_lints/src/question_mark.rs @@ -1,21 +1,24 @@ +use crate::manual_let_else::{MatchLintBehaviour, MANUAL_LET_ELSE}; use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::msrvs::Msrv; use clippy_utils::source::snippet_with_applicability; use clippy_utils::ty::is_type_diagnostic_item; use clippy_utils::{ - eq_expr_value, get_parent_node, in_constant, is_else_clause, is_res_lang_ctor, path_to_local, path_to_local_id, - peel_blocks, peel_blocks_with_stmt, + eq_expr_value, get_parent_node, higher, in_constant, is_else_clause, is_path_lang_item, is_res_lang_ctor, + pat_and_expr_can_be_question_mark, path_to_local, path_to_local_id, peel_blocks, peel_blocks_with_stmt, }; -use clippy_utils::{higher, is_path_lang_item}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::def::Res; use rustc_hir::LangItem::{self, OptionNone, OptionSome, ResultErr, ResultOk}; -use rustc_hir::{BindingAnnotation, ByRef, Expr, ExprKind, Node, PatKind, PathSegment, QPath}; +use rustc_hir::{ + BindingAnnotation, Block, ByRef, Expr, ExprKind, Local, Node, PatKind, PathSegment, QPath, Stmt, StmtKind, +}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::Ty; -use rustc_session::declare_tool_lint; -use rustc_session::impl_lint_pass; -use rustc_span::{sym, symbol::Symbol}; +use rustc_session::{declare_tool_lint, impl_lint_pass}; +use rustc_span::sym; +use rustc_span::symbol::Symbol; declare_clippy_lint! { /// ### What it does @@ -42,8 +45,9 @@ declare_clippy_lint! { "checks for expressions that could be replaced by the question mark operator" } -#[derive(Default)] pub struct QuestionMark { + pub(crate) msrv: Msrv, + pub(crate) matches_behaviour: MatchLintBehaviour, /// Keeps track of how many try blocks we are in at any point during linting. /// This allows us to answer the question "are we inside of a try block" /// very quickly, without having to walk up the parent chain, by simply checking @@ -51,7 +55,19 @@ pub struct QuestionMark { /// As for why we need this in the first place: <https://github.com/rust-lang/rust-clippy/issues/8628> try_block_depth_stack: Vec<u32>, } -impl_lint_pass!(QuestionMark => [QUESTION_MARK]); + +impl_lint_pass!(QuestionMark => [QUESTION_MARK, MANUAL_LET_ELSE]); + +impl QuestionMark { + #[must_use] + pub fn new(msrv: Msrv, matches_behaviour: MatchLintBehaviour) -> Self { + Self { + msrv, + matches_behaviour, + try_block_depth_stack: Vec::new(), + } + } +} enum IfBlockType<'hir> { /// An `if x.is_xxx() { a } else { b } ` expression. @@ -78,6 +94,29 @@ enum IfBlockType<'hir> { ), } +fn check_let_some_else_return_none(cx: &LateContext<'_>, stmt: &Stmt<'_>) { + if let StmtKind::Local(Local { pat, init: Some(init_expr), els: Some(els), .. }) = stmt.kind && + let Block { stmts: &[], expr: Some(els), .. } = els && + let Some(inner_pat) = pat_and_expr_can_be_question_mark(cx, pat, els) + { + let mut applicability = Applicability::MaybeIncorrect; + let init_expr_str = snippet_with_applicability(cx, init_expr.span, "..", &mut applicability); + let receiver_str = snippet_with_applicability(cx, inner_pat.span, "..", &mut applicability); + let sugg = format!( + "let {receiver_str} = {init_expr_str}?;", + ); + span_lint_and_sugg( + cx, + QUESTION_MARK, + stmt.span, + "this `let...else` may be rewritten with the `?` operator", + "replace it with", + sugg, + applicability, + ); + } +} + fn is_early_return(smbl: Symbol, cx: &LateContext<'_>, if_block: &IfBlockType<'_>) -> bool { match *if_block { IfBlockType::IfIs(caller, caller_ty, call_sym, if_then, _) => { @@ -259,6 +298,12 @@ fn is_try_block(cx: &LateContext<'_>, bl: &rustc_hir::Block<'_>) -> bool { } impl<'tcx> LateLintPass<'tcx> for QuestionMark { + fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) { + if !in_constant(cx, stmt.hir_id) { + check_let_some_else_return_none(cx, stmt); + } + self.check_manual_let_else(cx, stmt); + } fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { if !in_constant(cx, expr.hir_id) { self.check_is_none_or_err_and_early_return(cx, expr); @@ -291,4 +336,5 @@ impl<'tcx> LateLintPass<'tcx> for QuestionMark { .expect("blocks are always part of bodies and must have a depth") -= 1; } } + extract_msrv_attr!(LateContext); } diff --git a/src/tools/clippy/clippy_lints/src/ranges.rs b/src/tools/clippy/clippy_lints/src/ranges.rs index d2018aba9e3..3287675a82d 100644 --- a/src/tools/clippy/clippy_lints/src/ranges.rs +++ b/src/tools/clippy/clippy_lints/src/ranges.rs @@ -1,10 +1,9 @@ use clippy_utils::consts::{constant, Constant}; use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg, span_lint_and_then}; -use clippy_utils::higher; use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::source::{snippet, snippet_opt, snippet_with_applicability}; use clippy_utils::sugg::Sugg; -use clippy_utils::{get_parent_expr, in_constant, is_integer_const, path_to_local}; +use clippy_utils::{get_parent_expr, higher, in_constant, is_integer_const, path_to_local}; use if_chain::if_chain; use rustc_ast::ast::RangeLimits; use rustc_errors::Applicability; diff --git a/src/tools/clippy/clippy_lints/src/raw_strings.rs b/src/tools/clippy/clippy_lints/src/raw_strings.rs index f45bb1ef3e1..ccabb577cb7 100644 --- a/src/tools/clippy/clippy_lints/src/raw_strings.rs +++ b/src/tools/clippy/clippy_lints/src/raw_strings.rs @@ -1,10 +1,10 @@ -use std::{iter::once, ops::ControlFlow}; +use std::iter::once; +use std::ops::ControlFlow; -use clippy_utils::{diagnostics::span_lint_and_sugg, source::snippet}; -use rustc_ast::{ - ast::{Expr, ExprKind}, - token::LitKind, -}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::source::snippet; +use rustc_ast::ast::{Expr, ExprKind}; +use rustc_ast::token::LitKind; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass, LintContext}; use rustc_middle::lint::in_external_macro; @@ -95,7 +95,7 @@ impl EarlyLintPass for RawStrings { // `once` so a raw string ending in hashes is still checked let num = str.as_bytes().iter().chain(once(&0)).try_fold(0u8, |acc, &b| { match b { - b'"' => (following_quote, req) = (true, 1), + b'"' if !following_quote => (following_quote, req) = (true, 1), // I'm a bit surprised the compiler didn't optimize this out, there's no // branch but it still ends up doing an unnecessary comparison, it's: // - cmp r9b,1h diff --git a/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs b/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs index e82aa3a7b98..8e85c55e756 100644 --- a/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs +++ b/src/tools/clippy/clippy_lints/src/rc_clone_in_vec_init.rs @@ -1,10 +1,9 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::higher::VecArgs; -use clippy_utils::last_path_segment; use clippy_utils::macros::root_macro_call_first_node; -use clippy_utils::paths; use clippy_utils::source::{indent_of, snippet}; use clippy_utils::ty::match_type; +use clippy_utils::{last_path_segment, paths}; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, QPath, TyKind}; use rustc_lint::{LateContext, LateLintPass}; @@ -50,9 +49,15 @@ declare_lint_pass!(RcCloneInVecInit => [RC_CLONE_IN_VEC_INIT]); impl LateLintPass<'_> for RcCloneInVecInit { fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) { - let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return; }; - let Some(VecArgs::Repeat(elem, len)) = VecArgs::hir(cx, expr) else { return; }; - let Some((symbol, func_span)) = ref_init(cx, elem) else { return; }; + let Some(macro_call) = root_macro_call_first_node(cx, expr) else { + return; + }; + let Some(VecArgs::Repeat(elem, len)) = VecArgs::hir(cx, expr) else { + return; + }; + let Some((symbol, func_span)) = ref_init(cx, elem) else { + return; + }; emit_lint(cx, symbol, macro_call.span, elem, len, func_span); } diff --git a/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs b/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs index fa107858863..2bf90815caa 100644 --- a/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs +++ b/src/tools/clippy/clippy_lints/src/read_zero_byte_vec.rs @@ -1,9 +1,7 @@ -use clippy_utils::{ - diagnostics::{span_lint, span_lint_and_sugg}, - higher::{get_vec_init_kind, VecInitKind}, - source::snippet, - visitors::for_each_expr, -}; +use clippy_utils::diagnostics::{span_lint, span_lint_and_sugg}; +use clippy_utils::higher::{get_vec_init_kind, VecInitKind}; +use clippy_utils::source::snippet; +use clippy_utils::visitors::for_each_expr; use core::ops::ControlFlow; use hir::{Expr, ExprKind, Local, PatKind, PathSegment, QPath, StmtKind}; use rustc_errors::Applicability; diff --git a/src/tools/clippy/clippy_lints/src/redundant_async_block.rs b/src/tools/clippy/clippy_lints/src/redundant_async_block.rs index 05e52e6b38b..534b2762ba7 100644 --- a/src/tools/clippy/clippy_lints/src/redundant_async_block.rs +++ b/src/tools/clippy/clippy_lints/src/redundant_async_block.rs @@ -1,15 +1,14 @@ use std::ops::ControlFlow; -use clippy_utils::{ - diagnostics::span_lint_and_sugg, - peel_blocks, - source::{snippet, walk_span_to_context}, - visitors::for_each_expr, -}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::peel_blocks; +use clippy_utils::source::{snippet, walk_span_to_context}; +use clippy_utils::visitors::for_each_expr; use rustc_errors::Applicability; use rustc_hir::{AsyncGeneratorKind, Closure, Expr, ExprKind, GeneratorKind, MatchSource}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::{lint::in_external_macro, ty::UpvarCapture}; +use rustc_middle::lint::in_external_macro; +use rustc_middle::ty::UpvarCapture; use rustc_session::{declare_lint_pass, declare_tool_lint}; declare_clippy_lint! { diff --git a/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs b/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs index b6ce4ebc28f..4e3efe97b8c 100644 --- a/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs +++ b/src/tools/clippy/clippy_lints/src/redundant_closure_call.rs @@ -6,8 +6,7 @@ use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir as hir; use rustc_hir::intravisit as hir_visit; -use rustc_hir::intravisit::Visitor as HirVisitor; -use rustc_hir::intravisit::Visitor; +use rustc_hir::intravisit::{Visitor as HirVisitor, Visitor}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::hir::nested_filter; use rustc_middle::lint::in_external_macro; diff --git a/src/tools/clippy/clippy_lints/src/redundant_slicing.rs b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs index 2b65c08d25d..8277ce724d4 100644 --- a/src/tools/clippy/clippy_lints/src/redundant_slicing.rs +++ b/src/tools/clippy/clippy_lints/src/redundant_slicing.rs @@ -7,9 +7,8 @@ use rustc_ast::util::parser::PREC_PREFIX; use rustc_errors::Applicability; use rustc_hir::{BorrowKind, Expr, ExprKind, LangItem, Mutability}; use rustc_lint::{LateContext, LateLintPass, Lint}; -use rustc_middle::ty::Ty; use rustc_middle::ty::adjustment::{Adjust, AutoBorrow, AutoBorrowMutability}; -use rustc_middle::ty::GenericArg; +use rustc_middle::ty::{GenericArg, Ty}; use rustc_session::{declare_lint_pass, declare_tool_lint}; declare_clippy_lint! { diff --git a/src/tools/clippy/clippy_lints/src/reference.rs b/src/tools/clippy/clippy_lints/src/reference.rs index a642e2da3ba..db870ec4c5b 100644 --- a/src/tools/clippy/clippy_lints/src/reference.rs +++ b/src/tools/clippy/clippy_lints/src/reference.rs @@ -94,7 +94,7 @@ impl EarlyLintPass for DerefAddrOf { DEREF_ADDROF, e.span, "immediately dereferencing a reference", - "try this", + "try", sugg.to_string(), applicability, ); diff --git a/src/tools/clippy/clippy_lints/src/regex.rs b/src/tools/clippy/clippy_lints/src/regex.rs index 674f8bf4c0f..b795e4b15ba 100644 --- a/src/tools/clippy/clippy_lints/src/regex.rs +++ b/src/tools/clippy/clippy_lints/src/regex.rs @@ -3,12 +3,12 @@ use std::fmt::Display; use clippy_utils::consts::{constant, Constant}; use clippy_utils::diagnostics::{span_lint, span_lint_and_help}; use clippy_utils::source::snippet_opt; -use clippy_utils::{match_def_path, paths}; -use if_chain::if_chain; +use clippy_utils::{def_path_def_ids, path_def_id, paths}; use rustc_ast::ast::{LitKind, StrStyle}; +use rustc_hir::def_id::DefIdMap; use rustc_hir::{BorrowKind, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; -use rustc_session::{declare_lint_pass, declare_tool_lint}; +use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::source_map::{BytePos, Span}; declare_clippy_lint! { @@ -55,26 +55,52 @@ declare_clippy_lint! { "trivial regular expressions" } -declare_lint_pass!(Regex => [INVALID_REGEX, TRIVIAL_REGEX]); +#[derive(Copy, Clone)] +enum RegexKind { + Unicode, + UnicodeSet, + Bytes, + BytesSet, +} + +#[derive(Default)] +pub struct Regex { + definitions: DefIdMap<RegexKind>, +} + +impl_lint_pass!(Regex => [INVALID_REGEX, TRIVIAL_REGEX]); impl<'tcx> LateLintPass<'tcx> for Regex { + fn check_crate(&mut self, cx: &LateContext<'tcx>) { + // We don't use `match_def_path` here because that relies on matching the exact path, which changed + // between regex 1.8 and 1.9 + // + // `def_path_def_ids` will resolve through re-exports but is relatively heavy, so we only perform + // the operation once and store the results + let mut resolve = |path, kind| { + for id in def_path_def_ids(cx, path) { + self.definitions.insert(id, kind); + } + }; + + resolve(&paths::REGEX_NEW, RegexKind::Unicode); + resolve(&paths::REGEX_BUILDER_NEW, RegexKind::Unicode); + resolve(&paths::REGEX_SET_NEW, RegexKind::UnicodeSet); + resolve(&paths::REGEX_BYTES_NEW, RegexKind::Bytes); + resolve(&paths::REGEX_BYTES_BUILDER_NEW, RegexKind::Bytes); + resolve(&paths::REGEX_BYTES_SET_NEW, RegexKind::BytesSet); + } + fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { - if_chain! { - if let ExprKind::Call(fun, [arg]) = expr.kind; - if let ExprKind::Path(ref qpath) = fun.kind; - if let Some(def_id) = cx.qpath_res(qpath, fun.hir_id).opt_def_id(); - then { - if match_def_path(cx, def_id, &paths::REGEX_NEW) || - match_def_path(cx, def_id, &paths::REGEX_BUILDER_NEW) { - check_regex(cx, arg, true); - } else if match_def_path(cx, def_id, &paths::REGEX_BYTES_NEW) || - match_def_path(cx, def_id, &paths::REGEX_BYTES_BUILDER_NEW) { - check_regex(cx, arg, false); - } else if match_def_path(cx, def_id, &paths::REGEX_SET_NEW) { - check_set(cx, arg, true); - } else if match_def_path(cx, def_id, &paths::REGEX_BYTES_SET_NEW) { - check_set(cx, arg, false); - } + if let ExprKind::Call(fun, [arg]) = expr.kind + && let Some(def_id) = path_def_id(cx, fun) + && let Some(regex_kind) = self.definitions.get(&def_id) + { + match regex_kind { + RegexKind::Unicode => check_regex(cx, arg, true), + RegexKind::UnicodeSet => check_set(cx, arg, true), + RegexKind::Bytes => check_regex(cx, arg, false), + RegexKind::BytesSet => check_set(cx, arg, false), } } } diff --git a/src/tools/clippy/clippy_lints/src/returns.rs b/src/tools/clippy/clippy_lints/src/returns.rs index 4977df6c83f..2a494ff1fa6 100644 --- a/src/tools/clippy/clippy_lints/src/returns.rs +++ b/src/tools/clippy/clippy_lints/src/returns.rs @@ -1,6 +1,6 @@ use clippy_utils::diagnostics::{span_lint_and_then, span_lint_hir_and_then}; use clippy_utils::source::{snippet_opt, snippet_with_context}; -use clippy_utils::visitors::{for_each_expr, Descend}; +use clippy_utils::visitors::{for_each_expr_with_closures, Descend}; use clippy_utils::{fn_def_id, path_to_local_id, span_find_starting_semi}; use core::ops::ControlFlow; use if_chain::if_chain; @@ -328,7 +328,7 @@ fn emit_return_lint(cx: &LateContext<'_>, ret_span: Span, semi_spans: Vec<Span>, } fn last_statement_borrows<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> bool { - for_each_expr(expr, |e| { + for_each_expr_with_closures(cx, expr, |e| { if let Some(def_id) = fn_def_id(cx, e) && cx .tcx @@ -337,7 +337,7 @@ fn last_statement_borrows<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) .skip_binder() .output() .walk() - .any(|arg| matches!(arg.unpack(), GenericArgKind::Lifetime(_))) + .any(|arg| matches!(arg.unpack(), GenericArgKind::Lifetime(re) if !re.is_static())) { ControlFlow::Break(()) } else { diff --git a/src/tools/clippy/clippy_lints/src/semicolon_block.rs b/src/tools/clippy/clippy_lints/src/semicolon_block.rs index 419d7991f0e..88f295c72eb 100644 --- a/src/tools/clippy/clippy_lints/src/semicolon_block.rs +++ b/src/tools/clippy/clippy_lints/src/semicolon_block.rs @@ -148,12 +148,18 @@ impl LateLintPass<'_> for SemicolonBlock { expr: None, stmts: [.., stmt], .. - } = block else { return }; + } = block + else { + return; + }; let &Stmt { kind: StmtKind::Semi(expr), span, .. - } = stmt else { return }; + } = stmt + else { + return; + }; self.semicolon_outside_block(cx, block, expr, span); }, StmtKind::Semi(Expr { diff --git a/src/tools/clippy/clippy_lints/src/shadow.rs b/src/tools/clippy/clippy_lints/src/shadow.rs index 993f9373d85..78418b22392 100644 --- a/src/tools/clippy/clippy_lints/src/shadow.rs +++ b/src/tools/clippy/clippy_lints/src/shadow.rs @@ -106,7 +106,9 @@ impl_lint_pass!(Shadow => [SHADOW_SAME, SHADOW_REUSE, SHADOW_UNRELATED]); impl<'tcx> LateLintPass<'tcx> for Shadow { fn check_pat(&mut self, cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>) { - let PatKind::Binding(_, id, ident, _) = pat.kind else { return }; + let PatKind::Binding(_, id, ident, _) = pat.kind else { + return; + }; if pat.span.desugaring_kind().is_some() || pat.span.from_expansion() { return; diff --git a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs index 1493ad44ee5..3b282dbfa04 100644 --- a/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs +++ b/src/tools/clippy/clippy_lints/src/significant_drop_tightening.rs @@ -1,18 +1,16 @@ -use clippy_utils::{ - diagnostics::span_lint_and_then, - expr_or_init, get_attr, path_to_local, - source::{indent_of, snippet}, -}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::source::{indent_of, snippet}; +use clippy_utils::{expr_or_init, get_attr, path_to_local}; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap}; use rustc_errors::Applicability; -use rustc_hir::{ - self as hir, - intravisit::{walk_expr, Visitor}, -}; +use rustc_hir::def::{DefKind, Res}; +use rustc_hir::intravisit::{walk_expr, Visitor}; +use rustc_hir::{self as hir}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::ty::{GenericArgKind, Ty, TypeAndMut}; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::{symbol::Ident, Span, DUMMY_SP}; +use rustc_span::symbol::Ident; +use rustc_span::{sym, Span, DUMMY_SP}; use std::borrow::Cow; declare_clippy_lint! { @@ -333,7 +331,7 @@ impl<'ap, 'lc, 'others, 'stmt, 'tcx> Visitor<'tcx> for StmtsChecker<'ap, 'lc, 'o } }, hir::StmtKind::Semi(expr) => { - if has_drop(expr, &apa.first_bind_ident) { + if has_drop(expr, &apa.first_bind_ident, self.cx) { apa.has_expensive_expr_after_last_attr = false; apa.last_stmt_span = DUMMY_SP; return; @@ -430,11 +428,11 @@ fn dummy_stmt_expr<'any>(expr: &'any hir::Expr<'any>) -> hir::Stmt<'any> { } } -fn has_drop(expr: &hir::Expr<'_>, first_bind_ident: &Ident) -> bool { +fn has_drop(expr: &hir::Expr<'_>, first_bind_ident: &Ident, lcx: &LateContext<'_>) -> bool { if let hir::ExprKind::Call(fun, args) = expr.kind && let hir::ExprKind::Path(hir::QPath::Resolved(_, fun_path)) = &fun.kind - && let [fun_ident, ..] = fun_path.segments - && fun_ident.ident.name == rustc_span::sym::drop + && let Res::Def(DefKind::Fn, did) = fun_path.res + && lcx.tcx.is_diagnostic_item(sym::mem_drop, did) && let [first_arg, ..] = args && let hir::ExprKind::Path(hir::QPath::Resolved(_, arg_path)) = &first_arg.kind && let [first_arg_ps, .. ] = arg_path.segments diff --git a/src/tools/clippy/clippy_lints/src/single_call_fn.rs b/src/tools/clippy/clippy_lints/src/single_call_fn.rs index 42753d2e951..7bbe98e0a7f 100644 --- a/src/tools/clippy/clippy_lints/src/single_call_fn.rs +++ b/src/tools/clippy/clippy_lints/src/single_call_fn.rs @@ -2,8 +2,8 @@ use clippy_utils::diagnostics::span_lint_and_help; use clippy_utils::{is_from_proc_macro, is_in_test_function}; use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::LocalDefId; -use rustc_hir::intravisit::{walk_expr, Visitor}; -use rustc_hir::{intravisit::FnKind, Body, Expr, ExprKind, FnDecl}; +use rustc_hir::intravisit::{walk_expr, FnKind, Visitor}; +use rustc_hir::{Body, Expr, ExprKind, FnDecl}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::hir::nested_filter::OnlyBodies; use rustc_middle::lint::in_external_macro; diff --git a/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs index 5743dd21c28..9c21d70c82c 100644 --- a/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs +++ b/src/tools/clippy/clippy_lints/src/single_component_path_imports.rs @@ -1,11 +1,14 @@ use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg}; use rustc_ast::node_id::{NodeId, NodeMap}; +use rustc_ast::ptr::P; use rustc_ast::visit::{walk_expr, Visitor}; -use rustc_ast::{ptr::P, Crate, Expr, ExprKind, Item, ItemKind, MacroDef, ModKind, Ty, TyKind, UseTreeKind}; +use rustc_ast::{Crate, Expr, ExprKind, Item, ItemKind, MacroDef, ModKind, Ty, TyKind, UseTreeKind}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass, LintContext}; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::{edition::Edition, symbol::kw, Span, Symbol}; +use rustc_span::edition::Edition; +use rustc_span::symbol::kw; +use rustc_span::{Span, Symbol}; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs b/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs index dfe8be7a6a6..321c8988988 100644 --- a/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs +++ b/src/tools/clippy/clippy_lints/src/single_range_in_vec_init.rs @@ -1,7 +1,9 @@ -use clippy_utils::{ - diagnostics::span_lint_and_then, get_trait_def_id, higher::VecArgs, macros::root_macro_call_first_node, - source::snippet_opt, ty::implements_trait, -}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::get_trait_def_id; +use clippy_utils::higher::VecArgs; +use clippy_utils::macros::root_macro_call_first_node; +use clippy_utils::source::snippet_opt; +use clippy_utils::ty::implements_trait; use rustc_ast::{LitIntType, LitKind, UintTy}; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind, LangItem, QPath}; diff --git a/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs b/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs index 80c834066bb..bd783b4e005 100644 --- a/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs +++ b/src/tools/clippy/clippy_lints/src/size_of_in_element_count.rs @@ -4,8 +4,7 @@ use clippy_utils::diagnostics::span_lint_and_help; use clippy_utils::{match_def_path, paths}; use if_chain::if_chain; -use rustc_hir::BinOpKind; -use rustc_hir::{Expr, ExprKind}; +use rustc_hir::{BinOpKind, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_middle::ty::{self, Ty, TypeAndMut}; use rustc_session::{declare_lint_pass, declare_tool_lint}; diff --git a/src/tools/clippy/clippy_lints/src/size_of_ref.rs b/src/tools/clippy/clippy_lints/src/size_of_ref.rs index 8abec06c641..89ac8cd8ca9 100644 --- a/src/tools/clippy/clippy_lints/src/size_of_ref.rs +++ b/src/tools/clippy/clippy_lints/src/size_of_ref.rs @@ -1,4 +1,6 @@ -use clippy_utils::{diagnostics::span_lint_and_help, path_def_id, ty::peel_mid_ty_refs}; +use clippy_utils::diagnostics::span_lint_and_help; +use clippy_utils::path_def_id; +use clippy_utils::ty::peel_mid_ty_refs; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; diff --git a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs index a13bc7a5188..f239165276f 100644 --- a/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs +++ b/src/tools/clippy/clippy_lints/src/std_instead_of_core.rs @@ -1,9 +1,11 @@ use clippy_utils::diagnostics::span_lint_and_help; +use rustc_hir::def::Res; use rustc_hir::def_id::DefId; -use rustc_hir::{def::Res, HirId, Path, PathSegment}; +use rustc_hir::{HirId, Path, PathSegment}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_tool_lint, impl_lint_pass}; -use rustc_span::{sym, symbol::kw, Span}; +use rustc_span::symbol::kw; +use rustc_span::{sym, Span}; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/strings.rs b/src/tools/clippy/clippy_lints/src/strings.rs index 8658009eba4..58724852b3c 100644 --- a/src/tools/clippy/clippy_lints/src/strings.rs +++ b/src/tools/clippy/clippy_lints/src/strings.rs @@ -1,8 +1,10 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_sugg}; use clippy_utils::source::{snippet, snippet_with_applicability}; use clippy_utils::ty::is_type_lang_item; -use clippy_utils::{get_expr_use_or_unification_node, peel_blocks, SpanlessEq}; -use clippy_utils::{get_parent_expr, is_lint_allowed, is_path_diagnostic_item, method_calls}; +use clippy_utils::{ + get_expr_use_or_unification_node, get_parent_expr, is_lint_allowed, is_path_diagnostic_item, method_calls, + peel_blocks, SpanlessEq, +}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::def_id::DefId; diff --git a/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs b/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs index 2f2e84fa35a..b3db5e9a422 100644 --- a/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs +++ b/src/tools/clippy/clippy_lints/src/strlen_on_c_strings.rs @@ -78,7 +78,7 @@ impl<'tcx> LateLintPass<'tcx> for StrlenOnCStrings { STRLEN_ON_C_STRINGS, span, "using `libc::strlen` on a `CString` or `CStr` value", - "try this", + "try", format!("{val_name}.{method_name}().len()"), app, ); diff --git a/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs b/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs index e5746ca99ca..8be4ec3dc64 100644 --- a/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs +++ b/src/tools/clippy/clippy_lints/src/suspicious_doc_comments.rs @@ -1,6 +1,7 @@ use clippy_utils::diagnostics::{multispan_sugg_with_applicability, span_lint_and_then}; use if_chain::if_chain; -use rustc_ast::{token::CommentKind, AttrKind, AttrStyle, Attribute, Item}; +use rustc_ast::token::CommentKind; +use rustc_ast::{AttrKind, AttrStyle, Attribute, Item}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; diff --git a/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs b/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs index 9c0dc8096d0..6fa49afe0ec 100644 --- a/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs +++ b/src/tools/clippy/clippy_lints/src/suspicious_xor_used_as_pow.rs @@ -1,4 +1,5 @@ -use clippy_utils::{numeric_literal::NumericLiteral, source::snippet_with_context}; +use clippy_utils::numeric_literal::NumericLiteral; +use clippy_utils::source::snippet_with_context; use rustc_errors::Applicability; use rustc_hir::{BinOpKind, Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; diff --git a/src/tools/clippy/clippy_lints/src/swap.rs b/src/tools/clippy/clippy_lints/src/swap.rs index f7eef03d1d4..98158ed0aee 100644 --- a/src/tools/clippy/clippy_lints/src/swap.rs +++ b/src/tools/clippy/clippy_lints/src/swap.rs @@ -11,8 +11,8 @@ use rustc_middle::lint::in_external_macro; use rustc_middle::ty; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::source_map::Spanned; -use rustc_span::SyntaxContext; -use rustc_span::{sym, symbol::Ident, Span}; +use rustc_span::symbol::Ident; +use rustc_span::{sym, Span, SyntaxContext}; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs b/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs index 0a0a77082e0..b356666d852 100644 --- a/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs +++ b/src/tools/clippy/clippy_lints/src/tests_outside_test_module.rs @@ -1,8 +1,11 @@ -use clippy_utils::{diagnostics::span_lint_and_note, is_in_cfg_test, is_in_test_function}; -use rustc_hir::{intravisit::FnKind, Body, FnDecl}; +use clippy_utils::diagnostics::span_lint_and_note; +use clippy_utils::{is_in_cfg_test, is_in_test_function}; +use rustc_hir::intravisit::FnKind; +use rustc_hir::{Body, FnDecl}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; -use rustc_span::{def_id::LocalDefId, Span}; +use rustc_span::def_id::LocalDefId; +use rustc_span::Span; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs b/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs index 2512500a6be..f1b703fde0e 100644 --- a/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs +++ b/src/tools/clippy/clippy_lints/src/to_digit_is_some.rs @@ -82,7 +82,7 @@ impl<'tcx> LateLintPass<'tcx> for ToDigitIsSome { TO_DIGIT_IS_SOME, expr.span, "use of `.to_digit(..).is_some()`", - "try this", + "try", if is_method_call { format!("{char_arg_snip}.is_digit({radix_snip})") } else { diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_non_zero.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_non_zero.rs index 5503653253c..c0d0d2b93dc 100644 --- a/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_non_zero.rs +++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_int_to_non_zero.rs @@ -4,10 +4,8 @@ use clippy_utils::sugg; use rustc_errors::Applicability; use rustc_hir::Expr; use rustc_lint::LateContext; -use rustc_middle::{ - query::Key, - ty::{self, Ty}, -}; +use rustc_middle::query::Key; +use rustc_middle::ty::{self, Ty}; use rustc_span::symbol::sym; /// Checks for `transmute_int_to_non_zero` lint. diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs index 857d2ad8258..4ae4359eea0 100644 --- a/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs +++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_ptr_to_ptr.rs @@ -24,7 +24,7 @@ pub(super) fn check<'tcx>( "transmute from a pointer to a pointer", |diag| { if let Some(arg) = sugg::Sugg::hir_opt(cx, arg) { - let sugg = arg.as_ty(Ty::new_ptr(cx.tcx,*to_ty)); + let sugg = arg.as_ty(Ty::new_ptr(cx.tcx, *to_ty)); diag.span_suggestion(e.span, "try", sugg, Applicability::Unspecified); } }, diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs b/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs index 9e5d7662426..c61eb0a9311 100644 --- a/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs +++ b/src/tools/clippy/clippy_lints/src/transmute/transmute_undefined_repr.rs @@ -3,8 +3,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::ty::is_c_void; use rustc_hir::Expr; use rustc_lint::LateContext; -use rustc_middle::ty::GenericArgsRef; -use rustc_middle::ty::{self, IntTy, Ty, TypeAndMut, UintTy}; +use rustc_middle::ty::{self, GenericArgsRef, IntTy, Ty, TypeAndMut, UintTy}; #[expect(clippy::too_many_lines)] pub(super) fn check<'tcx>( diff --git a/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs b/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs index 85cd74f23ef..513a913f56a 100644 --- a/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs +++ b/src/tools/clippy/clippy_lints/src/transmute/transmutes_expressible_as_ptr_casts.rs @@ -6,7 +6,8 @@ use rustc_ast::ExprPrecedence; use rustc_errors::Applicability; use rustc_hir::{Expr, Node}; use rustc_lint::LateContext; -use rustc_middle::ty::{cast::CastKind, Ty}; +use rustc_middle::ty::cast::CastKind; +use rustc_middle::ty::Ty; /// Checks for `transmutes_expressible_as_ptr_casts` lint. /// Returns `true` if it's triggered, otherwise returns `false`. diff --git a/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs b/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs index b6615410e25..088c8fda87a 100644 --- a/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs +++ b/src/tools/clippy/clippy_lints/src/transmute/useless_transmute.rs @@ -43,7 +43,7 @@ pub(super) fn check<'tcx>( let sugg = if *ptr_ty == rty_and_mut { arg.as_ty(to_ty) } else { - arg.as_ty(Ty::new_ptr(cx.tcx,rty_and_mut)).as_ty(to_ty) + arg.as_ty(Ty::new_ptr(cx.tcx, rty_and_mut)).as_ty(to_ty) }; diag.span_suggestion(e.span, "try", sugg, Applicability::Unspecified); diff --git a/src/tools/clippy/clippy_lints/src/transmute/utils.rs b/src/tools/clippy/clippy_lints/src/transmute/utils.rs index 62efd13b8d9..1cf6cf8548a 100644 --- a/src/tools/clippy/clippy_lints/src/transmute/utils.rs +++ b/src/tools/clippy/clippy_lints/src/transmute/utils.rs @@ -2,7 +2,8 @@ use rustc_hir as hir; use rustc_hir::Expr; use rustc_hir_typeck::{cast, FnCtxt, Inherited}; use rustc_lint::LateContext; -use rustc_middle::ty::{cast::CastKind, Ty}; +use rustc_middle::ty::cast::CastKind; +use rustc_middle::ty::Ty; use rustc_span::DUMMY_SP; // check if the component types of the transmuted collection and the result have different ABI, diff --git a/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs b/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs index bd983306508..2e00ed042a8 100644 --- a/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs +++ b/src/tools/clippy/clippy_lints/src/tuple_array_conversions.rs @@ -1,13 +1,11 @@ -use clippy_utils::{ - diagnostics::span_lint_and_help, - is_from_proc_macro, - msrvs::{self, Msrv}, - path_to_local, -}; +use clippy_utils::diagnostics::span_lint_and_help; +use clippy_utils::msrvs::{self, Msrv}; +use clippy_utils::{is_from_proc_macro, path_to_local}; use rustc_ast::LitKind; use rustc_hir::{Expr, ExprKind, HirId, Node, Pat}; use rustc_lint::{LateContext, LateLintPass, LintContext}; -use rustc_middle::{lint::in_external_macro, ty}; +use rustc_middle::lint::in_external_macro; +use rustc_middle::ty; use rustc_session::{declare_tool_lint, impl_lint_pass}; use std::iter::once; @@ -16,8 +14,8 @@ declare_clippy_lint! { /// Checks for tuple<=>array conversions that are not done with `.into()`. /// /// ### Why is this bad? - /// It's unnecessary complexity. `.into()` works for tuples<=>arrays at or below 12 elements and - /// conveys the intent a lot better, while also leaving less room for hard to spot bugs! + /// It may be unnecessary complexity. `.into()` works for converting tuples + /// <=> arrays of up to 12 elements and may convey intent more clearly. /// /// ### Example /// ```rust,ignore @@ -31,7 +29,7 @@ declare_clippy_lint! { /// ``` #[clippy::version = "1.72.0"] pub TUPLE_ARRAY_CONVERSIONS, - complexity, + pedantic, "checks for tuple<=>array conversions that are not done with `.into()`" } impl_lint_pass!(TupleArrayConversions => [TUPLE_ARRAY_CONVERSIONS]); diff --git a/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs b/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs index acdf5471069..306ca5724da 100644 --- a/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs +++ b/src/tools/clippy/clippy_lints/src/types/borrowed_box.rs @@ -2,8 +2,9 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet; use if_chain::if_chain; use rustc_errors::Applicability; -use rustc_hir::{self as hir, GenericArg, GenericBounds, GenericParamKind}; -use rustc_hir::{HirId, Lifetime, MutTy, Mutability, Node, QPath, TyKind}; +use rustc_hir::{ + self as hir, GenericArg, GenericBounds, GenericParamKind, HirId, Lifetime, MutTy, Mutability, Node, QPath, TyKind, +}; use rustc_lint::LateContext; use rustc_span::sym; diff --git a/src/tools/clippy/clippy_lints/src/types/box_collection.rs b/src/tools/clippy/clippy_lints/src/types/box_collection.rs index 43665a922d4..4a5a94f2630 100644 --- a/src/tools/clippy/clippy_lints/src/types/box_collection.rs +++ b/src/tools/clippy/clippy_lints/src/types/box_collection.rs @@ -1,6 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_help; use clippy_utils::{path_def_id, qpath_generic_tys}; -use rustc_hir::{self as hir, def_id::DefId, QPath}; +use rustc_hir::def_id::DefId; +use rustc_hir::{self as hir, QPath}; use rustc_lint::LateContext; use rustc_span::{sym, Symbol}; diff --git a/src/tools/clippy/clippy_lints/src/types/linked_list.rs b/src/tools/clippy/clippy_lints/src/types/linked_list.rs index 5fb708741e5..fba804bbe08 100644 --- a/src/tools/clippy/clippy_lints/src/types/linked_list.rs +++ b/src/tools/clippy/clippy_lints/src/types/linked_list.rs @@ -1,5 +1,6 @@ use clippy_utils::diagnostics::span_lint_and_help; -use rustc_hir::{self as hir, def_id::DefId}; +use rustc_hir::def_id::DefId; +use rustc_hir::{self as hir}; use rustc_lint::LateContext; use rustc_span::symbol::sym; diff --git a/src/tools/clippy/clippy_lints/src/types/option_option.rs b/src/tools/clippy/clippy_lints/src/types/option_option.rs index 8767e3c30a6..60622903af1 100644 --- a/src/tools/clippy/clippy_lints/src/types/option_option.rs +++ b/src/tools/clippy/clippy_lints/src/types/option_option.rs @@ -1,7 +1,8 @@ use clippy_utils::diagnostics::span_lint; use clippy_utils::{path_def_id, qpath_generic_tys}; use if_chain::if_chain; -use rustc_hir::{self as hir, def_id::DefId, QPath}; +use rustc_hir::def_id::DefId; +use rustc_hir::{self as hir, QPath}; use rustc_lint::LateContext; use rustc_span::symbol::sym; diff --git a/src/tools/clippy/clippy_lints/src/types/rc_buffer.rs b/src/tools/clippy/clippy_lints/src/types/rc_buffer.rs index 855137b14d8..f6c2d8d5a5e 100644 --- a/src/tools/clippy/clippy_lints/src/types/rc_buffer.rs +++ b/src/tools/clippy/clippy_lints/src/types/rc_buffer.rs @@ -2,7 +2,8 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::snippet_with_applicability; use clippy_utils::{path_def_id, qpath_generic_tys}; use rustc_errors::Applicability; -use rustc_hir::{self as hir, def_id::DefId, QPath, TyKind}; +use rustc_hir::def_id::DefId; +use rustc_hir::{self as hir, QPath, TyKind}; use rustc_lint::LateContext; use rustc_span::symbol::sym; @@ -22,7 +23,9 @@ pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_ app, ); } else { - let Some(ty) = qpath_generic_tys(qpath).next() else { return false }; + let Some(ty) = qpath_generic_tys(qpath).next() else { + return false; + }; let Some(id) = path_def_id(cx, ty) else { return false }; if !cx.tcx.is_diagnostic_item(sym::Vec, id) { return false; diff --git a/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs b/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs index a75972cf3dd..a616c3e4ea8 100644 --- a/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs +++ b/src/tools/clippy/clippy_lints/src/types/rc_mutex.rs @@ -1,7 +1,8 @@ use clippy_utils::diagnostics::span_lint_and_help; use clippy_utils::{path_def_id, qpath_generic_tys}; use if_chain::if_chain; -use rustc_hir::{self as hir, def_id::DefId, QPath}; +use rustc_hir::def_id::DefId; +use rustc_hir::{self as hir, QPath}; use rustc_lint::LateContext; use rustc_span::symbol::sym; diff --git a/src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs b/src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs index f7adc9d3555..5a986254fad 100644 --- a/src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs +++ b/src/tools/clippy/clippy_lints/src/types/redundant_allocation.rs @@ -2,7 +2,8 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::{snippet, snippet_with_applicability}; use clippy_utils::{path_def_id, qpath_generic_tys}; use rustc_errors::Applicability; -use rustc_hir::{self as hir, def_id::DefId, QPath, TyKind}; +use rustc_hir::def_id::DefId; +use rustc_hir::{self as hir, QPath, TyKind}; use rustc_hir_analysis::hir_ty_to_ty; use rustc_lint::LateContext; use rustc_middle::ty::TypeVisitableExt; @@ -39,7 +40,9 @@ pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_ return true; } - let Some(ty) = qpath_generic_tys(qpath).next() else { return false }; + let Some(ty) = qpath_generic_tys(qpath).next() else { + return false; + }; let Some(id) = path_def_id(cx, ty) else { return false }; let (inner_sym, ty) = match cx.tcx.get_diagnostic_name(id) { Some(sym::Arc) => ("Arc", ty), @@ -49,7 +52,7 @@ pub(super) fn check(cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>, qpath: &QPath<'_ }; let TyKind::Path(inner_qpath) = &ty.kind else { - return false + return false; }; let inner_span = match qpath_generic_tys(inner_qpath).next() { Some(hir_ty) => { diff --git a/src/tools/clippy/clippy_lints/src/types/vec_box.rs b/src/tools/clippy/clippy_lints/src/types/vec_box.rs index d3062f3d2e3..decc183ad96 100644 --- a/src/tools/clippy/clippy_lints/src/types/vec_box.rs +++ b/src/tools/clippy/clippy_lints/src/types/vec_box.rs @@ -3,7 +3,8 @@ use clippy_utils::last_path_segment; use clippy_utils::source::snippet; use if_chain::if_chain; use rustc_errors::Applicability; -use rustc_hir::{self as hir, def_id::DefId, GenericArg, QPath, TyKind}; +use rustc_hir::def_id::DefId; +use rustc_hir::{self as hir, GenericArg, QPath, TyKind}; use rustc_hir_analysis::hir_ty_to_ty; use rustc_lint::LateContext; use rustc_middle::ty::layout::LayoutOf; diff --git a/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs b/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs index a9deee96784..f2ef602012f 100644 --- a/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs +++ b/src/tools/clippy/clippy_lints/src/undocumented_unsafe_blocks.rs @@ -158,11 +158,12 @@ impl<'tcx> LateLintPass<'tcx> for UndocumentedUnsafeBlocks { } fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &hir::Stmt<'tcx>) { - let ( - hir::StmtKind::Local(&hir::Local { init: Some(expr), .. }) - | hir::StmtKind::Expr(expr) - | hir::StmtKind::Semi(expr) - ) = stmt.kind else { return }; + let (hir::StmtKind::Local(&hir::Local { init: Some(expr), .. }) + | hir::StmtKind::Expr(expr) + | hir::StmtKind::Semi(expr)) = stmt.kind + else { + return; + }; if !is_lint_allowed(cx, UNNECESSARY_SAFETY_COMMENT, stmt.hir_id) && !in_external_macro(cx.tcx.sess, stmt.span) && let HasSafetyComment::Yes(pos) = stmt_has_safety_comment(cx, stmt.span, stmt.hir_id) diff --git a/src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs b/src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs index 226495dcbda..d4342ec5169 100644 --- a/src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs +++ b/src/tools/clippy/clippy_lints/src/unit_types/unit_cmp.rs @@ -14,7 +14,9 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>) { "assert_ne" | "debug_assert_ne" => "fail", _ => return, }; - let Some ((left, _, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else { return }; + let Some((left, _, _)) = find_assert_eq_args(cx, expr, macro_call.expn) else { + return; + }; if !cx.typeck_results().expr_ty(left).is_unit() { return; } diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs b/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs index e7449639f3a..ed2ef506381 100644 --- a/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs +++ b/src/tools/clippy/clippy_lints/src/unnecessary_box_returns.rs @@ -1,6 +1,8 @@ -use clippy_utils::{diagnostics::span_lint_and_then, ty::approx_ty_size}; +use clippy_utils::diagnostics::span_lint_and_then; +use clippy_utils::ty::approx_ty_size; use rustc_errors::Applicability; -use rustc_hir::{def_id::LocalDefId, FnDecl, FnRetTy, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind}; +use rustc_hir::def_id::LocalDefId; +use rustc_hir::{FnDecl, FnRetTy, ImplItemKind, Item, ItemKind, Node, TraitItem, TraitItemKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::Symbol; @@ -63,7 +65,9 @@ impl UnnecessaryBoxReturns { return; } - let FnRetTy::Return(return_ty_hir) = &decl.output else { return }; + let FnRetTy::Return(return_ty_hir) = &decl.output else { + return; + }; let return_ty = cx .tcx @@ -103,25 +107,33 @@ impl UnnecessaryBoxReturns { impl LateLintPass<'_> for UnnecessaryBoxReturns { fn check_trait_item(&mut self, cx: &LateContext<'_>, item: &TraitItem<'_>) { - let TraitItemKind::Fn(signature, _) = &item.kind else { return }; + let TraitItemKind::Fn(signature, _) = &item.kind else { + return; + }; self.check_fn_item(cx, signature.decl, item.owner_id.def_id, item.ident.name); } fn check_impl_item(&mut self, cx: &LateContext<'_>, item: &rustc_hir::ImplItem<'_>) { // Ignore implementations of traits, because the lint should be on the // trait, not on the implementation of it. - let Node::Item(parent) = cx.tcx.hir().get_parent(item.hir_id()) else { return }; + let Node::Item(parent) = cx.tcx.hir().get_parent(item.hir_id()) else { + return; + }; let ItemKind::Impl(parent) = parent.kind else { return }; if parent.of_trait.is_some() { return; } - let ImplItemKind::Fn(signature, ..) = &item.kind else { return }; + let ImplItemKind::Fn(signature, ..) = &item.kind else { + return; + }; self.check_fn_item(cx, signature.decl, item.owner_id.def_id, item.ident.name); } fn check_item(&mut self, cx: &LateContext<'_>, item: &Item<'_>) { - let ItemKind::Fn(signature, ..) = &item.kind else { return }; + let ItemKind::Fn(signature, ..) = &item.kind else { + return; + }; self.check_fn_item(cx, signature.decl, item.owner_id.def_id, item.ident.name); } } diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs b/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs index 6e802794f5a..57a4a429e12 100644 --- a/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs +++ b/src/tools/clippy/clippy_lints/src/unnecessary_owned_empty_strings.rs @@ -1,4 +1,5 @@ -use clippy_utils::{diagnostics::span_lint_and_sugg, ty::is_type_lang_item}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::ty::is_type_lang_item; use clippy_utils::{match_def_path, paths}; use if_chain::if_chain; use rustc_ast::ast::LitKind; diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs b/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs index 084b031982d..f4111186c64 100644 --- a/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs +++ b/src/tools/clippy/clippy_lints/src/unnecessary_struct_initialization.rs @@ -1,4 +1,7 @@ -use clippy_utils::{diagnostics::span_lint_and_sugg, get_parent_expr, path_to_local, source::snippet, ty::is_copy}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::source::snippet; +use clippy_utils::ty::is_copy; +use clippy_utils::{get_parent_expr, path_to_local}; use rustc_hir::{BindingAnnotation, Expr, ExprKind, Node, PatKind, UnOp}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; diff --git a/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs b/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs index 5073eb02bd8..f34f8d0e353 100644 --- a/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs +++ b/src/tools/clippy/clippy_lints/src/unnecessary_wraps.rs @@ -1,6 +1,7 @@ use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::source::snippet; -use clippy_utils::{contains_return, is_res_lang_ctor, path_res, return_ty, visitors::find_all_ret_expressions}; +use clippy_utils::visitors::find_all_ret_expressions; +use clippy_utils::{contains_return, is_res_lang_ctor, path_res, return_ty}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::intravisit::FnKind; diff --git a/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs b/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs index a57bf7ee822..9cf59577229 100644 --- a/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs +++ b/src/tools/clippy/clippy_lints/src/unnested_or_patterns.rs @@ -6,7 +6,8 @@ use clippy_utils::msrvs::{self, Msrv}; use clippy_utils::over; use rustc_ast::mut_visit::*; use rustc_ast::ptr::P; -use rustc_ast::{self as ast, Mutability, Pat, PatKind, PatKind::*, DUMMY_NODE_ID}; +use rustc_ast::PatKind::*; +use rustc_ast::{self as ast, Mutability, Pat, PatKind, DUMMY_NODE_ID}; use rustc_ast_pretty::pprust; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass}; @@ -162,9 +163,7 @@ fn unnest_or_patterns(pat: &mut P<Pat>) -> bool { noop_visit_pat(p, self); // Don't have an or-pattern? Just quit early on. - let Or(alternatives) = &mut p.kind else { - return - }; + let Or(alternatives) = &mut p.kind else { return }; // Collapse or-patterns directly nested in or-patterns. let mut idx = 0; diff --git a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs index 0e526c216be..0fcb62017c6 100644 --- a/src/tools/clippy/clippy_lints/src/unused_io_amount.rs +++ b/src/tools/clippy/clippy_lints/src/unused_io_amount.rs @@ -48,7 +48,7 @@ declare_lint_pass!(UnusedIoAmount => [UNUSED_IO_AMOUNT]); impl<'tcx> LateLintPass<'tcx> for UnusedIoAmount { fn check_stmt(&mut self, cx: &LateContext<'_>, s: &hir::Stmt<'_>) { let (hir::StmtKind::Semi(expr) | hir::StmtKind::Expr(expr)) = s.kind else { - return + return; }; match expr.kind { diff --git a/src/tools/clippy/clippy_lints/src/unused_unit.rs b/src/tools/clippy/clippy_lints/src/unused_unit.rs index cad8da18c2f..95e74718d80 100644 --- a/src/tools/clippy/clippy_lints/src/unused_unit.rs +++ b/src/tools/clippy/clippy_lints/src/unused_unit.rs @@ -1,7 +1,8 @@ use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::{position_before_rarrow, snippet_opt}; use if_chain::if_chain; -use rustc_ast::{ast, visit::FnKind, ClosureBinder}; +use rustc_ast::visit::FnKind; +use rustc_ast::{ast, ClosureBinder}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; diff --git a/src/tools/clippy/clippy_lints/src/unwrap.rs b/src/tools/clippy/clippy_lints/src/unwrap.rs index 377d3fb6f4e..c99b0290c0c 100644 --- a/src/tools/clippy/clippy_lints/src/unwrap.rs +++ b/src/tools/clippy/clippy_lints/src/unwrap.rs @@ -1,7 +1,7 @@ use clippy_utils::diagnostics::span_lint_hir_and_then; -use clippy_utils::higher; use clippy_utils::ty::is_type_diagnostic_item; -use clippy_utils::{path_to_local, usage::is_potentially_mutated}; +use clippy_utils::usage::is_potentially_mutated; +use clippy_utils::{higher, path_to_local}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::intravisit::{walk_expr, walk_fn, FnKind, Visitor}; diff --git a/src/tools/clippy/clippy_lints/src/use_self.rs b/src/tools/clippy/clippy_lints/src/use_self.rs index 81dc426b389..50231d930d0 100644 --- a/src/tools/clippy/clippy_lints/src/use_self.rs +++ b/src/tools/clippy/clippy_lints/src/use_self.rs @@ -5,13 +5,12 @@ use clippy_utils::ty::same_type_and_consts; use if_chain::if_chain; use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; +use rustc_hir::def::{CtorOf, DefKind, Res}; +use rustc_hir::def_id::LocalDefId; +use rustc_hir::intravisit::{walk_inf, walk_ty, Visitor}; use rustc_hir::{ - self as hir, - def::{CtorOf, DefKind, Res}, - def_id::LocalDefId, - intravisit::{walk_inf, walk_ty, Visitor}, - Expr, ExprKind, FnRetTy, FnSig, GenericArg, GenericArgsParentheses, GenericParam, GenericParamKind, HirId, Impl, - ImplItemKind, Item, ItemKind, Pat, PatKind, Path, QPath, Ty, TyKind, + self as hir, Expr, ExprKind, FnRetTy, FnSig, GenericArg, GenericArgsParentheses, GenericParam, GenericParamKind, + HirId, Impl, ImplItemKind, Item, ItemKind, Pat, PatKind, Path, QPath, Ty, TyKind, }; use rustc_hir_analysis::hir_ty_to_ty; use rustc_lint::{LateContext, LateLintPass}; diff --git a/src/tools/clippy/clippy_lints/src/useless_conversion.rs b/src/tools/clippy/clippy_lints/src/useless_conversion.rs index 98f7bfb4ba1..92b694d3076 100644 --- a/src/tools/clippy/clippy_lints/src/useless_conversion.rs +++ b/src/tools/clippy/clippy_lints/src/useless_conversion.rs @@ -1,9 +1,8 @@ use clippy_utils::diagnostics::{span_lint_and_help, span_lint_and_sugg, span_lint_and_then}; -use clippy_utils::is_ty_alias; use clippy_utils::source::{snippet, snippet_with_applicability, snippet_with_context}; use clippy_utils::sugg::Sugg; use clippy_utils::ty::{is_copy, is_type_diagnostic_item, same_type_and_consts}; -use clippy_utils::{get_parent_expr, is_trait_method, match_def_path, path_to_local, paths}; +use clippy_utils::{get_parent_expr, is_trait_method, is_ty_alias, match_def_path, path_to_local, paths}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::def_id::DefId; @@ -116,7 +115,7 @@ impl<'tcx> LateLintPass<'tcx> for UselessConversion { match e.kind { ExprKind::Match(_, arms, MatchSource::TryDesugar) => { let (ExprKind::Ret(Some(e)) | ExprKind::Break(_, Some(e))) = arms[0].body.kind else { - return + return; }; if let ExprKind::Call(_, [arg, ..]) = e.kind { self.try_desugar_arm.push(arg.hir_id); diff --git a/src/tools/clippy/clippy_lints/src/utils/conf.rs b/src/tools/clippy/clippy_lints/src/utils/conf.rs index f1d05c7529b..76654bfe536 100644 --- a/src/tools/clippy/clippy_lints/src/utils/conf.rs +++ b/src/tools/clippy/clippy_lints/src/utils/conf.rs @@ -319,7 +319,7 @@ define_Conf! { /// Lint: DISALLOWED_NAMES. /// /// The list of disallowed names to lint about. NB: `bar` is not here since it has legitimate uses. The value - /// `".."` can be used as part of the list to indicate, that the configured values should be appended to the + /// `".."` can be used as part of the list to indicate that the configured values should be appended to the /// default configuration of Clippy. By default, any configuration will replace the default value. (disallowed_names: Vec<String> = super::DEFAULT_DISALLOWED_NAMES.iter().map(ToString::to_string).collect()), /// Lint: SEMICOLON_INSIDE_BLOCK. diff --git a/src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs b/src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs index 09fcb82c37c..6d3493523e6 100644 --- a/src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs +++ b/src/tools/clippy/clippy_lints/src/utils/format_args_collector.rs @@ -71,7 +71,9 @@ fn has_span_from_proc_macro(cx: &EarlyContext<'_>, args: &FormatArgs) -> bool { for between_span in between_spans { let mut seen_comma = false; - let Some(snippet) = snippet_opt(cx, between_span) else { return true }; + let Some(snippet) = snippet_opt(cx, between_span) else { + return true; + }; for token in tokenize(&snippet) { match token.kind { TokenKind::LineComment { .. } | TokenKind::BlockComment { .. } | TokenKind::Whitespace => {}, diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs index 883a5c08e5c..fe2f12fe833 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs +++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/if_chain_style.rs @@ -46,7 +46,9 @@ impl<'tcx> LateLintPass<'tcx> for IfChainStyle { } else { return; }; - let ExprKind::Block(then_block, _) = then.kind else { return }; + let ExprKind::Block(then_block, _) = then.kind else { + return; + }; let if_chain_span = is_expn_of(expr.span, "if_chain"); if !els { check_nested_if_chains(cx, expr, then_block, if_chain_span); diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs index 9afe02c1e47..94b56304bca 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs +++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/invalid_paths.rs @@ -7,7 +7,8 @@ use rustc_hir::def::DefKind; use rustc_hir::Item; use rustc_hir_analysis::hir_ty_to_ty; use rustc_lint::{LateContext, LateLintPass}; -use rustc_middle::ty::{self, fast_reject::SimplifiedType, FloatTy}; +use rustc_middle::ty::fast_reject::SimplifiedType; +use rustc_middle::ty::{self, FloatTy}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::symbol::Symbol; diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs index f718207654f..87380f14f9a 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs +++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/lint_without_lint_pass.rs @@ -3,10 +3,8 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_help}; use clippy_utils::macros::root_macro_call_first_node; use clippy_utils::{is_lint_allowed, match_def_path, paths}; use if_chain::if_chain; -use rustc_ast as ast; use rustc_ast::ast::LitKind; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; -use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; use rustc_hir::hir_id::CRATE_HIR_ID; use rustc_hir::intravisit::Visitor; @@ -18,6 +16,7 @@ use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::source_map::Spanned; use rustc_span::symbol::Symbol; use rustc_span::{sym, Span}; +use {rustc_ast as ast, rustc_hir as hir}; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs b/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs index 107a62806a8..f49c3fadb07 100644 --- a/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs +++ b/src/tools/clippy/clippy_lints/src/utils/internal_lints/metadata_collector.rs @@ -8,11 +8,8 @@ //! a simple mistake) use crate::renamed_lints::RENAMED_LINTS; -use crate::utils::{ - collect_configs, - internal_lints::lint_without_lint_pass::{extract_clippy_version_value, is_lint_ref_type}, - ClippyConfiguration, -}; +use crate::utils::internal_lints::lint_without_lint_pass::{extract_clippy_version_value, is_lint_ref_type}; +use crate::utils::{collect_configs, ClippyConfiguration}; use clippy_utils::diagnostics::span_lint; use clippy_utils::ty::{match_type, walk_ptrs_ty_depth}; @@ -21,22 +18,22 @@ use if_chain::if_chain; use itertools::Itertools; use rustc_ast as ast; use rustc_data_structures::fx::FxHashMap; -use rustc_hir::{ - self as hir, def::DefKind, intravisit, intravisit::Visitor, Closure, ExprKind, Item, ItemKind, Mutability, QPath, -}; +use rustc_hir::def::DefKind; +use rustc_hir::intravisit::Visitor; +use rustc_hir::{self as hir, intravisit, Closure, ExprKind, Item, ItemKind, Mutability, QPath}; use rustc_lint::{CheckLintNameResult, LateContext, LateLintPass, LintContext, LintId}; use rustc_middle::hir::nested_filter; use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::symbol::Ident; use rustc_span::{sym, Loc, Span, Symbol}; -use serde::{ser::SerializeStruct, Serialize, Serializer}; +use serde::ser::SerializeStruct; +use serde::{Serialize, Serializer}; use std::collections::{BTreeSet, BinaryHeap}; use std::fmt; use std::fmt::Write as _; use std::fs::{self, OpenOptions}; use std::io::prelude::*; -use std::path::Path; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use std::process::Command; /// This is the json output file of the lint collector. diff --git a/src/tools/clippy/clippy_lints/src/vec.rs b/src/tools/clippy/clippy_lints/src/vec.rs index d1bf292d711..fc17e7c6d5a 100644 --- a/src/tools/clippy/clippy_lints/src/vec.rs +++ b/src/tools/clippy/clippy_lints/src/vec.rs @@ -154,6 +154,10 @@ impl UselessVec { span: Span, suggest_slice: SuggestedType, ) { + if span.from_expansion() { + return; + } + let mut applicability = Applicability::MachineApplicable; let snippet = match *vec_args { @@ -181,7 +185,7 @@ impl UselessVec { if args.len() as u64 * size_of(cx, last) > self.too_large_for_stack { return; } - let span = args[0].span.to(last.span); + let span = args[0].span.source_callsite().to(last.span.source_callsite()); let args = snippet_with_applicability(cx, span, "..", &mut applicability); match suggest_slice { diff --git a/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs b/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs index bd5be0c9d7e..1d4fc24eb6c 100644 --- a/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs +++ b/src/tools/clippy/clippy_lints/src/vec_init_then_push.rs @@ -74,7 +74,7 @@ impl VecPushSearcher { let mut needs_mut = false; let res = for_each_local_use_after_expr(cx, self.local_id, self.last_push_expr, |e| { let Some(parent) = get_parent_expr(cx, e) else { - return ControlFlow::Continue(()) + return ControlFlow::Continue(()); }; let adjusted_ty = cx.typeck_results().expr_ty_adjusted(e); let adjusted_mut = adjusted_ty.ref_mutability().unwrap_or(Mutability::Not); diff --git a/src/tools/clippy/clippy_lints/src/visibility.rs b/src/tools/clippy/clippy_lints/src/visibility.rs index 43248bccc13..49637652001 100644 --- a/src/tools/clippy/clippy_lints/src/visibility.rs +++ b/src/tools/clippy/clippy_lints/src/visibility.rs @@ -1,10 +1,12 @@ -use clippy_utils::{diagnostics::span_lint_and_sugg, source::snippet_opt}; +use clippy_utils::diagnostics::span_lint_and_sugg; +use clippy_utils::source::snippet_opt; use rustc_ast::ast::{Item, VisibilityKind}; use rustc_errors::Applicability; use rustc_lint::{EarlyContext, EarlyLintPass, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_session::{declare_lint_pass, declare_tool_lint}; -use rustc_span::{symbol::kw, Span}; +use rustc_span::symbol::kw; +use rustc_span::Span; declare_clippy_lint! { /// ### What it does diff --git a/src/tools/clippy/clippy_lints/src/wildcard_imports.rs b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs index 2a3d86988bb..d09d02a7dfd 100644 --- a/src/tools/clippy/clippy_lints/src/wildcard_imports.rs +++ b/src/tools/clippy/clippy_lints/src/wildcard_imports.rs @@ -3,10 +3,8 @@ use clippy_utils::is_test_module_or_function; use clippy_utils::source::{snippet, snippet_with_applicability}; use if_chain::if_chain; use rustc_errors::Applicability; -use rustc_hir::{ - def::{DefKind, Res}, - Item, ItemKind, PathSegment, UseKind, -}; +use rustc_hir::def::{DefKind, Res}; +use rustc_hir::{Item, ItemKind, PathSegment, UseKind}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::ty; use rustc_session::{declare_tool_lint, impl_lint_pass}; diff --git a/src/tools/clippy/clippy_lints/src/write.rs b/src/tools/clippy/clippy_lints/src/write.rs index f194dc5d4b2..a9957b18a53 100644 --- a/src/tools/clippy/clippy_lints/src/write.rs +++ b/src/tools/clippy/clippy_lints/src/write.rs @@ -272,9 +272,15 @@ impl<'tcx> LateLintPass<'tcx> for Write { } fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { - let Some(macro_call) = root_macro_call_first_node(cx, expr) else { return }; - let Some(diag_name) = cx.tcx.get_diagnostic_name(macro_call.def_id) else { return }; - let Some(name) = diag_name.as_str().strip_suffix("_macro") else { return }; + let Some(macro_call) = root_macro_call_first_node(cx, expr) else { + return; + }; + let Some(diag_name) = cx.tcx.get_diagnostic_name(macro_call.def_id) else { + return; + }; + let Some(name) = diag_name.as_str().strip_suffix("_macro") else { + return; + }; let is_build_script = cx .sess() @@ -343,7 +349,9 @@ fn is_debug_impl(cx: &LateContext<'_>, item: &Item<'_>) -> bool { } fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgs, macro_call: &MacroCall, name: &str) { - let Some(FormatArgsPiece::Literal(last)) = format_args.template.last() else { return }; + let Some(FormatArgsPiece::Literal(last)) = format_args.template.last() else { + return; + }; let count_vertical_whitespace = || { format_args @@ -379,7 +387,9 @@ fn check_newline(cx: &LateContext<'_>, format_args: &FormatArgs, macro_call: &Ma &format!("using `{name}!()` with a format string that ends in a single newline"), |diag| { let name_span = cx.sess().source_map().span_until_char(macro_call.span, '!'); - let Some(format_snippet) = snippet_opt(cx, format_string_span) else { return }; + let Some(format_snippet) = snippet_opt(cx, format_string_span) else { + return; + }; if format_args.template.len() == 1 && last.as_str() == "\n" { // print!("\n"), write!(f, "\n") @@ -522,7 +532,7 @@ fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgs, name: &str) { { let replacement = replacement.replace('{', "{{").replace('}', "}}"); diag.multipart_suggestion( - "try this", + "try", vec![(*placeholder_span, replacement), (removal_span, String::new())], Applicability::MachineApplicable, ); diff --git a/src/tools/clippy/clippy_test_deps/Cargo.toml b/src/tools/clippy/clippy_test_deps/Cargo.toml deleted file mode 100644 index 362c08e0d77..00000000000 --- a/src/tools/clippy/clippy_test_deps/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "clippy_test_deps" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -clippy_utils = { path = "../clippy_utils" } -derive-new = "0.5" -if_chain = "1.0" -itertools = "0.10.1" -quote = "1.0" -serde = { version = "1.0.125", features = ["derive"] } -syn = { version = "2.0", features = ["full"] } -futures = "0.3" -parking_lot = "0.12" -tokio = { version = "1", features = ["io-util"] } -regex = "1.5" -clippy_lints = { path = "../clippy_lints" } - -[features] -internal = ["clippy_lints/internal"] diff --git a/src/tools/clippy/clippy_test_deps/src/lib.rs b/src/tools/clippy/clippy_test_deps/src/lib.rs deleted file mode 100644 index 7d12d9af819..00000000000 --- a/src/tools/clippy/clippy_test_deps/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub fn add(left: usize, right: usize) -> usize { - left + right -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } -} diff --git a/src/tools/clippy/clippy_utils/Cargo.toml b/src/tools/clippy/clippy_utils/Cargo.toml index cfe686eb9b0..3926b954ec8 100644 --- a/src/tools/clippy/clippy_utils/Cargo.toml +++ b/src/tools/clippy/clippy_utils/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "clippy_utils" -version = "0.1.72" +version = "0.1.73" edition = "2021" publish = false diff --git a/src/tools/clippy/clippy_utils/src/attrs.rs b/src/tools/clippy/clippy_utils/src/attrs.rs index 49cb9718ef6..51771f78d4f 100644 --- a/src/tools/clippy/clippy_utils/src/attrs.rs +++ b/src/tools/clippy/clippy_utils/src/attrs.rs @@ -1,5 +1,4 @@ -use rustc_ast::ast; -use rustc_ast::attr; +use rustc_ast::{ast, attr}; use rustc_errors::Applicability; use rustc_session::Session; use rustc_span::sym; @@ -143,13 +142,13 @@ pub fn get_unique_attr<'a>( unique_attr } -/// Return true if the attributes contain any of `proc_macro`, +/// Returns true if the attributes contain any of `proc_macro`, /// `proc_macro_derive` or `proc_macro_attribute`, false otherwise pub fn is_proc_macro(attrs: &[ast::Attribute]) -> bool { attrs.iter().any(rustc_ast::Attribute::is_proc_macro_attr) } -/// Return true if the attributes contain `#[doc(hidden)]` +/// Returns true if the attributes contain `#[doc(hidden)]` pub fn is_doc_hidden(attrs: &[ast::Attribute]) -> bool { attrs .iter() diff --git a/src/tools/clippy/clippy_utils/src/check_proc_macro.rs b/src/tools/clippy/clippy_utils/src/check_proc_macro.rs index c6d0b654f57..89f8a65c5ae 100644 --- a/src/tools/clippy/clippy_utils/src/check_proc_macro.rs +++ b/src/tools/clippy/clippy_utils/src/check_proc_macro.rs @@ -12,20 +12,20 @@ //! code was written, and check if the span contains that text. Note this will only work correctly //! if the span is not from a `macro_rules` based macro. -use rustc_ast::{ - ast::{AttrKind, Attribute, IntTy, LitIntType, LitKind, StrStyle, UintTy}, - token::CommentKind, - AttrStyle, -}; +use rustc_ast::ast::{AttrKind, Attribute, IntTy, LitIntType, LitKind, StrStyle, UintTy}; +use rustc_ast::token::CommentKind; +use rustc_ast::AttrStyle; +use rustc_hir::intravisit::FnKind; use rustc_hir::{ - intravisit::FnKind, Block, BlockCheckMode, Body, Closure, Destination, Expr, ExprKind, FieldDef, FnHeader, HirId, - Impl, ImplItem, ImplItemKind, IsAuto, Item, ItemKind, LoopSource, MatchSource, MutTy, Node, QPath, TraitItem, - TraitItemKind, Ty, TyKind, UnOp, UnsafeSource, Unsafety, Variant, VariantData, YieldSource, + Block, BlockCheckMode, Body, Closure, Destination, Expr, ExprKind, FieldDef, FnHeader, HirId, Impl, ImplItem, + ImplItemKind, IsAuto, Item, ItemKind, LoopSource, MatchSource, MutTy, Node, QPath, TraitItem, TraitItemKind, Ty, + TyKind, UnOp, UnsafeSource, Unsafety, Variant, VariantData, YieldSource, }; use rustc_lint::{LateContext, LintContext}; use rustc_middle::ty::TyCtxt; use rustc_session::Session; -use rustc_span::{symbol::Ident, Span, Symbol}; +use rustc_span::symbol::Ident; +use rustc_span::{Span, Symbol}; use rustc_target::spec::abi::Abi; /// The search pattern to look for. Used by `span_matches_pat` @@ -339,7 +339,7 @@ fn ty_search_pat(ty: &Ty<'_>) -> (Pat, Pat) { TyKind::Tup(..) => (Pat::Str("("), Pat::Str(")")), TyKind::OpaqueDef(..) => (Pat::Str("impl"), Pat::Str("")), TyKind::Path(qpath) => qpath_search_pat(&qpath), - // NOTE: This is missing `TraitObject`. It always return true then. + // NOTE: This is missing `TraitObject`. It will always return true then. _ => (Pat::Str(""), Pat::Str("")), } } diff --git a/src/tools/clippy/clippy_utils/src/comparisons.rs b/src/tools/clippy/clippy_utils/src/comparisons.rs index 7a18d5e818f..5e6bf227844 100644 --- a/src/tools/clippy/clippy_utils/src/comparisons.rs +++ b/src/tools/clippy/clippy_utils/src/comparisons.rs @@ -1,11 +1,11 @@ -//! Utility functions about comparison operators. +//! Utility functions for comparison operators. #![deny(clippy::missing_docs_in_private_items)] use rustc_hir::{BinOpKind, Expr}; #[derive(PartialEq, Eq, Debug, Copy, Clone)] -/// Represent a normalized comparison operator. +/// Represents a normalized comparison operator. pub enum Rel { /// `<` Lt, diff --git a/src/tools/clippy/clippy_utils/src/consts.rs b/src/tools/clippy/clippy_utils/src/consts.rs index 4832a38eceb..061086c4fc2 100644 --- a/src/tools/clippy/clippy_utils/src/consts.rs +++ b/src/tools/clippy/clippy_utils/src/consts.rs @@ -9,11 +9,9 @@ use rustc_hir::def::{DefKind, Res}; use rustc_hir::{BinOp, BinOpKind, Block, ConstBlock, Expr, ExprKind, HirId, Item, ItemKind, Node, QPath, UnOp}; use rustc_lexer::tokenize; use rustc_lint::LateContext; -use rustc_middle::mir; use rustc_middle::mir::interpret::Scalar; -use rustc_middle::ty::{self, EarlyBinder, FloatTy, ScalarInt, Ty, TyCtxt}; -use rustc_middle::ty::{List, GenericArgsRef}; -use rustc_middle::{bug, span_bug}; +use rustc_middle::ty::{self, EarlyBinder, FloatTy, GenericArgsRef, List, ScalarInt, Ty, TyCtxt}; +use rustc_middle::{bug, mir, span_bug}; use rustc_span::symbol::{Ident, Symbol}; use rustc_span::SyntaxContext; use std::cmp::Ordering::{self, Equal}; @@ -155,7 +153,7 @@ impl<'tcx> Constant<'tcx> { }, (Self::Vec(l), Self::Vec(r)) => { let (ty::Array(cmp_type, _) | ty::Slice(cmp_type)) = *cmp_type.kind() else { - return None + return None; }; iter::zip(l, r) .map(|(li, ri)| Self::partial_cmp(tcx, cmp_type, li, ri)) @@ -267,7 +265,7 @@ pub fn constant_with_source<'tcx>( res.map(|x| (x, ctxt.source)) } -/// Attempts to evaluate an expression only if it's value is not dependent on other items. +/// Attempts to evaluate an expression only if its value is not dependent on other items. pub fn constant_simple<'tcx>( lcx: &LateContext<'tcx>, typeck_results: &ty::TypeckResults<'tcx>, diff --git a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs index 94b9006ed50..3c969572b8e 100644 --- a/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs +++ b/src/tools/clippy/clippy_utils/src/eager_or_lazy.rs @@ -1,7 +1,7 @@ //! Utilities for evaluating whether eagerly evaluated expressions can be made lazy and vice versa. //! //! Things to consider: -//! - has the expression side-effects? +//! - does the expression have side-effects? //! - is the expression computationally expensive? //! //! See lints: @@ -12,14 +12,14 @@ use crate::ty::{all_predicates_of, is_copy}; use crate::visitors::is_const_evaluatable; use rustc_hir::def::{DefKind, Res}; +use rustc_hir::def_id::DefId; use rustc_hir::intravisit::{walk_expr, Visitor}; -use rustc_hir::{def_id::DefId, Block, Expr, ExprKind, QPath, UnOp}; +use rustc_hir::{Block, Expr, ExprKind, QPath, UnOp}; use rustc_lint::LateContext; use rustc_middle::ty; use rustc_middle::ty::adjustment::Adjust; use rustc_span::{sym, Symbol}; -use std::cmp; -use std::ops; +use std::{cmp, ops}; #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] enum EagernessSuggestion { @@ -68,19 +68,24 @@ fn fn_eagerness(cx: &LateContext<'_>, fn_id: DefId, name: Symbol, have_one_arg: // Types where the only fields are generic types (or references to) with no trait bounds other // than marker traits. // Due to the limited operations on these types functions should be fairly cheap. - if def - .variants() - .iter() - .flat_map(|v| v.fields.iter()) - .any(|x| matches!(cx.tcx.type_of(x.did).instantiate_identity().peel_refs().kind(), ty::Param(_))) - && all_predicates_of(cx.tcx, fn_id).all(|(pred, _)| match pred.kind().skip_binder() { - ty::ClauseKind::Trait(pred) => cx.tcx.trait_def(pred.trait_ref.def_id).is_marker, - _ => true, - }) - && subs.types().all(|x| matches!(x.peel_refs().kind(), ty::Param(_))) + if def.variants().iter().flat_map(|v| v.fields.iter()).any(|x| { + matches!( + cx.tcx.type_of(x.did).instantiate_identity().peel_refs().kind(), + ty::Param(_) + ) + }) && all_predicates_of(cx.tcx, fn_id).all(|(pred, _)| match pred.kind().skip_binder() { + ty::ClauseKind::Trait(pred) => cx.tcx.trait_def(pred.trait_ref.def_id).is_marker, + _ => true, + }) && subs.types().all(|x| matches!(x.peel_refs().kind(), ty::Param(_))) { // Limit the function to either `(self) -> bool` or `(&self) -> bool` - match &**cx.tcx.fn_sig(fn_id).instantiate_identity().skip_binder().inputs_and_output { + match &**cx + .tcx + .fn_sig(fn_id) + .instantiate_identity() + .skip_binder() + .inputs_and_output + { [arg, res] if !arg.is_mutable_ptr() && arg.peel_refs() == ty && res.is_bool() => NoChange, _ => Lazy, } diff --git a/src/tools/clippy/clippy_utils/src/higher.rs b/src/tools/clippy/clippy_utils/src/higher.rs index a61e4c38088..357ac40b455 100644 --- a/src/tools/clippy/clippy_utils/src/higher.rs +++ b/src/tools/clippy/clippy_utils/src/higher.rs @@ -13,7 +13,7 @@ use rustc_lint::LateContext; use rustc_span::{sym, symbol, Span}; /// The essential nodes of a desugared for loop as well as the entire span: -/// `for pat in arg { body }` becomes `(pat, arg, body)`. Return `(pat, arg, body, span)`. +/// `for pat in arg { body }` becomes `(pat, arg, body)`. Returns `(pat, arg, body, span)`. pub struct ForLoop<'tcx> { /// `for` loop item pub pat: &'tcx hir::Pat<'tcx>, @@ -264,7 +264,7 @@ impl<'a> Range<'a> { } } -/// Represent the pre-expansion arguments of a `vec!` invocation. +/// Represents the pre-expansion arguments of a `vec!` invocation. pub enum VecArgs<'a> { /// `vec![elem; len]` Repeat(&'a hir::Expr<'a>, &'a hir::Expr<'a>), @@ -398,7 +398,7 @@ impl<'hir> WhileLet<'hir> { } } -/// Converts a hir binary operator to the corresponding `ast` type. +/// Converts a `hir` binary operator to the corresponding `ast` type. #[must_use] pub fn binop(op: hir::BinOpKind) -> ast::BinOpKind { match op { @@ -436,7 +436,7 @@ pub enum VecInitKind { WithExprCapacity(HirId), } -/// Checks if given expression is an initialization of `Vec` and returns its kind. +/// Checks if the given expression is an initialization of `Vec` and returns its kind. pub fn get_vec_init_kind<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> Option<VecInitKind> { if let ExprKind::Call(func, args) = expr.kind { match func.kind { diff --git a/src/tools/clippy/clippy_utils/src/hir_utils.rs b/src/tools/clippy/clippy_utils/src/hir_utils.rs index 3e1d7356414..fb359ee3bbe 100644 --- a/src/tools/clippy/clippy_utils/src/hir_utils.rs +++ b/src/tools/clippy/clippy_utils/src/hir_utils.rs @@ -5,10 +5,9 @@ use crate::tokenize_with_text; use rustc_ast::ast::InlineAsmTemplatePiece; use rustc_data_structures::fx::FxHasher; use rustc_hir::def::Res; -use rustc_hir::HirIdMap; use rustc_hir::{ ArrayLen, BinOpKind, BindingAnnotation, Block, BodyId, Closure, Expr, ExprField, ExprKind, FnRetTy, GenericArg, - GenericArgs, Guard, HirId, InlineAsmOperand, Let, Lifetime, LifetimeName, Pat, PatField, PatKind, Path, + GenericArgs, Guard, HirId, HirIdMap, InlineAsmOperand, Let, Lifetime, LifetimeName, Pat, PatField, PatKind, Path, PathSegment, PrimTy, QPath, Stmt, StmtKind, Ty, TyKind, TypeBinding, }; use rustc_lexer::{tokenize, TokenKind}; diff --git a/src/tools/clippy/clippy_utils/src/lib.rs b/src/tools/clippy/clippy_utils/src/lib.rs index 5eed5ddf9e3..00e893fbdda 100644 --- a/src/tools/clippy/clippy_utils/src/lib.rs +++ b/src/tools/clippy/clippy_utils/src/lib.rs @@ -74,8 +74,7 @@ pub use self::hir_utils::{ use core::ops::ControlFlow; use std::collections::hash_map::Entry; use std::hash::BuildHasherDefault; -use std::sync::OnceLock; -use std::sync::{Mutex, MutexGuard}; +use std::sync::{Mutex, MutexGuard, OnceLock}; use if_chain::if_chain; use itertools::Itertools; @@ -87,7 +86,7 @@ use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE}; use rustc_hir::hir_id::{HirIdMap, HirIdSet}; use rustc_hir::intravisit::{walk_expr, FnKind, Visitor}; -use rustc_hir::LangItem::{OptionNone, ResultErr, ResultOk}; +use rustc_hir::LangItem::{OptionNone, OptionSome, ResultErr, ResultOk}; use rustc_hir::{ self as hir, def, Arm, ArrayLen, BindingAnnotation, Block, BlockCheckMode, Body, Closure, Destination, Expr, ExprKind, FnDecl, HirId, Impl, ImplItem, ImplItemKind, ImplItemRef, IsAsync, Item, ItemKind, LangItem, Local, @@ -105,15 +104,14 @@ use rustc_middle::ty::fast_reject::SimplifiedType::{ ArraySimplifiedType, BoolSimplifiedType, CharSimplifiedType, FloatSimplifiedType, IntSimplifiedType, PtrSimplifiedType, SliceSimplifiedType, StrSimplifiedType, UintSimplifiedType, }; +use rustc_middle::ty::layout::IntegerExt; use rustc_middle::ty::{ - layout::IntegerExt, BorrowKind, ClosureKind, Ty, TyCtxt, TypeAndMut, TypeVisitableExt, UpvarCapture, + BorrowKind, ClosureKind, FloatTy, IntTy, Ty, TyCtxt, TypeAndMut, TypeVisitableExt, UintTy, UpvarCapture, }; -use rustc_middle::ty::{FloatTy, IntTy, UintTy}; use rustc_span::hygiene::{ExpnKind, MacroKind}; use rustc_span::source_map::SourceMap; -use rustc_span::sym; use rustc_span::symbol::{kw, Ident, Symbol}; -use rustc_span::Span; +use rustc_span::{sym, Span}; use rustc_target::abi::Integer; use crate::consts::{constant, miri_to_const, Constant}; @@ -823,7 +821,7 @@ fn is_default_equivalent_ctor(cx: &LateContext<'_>, def_id: DefId, path: &QPath< false } -/// Return true if the expr is equal to `Default::default` when evaluated. +/// Returns true if the expr is equal to `Default::default` when evaluated. pub fn is_default_equivalent_call(cx: &LateContext<'_>, repl_func: &Expr<'_>) -> bool { if_chain! { if let hir::ExprKind::Path(ref repl_func_qpath) = repl_func.kind; @@ -2518,7 +2516,9 @@ pub fn tokenize_with_text(s: &str) -> impl Iterator<Item = (TokenKind, &str)> { /// Checks whether a given span has any comment token /// This checks for all types of comment: line "//", block "/**", doc "///" "//!" pub fn span_contains_comment(sm: &SourceMap, span: Span) -> bool { - let Ok(snippet) = sm.span_to_snippet(span) else { return false }; + let Ok(snippet) = sm.span_to_snippet(span) else { + return false; + }; return tokenize(&snippet).any(|token| { matches!( token.kind, @@ -2527,7 +2527,8 @@ pub fn span_contains_comment(sm: &SourceMap, span: Span) -> bool { }); } -/// Return all the comments a given span contains +/// Returns all the comments a given span contains +/// /// Comments are returned wrapped with their relevant delimiters pub fn span_extract_comment(sm: &SourceMap, span: Span) -> String { let snippet = sm.span_to_snippet(span).unwrap_or_default(); @@ -2542,6 +2543,50 @@ pub fn span_find_starting_semi(sm: &SourceMap, span: Span) -> Span { sm.span_take_while(span, |&ch| ch == ' ' || ch == ';') } +/// Returns whether the given let pattern and else body can be turned into a question mark +/// +/// For this example: +/// ```ignore +/// let FooBar { a, b } = if let Some(a) = ex { a } else { return None }; +/// ``` +/// We get as parameters: +/// ```ignore +/// pat: Some(a) +/// else_body: return None +/// ``` + +/// And for this example: +/// ```ignore +/// let Some(FooBar { a, b }) = ex else { return None }; +/// ``` +/// We get as parameters: +/// ```ignore +/// pat: Some(FooBar { a, b }) +/// else_body: return None +/// ``` + +/// We output `Some(a)` in the first instance, and `Some(FooBar { a, b })` in the second, because +/// the question mark operator is applicable here. Callers have to check whether we are in a +/// constant or not. +pub fn pat_and_expr_can_be_question_mark<'a, 'hir>( + cx: &LateContext<'_>, + pat: &'a Pat<'hir>, + else_body: &Expr<'_>, +) -> Option<&'a Pat<'hir>> { + if let PatKind::TupleStruct(pat_path, [inner_pat], _) = pat.kind && + is_res_lang_ctor(cx, cx.qpath_res(&pat_path, pat.hir_id), OptionSome) && + !is_refutable(cx, inner_pat) && + let else_body = peel_blocks(else_body) && + let ExprKind::Ret(Some(ret_val)) = else_body.kind && + let ExprKind::Path(ret_path) = ret_val.kind && + is_res_lang_ctor(cx, cx.qpath_res(&ret_path, ret_val.hir_id), OptionNone) + { + Some(inner_pat) + } else { + None + } +} + macro_rules! op_utils { ($($name:ident $assign:ident)*) => { /// Binary operation traits like `LangItem::Add` diff --git a/src/tools/clippy/clippy_utils/src/macros.rs b/src/tools/clippy/clippy_utils/src/macros.rs index 00f3abaec8a..173f9841d44 100644 --- a/src/tools/clippy/clippy_utils/src/macros.rs +++ b/src/tools/clippy/clippy_utils/src/macros.rs @@ -192,7 +192,9 @@ pub fn first_node_in_macro(cx: &LateContext<'_>, node: &impl HirNode) -> Option< /// Is `def_id` of `std::panic`, `core::panic` or any inner implementation macros pub fn is_panic(cx: &LateContext<'_>, def_id: DefId) -> bool { - let Some(name) = cx.tcx.get_diagnostic_name(def_id) else { return false }; + let Some(name) = cx.tcx.get_diagnostic_name(def_id) else { + return false; + }; matches!( name, sym::core_panic_macro @@ -205,7 +207,9 @@ pub fn is_panic(cx: &LateContext<'_>, def_id: DefId) -> bool { /// Is `def_id` of `assert!` or `debug_assert!` pub fn is_assert_macro(cx: &LateContext<'_>, def_id: DefId) -> bool { - let Some(name) = cx.tcx.get_diagnostic_name(def_id) else { return false }; + let Some(name) = cx.tcx.get_diagnostic_name(def_id) else { + return false; + }; matches!(name, sym::assert_macro | sym::debug_assert_macro) } @@ -223,13 +227,19 @@ pub enum PanicExpn<'a> { impl<'a> PanicExpn<'a> { pub fn parse(expr: &'a Expr<'a>) -> Option<Self> { - let ExprKind::Call(callee, [arg, rest @ ..]) = &expr.kind else { return None }; - let ExprKind::Path(QPath::Resolved(_, path)) = &callee.kind else { return None }; + let ExprKind::Call(callee, [arg, rest @ ..]) = &expr.kind else { + return None; + }; + let ExprKind::Path(QPath::Resolved(_, path)) = &callee.kind else { + return None; + }; let result = match path.segments.last().unwrap().ident.as_str() { "panic" if arg.span.ctxt() == expr.span.ctxt() => Self::Empty, "panic" | "panic_str" => Self::Str(arg), "panic_display" => { - let ExprKind::AddrOf(_, _, e) = &arg.kind else { return None }; + let ExprKind::AddrOf(_, _, e) = &arg.kind else { + return None; + }; Self::Display(e) }, "panic_fmt" => Self::Format(arg), diff --git a/src/tools/clippy/clippy_utils/src/mir/mod.rs b/src/tools/clippy/clippy_utils/src/mir/mod.rs index 26c0015e87e..131f3c0aa39 100644 --- a/src/tools/clippy/clippy_utils/src/mir/mod.rs +++ b/src/tools/clippy/clippy_utils/src/mir/mod.rs @@ -101,21 +101,26 @@ pub fn used_exactly_once(mir: &rustc_middle::mir::Body<'_>, local: rustc_middle: /// Returns the `mir::Body` containing the node associated with `hir_id`. #[allow(clippy::module_name_repetitions)] -pub fn enclosing_mir(tcx: TyCtxt<'_>, hir_id: HirId) -> &Body<'_> { +pub fn enclosing_mir(tcx: TyCtxt<'_>, hir_id: HirId) -> Option<&Body<'_>> { let body_owner_local_def_id = tcx.hir().enclosing_body_owner(hir_id); - tcx.optimized_mir(body_owner_local_def_id.to_def_id()) + if tcx.hir().body_owner_kind(body_owner_local_def_id).is_fn_or_closure() { + Some(tcx.optimized_mir(body_owner_local_def_id.to_def_id())) + } else { + None + } } /// Tries to determine the `Local` corresponding to `expr`, if any. /// This function is expensive and should be used sparingly. pub fn expr_local(tcx: TyCtxt<'_>, expr: &Expr<'_>) -> Option<Local> { - let mir = enclosing_mir(tcx, expr.hir_id); - mir.local_decls.iter_enumerated().find_map(|(local, local_decl)| { - if local_decl.source_info.span == expr.span { - Some(local) - } else { - None - } + enclosing_mir(tcx, expr.hir_id).and_then(|mir| { + mir.local_decls.iter_enumerated().find_map(|(local, local_decl)| { + if local_decl.source_info.span == expr.span { + Some(local) + } else { + None + } + }) }) } diff --git a/src/tools/clippy/clippy_utils/src/mir/possible_borrower.rs b/src/tools/clippy/clippy_utils/src/mir/possible_borrower.rs index 920ce8e655b..703985b9d4b 100644 --- a/src/tools/clippy/clippy_utils/src/mir/possible_borrower.rs +++ b/src/tools/clippy/clippy_utils/src/mir/possible_borrower.rs @@ -1,11 +1,15 @@ -use super::{possible_origin::PossibleOriginVisitor, transitive_relation::TransitiveRelation}; +use super::possible_origin::PossibleOriginVisitor; +use super::transitive_relation::TransitiveRelation; use crate::ty::is_copy; use rustc_data_structures::fx::FxHashMap; use rustc_index::bit_set::{BitSet, HybridBitSet}; use rustc_lint::LateContext; -use rustc_middle::mir::{self, visit::Visitor as _, Mutability}; -use rustc_middle::ty::{self, visit::TypeVisitor, TyCtxt}; -use rustc_mir_dataflow::{impls::MaybeStorageLive, Analysis, ResultsCursor}; +use rustc_middle::mir::visit::Visitor as _; +use rustc_middle::mir::{self, Mutability}; +use rustc_middle::ty::visit::TypeVisitor; +use rustc_middle::ty::{self, TyCtxt}; +use rustc_mir_dataflow::impls::MaybeStorageLive; +use rustc_mir_dataflow::{Analysis, ResultsCursor}; use std::borrow::Cow; use std::ops::ControlFlow; diff --git a/src/tools/clippy/clippy_utils/src/paths.rs b/src/tools/clippy/clippy_utils/src/paths.rs index 0e6f01287b5..f3677e6f614 100644 --- a/src/tools/clippy/clippy_utils/src/paths.rs +++ b/src/tools/clippy/clippy_utils/src/paths.rs @@ -94,12 +94,12 @@ pub const PUSH_STR: [&str; 4] = ["alloc", "string", "String", "push_str"]; pub const RANGE_ARGUMENT_TRAIT: [&str; 3] = ["core", "ops", "RangeBounds"]; pub const REFCELL_REF: [&str; 3] = ["core", "cell", "Ref"]; pub const REFCELL_REFMUT: [&str; 3] = ["core", "cell", "RefMut"]; -pub const REGEX_BUILDER_NEW: [&str; 5] = ["regex", "re_builder", "unicode", "RegexBuilder", "new"]; -pub const REGEX_BYTES_BUILDER_NEW: [&str; 5] = ["regex", "re_builder", "bytes", "RegexBuilder", "new"]; -pub const REGEX_BYTES_NEW: [&str; 4] = ["regex", "re_bytes", "Regex", "new"]; -pub const REGEX_BYTES_SET_NEW: [&str; 5] = ["regex", "re_set", "bytes", "RegexSet", "new"]; -pub const REGEX_NEW: [&str; 4] = ["regex", "re_unicode", "Regex", "new"]; -pub const REGEX_SET_NEW: [&str; 5] = ["regex", "re_set", "unicode", "RegexSet", "new"]; +pub const REGEX_BUILDER_NEW: [&str; 3] = ["regex", "RegexBuilder", "new"]; +pub const REGEX_BYTES_BUILDER_NEW: [&str; 4] = ["regex", "bytes", "RegexBuilder", "new"]; +pub const REGEX_BYTES_NEW: [&str; 4] = ["regex", "bytes", "Regex", "new"]; +pub const REGEX_BYTES_SET_NEW: [&str; 4] = ["regex", "bytes", "RegexSet", "new"]; +pub const REGEX_NEW: [&str; 3] = ["regex", "Regex", "new"]; +pub const REGEX_SET_NEW: [&str; 3] = ["regex", "RegexSet", "new"]; pub const SERDE_DESERIALIZE: [&str; 3] = ["serde", "de", "Deserialize"]; pub const SERDE_DE_VISITOR: [&str; 3] = ["serde", "de", "Visitor"]; pub const SLICE_FROM_RAW_PARTS: [&str; 4] = ["core", "slice", "raw", "from_raw_parts"]; diff --git a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs index 515f80e0edf..1c29d614fa3 100644 --- a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs +++ b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs @@ -15,9 +15,8 @@ use rustc_middle::mir::{ Terminator, TerminatorKind, }; use rustc_middle::traits::{ImplSource, ObligationCause}; -use rustc_middle::ty::GenericArgKind; -use rustc_middle::ty::{self, adjustment::PointerCoercion, Ty, TyCtxt}; -use rustc_middle::ty::{BoundConstness, TraitRef}; +use rustc_middle::ty::adjustment::PointerCoercion; +use rustc_middle::ty::{self, BoundConstness, GenericArgKind, TraitRef, Ty, TyCtxt}; use rustc_semver::RustcVersion; use rustc_span::symbol::sym; use rustc_span::Span; @@ -125,7 +124,9 @@ fn check_rvalue<'tcx>( ) => check_operand(tcx, operand, span, body), Rvalue::Cast( CastKind::PointerCoercion( - PointerCoercion::UnsafeFnPointer | PointerCoercion::ClosureFnPointer(_) | PointerCoercion::ReifyFnPointer, + PointerCoercion::UnsafeFnPointer + | PointerCoercion::ClosureFnPointer(_) + | PointerCoercion::ReifyFnPointer, ), _, _, diff --git a/src/tools/clippy/clippy_utils/src/source.rs b/src/tools/clippy/clippy_utils/src/source.rs index 582337b47e8..dc4ee725681 100644 --- a/src/tools/clippy/clippy_utils/src/source.rs +++ b/src/tools/clippy/clippy_utils/src/source.rs @@ -8,8 +8,7 @@ use rustc_hir::{BlockCheckMode, Expr, ExprKind, UnsafeSource}; use rustc_lint::{LateContext, LintContext}; use rustc_session::Session; use rustc_span::source_map::{original_sp, SourceMap}; -use rustc_span::{hygiene, SourceFile}; -use rustc_span::{BytePos, Pos, Span, SpanData, SyntaxContext, DUMMY_SP}; +use rustc_span::{hygiene, BytePos, Pos, SourceFile, Span, SpanData, SyntaxContext, DUMMY_SP}; use std::borrow::Cow; use std::ops::Range; diff --git a/src/tools/clippy/clippy_utils/src/sugg.rs b/src/tools/clippy/clippy_utils/src/sugg.rs index 3953a7d8fa7..5d7e1494fcf 100644 --- a/src/tools/clippy/clippy_utils/src/sugg.rs +++ b/src/tools/clippy/clippy_utils/src/sugg.rs @@ -395,7 +395,7 @@ fn binop_to_string(op: AssocOp, lhs: &str, rhs: &str) -> String { } } -/// Return `true` if `sugg` is enclosed in parenthesis. +/// Returns `true` if `sugg` is enclosed in parenthesis. pub fn has_enclosing_paren(sugg: impl AsRef<str>) -> bool { let mut chars = sugg.as_ref().chars(); if chars.next() == Some('(') { diff --git a/src/tools/clippy/clippy_utils/src/ty.rs b/src/tools/clippy/clippy_utils/src/ty.rs index 7687d361923..fd39a246f48 100644 --- a/src/tools/clippy/clippy_utils/src/ty.rs +++ b/src/tools/clippy/clippy_utils/src/ty.rs @@ -9,18 +9,16 @@ use rustc_hir as hir; use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res}; use rustc_hir::def_id::DefId; use rustc_hir::{Expr, FnDecl, LangItem, TyKind, Unsafety}; -use rustc_infer::infer::{ - type_variable::{TypeVariableOrigin, TypeVariableOriginKind}, - TyCtxtInferExt, -}; +use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; +use rustc_infer::infer::TyCtxtInferExt; use rustc_lint::LateContext; use rustc_middle::mir::interpret::{ConstValue, Scalar}; +use rustc_middle::ty::layout::ValidityRequirement; use rustc_middle::ty::{ - self, layout::ValidityRequirement, AdtDef, AliasTy, AssocKind, Binder, BoundRegion, FnSig, IntTy, List, ParamEnv, - Region, RegionKind, GenericArgsRef, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, + self, AdtDef, AliasTy, AssocKind, Binder, BoundRegion, FnSig, GenericArg, GenericArgKind, GenericArgsRef, IntTy, + List, ParamEnv, Region, RegionKind, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, UintTy, VariantDef, VariantDiscr, }; -use rustc_middle::ty::{GenericArg, GenericArgKind}; use rustc_span::symbol::Ident; use rustc_span::{sym, Span, Symbol, DUMMY_SP}; use rustc_target::abi::{Size, VariantIdx}; @@ -665,7 +663,7 @@ pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<ExprFnSig<'t ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => sig_from_bounds( cx, ty, - cx.tcx.item_bounds(def_id).arg_iter(cx.tcx, args), + cx.tcx.item_bounds(def_id).iter_instantiated(cx.tcx, args), cx.tcx.opt_parent(def_id), ), ty::FnPtr(sig) => Some(ExprFnSig::Sig(sig, None)), @@ -741,11 +739,7 @@ fn sig_for_projection<'tcx>(cx: &LateContext<'tcx>, ty: AliasTy<'tcx>) -> Option let mut output = None; let lang_items = cx.tcx.lang_items(); - for (pred, _) in cx - .tcx - .explicit_item_bounds(ty.def_id) - .arg_iter_copied(cx.tcx, ty.args) - { + for (pred, _) in cx.tcx.explicit_item_bounds(ty.def_id).iter_instantiated_copied(cx.tcx, ty.args) { match pred.kind().skip_binder() { ty::ClauseKind::Trait(p) if (lang_items.fn_trait() == Some(p.def_id()) @@ -940,8 +934,7 @@ pub fn ty_is_fn_once_param<'tcx>(tcx: TyCtxt<'_>, ty: Ty<'tcx>, predicates: &'tc return false; }; let lang = tcx.lang_items(); - let (Some(fn_once_id), Some(fn_mut_id), Some(fn_id)) - = (lang.fn_once_trait(), lang.fn_mut_trait(), lang.fn_trait()) + let (Some(fn_once_id), Some(fn_mut_id), Some(fn_id)) = (lang.fn_once_trait(), lang.fn_mut_trait(), lang.fn_trait()) else { return false; }; @@ -1033,10 +1026,12 @@ pub fn make_projection<'tcx>( assoc_ty: Symbol, args: GenericArgsRef<'tcx>, ) -> Option<AliasTy<'tcx>> { - let Some(assoc_item) = tcx - .associated_items(container_id) - .find_by_name_and_kind(tcx, Ident::with_dummy_span(assoc_ty), AssocKind::Type, container_id) - else { + let Some(assoc_item) = tcx.associated_items(container_id).find_by_name_and_kind( + tcx, + Ident::with_dummy_span(assoc_ty), + AssocKind::Type, + container_id, + ) else { debug_assert!(false, "type `{assoc_ty}` not found in `{container_id:?}`"); return None; }; @@ -1124,7 +1119,7 @@ pub fn make_normalized_projection<'tcx>( ); return None; } - match tcx.try_normalize_erasing_regions(param_env, Ty::new_projection(tcx,ty.def_id, ty.args)) { + match tcx.try_normalize_erasing_regions(param_env, Ty::new_projection(tcx, ty.def_id, ty.args)) { Ok(ty) => Some(ty), Err(e) => { debug_assert!(false, "failed to normalize type `{ty}`: {e:#?}"); @@ -1207,7 +1202,7 @@ pub fn make_normalized_projection_with_regions<'tcx>( .infer_ctxt() .build() .at(&cause, param_env) - .query_normalize(Ty::new_projection(tcx,ty.def_id, ty.args)) + .query_normalize(Ty::new_projection(tcx, ty.def_id, ty.args)) { Ok(ty) => Some(ty.value), Err(e) => { diff --git a/src/tools/clippy/clippy_utils/src/usage.rs b/src/tools/clippy/clippy_utils/src/usage.rs index 9855085216f..20993398d1b 100644 --- a/src/tools/clippy/clippy_utils/src/usage.rs +++ b/src/tools/clippy/clippy_utils/src/usage.rs @@ -1,16 +1,14 @@ -use crate as utils; use crate::visitors::{for_each_expr, for_each_expr_with_closures, Descend}; use core::ops::ControlFlow; -use rustc_hir as hir; use rustc_hir::intravisit::{self, Visitor}; -use rustc_hir::HirIdSet; -use rustc_hir::{Expr, ExprKind, HirId, Node}; +use rustc_hir::{Expr, ExprKind, HirId, HirIdSet, Node}; use rustc_hir_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId}; use rustc_infer::infer::TyCtxtInferExt; use rustc_lint::LateContext; use rustc_middle::hir::nested_filter; use rustc_middle::mir::FakeReadCause; use rustc_middle::ty; +use {crate as utils, rustc_hir as hir}; /// Returns a set of mutated local variable IDs, or `None` if mutations could not be determined. pub fn mutated_variables<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) -> Option<HirIdSet> { @@ -156,7 +154,9 @@ pub fn contains_return_break_continue_macro(expression: &Expr<'_>) -> bool { } pub fn local_used_after_expr(cx: &LateContext<'_>, local_id: HirId, after: &Expr<'_>) -> bool { - let Some(block) = utils::get_enclosing_block(cx, local_id) else { return false }; + let Some(block) = utils::get_enclosing_block(cx, local_id) else { + return false; + }; // for _ in 1..3 { // local diff --git a/src/tools/clippy/declare_clippy_lint/Cargo.toml b/src/tools/clippy/declare_clippy_lint/Cargo.toml index 4dc906d00db..3633ed31d73 100644 --- a/src/tools/clippy/declare_clippy_lint/Cargo.toml +++ b/src/tools/clippy/declare_clippy_lint/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "declare_clippy_lint" -version = "0.1.72" +version = "0.1.73" edition = "2021" publish = false diff --git a/src/tools/clippy/lintcheck/src/config.rs b/src/tools/clippy/lintcheck/src/config.rs index 3f01e9bb0a7..e678d40795e 100644 --- a/src/tools/clippy/lintcheck/src/config.rs +++ b/src/tools/clippy/lintcheck/src/config.rs @@ -1,5 +1,6 @@ use clap::Parser; -use std::{num::NonZeroUsize, path::PathBuf}; +use std::num::NonZeroUsize; +use std::path::PathBuf; #[derive(Clone, Debug, Parser)] pub(crate) struct LintcheckConfig { diff --git a/src/tools/clippy/lintcheck/src/main.rs b/src/tools/clippy/lintcheck/src/main.rs index de56a6f8275..3a022b343a4 100644 --- a/src/tools/clippy/lintcheck/src/main.rs +++ b/src/tools/clippy/lintcheck/src/main.rs @@ -15,16 +15,14 @@ use crate::config::LintcheckConfig; use crate::recursive::LintcheckServer; use std::collections::{HashMap, HashSet}; -use std::env; use std::env::consts::EXE_SUFFIX; use std::fmt::{self, Write as _}; -use std::fs; use std::io::{self, ErrorKind}; use std::path::{Path, PathBuf}; use std::process::Command; use std::sync::atomic::{AtomicUsize, Ordering}; -use std::thread; use std::time::Duration; +use std::{env, fs, thread}; use cargo_metadata::diagnostic::{Diagnostic, DiagnosticLevel}; use cargo_metadata::Message; diff --git a/src/tools/clippy/lintcheck/src/recursive.rs b/src/tools/clippy/lintcheck/src/recursive.rs index 49072e65192..994fa3c3b23 100644 --- a/src/tools/clippy/lintcheck/src/recursive.rs +++ b/src/tools/clippy/lintcheck/src/recursive.rs @@ -3,8 +3,7 @@ //! [`LintcheckServer`] to ask if it should be skipped, and if not sends the stderr of running //! clippy on the crate to the server -use crate::ClippyWarning; -use crate::RecursiveOptions; +use crate::{ClippyWarning, RecursiveOptions}; use std::collections::HashSet; use std::io::{BufRead, BufReader, Read, Write}; diff --git a/src/tools/clippy/rust-toolchain b/src/tools/clippy/rust-toolchain index 4475d914ced..b658101a10d 100644 --- a/src/tools/clippy/rust-toolchain +++ b/src/tools/clippy/rust-toolchain @@ -1,3 +1,3 @@ [toolchain] -channel = "nightly-2023-06-29" +channel = "nightly-2023-07-14" components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"] diff --git a/src/tools/clippy/rustfmt.toml b/src/tools/clippy/rustfmt.toml index 18b2a33469d..4248f42f654 100644 --- a/src/tools/clippy/rustfmt.toml +++ b/src/tools/clippy/rustfmt.toml @@ -4,5 +4,6 @@ match_block_trailing_comma = true wrap_comments = true edition = "2021" error_on_line_overflow = true +imports_granularity = "Module" version = "Two" ignore = ["tests/ui/crashes/ice-10912.rs"] diff --git a/src/tools/clippy/src/driver.rs b/src/tools/clippy/src/driver.rs index 1eb288b153a..ee17feed77a 100644 --- a/src/tools/clippy/src/driver.rs +++ b/src/tools/clippy/src/driver.rs @@ -130,6 +130,13 @@ impl rustc_driver::Callbacks for ClippyCallbacks { config.parse_sess_created = Some(Box::new(move |parse_sess| { track_clippy_args(parse_sess, &clippy_args_var); track_files(parse_sess); + + // Trigger a rebuild if CLIPPY_CONF_DIR changes. The value must be a valid string so + // changes between dirs that are invalid UTF-8 will not trigger rebuilds + parse_sess.env_depinfo.get_mut().insert(( + Symbol::intern("CLIPPY_CONF_DIR"), + env::var("CLIPPY_CONF_DIR").ok().map(|dir| Symbol::intern(&dir)), + )); })); config.register_lints = Some(Box::new(move |sess, lint_store| { // technically we're ~guaranteed that this is none but might as well call anything that diff --git a/src/tools/clippy/tests/compile-test.rs b/src/tools/clippy/tests/compile-test.rs index 0fd37c640ae..d70c4ea34cb 100644 --- a/src/tools/clippy/tests/compile-test.rs +++ b/src/tools/clippy/tests/compile-test.rs @@ -3,17 +3,108 @@ #![feature(is_sorted)] #![cfg_attr(feature = "deny-warnings", deny(warnings))] #![warn(rust_2018_idioms, unused_lifetimes)] +#![allow(unused_extern_crates)] use compiletest::{status_emitter, CommandBuilder}; use ui_test as compiletest; use ui_test::Mode as TestMode; +use std::collections::BTreeMap; use std::env::{self, remove_var, set_var, var_os}; use std::ffi::{OsStr, OsString}; use std::fs; use std::path::{Path, PathBuf}; +use std::sync::LazyLock; use test_utils::IS_RUSTC_TEST_SUITE; +// Test dependencies may need an `extern crate` here to ensure that they show up +// in the depinfo file (otherwise cargo thinks they are unused) +extern crate clippy_lints; +extern crate clippy_utils; +extern crate derive_new; +extern crate futures; +extern crate if_chain; +extern crate itertools; +extern crate parking_lot; +extern crate quote; +extern crate syn; +extern crate tokio; + +/// All crates used in UI tests are listed here +static TEST_DEPENDENCIES: &[&str] = &[ + "clippy_lints", + "clippy_utils", + "derive_new", + "futures", + "if_chain", + "itertools", + "parking_lot", + "quote", + "regex", + "serde_derive", + "serde", + "syn", + "tokio", +]; + +/// Produces a string with an `--extern` flag for all UI test crate +/// dependencies. +/// +/// The dependency files are located by parsing the depinfo file for this test +/// module. This assumes the `-Z binary-dep-depinfo` flag is enabled. All test +/// dependencies must be added to Cargo.toml at the project root. Test +/// dependencies that are not *directly* used by this test module require an +/// `extern crate` declaration. +static EXTERN_FLAGS: LazyLock<Vec<String>> = LazyLock::new(|| { + let current_exe_depinfo = { + let mut path = env::current_exe().unwrap(); + path.set_extension("d"); + fs::read_to_string(path).unwrap() + }; + let mut crates = BTreeMap::<&str, &str>::new(); + for line in current_exe_depinfo.lines() { + // each dependency is expected to have a Makefile rule like `/path/to/crate-hash.rlib:` + let parse_name_path = || { + if line.starts_with(char::is_whitespace) { + return None; + } + let path_str = line.strip_suffix(':')?; + let path = Path::new(path_str); + if !matches!(path.extension()?.to_str()?, "rlib" | "so" | "dylib" | "dll") { + return None; + } + let (name, _hash) = path.file_stem()?.to_str()?.rsplit_once('-')?; + // the "lib" prefix is not present for dll files + let name = name.strip_prefix("lib").unwrap_or(name); + Some((name, path_str)) + }; + if let Some((name, path)) = parse_name_path() { + if TEST_DEPENDENCIES.contains(&name) { + // A dependency may be listed twice if it is available in sysroot, + // and the sysroot dependencies are listed first. As of the writing, + // this only seems to apply to if_chain. + crates.insert(name, path); + } + } + } + let not_found: Vec<&str> = TEST_DEPENDENCIES + .iter() + .copied() + .filter(|n| !crates.contains_key(n)) + .collect(); + assert!( + not_found.is_empty(), + "dependencies not found in depinfo: {not_found:?}\n\ + help: Make sure the `-Z binary-dep-depinfo` rust flag is enabled\n\ + help: Try adding to dev-dependencies in Cargo.toml\n\ + help: Be sure to also add `extern crate ...;` to tests/compile-test.rs", + ); + crates + .into_iter() + .map(|(name, path)| format!("--extern={name}={path}")) + .collect() +}); + mod test_utils; // whether to run internal tests or not @@ -29,7 +120,6 @@ fn base_config(test_dir: &str) -> compiletest::Config { } else { compiletest::OutputConflictHandling::Error("cargo test -- -- --bless".into()) }, - dependencies_crate_manifest_path: Some("clippy_test_deps/Cargo.toml".into()), target: None, out_dir: PathBuf::from(std::env::var_os("CARGO_TARGET_DIR").unwrap_or("target".into())).join("ui_test"), ..compiletest::Config::rustc(Path::new("tests").join(test_dir)) @@ -44,10 +134,23 @@ fn base_config(test_dir: &str) -> compiletest::Config { let deps_path = current_exe_path.parent().unwrap(); let profile_path = deps_path.parent().unwrap(); - config.program.args.push("--emit=metadata".into()); - config.program.args.push("-Aunused".into()); - config.program.args.push("-Zui-testing".into()); - config.program.args.push("-Dwarnings".into()); + config.program.args.extend( + [ + "--emit=metadata", + "-Aunused", + "-Zui-testing", + "-Dwarnings", + &format!("-Ldependency={}", deps_path.display()), + ] + .map(OsString::from), + ); + + config.program.args.extend(EXTERN_FLAGS.iter().map(OsString::from)); + + if let Some(host_libs) = option_env!("HOST_LIBS") { + let dep = format!("-Ldependency={}", Path::new(host_libs).join("deps").display()); + config.program.args.push(dep.into()); + } // Normalize away slashes in windows paths. config.stderr_filter(r"\\", "/"); @@ -105,9 +208,7 @@ fn run_internal_tests() { if !RUN_INTERNAL_TESTS { return; } - let mut config = base_config("ui-internal"); - config.dependency_builder.args.push("--features".into()); - config.dependency_builder.args.push("internal".into()); + let config = base_config("ui-internal"); compiletest::run_tests(config).unwrap(); } @@ -211,12 +312,45 @@ fn main() { } set_var("CLIPPY_DISABLE_DOCS_LINKS", "true"); - run_ui(); - run_ui_toml(); - run_ui_cargo(); - run_internal_tests(); - rustfix_coverage_known_exceptions_accuracy(); - ui_cargo_toml_metadata(); + // The SPEEDTEST_* env variables can be used to check Clippy's performance on your PR. It runs the + // affected test 1000 times and gets the average. + if let Ok(speedtest) = std::env::var("SPEEDTEST") { + println!("----------- STARTING SPEEDTEST -----------"); + let f = match speedtest.as_str() { + "ui" => run_ui as fn(), + "cargo" => run_ui_cargo as fn(), + "toml" => run_ui_toml as fn(), + "internal" => run_internal_tests as fn(), + "rustfix-coverage-known-exceptions-accuracy" => rustfix_coverage_known_exceptions_accuracy as fn(), + "ui-cargo-toml-metadata" => ui_cargo_toml_metadata as fn(), + + _ => panic!("unknown speedtest: {speedtest} || accepted speedtests are: [ui, cargo, toml, internal]"), + }; + + let iterations; + if let Ok(iterations_str) = std::env::var("SPEEDTEST_ITERATIONS") { + iterations = iterations_str + .parse::<u64>() + .unwrap_or_else(|_| panic!("Couldn't parse `{iterations_str}`, please use a valid u64")); + } else { + iterations = 1000; + } + + let mut sum = 0; + for _ in 0..iterations { + let start = std::time::Instant::now(); + f(); + sum += start.elapsed().as_millis(); + } + println!("average {} time: {} millis.", speedtest.to_uppercase(), sum / 1000); + } else { + run_ui(); + run_ui_toml(); + run_ui_cargo(); + run_internal_tests(); + rustfix_coverage_known_exceptions_accuracy(); + ui_cargo_toml_metadata(); + } } const RUSTFIX_COVERAGE_KNOWN_EXCEPTIONS: &[&str] = &[ diff --git a/src/tools/clippy/tests/lint_message_convention.rs b/src/tools/clippy/tests/lint_message_convention.rs index 15e5cdd6992..98019c75527 100644 --- a/src/tools/clippy/tests/lint_message_convention.rs +++ b/src/tools/clippy/tests/lint_message_convention.rs @@ -18,18 +18,20 @@ impl Message { fn new(path: PathBuf) -> Self { // we don't want the first letter after "error: ", "help: " ... to be capitalized // also no punctuation (except for "?" ?) at the end of a line + // Prefer "try" over "try this". static REGEX_SET: LazyLock<RegexSet> = LazyLock::new(|| { RegexSet::new([ "error: [A-Z]", "help: [A-Z]", "warning: [A-Z]", "note: [A-Z]", - "try this: [A-Z]", + "try: [A-Z]", "error: .*[.!]$", "help: .*[.!]$", "warning: .*[.!]$", "note: .*[.!]$", - "try this: .*[.!]$", + "try: .*[.!]$", + "try this", ]) .unwrap() }); diff --git a/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr b/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr index 6ec79a618de..eb1180e60b8 100644 --- a/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr +++ b/src/tools/clippy/tests/ui-toml/allow_mixed_uninlined_format_args/uninlined_format_args.stderr @@ -30,7 +30,7 @@ LL | println!("Hello {} is {:.*}", "x", local_i32, local_f64); | ^^^ | = note: `-D clippy::print-literal` implied by `-D warnings` -help: try this +help: try | LL - println!("Hello {} is {:.*}", "x", local_i32, local_f64); LL + println!("Hello x is {:.*}", local_i32, local_f64); diff --git a/src/tools/clippy/tests/ui-toml/excessive_nesting/auxiliary/proc_macros.rs b/src/tools/clippy/tests/ui-toml/excessive_nesting/auxiliary/proc_macros.rs index ebadd4e440a..60fbaaea3d3 100644 --- a/src/tools/clippy/tests/ui-toml/excessive_nesting/auxiliary/proc_macros.rs +++ b/src/tools/clippy/tests/ui-toml/excessive_nesting/auxiliary/proc_macros.rs @@ -7,13 +7,10 @@ extern crate proc_macro; use core::mem; -use proc_macro::{ - token_stream::IntoIter, - Delimiter::{self, Brace, Parenthesis}, - Group, Ident, Literal, Punct, - Spacing::{self, Alone, Joint}, - Span, TokenStream, TokenTree as TT, -}; +use proc_macro::token_stream::IntoIter; +use proc_macro::Delimiter::{self, Brace, Parenthesis}; +use proc_macro::Spacing::{self, Alone, Joint}; +use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree as TT}; type Result<T> = core::result::Result<T, TokenStream>; diff --git a/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs index f267a67f40e..78784bfff0f 100644 --- a/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs +++ b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.rs @@ -2,6 +2,7 @@ //@normalize-stderr-test: "\(limit: \d+ byte\)" -> "(limit: N byte)" #![warn(clippy::trivially_copy_pass_by_ref)] +#![allow(clippy::needless_pass_by_ref_mut)] #[derive(Copy, Clone)] struct Foo(u8); diff --git a/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr index d2b55eff16d..db5d6805362 100644 --- a/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr +++ b/src/tools/clippy/tests/ui-toml/toml_trivially_copy/test.stderr @@ -1,5 +1,5 @@ error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/test.rs:14:11 + --> $DIR/test.rs:15:11 | LL | fn bad(x: &u16, y: &Foo) {} | ^^^^ help: consider passing by value instead: `u16` @@ -7,7 +7,7 @@ LL | fn bad(x: &u16, y: &Foo) {} = note: `-D clippy::trivially-copy-pass-by-ref` implied by `-D warnings` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/test.rs:14:20 + --> $DIR/test.rs:15:20 | LL | fn bad(x: &u16, y: &Foo) {} | ^^^^ help: consider passing by value instead: `Foo` diff --git a/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs index dde1c6d7c37..e300ba18c33 100644 --- a/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs +++ b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.rs @@ -9,9 +9,7 @@ #![warn(clippy::unwrap_used)] #![warn(clippy::get_unwrap)] -use std::collections::BTreeMap; -use std::collections::HashMap; -use std::collections::VecDeque; +use std::collections::{BTreeMap, HashMap, VecDeque}; struct GetFalsePositive { arr: [u32; 3], diff --git a/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr index eb66a5cf50b..4c9bdfa9dba 100644 --- a/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr +++ b/src/tools/clippy/tests/ui-toml/unwrap_used/unwrap_used.stderr @@ -1,13 +1,13 @@ error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:40:17 + --> $DIR/unwrap_used.rs:38:17 | LL | let _ = boxed_slice.get(1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&boxed_slice[1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&boxed_slice[1]` | = note: `-D clippy::get-unwrap` implied by `-D warnings` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:40:17 + --> $DIR/unwrap_used.rs:38:17 | LL | let _ = boxed_slice.get(1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -16,13 +16,13 @@ LL | let _ = boxed_slice.get(1).unwrap(); = note: `-D clippy::unwrap-used` implied by `-D warnings` error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:41:17 + --> $DIR/unwrap_used.rs:39:17 | LL | let _ = some_slice.get(0).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_slice[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_slice[0]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:41:17 + --> $DIR/unwrap_used.rs:39:17 | LL | let _ = some_slice.get(0).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -30,13 +30,13 @@ LL | let _ = some_slice.get(0).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:42:17 + --> $DIR/unwrap_used.rs:40:17 | LL | let _ = some_vec.get(0).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vec[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_vec[0]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:42:17 + --> $DIR/unwrap_used.rs:40:17 | LL | let _ = some_vec.get(0).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -44,13 +44,13 @@ LL | let _ = some_vec.get(0).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a VecDeque. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:43:17 + --> $DIR/unwrap_used.rs:41:17 | LL | let _ = some_vecdeque.get(0).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vecdeque[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_vecdeque[0]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:43:17 + --> $DIR/unwrap_used.rs:41:17 | LL | let _ = some_vecdeque.get(0).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -58,13 +58,13 @@ LL | let _ = some_vecdeque.get(0).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a HashMap. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:44:17 + --> $DIR/unwrap_used.rs:42:17 | LL | let _ = some_hashmap.get(&1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_hashmap[&1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_hashmap[&1]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:44:17 + --> $DIR/unwrap_used.rs:42:17 | LL | let _ = some_hashmap.get(&1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -72,13 +72,13 @@ LL | let _ = some_hashmap.get(&1).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a BTreeMap. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:45:17 + --> $DIR/unwrap_used.rs:43:17 | LL | let _ = some_btreemap.get(&1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_btreemap[&1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_btreemap[&1]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:45:17 + --> $DIR/unwrap_used.rs:43:17 | LL | let _ = some_btreemap.get(&1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -86,13 +86,13 @@ LL | let _ = some_btreemap.get(&1).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:49:21 + --> $DIR/unwrap_used.rs:47:21 | LL | let _: u8 = *boxed_slice.get(1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `boxed_slice[1]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:49:22 + --> $DIR/unwrap_used.rs:47:22 | LL | let _: u8 = *boxed_slice.get(1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -100,13 +100,13 @@ LL | let _: u8 = *boxed_slice.get(1).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:54:9 + --> $DIR/unwrap_used.rs:52:9 | LL | *boxed_slice.get_mut(0).unwrap() = 1; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `boxed_slice[0]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:54:10 + --> $DIR/unwrap_used.rs:52:10 | LL | *boxed_slice.get_mut(0).unwrap() = 1; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -114,13 +114,13 @@ LL | *boxed_slice.get_mut(0).unwrap() = 1; = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:55:9 + --> $DIR/unwrap_used.rs:53:9 | LL | *some_slice.get_mut(0).unwrap() = 1; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_slice[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_slice[0]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:55:10 + --> $DIR/unwrap_used.rs:53:10 | LL | *some_slice.get_mut(0).unwrap() = 1; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -128,13 +128,13 @@ LL | *some_slice.get_mut(0).unwrap() = 1; = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:56:9 + --> $DIR/unwrap_used.rs:54:9 | LL | *some_vec.get_mut(0).unwrap() = 1; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:56:10 + --> $DIR/unwrap_used.rs:54:10 | LL | *some_vec.get_mut(0).unwrap() = 1; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -142,13 +142,13 @@ LL | *some_vec.get_mut(0).unwrap() = 1; = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get_mut().unwrap()` on a VecDeque. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:57:9 + --> $DIR/unwrap_used.rs:55:9 | LL | *some_vecdeque.get_mut(0).unwrap() = 1; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vecdeque[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vecdeque[0]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:57:10 + --> $DIR/unwrap_used.rs:55:10 | LL | *some_vecdeque.get_mut(0).unwrap() = 1; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -156,13 +156,13 @@ LL | *some_vecdeque.get_mut(0).unwrap() = 1; = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:69:17 + --> $DIR/unwrap_used.rs:67:17 | LL | let _ = some_vec.get(0..1).unwrap().to_vec(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0..1]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:69:17 + --> $DIR/unwrap_used.rs:67:17 | LL | let _ = some_vec.get(0..1).unwrap().to_vec(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -170,13 +170,13 @@ LL | let _ = some_vec.get(0..1).unwrap().to_vec(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:70:17 + --> $DIR/unwrap_used.rs:68:17 | LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0..1]` error: used `unwrap()` on an `Option` value - --> $DIR/unwrap_used.rs:70:17 + --> $DIR/unwrap_used.rs:68:17 | LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -184,16 +184,16 @@ LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:77:13 + --> $DIR/unwrap_used.rs:75:13 | LL | let _ = boxed_slice.get(1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&boxed_slice[1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&boxed_slice[1]` error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/unwrap_used.rs:95:17 + --> $DIR/unwrap_used.rs:93:17 | LL | let _ = Box::new([0]).get(1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&Box::new([0])[1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&Box::new([0])[1]` error: aborting due to 28 previous errors diff --git a/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs b/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs index ac786f68c12..b6fcca0a791 100644 --- a/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs +++ b/src/tools/clippy/tests/ui/arc_with_non_send_sync.rs @@ -7,11 +7,18 @@ fn foo<T>(x: T) { // Should not lint - purposefully ignoring generic args. let a = Arc::new(x); } +fn issue11076<T>() { + let a: Arc<Vec<T>> = Arc::new(Vec::new()); +} fn main() { - // This is safe, as `i32` implements `Send` and `Sync`. - let a = Arc::new(42); + let _ = Arc::new(42); - // This is not safe, as `RefCell` does not implement `Sync`. - let b = Arc::new(RefCell::new(42)); + // !Sync + let _ = Arc::new(RefCell::new(42)); + let mutex = Mutex::new(1); + // !Send + let _ = Arc::new(mutex.lock().unwrap()); + // !Send + !Sync + let _ = Arc::new(&42 as *const i32); } diff --git a/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr b/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr index fc2fc5f93b1..7633b38dfb5 100644 --- a/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr +++ b/src/tools/clippy/tests/ui/arc_with_non_send_sync.stderr @@ -1,11 +1,34 @@ -error: usage of `Arc<T>` where `T` is not `Send` or `Sync` - --> $DIR/arc_with_non_send_sync.rs:16:13 +error: usage of an `Arc` that is not `Send` or `Sync` + --> $DIR/arc_with_non_send_sync.rs:18:13 | -LL | let b = Arc::new(RefCell::new(42)); +LL | let _ = Arc::new(RefCell::new(42)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = help: consider using `Rc<T>` instead or wrapping `T` in a std::sync type like `Mutex<T>` + = note: the trait `Sync` is not implemented for `RefCell<i32>` + = note: required for `Arc<RefCell<i32>>` to implement `Send` and `Sync` + = help: consider using an `Rc` instead or wrapping the inner type with a `Mutex` = note: `-D clippy::arc-with-non-send-sync` implied by `-D warnings` -error: aborting due to previous error +error: usage of an `Arc` that is not `Send` or `Sync` + --> $DIR/arc_with_non_send_sync.rs:21:13 + | +LL | let _ = Arc::new(mutex.lock().unwrap()); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: the trait `Send` is not implemented for `MutexGuard<'_, i32>` + = note: required for `Arc<MutexGuard<'_, i32>>` to implement `Send` and `Sync` + = help: consider using an `Rc` instead or wrapping the inner type with a `Mutex` + +error: usage of an `Arc` that is not `Send` or `Sync` + --> $DIR/arc_with_non_send_sync.rs:23:13 + | +LL | let _ = Arc::new(&42 as *const i32); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: the trait `Send` is not implemented for `*const i32` + = note: the trait `Sync` is not implemented for `*const i32` + = note: required for `Arc<*const i32>` to implement `Send` and `Sync` + = help: consider using an `Rc` instead or wrapping the inner type with a `Mutex` + +error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui/arithmetic_side_effects.rs b/src/tools/clippy/tests/ui/arithmetic_side_effects.rs index 4f38e50c81d..ed75acee8a2 100644 --- a/src/tools/clippy/tests/ui/arithmetic_side_effects.rs +++ b/src/tools/clippy/tests/ui/arithmetic_side_effects.rs @@ -481,4 +481,9 @@ pub fn issue_10792() { let _ = 10 / TWO.c; } +pub fn issue_11145() { + let mut x: Wrapping<u32> = Wrapping(0_u32); + x += 1; +} + fn main() {} diff --git a/src/tools/clippy/tests/ui/as_conversions.rs b/src/tools/clippy/tests/ui/as_conversions.rs index 427842a51d9..69f1c541c4e 100644 --- a/src/tools/clippy/tests/ui/as_conversions.rs +++ b/src/tools/clippy/tests/ui/as_conversions.rs @@ -4,8 +4,7 @@ #![allow(clippy::borrow_as_ptr, unused)] extern crate proc_macros; -use proc_macros::external; -use proc_macros::with_span; +use proc_macros::{external, with_span}; fn main() { let i = 0u32 as u64; diff --git a/src/tools/clippy/tests/ui/as_conversions.stderr b/src/tools/clippy/tests/ui/as_conversions.stderr index ca41d1378aa..54037a64997 100644 --- a/src/tools/clippy/tests/ui/as_conversions.stderr +++ b/src/tools/clippy/tests/ui/as_conversions.stderr @@ -1,5 +1,5 @@ error: using a potentially dangerous silent `as` conversion - --> $DIR/as_conversions.rs:11:13 + --> $DIR/as_conversions.rs:10:13 | LL | let i = 0u32 as u64; | ^^^^^^^^^^^ @@ -8,7 +8,7 @@ LL | let i = 0u32 as u64; = note: `-D clippy::as-conversions` implied by `-D warnings` error: using a potentially dangerous silent `as` conversion - --> $DIR/as_conversions.rs:13:13 + --> $DIR/as_conversions.rs:12:13 | LL | let j = &i as *const u64 as *mut u64; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -16,7 +16,7 @@ LL | let j = &i as *const u64 as *mut u64; = help: consider using a safe wrapper for this conversion error: using a potentially dangerous silent `as` conversion - --> $DIR/as_conversions.rs:13:13 + --> $DIR/as_conversions.rs:12:13 | LL | let j = &i as *const u64 as *mut u64; | ^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs b/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs index cab216b51ac..f20df6f0f09 100644 --- a/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs +++ b/src/tools/clippy/tests/ui/auxiliary/macro_use_helper.rs @@ -15,8 +15,7 @@ pub mod inner { // RE-EXPORT // this will stick in `inner` module - pub use macro_rules::mut_mut; - pub use macro_rules::try_err; + pub use macro_rules::{mut_mut, try_err}; pub mod nested { pub use macro_rules::string_add; diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs index fdfe5fc4181..c5879557516 100644 --- a/src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs +++ b/src/tools/clippy/tests/ui/auxiliary/proc_macro_attr.rs @@ -8,11 +8,11 @@ extern crate syn; use proc_macro::TokenStream; use quote::{quote, quote_spanned}; -use syn::parse_macro_input; use syn::spanned::Spanned; use syn::token::Star; use syn::{ - parse_quote, FnArg, ImplItem, ItemImpl, ItemTrait, Lifetime, Pat, PatIdent, PatType, Signature, TraitItem, Type, + parse_macro_input, parse_quote, FnArg, ImplItem, ItemImpl, ItemTrait, Lifetime, Pat, PatIdent, PatType, Signature, + TraitItem, Type, }; #[proc_macro_attribute] diff --git a/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs b/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs index 4d008c8cb59..43df654389b 100644 --- a/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs +++ b/src/tools/clippy/tests/ui/auxiliary/proc_macros.rs @@ -5,13 +5,10 @@ extern crate proc_macro; use core::mem; -use proc_macro::{ - token_stream::IntoIter, - Delimiter::{self, Brace, Parenthesis}, - Group, Ident, Literal, Punct, - Spacing::{self, Alone, Joint}, - Span, TokenStream, TokenTree as TT, -}; +use proc_macro::token_stream::IntoIter; +use proc_macro::Delimiter::{self, Brace, Parenthesis}; +use proc_macro::Spacing::{self, Alone, Joint}; +use proc_macro::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree as TT}; type Result<T> = core::result::Result<T, TokenStream>; diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map.stderr b/src/tools/clippy/tests/ui/bind_instead_of_map.stderr index b6a81d21bb2..f17fee7460d 100644 --- a/src/tools/clippy/tests/ui/bind_instead_of_map.stderr +++ b/src/tools/clippy/tests/ui/bind_instead_of_map.stderr @@ -14,7 +14,7 @@ error: using `Option.and_then(|x| Some(y))`, which is more succinctly expressed --> $DIR/bind_instead_of_map.rs:10:13 | LL | let _ = x.and_then(|o| Some(o + 1)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.map(|o| o + 1)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.map(|o| o + 1)` error: using `Result.and_then(Ok)`, which is a no-op --> $DIR/bind_instead_of_map.rs:16:13 diff --git a/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr index 0152a93feee..cedbca78561 100644 --- a/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr +++ b/src/tools/clippy/tests/ui/bind_instead_of_map_multipart.stderr @@ -9,7 +9,7 @@ note: the lint level is defined here | LL | #![deny(clippy::bind_instead_of_map)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -help: try this +help: try | LL | let _ = Some("42").map(|s| if s.len() < 42 { 0 } else { s.len() }); | ~~~ ~ ~~~~~~~ @@ -20,7 +20,7 @@ error: using `Result.and_then(|x| Ok(y))`, which is more succinctly expressed as LL | let _ = Ok::<_, ()>("42").and_then(|s| if s.len() < 42 { Ok(0) } else { Ok(s.len()) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: try this +help: try | LL | let _ = Ok::<_, ()>("42").map(|s| if s.len() < 42 { 0 } else { s.len() }); | ~~~ ~ ~~~~~~~ @@ -31,7 +31,7 @@ error: using `Result.or_else(|x| Err(y))`, which is more succinctly expressed as LL | let _ = Err::<(), _>("42").or_else(|s| if s.len() < 42 { Err(s.len() + 20) } else { Err(s.len()) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: try this +help: try | LL | let _ = Err::<(), _>("42").map_err(|s| if s.len() < 42 { s.len() + 20 } else { s.len() }); | ~~~~~~~ ~~~~~~~~~~~~ ~~~~~~~ @@ -48,7 +48,7 @@ LL | | } LL | | }); | |______^ | -help: try this +help: try | LL ~ Some("42").map(|s| { LL | if { @@ -82,7 +82,7 @@ error: using `Option.and_then(|x| Some(y))`, which is more succinctly expressed LL | let _ = Some("").and_then(|s| if s.len() == 20 { Some(m!()) } else { Some(Some(20)) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | -help: try this +help: try | LL | let _ = Some("").map(|s| if s.len() == 20 { m!() } else { Some(20) }); | ~~~ ~~~~ ~~~~~~~~ diff --git a/src/tools/clippy/tests/ui/bool_comparison.fixed b/src/tools/clippy/tests/ui/bool_comparison.fixed index d6774c03598..8689f89d2c3 100644 --- a/src/tools/clippy/tests/ui/bool_comparison.fixed +++ b/src/tools/clippy/tests/ui/bool_comparison.fixed @@ -2,6 +2,7 @@ #![allow(clippy::needless_if)] #![warn(clippy::bool_comparison)] +#![allow(clippy::incorrect_partial_ord_impl_on_ord_type)] fn main() { let x = true; diff --git a/src/tools/clippy/tests/ui/bool_comparison.rs b/src/tools/clippy/tests/ui/bool_comparison.rs index c0483fd7374..a1c94aff94b 100644 --- a/src/tools/clippy/tests/ui/bool_comparison.rs +++ b/src/tools/clippy/tests/ui/bool_comparison.rs @@ -2,6 +2,7 @@ #![allow(clippy::needless_if)] #![warn(clippy::bool_comparison)] +#![allow(clippy::incorrect_partial_ord_impl_on_ord_type)] fn main() { let x = true; diff --git a/src/tools/clippy/tests/ui/bool_comparison.stderr b/src/tools/clippy/tests/ui/bool_comparison.stderr index f4dded365fb..19bdf301358 100644 --- a/src/tools/clippy/tests/ui/bool_comparison.stderr +++ b/src/tools/clippy/tests/ui/bool_comparison.stderr @@ -1,5 +1,5 @@ error: equality checks against true are unnecessary - --> $DIR/bool_comparison.rs:8:8 + --> $DIR/bool_comparison.rs:9:8 | LL | if x == true { | ^^^^^^^^^ help: try simplifying it as shown: `x` @@ -7,127 +7,127 @@ LL | if x == true { = note: `-D clippy::bool-comparison` implied by `-D warnings` error: equality checks against false can be replaced by a negation - --> $DIR/bool_comparison.rs:13:8 + --> $DIR/bool_comparison.rs:14:8 | LL | if x == false { | ^^^^^^^^^^ help: try simplifying it as shown: `!x` error: equality checks against true are unnecessary - --> $DIR/bool_comparison.rs:18:8 + --> $DIR/bool_comparison.rs:19:8 | LL | if true == x { | ^^^^^^^^^ help: try simplifying it as shown: `x` error: equality checks against false can be replaced by a negation - --> $DIR/bool_comparison.rs:23:8 + --> $DIR/bool_comparison.rs:24:8 | LL | if false == x { | ^^^^^^^^^^ help: try simplifying it as shown: `!x` error: inequality checks against true can be replaced by a negation - --> $DIR/bool_comparison.rs:28:8 + --> $DIR/bool_comparison.rs:29:8 | LL | if x != true { | ^^^^^^^^^ help: try simplifying it as shown: `!x` error: inequality checks against false are unnecessary - --> $DIR/bool_comparison.rs:33:8 + --> $DIR/bool_comparison.rs:34:8 | LL | if x != false { | ^^^^^^^^^^ help: try simplifying it as shown: `x` error: inequality checks against true can be replaced by a negation - --> $DIR/bool_comparison.rs:38:8 + --> $DIR/bool_comparison.rs:39:8 | LL | if true != x { | ^^^^^^^^^ help: try simplifying it as shown: `!x` error: inequality checks against false are unnecessary - --> $DIR/bool_comparison.rs:43:8 + --> $DIR/bool_comparison.rs:44:8 | LL | if false != x { | ^^^^^^^^^^ help: try simplifying it as shown: `x` error: less than comparison against true can be replaced by a negation - --> $DIR/bool_comparison.rs:48:8 + --> $DIR/bool_comparison.rs:49:8 | LL | if x < true { | ^^^^^^^^ help: try simplifying it as shown: `!x` error: greater than checks against false are unnecessary - --> $DIR/bool_comparison.rs:53:8 + --> $DIR/bool_comparison.rs:54:8 | LL | if false < x { | ^^^^^^^^^ help: try simplifying it as shown: `x` error: greater than checks against false are unnecessary - --> $DIR/bool_comparison.rs:58:8 + --> $DIR/bool_comparison.rs:59:8 | LL | if x > false { | ^^^^^^^^^ help: try simplifying it as shown: `x` error: less than comparison against true can be replaced by a negation - --> $DIR/bool_comparison.rs:63:8 + --> $DIR/bool_comparison.rs:64:8 | LL | if true > x { | ^^^^^^^^ help: try simplifying it as shown: `!x` error: order comparisons between booleans can be simplified - --> $DIR/bool_comparison.rs:69:8 + --> $DIR/bool_comparison.rs:70:8 | LL | if x < y { | ^^^^^ help: try simplifying it as shown: `!x & y` error: order comparisons between booleans can be simplified - --> $DIR/bool_comparison.rs:74:8 + --> $DIR/bool_comparison.rs:75:8 | LL | if x > y { | ^^^^^ help: try simplifying it as shown: `x & !y` error: this comparison might be written more concisely - --> $DIR/bool_comparison.rs:122:8 + --> $DIR/bool_comparison.rs:123:8 | LL | if a == !b {}; | ^^^^^^^ help: try simplifying it as shown: `a != b` error: this comparison might be written more concisely - --> $DIR/bool_comparison.rs:123:8 + --> $DIR/bool_comparison.rs:124:8 | LL | if !a == b {}; | ^^^^^^^ help: try simplifying it as shown: `a != b` error: this comparison might be written more concisely - --> $DIR/bool_comparison.rs:127:8 + --> $DIR/bool_comparison.rs:128:8 | LL | if b == !a {}; | ^^^^^^^ help: try simplifying it as shown: `b != a` error: this comparison might be written more concisely - --> $DIR/bool_comparison.rs:128:8 + --> $DIR/bool_comparison.rs:129:8 | LL | if !b == a {}; | ^^^^^^^ help: try simplifying it as shown: `b != a` error: equality checks against false can be replaced by a negation - --> $DIR/bool_comparison.rs:152:8 + --> $DIR/bool_comparison.rs:153:8 | LL | if false == m!(func) {} | ^^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `!m!(func)` error: equality checks against false can be replaced by a negation - --> $DIR/bool_comparison.rs:153:8 + --> $DIR/bool_comparison.rs:154:8 | LL | if m!(func) == false {} | ^^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `!m!(func)` error: equality checks against true are unnecessary - --> $DIR/bool_comparison.rs:154:8 + --> $DIR/bool_comparison.rs:155:8 | LL | if true == m!(func) {} | ^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `m!(func)` error: equality checks against true are unnecessary - --> $DIR/bool_comparison.rs:155:8 + --> $DIR/bool_comparison.rs:156:8 | LL | if m!(func) == true {} | ^^^^^^^^^^^^^^^^ help: try simplifying it as shown: `m!(func)` diff --git a/src/tools/clippy/tests/ui/borrow_box.rs b/src/tools/clippy/tests/ui/borrow_box.rs index 3b5b6bf4c95..95b6b0f5038 100644 --- a/src/tools/clippy/tests/ui/borrow_box.rs +++ b/src/tools/clippy/tests/ui/borrow_box.rs @@ -1,6 +1,10 @@ #![deny(clippy::borrowed_box)] #![allow(dead_code, unused_variables)] -#![allow(clippy::uninlined_format_args, clippy::disallowed_names)] +#![allow( + clippy::uninlined_format_args, + clippy::disallowed_names, + clippy::needless_pass_by_ref_mut +)] use std::fmt::Display; diff --git a/src/tools/clippy/tests/ui/borrow_box.stderr b/src/tools/clippy/tests/ui/borrow_box.stderr index 99cb60a1ead..90e752211ff 100644 --- a/src/tools/clippy/tests/ui/borrow_box.stderr +++ b/src/tools/clippy/tests/ui/borrow_box.stderr @@ -1,5 +1,5 @@ error: you seem to be trying to use `&Box<T>`. Consider using just `&T` - --> $DIR/borrow_box.rs:20:14 + --> $DIR/borrow_box.rs:24:14 | LL | let foo: &Box<bool>; | ^^^^^^^^^^ help: try: `&bool` @@ -11,55 +11,55 @@ LL | #![deny(clippy::borrowed_box)] | ^^^^^^^^^^^^^^^^^^^^ error: you seem to be trying to use `&Box<T>`. Consider using just `&T` - --> $DIR/borrow_box.rs:24:10 + --> $DIR/borrow_box.rs:28:10 | LL | foo: &'a Box<bool>, | ^^^^^^^^^^^^^ help: try: `&'a bool` error: you seem to be trying to use `&Box<T>`. Consider using just `&T` - --> $DIR/borrow_box.rs:28:17 + --> $DIR/borrow_box.rs:32:17 | LL | fn test4(a: &Box<bool>); | ^^^^^^^^^^ help: try: `&bool` error: you seem to be trying to use `&Box<T>`. Consider using just `&T` - --> $DIR/borrow_box.rs:94:25 + --> $DIR/borrow_box.rs:98:25 | LL | pub fn test14(_display: &Box<dyn Display>) {} | ^^^^^^^^^^^^^^^^^ help: try: `&dyn Display` error: you seem to be trying to use `&Box<T>`. Consider using just `&T` - --> $DIR/borrow_box.rs:95:25 + --> $DIR/borrow_box.rs:99:25 | LL | pub fn test15(_display: &Box<dyn Display + Send>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&(dyn Display + Send)` error: you seem to be trying to use `&Box<T>`. Consider using just `&T` - --> $DIR/borrow_box.rs:96:29 + --> $DIR/borrow_box.rs:100:29 | LL | pub fn test16<'a>(_display: &'a Box<dyn Display + 'a>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&'a (dyn Display + 'a)` error: you seem to be trying to use `&Box<T>`. Consider using just `&T` - --> $DIR/borrow_box.rs:98:25 + --> $DIR/borrow_box.rs:102:25 | LL | pub fn test17(_display: &Box<impl Display>) {} | ^^^^^^^^^^^^^^^^^^ help: try: `&impl Display` error: you seem to be trying to use `&Box<T>`. Consider using just `&T` - --> $DIR/borrow_box.rs:99:25 + --> $DIR/borrow_box.rs:103:25 | LL | pub fn test18(_display: &Box<impl Display + Send>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&(impl Display + Send)` error: you seem to be trying to use `&Box<T>`. Consider using just `&T` - --> $DIR/borrow_box.rs:100:29 + --> $DIR/borrow_box.rs:104:29 | LL | pub fn test19<'a>(_display: &'a Box<impl Display + 'a>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&'a (impl Display + 'a)` error: you seem to be trying to use `&Box<T>`. Consider using just `&T` - --> $DIR/borrow_box.rs:105:25 + --> $DIR/borrow_box.rs:109:25 | LL | pub fn test20(_display: &Box<(dyn Display + Send)>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&(dyn Display + Send)` diff --git a/src/tools/clippy/tests/ui/crashes/ice-7169.stderr b/src/tools/clippy/tests/ui/crashes/ice-7169.stderr index 84e0af3f0d0..0cd02851640 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-7169.stderr +++ b/src/tools/clippy/tests/ui/crashes/ice-7169.stderr @@ -2,7 +2,7 @@ error: redundant pattern matching, consider using `is_ok()` --> $DIR/ice-7169.rs:10:12 | LL | if let Ok(_) = Ok::<_, ()>(A::<String>::default()) {} - | -------^^^^^-------------------------------------- help: try this: `if Ok::<_, ()>(A::<String>::default()).is_ok()` + | -------^^^^^-------------------------------------- help: try: `if Ok::<_, ()>(A::<String>::default()).is_ok()` | = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings` diff --git a/src/tools/clippy/tests/ui/crashes/ice-8250.stderr b/src/tools/clippy/tests/ui/crashes/ice-8250.stderr index 8ed8f3b3a06..e6f3644ef34 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-8250.stderr +++ b/src/tools/clippy/tests/ui/crashes/ice-8250.stderr @@ -2,7 +2,7 @@ error: unnecessary use of `splitn` --> $DIR/ice-8250.rs:2:13 | LL | let _ = s[1..].splitn(2, '.').next()?; - | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `s[1..].split('.')` + | ^^^^^^^^^^^^^^^^^^^^^ help: try: `s[1..].split('.')` | = note: `-D clippy::needless-splitn` implied by `-D warnings` diff --git a/src/tools/clippy/tests/ui/default_trait_access.fixed b/src/tools/clippy/tests/ui/default_trait_access.fixed index 14eb6d572cf..6e541473cb3 100644 --- a/src/tools/clippy/tests/ui/default_trait_access.fixed +++ b/src/tools/clippy/tests/ui/default_trait_access.fixed @@ -7,9 +7,8 @@ extern crate proc_macros; use proc_macros::with_span; -use std::default; use std::default::Default as D2; -use std::string; +use std::{default, string}; fn main() { let s1: String = String::default(); diff --git a/src/tools/clippy/tests/ui/default_trait_access.rs b/src/tools/clippy/tests/ui/default_trait_access.rs index aa2ced0a7f0..2ffeb32fbdc 100644 --- a/src/tools/clippy/tests/ui/default_trait_access.rs +++ b/src/tools/clippy/tests/ui/default_trait_access.rs @@ -7,9 +7,8 @@ extern crate proc_macros; use proc_macros::with_span; -use std::default; use std::default::Default as D2; -use std::string; +use std::{default, string}; fn main() { let s1: String = Default::default(); diff --git a/src/tools/clippy/tests/ui/default_trait_access.stderr b/src/tools/clippy/tests/ui/default_trait_access.stderr index e4f73c08d19..103fccf6a1d 100644 --- a/src/tools/clippy/tests/ui/default_trait_access.stderr +++ b/src/tools/clippy/tests/ui/default_trait_access.stderr @@ -1,5 +1,5 @@ error: calling `String::default()` is more clear than this expression - --> $DIR/default_trait_access.rs:15:22 + --> $DIR/default_trait_access.rs:14:22 | LL | let s1: String = Default::default(); | ^^^^^^^^^^^^^^^^^^ help: try: `String::default()` @@ -11,43 +11,43 @@ LL | #![deny(clippy::default_trait_access)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: calling `String::default()` is more clear than this expression - --> $DIR/default_trait_access.rs:19:22 + --> $DIR/default_trait_access.rs:18:22 | LL | let s3: String = D2::default(); | ^^^^^^^^^^^^^ help: try: `String::default()` error: calling `String::default()` is more clear than this expression - --> $DIR/default_trait_access.rs:21:22 + --> $DIR/default_trait_access.rs:20:22 | LL | let s4: String = std::default::Default::default(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `String::default()` error: calling `String::default()` is more clear than this expression - --> $DIR/default_trait_access.rs:25:22 + --> $DIR/default_trait_access.rs:24:22 | LL | let s6: String = default::Default::default(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `String::default()` error: calling `GenericDerivedDefault::default()` is more clear than this expression - --> $DIR/default_trait_access.rs:35:46 + --> $DIR/default_trait_access.rs:34:46 | LL | let s11: GenericDerivedDefault<String> = Default::default(); | ^^^^^^^^^^^^^^^^^^ help: try: `GenericDerivedDefault::default()` error: calling `TupleDerivedDefault::default()` is more clear than this expression - --> $DIR/default_trait_access.rs:41:36 + --> $DIR/default_trait_access.rs:40:36 | LL | let s14: TupleDerivedDefault = Default::default(); | ^^^^^^^^^^^^^^^^^^ help: try: `TupleDerivedDefault::default()` error: calling `ArrayDerivedDefault::default()` is more clear than this expression - --> $DIR/default_trait_access.rs:43:36 + --> $DIR/default_trait_access.rs:42:36 | LL | let s15: ArrayDerivedDefault = Default::default(); | ^^^^^^^^^^^^^^^^^^ help: try: `ArrayDerivedDefault::default()` error: calling `TupleStructDerivedDefault::default()` is more clear than this expression - --> $DIR/default_trait_access.rs:47:42 + --> $DIR/default_trait_access.rs:46:42 | LL | let s17: TupleStructDerivedDefault = Default::default(); | ^^^^^^^^^^^^^^^^^^ help: try: `TupleStructDerivedDefault::default()` diff --git a/src/tools/clippy/tests/ui/deref_addrof.stderr b/src/tools/clippy/tests/ui/deref_addrof.stderr index e0287522fc5..9dd1e246b3e 100644 --- a/src/tools/clippy/tests/ui/deref_addrof.stderr +++ b/src/tools/clippy/tests/ui/deref_addrof.stderr @@ -2,7 +2,7 @@ error: immediately dereferencing a reference --> $DIR/deref_addrof.rs:24:13 | LL | let b = *&a; - | ^^^ help: try this: `a` + | ^^^ help: try: `a` | = note: `-D clippy::deref-addrof` implied by `-D warnings` @@ -10,49 +10,49 @@ error: immediately dereferencing a reference --> $DIR/deref_addrof.rs:26:13 | LL | let b = *&get_number(); - | ^^^^^^^^^^^^^^ help: try this: `get_number()` + | ^^^^^^^^^^^^^^ help: try: `get_number()` error: immediately dereferencing a reference --> $DIR/deref_addrof.rs:31:13 | LL | let b = *&bytes[1..2][0]; - | ^^^^^^^^^^^^^^^^ help: try this: `bytes[1..2][0]` + | ^^^^^^^^^^^^^^^^ help: try: `bytes[1..2][0]` error: immediately dereferencing a reference --> $DIR/deref_addrof.rs:35:13 | LL | let b = *&(a); - | ^^^^^ help: try this: `(a)` + | ^^^^^ help: try: `(a)` error: immediately dereferencing a reference --> $DIR/deref_addrof.rs:37:13 | LL | let b = *(&a); - | ^^^^^ help: try this: `a` + | ^^^^^ help: try: `a` error: immediately dereferencing a reference --> $DIR/deref_addrof.rs:40:13 | LL | let b = *((&a)); - | ^^^^^^^ help: try this: `a` + | ^^^^^^^ help: try: `a` error: immediately dereferencing a reference --> $DIR/deref_addrof.rs:42:13 | LL | let b = *&&a; - | ^^^^ help: try this: `&a` + | ^^^^ help: try: `&a` error: immediately dereferencing a reference --> $DIR/deref_addrof.rs:44:14 | LL | let b = **&aref; - | ^^^^^^ help: try this: `aref` + | ^^^^^^ help: try: `aref` error: immediately dereferencing a reference --> $DIR/deref_addrof.rs:54:17 | LL | inline!(*& $(@expr self)) - | ^^^^^^^^^^^^^^^^ help: try this: `$(@expr self)` + | ^^^^^^^^^^^^^^^^ help: try: `$(@expr self)` | = note: this error originates in the macro `__inline_mac_impl` (in Nightly builds, run with -Z macro-backtrace for more info) @@ -60,7 +60,7 @@ error: immediately dereferencing a reference --> $DIR/deref_addrof.rs:58:17 | LL | inline!(*&mut $(@expr self)) - | ^^^^^^^^^^^^^^^^^^^ help: try this: `$(@expr self)` + | ^^^^^^^^^^^^^^^^^^^ help: try: `$(@expr self)` | = note: this error originates in the macro `__inline_mac_impl` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr b/src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr index 2c55a4ed6ac..6fa5069b6b4 100644 --- a/src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr +++ b/src/tools/clippy/tests/ui/deref_addrof_double_trigger.stderr @@ -2,7 +2,7 @@ error: immediately dereferencing a reference --> $DIR/deref_addrof_double_trigger.rs:10:14 | LL | let b = **&&a; - | ^^^^ help: try this: `&a` + | ^^^^ help: try: `&a` | = note: `-D clippy::deref-addrof` implied by `-D warnings` @@ -10,13 +10,13 @@ error: immediately dereferencing a reference --> $DIR/deref_addrof_double_trigger.rs:14:17 | LL | let y = *&mut x; - | ^^^^^^^ help: try this: `x` + | ^^^^^^^ help: try: `x` error: immediately dereferencing a reference --> $DIR/deref_addrof_double_trigger.rs:21:18 | LL | let y = **&mut &mut x; - | ^^^^^^^^^^^^ help: try this: `&mut x` + | ^^^^^^^^^^^^ help: try: `&mut x` error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui/derive.rs b/src/tools/clippy/tests/ui/derive.rs index e01079bc977..c76711312e1 100644 --- a/src/tools/clippy/tests/ui/derive.rs +++ b/src/tools/clippy/tests/ui/derive.rs @@ -1,4 +1,8 @@ -#![allow(clippy::incorrect_clone_impl_on_copy_type, dead_code)] +#![allow( + clippy::incorrect_clone_impl_on_copy_type, + clippy::incorrect_partial_ord_impl_on_ord_type, + dead_code +)] #![warn(clippy::expl_impl_clone_on_copy)] diff --git a/src/tools/clippy/tests/ui/derive.stderr b/src/tools/clippy/tests/ui/derive.stderr index e1fbb8dcd1e..5d7ed09188f 100644 --- a/src/tools/clippy/tests/ui/derive.stderr +++ b/src/tools/clippy/tests/ui/derive.stderr @@ -1,5 +1,5 @@ error: you are implementing `Clone` explicitly on a `Copy` type - --> $DIR/derive.rs:8:1 + --> $DIR/derive.rs:12:1 | LL | / impl Clone for Qux { LL | | fn clone(&self) -> Self { @@ -9,7 +9,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> $DIR/derive.rs:8:1 + --> $DIR/derive.rs:12:1 | LL | / impl Clone for Qux { LL | | fn clone(&self) -> Self { @@ -20,7 +20,7 @@ LL | | } = note: `-D clippy::expl-impl-clone-on-copy` implied by `-D warnings` error: you are implementing `Clone` explicitly on a `Copy` type - --> $DIR/derive.rs:32:1 + --> $DIR/derive.rs:36:1 | LL | / impl<'a> Clone for Lt<'a> { LL | | fn clone(&self) -> Self { @@ -30,7 +30,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> $DIR/derive.rs:32:1 + --> $DIR/derive.rs:36:1 | LL | / impl<'a> Clone for Lt<'a> { LL | | fn clone(&self) -> Self { @@ -40,7 +40,7 @@ LL | | } | |_^ error: you are implementing `Clone` explicitly on a `Copy` type - --> $DIR/derive.rs:43:1 + --> $DIR/derive.rs:47:1 | LL | / impl Clone for BigArray { LL | | fn clone(&self) -> Self { @@ -50,7 +50,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> $DIR/derive.rs:43:1 + --> $DIR/derive.rs:47:1 | LL | / impl Clone for BigArray { LL | | fn clone(&self) -> Self { @@ -60,7 +60,7 @@ LL | | } | |_^ error: you are implementing `Clone` explicitly on a `Copy` type - --> $DIR/derive.rs:54:1 + --> $DIR/derive.rs:58:1 | LL | / impl Clone for FnPtr { LL | | fn clone(&self) -> Self { @@ -70,7 +70,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> $DIR/derive.rs:54:1 + --> $DIR/derive.rs:58:1 | LL | / impl Clone for FnPtr { LL | | fn clone(&self) -> Self { @@ -80,7 +80,7 @@ LL | | } | |_^ error: you are implementing `Clone` explicitly on a `Copy` type - --> $DIR/derive.rs:74:1 + --> $DIR/derive.rs:78:1 | LL | / impl<T: Clone> Clone for Generic2<T> { LL | | fn clone(&self) -> Self { @@ -90,7 +90,7 @@ LL | | } | |_^ | note: consider deriving `Clone` or removing `Copy` - --> $DIR/derive.rs:74:1 + --> $DIR/derive.rs:78:1 | LL | / impl<T: Clone> Clone for Generic2<T> { LL | | fn clone(&self) -> Self { diff --git a/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs index 6f12d36d777..1fb3d51c46d 100644 --- a/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs +++ b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.rs @@ -1,5 +1,6 @@ #![warn(clippy::derive_ord_xor_partial_ord)] #![allow(clippy::unnecessary_wraps)] +#![allow(clippy::incorrect_partial_ord_impl_on_ord_type)] use std::cmp::Ordering; diff --git a/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr index 58efbb8541f..bd148834814 100644 --- a/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr +++ b/src/tools/clippy/tests/ui/derive_ord_xor_partial_ord.stderr @@ -1,11 +1,11 @@ error: you are deriving `Ord` but have implemented `PartialOrd` explicitly - --> $DIR/derive_ord_xor_partial_ord.rs:21:10 + --> $DIR/derive_ord_xor_partial_ord.rs:22:10 | LL | #[derive(Ord, PartialEq, Eq)] | ^^^ | note: `PartialOrd` implemented here - --> $DIR/derive_ord_xor_partial_ord.rs:24:1 + --> $DIR/derive_ord_xor_partial_ord.rs:25:1 | LL | impl PartialOrd for DeriveOrd { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -13,20 +13,20 @@ LL | impl PartialOrd for DeriveOrd { = note: this error originates in the derive macro `Ord` (in Nightly builds, run with -Z macro-backtrace for more info) error: you are deriving `Ord` but have implemented `PartialOrd` explicitly - --> $DIR/derive_ord_xor_partial_ord.rs:30:10 + --> $DIR/derive_ord_xor_partial_ord.rs:31:10 | LL | #[derive(Ord, PartialEq, Eq)] | ^^^ | note: `PartialOrd` implemented here - --> $DIR/derive_ord_xor_partial_ord.rs:33:1 + --> $DIR/derive_ord_xor_partial_ord.rs:34:1 | LL | impl PartialOrd<DeriveOrdWithExplicitTypeVariable> for DeriveOrdWithExplicitTypeVariable { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: this error originates in the derive macro `Ord` (in Nightly builds, run with -Z macro-backtrace for more info) error: you are implementing `Ord` explicitly but have derived `PartialOrd` - --> $DIR/derive_ord_xor_partial_ord.rs:42:1 + --> $DIR/derive_ord_xor_partial_ord.rs:43:1 | LL | / impl std::cmp::Ord for DerivePartialOrd { LL | | fn cmp(&self, other: &Self) -> Ordering { @@ -36,14 +36,14 @@ LL | | } | |_^ | note: `PartialOrd` implemented here - --> $DIR/derive_ord_xor_partial_ord.rs:39:10 + --> $DIR/derive_ord_xor_partial_ord.rs:40:10 | LL | #[derive(PartialOrd, PartialEq, Eq)] | ^^^^^^^^^^ = note: this error originates in the derive macro `PartialOrd` (in Nightly builds, run with -Z macro-backtrace for more info) error: you are implementing `Ord` explicitly but have derived `PartialOrd` - --> $DIR/derive_ord_xor_partial_ord.rs:62:5 + --> $DIR/derive_ord_xor_partial_ord.rs:63:5 | LL | / impl Ord for DerivePartialOrdInUseOrd { LL | | fn cmp(&self, other: &Self) -> Ordering { @@ -53,7 +53,7 @@ LL | | } | |_____^ | note: `PartialOrd` implemented here - --> $DIR/derive_ord_xor_partial_ord.rs:59:14 + --> $DIR/derive_ord_xor_partial_ord.rs:60:14 | LL | #[derive(PartialOrd, PartialEq, Eq)] | ^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/entry.stderr b/src/tools/clippy/tests/ui/entry.stderr index 2c4c49d2522..e8a003e9cf6 100644 --- a/src/tools/clippy/tests/ui/entry.stderr +++ b/src/tools/clippy/tests/ui/entry.stderr @@ -4,7 +4,7 @@ error: usage of `contains_key` followed by `insert` on a `HashMap` LL | / if !m.contains_key(&k) { LL | | m.insert(k, v); LL | | } - | |_____^ help: try this: `m.entry(k).or_insert(v);` + | |_____^ help: try: `m.entry(k).or_insert(v);` | = note: `-D clippy::map-entry` implied by `-D warnings` @@ -20,7 +20,7 @@ LL | | } LL | | } | |_____^ | -help: try this +help: try | LL ~ m.entry(k).or_insert_with(|| { LL + if true { @@ -43,7 +43,7 @@ LL | | }; LL | | } | |_____^ | -help: try this +help: try | LL ~ m.entry(k).or_insert_with(|| { LL + if true { @@ -66,7 +66,7 @@ LL | | } LL | | } | |_____^ | -help: try this +help: try | LL ~ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) { LL + if true { @@ -87,7 +87,7 @@ LL | | m.insert(k, v); LL | | } | |_____^ | -help: try this +help: try | LL ~ m.entry(k).or_insert_with(|| { LL + foo(); @@ -107,7 +107,7 @@ LL | | }; LL | | } | |_____^ | -help: try this +help: try | LL ~ m.entry(k).or_insert_with(|| { LL + match 0 { @@ -133,7 +133,7 @@ LL | | }; LL | | } | |_____^ | -help: try this +help: try | LL ~ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) { LL + match 0 { @@ -157,7 +157,7 @@ LL | | } LL | | } | |_____^ | -help: try this +help: try | LL ~ m.entry(k).or_insert_with(|| { LL + foo(); @@ -192,7 +192,7 @@ error: usage of `contains_key` followed by `insert` on a `HashMap` LL | / if !m.contains_key(&m!(k)) { LL | | m.insert(m!(k), m!(v)); LL | | } - | |_____^ help: try this: `m.entry(m!(k)).or_insert_with(|| m!(v));` + | |_____^ help: try: `m.entry(m!(k)).or_insert_with(|| m!(v));` error: usage of `contains_key` followed by `insert` on a `HashMap` --> $DIR/entry.rs:152:5 @@ -204,7 +204,7 @@ LL | | m.insert(k, v); LL | | } | |_____^ | -help: try this +help: try | LL ~ m.entry(k).or_insert_with(|| { LL + let x = (String::new(), String::new()); diff --git a/src/tools/clippy/tests/ui/entry_btree.stderr b/src/tools/clippy/tests/ui/entry_btree.stderr index 5c6fcdf1a28..8f41581d6b6 100644 --- a/src/tools/clippy/tests/ui/entry_btree.stderr +++ b/src/tools/clippy/tests/ui/entry_btree.stderr @@ -8,7 +8,7 @@ LL | | } | |_____^ | = note: `-D clippy::map-entry` implied by `-D warnings` -help: try this +help: try | LL ~ if let std::collections::btree_map::Entry::Vacant(e) = m.entry(k) { LL + e.insert(v); diff --git a/src/tools/clippy/tests/ui/entry_with_else.stderr b/src/tools/clippy/tests/ui/entry_with_else.stderr index e0f6671b460..0d0eb964937 100644 --- a/src/tools/clippy/tests/ui/entry_with_else.stderr +++ b/src/tools/clippy/tests/ui/entry_with_else.stderr @@ -9,7 +9,7 @@ LL | | } | |_____^ | = note: `-D clippy::map-entry` implied by `-D warnings` -help: try this +help: try | LL ~ match m.entry(k) { LL + std::collections::hash_map::Entry::Vacant(e) => { @@ -31,7 +31,7 @@ LL | | m.insert(k, v2); LL | | } | |_____^ | -help: try this +help: try | LL ~ match m.entry(k) { LL + std::collections::hash_map::Entry::Occupied(mut e) => { @@ -53,7 +53,7 @@ LL | | foo(); LL | | } | |_____^ | -help: try this +help: try | LL ~ if let std::collections::hash_map::Entry::Vacant(e) = m.entry(k) { LL + e.insert(v); @@ -72,7 +72,7 @@ LL | | m.insert(k, v); LL | | } | |_____^ | -help: try this +help: try | LL ~ if let std::collections::hash_map::Entry::Occupied(mut e) = m.entry(k) { LL + e.insert(v); @@ -91,7 +91,7 @@ LL | | m.insert(k, v2); LL | | } | |_____^ | -help: try this +help: try | LL ~ match m.entry(k) { LL + std::collections::hash_map::Entry::Vacant(e) => { @@ -113,7 +113,7 @@ LL | | m.insert(k, v) LL | | }; | |_____^ | -help: try this +help: try | LL ~ match m.entry(k) { LL + std::collections::hash_map::Entry::Occupied(mut e) => { @@ -137,7 +137,7 @@ LL | | None LL | | }; | |_____^ | -help: try this +help: try | LL ~ if let std::collections::hash_map::Entry::Occupied(mut e) = m.entry(k) { LL + foo(); diff --git a/src/tools/clippy/tests/ui/expect_fun_call.stderr b/src/tools/clippy/tests/ui/expect_fun_call.stderr index 36fb0e5de15..a621f681d98 100644 --- a/src/tools/clippy/tests/ui/expect_fun_call.stderr +++ b/src/tools/clippy/tests/ui/expect_fun_call.stderr @@ -2,7 +2,7 @@ error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:38:26 | LL | with_none_and_format.expect(&format!("Error {}: fake error", error_code)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("Error {}: fake error", error_code))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("Error {}: fake error", error_code))` | = note: `-D clippy::expect-fun-call` implied by `-D warnings` @@ -10,85 +10,85 @@ error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:41:26 | LL | with_none_and_as_str.expect(format!("Error {}: fake error", error_code).as_str()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("Error {}: fake error", error_code))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("Error {}: fake error", error_code))` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:44:37 | LL | with_none_and_format_with_macro.expect(format!("Error {}: fake error", one!()).as_str()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("Error {}: fake error", one!()))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("Error {}: fake error", one!()))` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:54:25 | LL | with_err_and_format.expect(&format!("Error {}: fake error", error_code)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| panic!("Error {}: fake error", error_code))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|_| panic!("Error {}: fake error", error_code))` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:57:25 | LL | with_err_and_as_str.expect(format!("Error {}: fake error", error_code).as_str()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| panic!("Error {}: fake error", error_code))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|_| panic!("Error {}: fake error", error_code))` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:69:17 | LL | Some("foo").expect(format!("{} {}", 1, 2).as_ref()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("{} {}", 1, 2))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{} {}", 1, 2))` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:90:21 | LL | Some("foo").expect(&get_string()); - | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:91:21 | LL | Some("foo").expect(get_string().as_ref()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:92:21 | LL | Some("foo").expect(get_string().as_str()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_string()) })` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:94:21 | LL | Some("foo").expect(get_static_str()); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_static_str()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_static_str()) })` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:95:21 | LL | Some("foo").expect(get_non_static_str(&0)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| { panic!("{}", get_non_static_str(&0).to_string()) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| { panic!("{}", get_non_static_str(&0).to_string()) })` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:99:16 | LL | Some(true).expect(&format!("key {}, {}", 1, 2)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("key {}, {}", 1, 2))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("key {}, {}", 1, 2))` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:105:17 | LL | opt_ref.expect(&format!("{:?}", opt_ref)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("{:?}", opt_ref))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{:?}", opt_ref))` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:109:20 | LL | format_capture.expect(&format!("{error_code}")); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("{error_code}"))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{error_code}"))` error: use of `expect` followed by a function call --> $DIR/expect_fun_call.rs:112:30 | LL | format_capture_and_value.expect(&format!("{error_code}, {}", 1)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| panic!("{error_code}, {}", 1))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| panic!("{error_code}, {}", 1))` error: aborting due to 15 previous errors diff --git a/src/tools/clippy/tests/ui/explicit_auto_deref.stderr b/src/tools/clippy/tests/ui/explicit_auto_deref.stderr index 91863abcc5d..afc311e3f7c 100644 --- a/src/tools/clippy/tests/ui/explicit_auto_deref.stderr +++ b/src/tools/clippy/tests/ui/explicit_auto_deref.stderr @@ -2,7 +2,7 @@ error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:70:19 | LL | let _: &str = &*s; - | ^^^ help: try this: `&s` + | ^^^ help: try: `&s` | = note: `-D clippy::explicit-auto-deref` implied by `-D warnings` @@ -10,229 +10,229 @@ error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:71:19 | LL | let _: &str = &*{ String::new() }; - | ^^^^^^^^^^^^^^^^^^^ help: try this: `&{ String::new() }` + | ^^^^^^^^^^^^^^^^^^^ help: try: `&{ String::new() }` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:72:19 | LL | let _: &str = &mut *{ String::new() }; - | ^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&mut { String::new() }` + | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut { String::new() }` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:76:11 | LL | f_str(&*s); - | ^^^ help: try this: `&s` + | ^^^ help: try: `&s` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:80:13 | LL | f_str_t(&*s, &*s); // Don't lint second param. - | ^^^ help: try this: `&s` + | ^^^ help: try: `&s` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:83:24 | LL | let _: &Box<i32> = &**b; - | ^^^^ help: try this: `&b` + | ^^^^ help: try: `&b` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:89:7 | LL | c(&*s); - | ^^^ help: try this: `&s` + | ^^^ help: try: `&s` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:95:9 | LL | &**x - | ^^^^ help: try this: `x` + | ^^^^ help: try: `x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:99:11 | LL | { &**x } - | ^^^^ help: try this: `x` + | ^^^^ help: try: `x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:103:9 | LL | &**{ x } - | ^^^^^^^^ help: try this: `{ x }` + | ^^^^^^^^ help: try: `{ x }` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:107:9 | LL | &***x - | ^^^^^ help: try this: `x` + | ^^^^^ help: try: `x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:124:12 | LL | f1(&*x); - | ^^^ help: try this: `&x` + | ^^^ help: try: `&x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:125:12 | LL | f2(&*x); - | ^^^ help: try this: `&x` + | ^^^ help: try: `&x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:126:12 | LL | f3(&*x); - | ^^^ help: try this: `&x` + | ^^^ help: try: `&x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:127:27 | LL | f4.callable_str()(&*x); - | ^^^ help: try this: `&x` + | ^^^ help: try: `&x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:128:12 | LL | f5(&*x); - | ^^^ help: try this: `&x` + | ^^^ help: try: `&x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:129:12 | LL | f6(&*x); - | ^^^ help: try this: `&x` + | ^^^ help: try: `&x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:130:27 | LL | f7.callable_str()(&*x); - | ^^^ help: try this: `&x` + | ^^^ help: try: `&x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:131:25 | LL | f8.callable_t()(&*x); - | ^^^ help: try this: `&x` + | ^^^ help: try: `&x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:132:12 | LL | f9(&*x); - | ^^^ help: try this: `&x` + | ^^^ help: try: `&x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:133:13 | LL | f10(&*x); - | ^^^ help: try this: `&x` + | ^^^ help: try: `&x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:134:26 | LL | f11.callable_t()(&*x); - | ^^^ help: try this: `&x` + | ^^^ help: try: `&x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:138:16 | LL | let _ = S1(&*s); - | ^^^ help: try this: `&s` + | ^^^ help: try: `&s` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:143:21 | LL | let _ = S2 { s: &*s }; - | ^^^ help: try this: `&s` + | ^^^ help: try: `&s` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:159:30 | LL | let _ = Self::S1(&**s); - | ^^^^ help: try this: `s` + | ^^^^ help: try: `s` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:160:35 | LL | let _ = Self::S2 { s: &**s }; - | ^^^^ help: try this: `s` + | ^^^^ help: try: `s` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:163:20 | LL | let _ = E1::S1(&*s); - | ^^^ help: try this: `&s` + | ^^^ help: try: `&s` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:164:25 | LL | let _ = E1::S2 { s: &*s }; - | ^^^ help: try this: `&s` + | ^^^ help: try: `&s` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:182:13 | LL | let _ = (*b).foo; - | ^^^^ help: try this: `b` + | ^^^^ help: try: `b` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:183:13 | LL | let _ = (**b).foo; - | ^^^^^ help: try this: `b` + | ^^^^^ help: try: `b` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:198:19 | LL | let _ = f_str(*ref_str); - | ^^^^^^^^ help: try this: `ref_str` + | ^^^^^^^^ help: try: `ref_str` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:200:19 | LL | let _ = f_str(**ref_ref_str); - | ^^^^^^^^^^^^^ help: try this: `ref_ref_str` + | ^^^^^^^^^^^^^ help: try: `ref_ref_str` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:210:13 | LL | f_str(&&*ref_str); // `needless_borrow` will suggest removing both references - | ^^^^^^^^ help: try this: `ref_str` + | ^^^^^^^^ help: try: `ref_str` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:211:12 | LL | f_str(&&**ref_str); // `needless_borrow` will suggest removing only one reference - | ^^^^^^^^^^ help: try this: `ref_str` + | ^^^^^^^^^^ help: try: `ref_str` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:220:41 | LL | let _ = || -> &'static str { return *s }; - | ^^ help: try this: `s` + | ^^ help: try: `s` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:239:9 | LL | &**x - | ^^^^ help: try this: `x` + | ^^^^ help: try: `x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:262:8 | LL | c1(*x); - | ^^ help: try this: `x` + | ^^ help: try: `x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:265:20 | LL | return *x; - | ^^ help: try this: `x` + | ^^ help: try: `x` error: deref which would be done by auto-deref --> $DIR/explicit_auto_deref.rs:267:9 | LL | *x - | ^^ help: try this: `x` + | ^^ help: try: `x` error: aborting due to 39 previous errors diff --git a/src/tools/clippy/tests/ui/explicit_deref_methods.stderr b/src/tools/clippy/tests/ui/explicit_deref_methods.stderr index d025035b789..362e559b21a 100644 --- a/src/tools/clippy/tests/ui/explicit_deref_methods.stderr +++ b/src/tools/clippy/tests/ui/explicit_deref_methods.stderr @@ -2,7 +2,7 @@ error: explicit `deref` method call --> $DIR/explicit_deref_methods.rs:54:19 | LL | let b: &str = a.deref(); - | ^^^^^^^^^ help: try this: `&*a` + | ^^^^^^^^^ help: try: `&*a` | = note: `-D clippy::explicit-deref-methods` implied by `-D warnings` @@ -10,67 +10,67 @@ error: explicit `deref_mut` method call --> $DIR/explicit_deref_methods.rs:56:23 | LL | let b: &mut str = a.deref_mut(); - | ^^^^^^^^^^^^^ help: try this: `&mut **a` + | ^^^^^^^^^^^^^ help: try: `&mut **a` error: explicit `deref` method call --> $DIR/explicit_deref_methods.rs:59:39 | LL | let b: String = format!("{}, {}", a.deref(), a.deref()); - | ^^^^^^^^^ help: try this: `&*a` + | ^^^^^^^^^ help: try: `&*a` error: explicit `deref` method call --> $DIR/explicit_deref_methods.rs:59:50 | LL | let b: String = format!("{}, {}", a.deref(), a.deref()); - | ^^^^^^^^^ help: try this: `&*a` + | ^^^^^^^^^ help: try: `&*a` error: explicit `deref` method call --> $DIR/explicit_deref_methods.rs:61:20 | LL | println!("{}", a.deref()); - | ^^^^^^^^^ help: try this: `&*a` + | ^^^^^^^^^ help: try: `&*a` error: explicit `deref` method call --> $DIR/explicit_deref_methods.rs:64:11 | LL | match a.deref() { - | ^^^^^^^^^ help: try this: `&*a` + | ^^^^^^^^^ help: try: `&*a` error: explicit `deref` method call --> $DIR/explicit_deref_methods.rs:68:28 | LL | let b: String = concat(a.deref()); - | ^^^^^^^^^ help: try this: `&*a` + | ^^^^^^^^^ help: try: `&*a` error: explicit `deref` method call --> $DIR/explicit_deref_methods.rs:70:13 | LL | let b = just_return(a).deref(); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `just_return(a)` + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `just_return(a)` error: explicit `deref` method call --> $DIR/explicit_deref_methods.rs:72:28 | LL | let b: String = concat(just_return(a).deref()); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `just_return(a)` + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `just_return(a)` error: explicit `deref` method call --> $DIR/explicit_deref_methods.rs:74:19 | LL | let b: &str = a.deref().deref(); - | ^^^^^^^^^^^^^^^^^ help: try this: `&**a` + | ^^^^^^^^^^^^^^^^^ help: try: `&**a` error: explicit `deref` method call --> $DIR/explicit_deref_methods.rs:77:13 | LL | let b = opt_a.unwrap().deref(); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&*opt_a.unwrap()` + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `&*opt_a.unwrap()` error: explicit `deref` method call --> $DIR/explicit_deref_methods.rs:114:31 | LL | let b: &str = expr_deref!(a.deref()); - | ^^^^^^^^^ help: try this: `&*a` + | ^^^^^^^^^ help: try: `&*a` error: aborting due to 12 previous errors diff --git a/src/tools/clippy/tests/ui/explicit_write.stderr b/src/tools/clippy/tests/ui/explicit_write.stderr index 457e9c62718..b3aa7274c6d 100644 --- a/src/tools/clippy/tests/ui/explicit_write.stderr +++ b/src/tools/clippy/tests/ui/explicit_write.stderr @@ -2,7 +2,7 @@ error: use of `write!(stdout(), ...).unwrap()` --> $DIR/explicit_write.rs:24:9 | LL | write!(std::io::stdout(), "test").unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `print!("test")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `print!("test")` | = note: `-D clippy::explicit-write` implied by `-D warnings` @@ -10,73 +10,73 @@ error: use of `write!(stderr(), ...).unwrap()` --> $DIR/explicit_write.rs:25:9 | LL | write!(std::io::stderr(), "test").unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprint!("test")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprint!("test")` error: use of `writeln!(stdout(), ...).unwrap()` --> $DIR/explicit_write.rs:26:9 | LL | writeln!(std::io::stdout(), "test").unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `println!("test")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `println!("test")` error: use of `writeln!(stderr(), ...).unwrap()` --> $DIR/explicit_write.rs:27:9 | LL | writeln!(std::io::stderr(), "test").unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("test")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("test")` error: use of `stdout().write_fmt(...).unwrap()` --> $DIR/explicit_write.rs:28:9 | LL | std::io::stdout().write_fmt(format_args!("test")).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `print!("test")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `print!("test")` error: use of `stderr().write_fmt(...).unwrap()` --> $DIR/explicit_write.rs:29:9 | LL | std::io::stderr().write_fmt(format_args!("test")).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprint!("test")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprint!("test")` error: use of `writeln!(stdout(), ...).unwrap()` --> $DIR/explicit_write.rs:32:9 | LL | writeln!(std::io::stdout(), "test/ntest").unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `println!("test/ntest")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `println!("test/ntest")` error: use of `writeln!(stderr(), ...).unwrap()` --> $DIR/explicit_write.rs:33:9 | LL | writeln!(std::io::stderr(), "test/ntest").unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("test/ntest")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("test/ntest")` error: use of `writeln!(stderr(), ...).unwrap()` --> $DIR/explicit_write.rs:36:9 | LL | writeln!(std::io::stderr(), "with {}", value).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("with {}", value)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("with {}", value)` error: use of `writeln!(stderr(), ...).unwrap()` --> $DIR/explicit_write.rs:37:9 | LL | writeln!(std::io::stderr(), "with {} {}", 2, value).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("with {} {}", 2, value)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("with {} {}", 2, value)` error: use of `writeln!(stderr(), ...).unwrap()` --> $DIR/explicit_write.rs:38:9 | LL | writeln!(std::io::stderr(), "with {value}").unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("with {value}")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("with {value}")` error: use of `writeln!(stderr(), ...).unwrap()` --> $DIR/explicit_write.rs:39:9 | LL | writeln!(std::io::stderr(), "macro arg {}", one!()).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("macro arg {}", one!())` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("macro arg {}", one!())` error: use of `writeln!(stderr(), ...).unwrap()` --> $DIR/explicit_write.rs:41:9 | LL | writeln!(std::io::stderr(), "{:w$}", value, w = width).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `eprintln!("{:w$}", value, w = width)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `eprintln!("{:w$}", value, w = width)` error: aborting due to 13 previous errors diff --git a/src/tools/clippy/tests/ui/extend_with_drain.stderr b/src/tools/clippy/tests/ui/extend_with_drain.stderr index da14ddb25b3..eb2dd304d37 100644 --- a/src/tools/clippy/tests/ui/extend_with_drain.stderr +++ b/src/tools/clippy/tests/ui/extend_with_drain.stderr @@ -2,7 +2,7 @@ error: use of `extend` instead of `append` for adding the full range of a second --> $DIR/extend_with_drain.rs:9:5 | LL | vec2.extend(vec1.drain(..)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `vec2.append(&mut vec1)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec2.append(&mut vec1)` | = note: `-D clippy::extend-with-drain` implied by `-D warnings` @@ -10,19 +10,19 @@ error: use of `extend` instead of `append` for adding the full range of a second --> $DIR/extend_with_drain.rs:14:5 | LL | vec4.extend(vec3.drain(..)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `vec4.append(&mut vec3)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec4.append(&mut vec3)` error: use of `extend` instead of `append` for adding the full range of a second vector --> $DIR/extend_with_drain.rs:18:5 | LL | vec11.extend(return_vector().drain(..)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `vec11.append(&mut return_vector())` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `vec11.append(&mut return_vector())` error: use of `extend` instead of `append` for adding the full range of a second vector --> $DIR/extend_with_drain.rs:49:5 | LL | y.extend(ref_x.drain(..)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `y.append(ref_x)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `y.append(ref_x)` error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr b/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr index a9fc6abe88f..26d9c5e19da 100644 --- a/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr +++ b/src/tools/clippy/tests/ui/filter_map_next_fixable.stderr @@ -2,7 +2,7 @@ error: called `filter_map(..).next()` on an `Iterator`. This is more succinctly --> $DIR/filter_map_next_fixable.rs:9:32 | LL | let element: Option<i32> = a.iter().filter_map(|s| s.parse().ok()).next(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `a.iter().find_map(|s| s.parse().ok())` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `a.iter().find_map(|s| s.parse().ok())` | = note: `-D clippy::filter-map-next` implied by `-D warnings` @@ -10,7 +10,7 @@ error: called `filter_map(..).next()` on an `Iterator`. This is more succinctly --> $DIR/filter_map_next_fixable.rs:22:26 | LL | let _: Option<i32> = a.iter().filter_map(|s| s.parse().ok()).next(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `a.iter().find_map(|s| s.parse().ok())` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `a.iter().find_map(|s| s.parse().ok())` error: aborting due to 2 previous errors diff --git a/src/tools/clippy/tests/ui/get_first.fixed b/src/tools/clippy/tests/ui/get_first.fixed index a29c0918a6d..bc2f86566bc 100644 --- a/src/tools/clippy/tests/ui/get_first.fixed +++ b/src/tools/clippy/tests/ui/get_first.fixed @@ -1,9 +1,7 @@ //@run-rustfix #![warn(clippy::get_first)] #![allow(clippy::useless_vec)] -use std::collections::BTreeMap; -use std::collections::HashMap; -use std::collections::VecDeque; +use std::collections::{BTreeMap, HashMap, VecDeque}; struct Bar { arr: [u32; 3], diff --git a/src/tools/clippy/tests/ui/get_first.rs b/src/tools/clippy/tests/ui/get_first.rs index 2062f3ec23a..bc0e233fdee 100644 --- a/src/tools/clippy/tests/ui/get_first.rs +++ b/src/tools/clippy/tests/ui/get_first.rs @@ -1,9 +1,7 @@ //@run-rustfix #![warn(clippy::get_first)] #![allow(clippy::useless_vec)] -use std::collections::BTreeMap; -use std::collections::HashMap; -use std::collections::VecDeque; +use std::collections::{BTreeMap, HashMap, VecDeque}; struct Bar { arr: [u32; 3], diff --git a/src/tools/clippy/tests/ui/get_first.stderr b/src/tools/clippy/tests/ui/get_first.stderr index 4e267ba9a3b..0899a5905f3 100644 --- a/src/tools/clippy/tests/ui/get_first.stderr +++ b/src/tools/clippy/tests/ui/get_first.stderr @@ -1,5 +1,5 @@ error: accessing first element with `x.get(0)` - --> $DIR/get_first.rs:20:13 + --> $DIR/get_first.rs:18:13 | LL | let _ = x.get(0); // Use x.first() | ^^^^^^^^ help: try: `x.first()` @@ -7,13 +7,13 @@ LL | let _ = x.get(0); // Use x.first() = note: `-D clippy::get-first` implied by `-D warnings` error: accessing first element with `y.get(0)` - --> $DIR/get_first.rs:25:13 + --> $DIR/get_first.rs:23:13 | LL | let _ = y.get(0); // Use y.first() | ^^^^^^^^ help: try: `y.first()` error: accessing first element with `z.get(0)` - --> $DIR/get_first.rs:30:13 + --> $DIR/get_first.rs:28:13 | LL | let _ = z.get(0); // Use z.first() | ^^^^^^^^ help: try: `z.first()` diff --git a/src/tools/clippy/tests/ui/get_unwrap.fixed b/src/tools/clippy/tests/ui/get_unwrap.fixed index 56ee37f02d2..fda334407a9 100644 --- a/src/tools/clippy/tests/ui/get_unwrap.fixed +++ b/src/tools/clippy/tests/ui/get_unwrap.fixed @@ -9,9 +9,7 @@ #![warn(clippy::unwrap_used)] #![deny(clippy::get_unwrap)] -use std::collections::BTreeMap; -use std::collections::HashMap; -use std::collections::VecDeque; +use std::collections::{BTreeMap, HashMap, VecDeque}; struct GetFalsePositive { arr: [u32; 3], diff --git a/src/tools/clippy/tests/ui/get_unwrap.rs b/src/tools/clippy/tests/ui/get_unwrap.rs index af3a619adc5..eaf6b005a36 100644 --- a/src/tools/clippy/tests/ui/get_unwrap.rs +++ b/src/tools/clippy/tests/ui/get_unwrap.rs @@ -9,9 +9,7 @@ #![warn(clippy::unwrap_used)] #![deny(clippy::get_unwrap)] -use std::collections::BTreeMap; -use std::collections::HashMap; -use std::collections::VecDeque; +use std::collections::{BTreeMap, HashMap, VecDeque}; struct GetFalsePositive { arr: [u32; 3], diff --git a/src/tools/clippy/tests/ui/get_unwrap.stderr b/src/tools/clippy/tests/ui/get_unwrap.stderr index fd961420dc4..c567ed319b5 100644 --- a/src/tools/clippy/tests/ui/get_unwrap.stderr +++ b/src/tools/clippy/tests/ui/get_unwrap.stderr @@ -1,8 +1,8 @@ error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:40:17 + --> $DIR/get_unwrap.rs:38:17 | LL | let _ = boxed_slice.get(1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&boxed_slice[1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&boxed_slice[1]` | note: the lint level is defined here --> $DIR/get_unwrap.rs:10:9 @@ -11,7 +11,7 @@ LL | #![deny(clippy::get_unwrap)] | ^^^^^^^^^^^^^^^^^^ error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:40:17 + --> $DIR/get_unwrap.rs:38:17 | LL | let _ = boxed_slice.get(1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -20,13 +20,13 @@ LL | let _ = boxed_slice.get(1).unwrap(); = note: `-D clippy::unwrap-used` implied by `-D warnings` error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:41:17 + --> $DIR/get_unwrap.rs:39:17 | LL | let _ = some_slice.get(0).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_slice[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_slice[0]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:41:17 + --> $DIR/get_unwrap.rs:39:17 | LL | let _ = some_slice.get(0).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -34,13 +34,13 @@ LL | let _ = some_slice.get(0).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:42:17 + --> $DIR/get_unwrap.rs:40:17 | LL | let _ = some_vec.get(0).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vec[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_vec[0]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:42:17 + --> $DIR/get_unwrap.rs:40:17 | LL | let _ = some_vec.get(0).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -48,13 +48,13 @@ LL | let _ = some_vec.get(0).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a VecDeque. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:43:17 + --> $DIR/get_unwrap.rs:41:17 | LL | let _ = some_vecdeque.get(0).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_vecdeque[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_vecdeque[0]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:43:17 + --> $DIR/get_unwrap.rs:41:17 | LL | let _ = some_vecdeque.get(0).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -62,13 +62,13 @@ LL | let _ = some_vecdeque.get(0).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a HashMap. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:44:17 + --> $DIR/get_unwrap.rs:42:17 | LL | let _ = some_hashmap.get(&1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_hashmap[&1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_hashmap[&1]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:44:17 + --> $DIR/get_unwrap.rs:42:17 | LL | let _ = some_hashmap.get(&1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -76,13 +76,13 @@ LL | let _ = some_hashmap.get(&1).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a BTreeMap. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:45:17 + --> $DIR/get_unwrap.rs:43:17 | LL | let _ = some_btreemap.get(&1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&some_btreemap[&1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&some_btreemap[&1]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:45:17 + --> $DIR/get_unwrap.rs:43:17 | LL | let _ = some_btreemap.get(&1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -90,13 +90,13 @@ LL | let _ = some_btreemap.get(&1).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:49:21 + --> $DIR/get_unwrap.rs:47:21 | LL | let _: u8 = *boxed_slice.get(1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `boxed_slice[1]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:49:22 + --> $DIR/get_unwrap.rs:47:22 | LL | let _: u8 = *boxed_slice.get(1).unwrap(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -104,13 +104,13 @@ LL | let _: u8 = *boxed_slice.get(1).unwrap(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:54:9 + --> $DIR/get_unwrap.rs:52:9 | LL | *boxed_slice.get_mut(0).unwrap() = 1; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `boxed_slice[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `boxed_slice[0]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:54:10 + --> $DIR/get_unwrap.rs:52:10 | LL | *boxed_slice.get_mut(0).unwrap() = 1; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -118,13 +118,13 @@ LL | *boxed_slice.get_mut(0).unwrap() = 1; = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:55:9 + --> $DIR/get_unwrap.rs:53:9 | LL | *some_slice.get_mut(0).unwrap() = 1; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_slice[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_slice[0]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:55:10 + --> $DIR/get_unwrap.rs:53:10 | LL | *some_slice.get_mut(0).unwrap() = 1; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -132,13 +132,13 @@ LL | *some_slice.get_mut(0).unwrap() = 1; = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:56:9 + --> $DIR/get_unwrap.rs:54:9 | LL | *some_vec.get_mut(0).unwrap() = 1; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:56:10 + --> $DIR/get_unwrap.rs:54:10 | LL | *some_vec.get_mut(0).unwrap() = 1; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -146,13 +146,13 @@ LL | *some_vec.get_mut(0).unwrap() = 1; = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get_mut().unwrap()` on a VecDeque. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:57:9 + --> $DIR/get_unwrap.rs:55:9 | LL | *some_vecdeque.get_mut(0).unwrap() = 1; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vecdeque[0]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vecdeque[0]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:57:10 + --> $DIR/get_unwrap.rs:55:10 | LL | *some_vecdeque.get_mut(0).unwrap() = 1; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -160,13 +160,13 @@ LL | *some_vecdeque.get_mut(0).unwrap() = 1; = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a Vec. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:69:17 + --> $DIR/get_unwrap.rs:67:17 | LL | let _ = some_vec.get(0..1).unwrap().to_vec(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0..1]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:69:17 + --> $DIR/get_unwrap.rs:67:17 | LL | let _ = some_vec.get(0..1).unwrap().to_vec(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -174,13 +174,13 @@ LL | let _ = some_vec.get(0..1).unwrap().to_vec(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get_mut().unwrap()` on a Vec. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:70:17 + --> $DIR/get_unwrap.rs:68:17 | LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `some_vec[0..1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `some_vec[0..1]` error: used `unwrap()` on an `Option` value - --> $DIR/get_unwrap.rs:70:17 + --> $DIR/get_unwrap.rs:68:17 | LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -188,28 +188,28 @@ LL | let _ = some_vec.get_mut(0..1).unwrap().to_vec(); = help: if you don't want to handle the `None` case gracefully, consider using `expect()` to provide a better panic message error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:80:24 + --> $DIR/get_unwrap.rs:78:24 | LL | let _x: &i32 = f.get(1 + 2).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `&f[1 + 2]` + | ^^^^^^^^^^^^^^^^^^^^^ help: try: `&f[1 + 2]` error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:83:18 + --> $DIR/get_unwrap.rs:81:18 | LL | let _x = f.get(1 + 2).unwrap().to_string(); - | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `f[1 + 2]` + | ^^^^^^^^^^^^^^^^^^^^^ help: try: `f[1 + 2]` error: called `.get().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:86:18 + --> $DIR/get_unwrap.rs:84:18 | LL | let _x = f.get(1 + 2).unwrap().abs(); - | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `f[1 + 2]` + | ^^^^^^^^^^^^^^^^^^^^^ help: try: `f[1 + 2]` error: called `.get_mut().unwrap()` on a slice. Using `[]` is more clear and more concise - --> $DIR/get_unwrap.rs:103:33 + --> $DIR/get_unwrap.rs:101:33 | LL | let b = rest.get_mut(linidx(j, k) - linidx(i, k) - 1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `&mut rest[linidx(j, k) - linidx(i, k) - 1]` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `&mut rest[linidx(j, k) - linidx(i, k) - 1]` error: aborting due to 30 previous errors diff --git a/src/tools/clippy/tests/ui/incorrect_clone_impl_on_copy_type.stderr b/src/tools/clippy/tests/ui/incorrect_clone_impl_on_copy_type.stderr index 0021841aa86..7bcba8ba45a 100644 --- a/src/tools/clippy/tests/ui/incorrect_clone_impl_on_copy_type.stderr +++ b/src/tools/clippy/tests/ui/incorrect_clone_impl_on_copy_type.stderr @@ -16,7 +16,7 @@ LL | / fn clone_from(&mut self, source: &Self) { LL | | source.clone(); LL | | *self = source.clone(); LL | | } - | |_____^ help: remove this + | |_____^ help: remove it error: incorrect implementation of `clone` on a `Copy` type --> $DIR/incorrect_clone_impl_on_copy_type.rs:81:29 @@ -34,7 +34,7 @@ LL | / fn clone_from(&mut self, source: &Self) { LL | | source.clone(); LL | | *self = source.clone(); LL | | } - | |_____^ help: remove this + | |_____^ help: remove it error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.fixed b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.fixed new file mode 100644 index 00000000000..dd4fdd98822 --- /dev/null +++ b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.fixed @@ -0,0 +1,114 @@ +//@run-rustfix +#![allow(unused)] +#![no_main] + +use std::cmp::Ordering; + +// lint + +#[derive(Eq, PartialEq)] +struct A(u32); + +impl Ord for A { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for A { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } +} + +// do not lint + +#[derive(Eq, PartialEq)] +struct B(u32); + +impl Ord for B { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for B { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + Some(self.cmp(other)) + } +} + +// lint, and give `_` a name + +#[derive(Eq, PartialEq)] +struct C(u32); + +impl Ord for C { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for C { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } +} + +// do not lint derived + +#[derive(Eq, Ord, PartialEq, PartialOrd)] +struct D(u32); + +// do not lint if ord is not manually implemented + +#[derive(Eq, PartialEq)] +struct E(u32); + +impl PartialOrd for E { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + todo!(); + } +} + +// do not lint since ord has more restrictive bounds + +#[derive(Eq, PartialEq)] +struct Uwu<A>(A); + +impl<A: std::fmt::Debug + Ord + PartialOrd> Ord for Uwu<A> { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl<A: Ord + PartialOrd> PartialOrd for Uwu<A> { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + todo!(); + } +} + +// do not lint since `Rhs` is not `Self` + +#[derive(Eq, PartialEq)] +struct F(u32); + +impl Ord for F { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for F { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + Some(self.cmp(other)) + } +} + +impl PartialEq<u32> for F { + fn eq(&self, other: &u32) -> bool { + todo!(); + } +} + +impl PartialOrd<u32> for F { + fn partial_cmp(&self, other: &u32) -> Option<Ordering> { + todo!(); + } +} diff --git a/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.rs b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.rs new file mode 100644 index 00000000000..522e82299c0 --- /dev/null +++ b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.rs @@ -0,0 +1,118 @@ +//@run-rustfix +#![allow(unused)] +#![no_main] + +use std::cmp::Ordering; + +// lint + +#[derive(Eq, PartialEq)] +struct A(u32); + +impl Ord for A { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for A { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + todo!(); + } +} + +// do not lint + +#[derive(Eq, PartialEq)] +struct B(u32); + +impl Ord for B { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for B { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + Some(self.cmp(other)) + } +} + +// lint, and give `_` a name + +#[derive(Eq, PartialEq)] +struct C(u32); + +impl Ord for C { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for C { + fn partial_cmp(&self, _: &Self) -> Option<Ordering> { + todo!(); + } +} + +// do not lint derived + +#[derive(Eq, Ord, PartialEq, PartialOrd)] +struct D(u32); + +// do not lint if ord is not manually implemented + +#[derive(Eq, PartialEq)] +struct E(u32); + +impl PartialOrd for E { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + todo!(); + } +} + +// do not lint since ord has more restrictive bounds + +#[derive(Eq, PartialEq)] +struct Uwu<A>(A); + +impl<A: std::fmt::Debug + Ord + PartialOrd> Ord for Uwu<A> { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl<A: Ord + PartialOrd> PartialOrd for Uwu<A> { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + todo!(); + } +} + +// do not lint since `Rhs` is not `Self` + +#[derive(Eq, PartialEq)] +struct F(u32); + +impl Ord for F { + fn cmp(&self, other: &Self) -> Ordering { + todo!(); + } +} + +impl PartialOrd for F { + fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + Some(self.cmp(other)) + } +} + +impl PartialEq<u32> for F { + fn eq(&self, other: &u32) -> bool { + todo!(); + } +} + +impl PartialOrd<u32> for F { + fn partial_cmp(&self, other: &u32) -> Option<Ordering> { + todo!(); + } +} diff --git a/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.stderr b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.stderr new file mode 100644 index 00000000000..0e477798c40 --- /dev/null +++ b/src/tools/clippy/tests/ui/incorrect_partial_ord_impl_on_ord_type.stderr @@ -0,0 +1,31 @@ +error: incorrect implementation of `partial_cmp` on an `Ord` type + --> $DIR/incorrect_partial_ord_impl_on_ord_type.rs:18:1 + | +LL | / impl PartialOrd for A { +LL | | fn partial_cmp(&self, other: &Self) -> Option<Ordering> { + | | _____________________________________________________________- +LL | || todo!(); +LL | || } + | ||_____- help: change this to: `{ Some(self.cmp(other)) }` +LL | | } + | |__^ + | + = note: `#[deny(clippy::incorrect_partial_ord_impl_on_ord_type)]` on by default + +error: incorrect implementation of `partial_cmp` on an `Ord` type + --> $DIR/incorrect_partial_ord_impl_on_ord_type.rs:52:1 + | +LL | / impl PartialOrd for C { +LL | | fn partial_cmp(&self, _: &Self) -> Option<Ordering> { +LL | | todo!(); +LL | | } +LL | | } + | |_^ + | +help: change this to + | +LL | fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } + | ~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~ + +error: aborting due to 2 previous errors + diff --git a/src/tools/clippy/tests/ui/infallible_destructuring_match.stderr b/src/tools/clippy/tests/ui/infallible_destructuring_match.stderr index f8a50f0223d..004260a1d64 100644 --- a/src/tools/clippy/tests/ui/infallible_destructuring_match.stderr +++ b/src/tools/clippy/tests/ui/infallible_destructuring_match.stderr @@ -4,7 +4,7 @@ error: you seem to be trying to use `match` to destructure a single infallible p LL | / let data = match wrapper { LL | | SingleVariantEnum::Variant(i) => i, LL | | }; - | |______^ help: try this: `let SingleVariantEnum::Variant(data) = wrapper;` + | |______^ help: try: `let SingleVariantEnum::Variant(data) = wrapper;` | = note: `-D clippy::infallible-destructuring-match` implied by `-D warnings` @@ -14,7 +14,7 @@ error: you seem to be trying to use `match` to destructure a single infallible p LL | / let data = match wrapper { LL | | TupleStruct(i) => i, LL | | }; - | |______^ help: try this: `let TupleStruct(data) = wrapper;` + | |______^ help: try: `let TupleStruct(data) = wrapper;` error: you seem to be trying to use `match` to destructure a single infallible pattern. Consider using `let` --> $DIR/infallible_destructuring_match.rs:85:5 @@ -22,7 +22,7 @@ error: you seem to be trying to use `match` to destructure a single infallible p LL | / let data = match wrapper { LL | | TupleStructWithNonCopy(ref n) => n, LL | | }; - | |______^ help: try this: `let TupleStructWithNonCopy(ref data) = wrapper;` + | |______^ help: try: `let TupleStructWithNonCopy(ref data) = wrapper;` error: you seem to be trying to use `match` to destructure a single infallible pattern. Consider using `let` --> $DIR/infallible_destructuring_match.rs:104:5 @@ -30,7 +30,7 @@ error: you seem to be trying to use `match` to destructure a single infallible p LL | / let data = match wrapper { LL | | Ok(i) => i, LL | | }; - | |______^ help: try this: `let Ok(data) = wrapper;` + | |______^ help: try: `let Ok(data) = wrapper;` error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui/infinite_loop.stderr b/src/tools/clippy/tests/ui/infinite_loop.stderr index 85258b9d64f..701b3cd1904 100644 --- a/src/tools/clippy/tests/ui/infinite_loop.stderr +++ b/src/tools/clippy/tests/ui/infinite_loop.stderr @@ -1,3 +1,11 @@ +error: this argument is a mutable reference, but not used mutably + --> $DIR/infinite_loop.rs:7:17 + | +LL | fn fn_mutref(i: &mut i32) { + | ^^^^^^^^ help: consider changing to: `&i32` + | + = note: `-D clippy::needless-pass-by-ref-mut` implied by `-D warnings` + error: variables in the condition are not mutated in the loop body --> $DIR/infinite_loop.rs:20:11 | @@ -91,5 +99,5 @@ LL | while y < 10 { = note: this loop contains `return`s or `break`s = help: rewrite it as `if cond { loop { } }` -error: aborting due to 11 previous errors +error: aborting due to 12 previous errors diff --git a/src/tools/clippy/tests/ui/issue-7447.rs b/src/tools/clippy/tests/ui/issue-7447.rs index fdb77f32257..de4362c4df7 100644 --- a/src/tools/clippy/tests/ui/issue-7447.rs +++ b/src/tools/clippy/tests/ui/issue-7447.rs @@ -1,4 +1,7 @@ -use std::{borrow::Cow, collections::BTreeMap, marker::PhantomData, sync::Arc}; +use std::borrow::Cow; +use std::collections::BTreeMap; +use std::marker::PhantomData; +use std::sync::Arc; fn byte_view<'a>(s: &'a ByteView<'_>) -> BTreeMap<&'a str, ByteView<'a>> { panic!() diff --git a/src/tools/clippy/tests/ui/issue-7447.stderr b/src/tools/clippy/tests/ui/issue-7447.stderr index 8d8c29f1385..7a113740c6a 100644 --- a/src/tools/clippy/tests/ui/issue-7447.stderr +++ b/src/tools/clippy/tests/ui/issue-7447.stderr @@ -1,5 +1,5 @@ error: sub-expression diverges - --> $DIR/issue-7447.rs:23:15 + --> $DIR/issue-7447.rs:26:15 | LL | byte_view(panic!()); | ^^^^^^^^ @@ -8,7 +8,7 @@ LL | byte_view(panic!()); = note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info) error: sub-expression diverges - --> $DIR/issue-7447.rs:24:19 + --> $DIR/issue-7447.rs:27:19 | LL | group_entries(panic!()); | ^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.fixed b/src/tools/clippy/tests/ui/iter_cloned_collect.fixed index 2baea06f84b..636f572a343 100644 --- a/src/tools/clippy/tests/ui/iter_cloned_collect.fixed +++ b/src/tools/clippy/tests/ui/iter_cloned_collect.fixed @@ -3,8 +3,7 @@ #![allow(unused)] #![allow(clippy::useless_vec)] -use std::collections::HashSet; -use std::collections::VecDeque; +use std::collections::{HashSet, VecDeque}; fn main() { let v = [1, 2, 3, 4, 5]; diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.rs b/src/tools/clippy/tests/ui/iter_cloned_collect.rs index 9eac94eb8d9..518cb75affe 100644 --- a/src/tools/clippy/tests/ui/iter_cloned_collect.rs +++ b/src/tools/clippy/tests/ui/iter_cloned_collect.rs @@ -3,8 +3,7 @@ #![allow(unused)] #![allow(clippy::useless_vec)] -use std::collections::HashSet; -use std::collections::VecDeque; +use std::collections::{HashSet, VecDeque}; fn main() { let v = [1, 2, 3, 4, 5]; diff --git a/src/tools/clippy/tests/ui/iter_cloned_collect.stderr b/src/tools/clippy/tests/ui/iter_cloned_collect.stderr index b38cf547dc5..b2cc497bf43 100644 --- a/src/tools/clippy/tests/ui/iter_cloned_collect.stderr +++ b/src/tools/clippy/tests/ui/iter_cloned_collect.stderr @@ -1,5 +1,5 @@ error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable - --> $DIR/iter_cloned_collect.rs:11:27 + --> $DIR/iter_cloned_collect.rs:10:27 | LL | let v2: Vec<isize> = v.iter().cloned().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()` @@ -7,13 +7,13 @@ LL | let v2: Vec<isize> = v.iter().cloned().collect(); = note: `-D clippy::iter-cloned-collect` implied by `-D warnings` error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable - --> $DIR/iter_cloned_collect.rs:16:38 + --> $DIR/iter_cloned_collect.rs:15:38 | LL | let _: Vec<isize> = vec![1, 2, 3].iter().cloned().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()` error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable - --> $DIR/iter_cloned_collect.rs:21:24 + --> $DIR/iter_cloned_collect.rs:20:24 | LL | .to_bytes() | ________________________^ @@ -23,13 +23,13 @@ LL | | .collect(); | |______________________^ help: try: `.to_vec()` error: called `iter().cloned().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable - --> $DIR/iter_cloned_collect.rs:29:24 + --> $DIR/iter_cloned_collect.rs:28:24 | LL | let _: Vec<_> = arr.iter().cloned().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()` error: called `iter().copied().collect()` on a slice to create a `Vec`. Calling `to_vec()` is both faster and more readable - --> $DIR/iter_cloned_collect.rs:32:26 + --> $DIR/iter_cloned_collect.rs:31:26 | LL | let _: Vec<isize> = v.iter().copied().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `.to_vec()` diff --git a/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr b/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr index dcae7cecd33..eaac48be880 100644 --- a/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr +++ b/src/tools/clippy/tests/ui/iter_overeager_cloned.stderr @@ -4,7 +4,7 @@ error: unnecessarily eager cloning of iterator items LL | let _: Option<String> = vec.iter().cloned().last(); | ^^^^^^^^^^---------------- | | - | help: try this: `.last().cloned()` + | help: try: `.last().cloned()` | = note: `-D clippy::iter-overeager-cloned` implied by `-D warnings` @@ -14,7 +14,7 @@ error: unnecessarily eager cloning of iterator items LL | let _: Option<String> = vec.iter().chain(vec.iter()).cloned().next(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^---------------- | | - | help: try this: `.next().cloned()` + | help: try: `.next().cloned()` error: unneeded cloning of iterator items --> $DIR/iter_overeager_cloned.rs:12:20 @@ -22,7 +22,7 @@ error: unneeded cloning of iterator items LL | let _: usize = vec.iter().filter(|x| x == &"2").cloned().count(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^----------------- | | - | help: try this: `.count()` + | help: try: `.count()` | = note: `-D clippy::redundant-clone` implied by `-D warnings` @@ -32,7 +32,7 @@ error: unnecessarily eager cloning of iterator items LL | let _: Vec<_> = vec.iter().cloned().take(2).collect(); | ^^^^^^^^^^----------------- | | - | help: try this: `.take(2).cloned()` + | help: try: `.take(2).cloned()` error: unnecessarily eager cloning of iterator items --> $DIR/iter_overeager_cloned.rs:16:21 @@ -40,7 +40,7 @@ error: unnecessarily eager cloning of iterator items LL | let _: Vec<_> = vec.iter().cloned().skip(2).collect(); | ^^^^^^^^^^----------------- | | - | help: try this: `.skip(2).cloned()` + | help: try: `.skip(2).cloned()` error: unnecessarily eager cloning of iterator items --> $DIR/iter_overeager_cloned.rs:18:13 @@ -48,7 +48,7 @@ error: unnecessarily eager cloning of iterator items LL | let _ = vec.iter().filter(|x| x == &"2").cloned().nth(2); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^---------------- | | - | help: try this: `.nth(2).cloned()` + | help: try: `.nth(2).cloned()` error: unnecessarily eager cloning of iterator items --> $DIR/iter_overeager_cloned.rs:20:13 @@ -60,7 +60,7 @@ LL | | .cloned() LL | | .flatten(); | |__________________^ | -help: try this +help: try | LL ~ .iter() LL ~ .flatten().cloned(); diff --git a/src/tools/clippy/tests/ui/iter_with_drain.stderr b/src/tools/clippy/tests/ui/iter_with_drain.stderr index aa394439fa6..bfaed29a099 100644 --- a/src/tools/clippy/tests/ui/iter_with_drain.stderr +++ b/src/tools/clippy/tests/ui/iter_with_drain.stderr @@ -2,7 +2,7 @@ error: `drain(..)` used on a `Vec` --> $DIR/iter_with_drain.rs:11:34 | LL | let mut a: BinaryHeap<_> = a.drain(..).collect(); - | ^^^^^^^^^ help: try this: `into_iter()` + | ^^^^^^^^^ help: try: `into_iter()` | = note: `-D clippy::iter-with-drain` implied by `-D warnings` @@ -10,31 +10,31 @@ error: `drain(..)` used on a `VecDeque` --> $DIR/iter_with_drain.rs:14:27 | LL | let mut a: Vec<_> = a.drain(..).collect(); - | ^^^^^^^^^ help: try this: `into_iter()` + | ^^^^^^^^^ help: try: `into_iter()` error: `drain(..)` used on a `Vec` --> $DIR/iter_with_drain.rs:15:34 | LL | let mut a: HashMap<_, _> = a.drain(..).map(|x| (x.clone(), x)).collect(); - | ^^^^^^^^^ help: try this: `into_iter()` + | ^^^^^^^^^ help: try: `into_iter()` error: `drain(..)` used on a `Vec` --> $DIR/iter_with_drain.rs:21:34 | LL | let mut a: BinaryHeap<_> = a.drain(0..).collect(); - | ^^^^^^^^^^ help: try this: `into_iter()` + | ^^^^^^^^^^ help: try: `into_iter()` error: `drain(..)` used on a `VecDeque` --> $DIR/iter_with_drain.rs:24:27 | LL | let mut a: Vec<_> = a.drain(..a.len()).collect(); - | ^^^^^^^^^^^^^^^^ help: try this: `into_iter()` + | ^^^^^^^^^^^^^^^^ help: try: `into_iter()` error: `drain(..)` used on a `Vec` --> $DIR/iter_with_drain.rs:25:34 | LL | let mut a: HashMap<_, _> = a.drain(0..a.len()).map(|x| (x.clone(), x)).collect(); - | ^^^^^^^^^^^^^^^^^ help: try this: `into_iter()` + | ^^^^^^^^^^^^^^^^^ help: try: `into_iter()` error: aborting due to 6 previous errors diff --git a/src/tools/clippy/tests/ui/let_and_return.rs b/src/tools/clippy/tests/ui/let_and_return.rs index bb162adc9ad..7e4d783a026 100644 --- a/src/tools/clippy/tests/ui/let_and_return.rs +++ b/src/tools/clippy/tests/ui/let_and_return.rs @@ -1,6 +1,8 @@ #![allow(unused)] #![warn(clippy::let_and_return)] +use std::cell::RefCell; + fn test() -> i32 { let _y = 0; // no warning let x = 5; @@ -65,45 +67,46 @@ macro_rules! tuple_encode { ); } +fn issue_3792() -> String { + use std::io::{self, BufRead, Stdin}; + + let stdin = io::stdin(); + // `Stdin::lock` returns `StdinLock<'static>` so `line` doesn't borrow from `stdin` + // https://github.com/rust-lang/rust/pull/93965 + let line = stdin.lock().lines().next().unwrap().unwrap(); + line +} + tuple_encode!(T0, T1, T2, T3, T4, T5, T6, T7); mod no_lint_if_stmt_borrows { - mod issue_3792 { - use std::io::{self, BufRead, Stdin}; + use std::cell::RefCell; + use std::rc::{Rc, Weak}; + struct Bar; - fn read_line() -> String { - let stdin = io::stdin(); - let line = stdin.lock().lines().next().unwrap().unwrap(); - line + impl Bar { + fn new() -> Self { + Bar {} } - } - - mod issue_3324 { - use std::cell::RefCell; - use std::rc::{Rc, Weak}; - - fn test(value: Weak<RefCell<Bar>>) -> u32 { - let value = value.upgrade().unwrap(); - let ret = value.borrow().baz(); - ret + fn baz(&self) -> u32 { + 0 } + } - struct Bar; + fn issue_3324(value: Weak<RefCell<Bar>>) -> u32 { + let value = value.upgrade().unwrap(); + let ret = value.borrow().baz(); + ret + } - impl Bar { - fn new() -> Self { - Bar {} - } - fn baz(&self) -> u32 { - 0 - } + fn borrows_in_closure(value: Weak<RefCell<Bar>>) -> u32 { + fn f(mut x: impl FnMut() -> u32) -> impl FnMut() -> u32 { + x } - fn main() { - let a = Rc::new(RefCell::new(Bar::new())); - let b = Rc::downgrade(&a); - test(b); - } + let value = value.upgrade().unwrap(); + let ret = f(|| value.borrow().baz())(); + ret } mod free_function { diff --git a/src/tools/clippy/tests/ui/let_and_return.stderr b/src/tools/clippy/tests/ui/let_and_return.stderr index 17fd694bf7a..4ca0a05c858 100644 --- a/src/tools/clippy/tests/ui/let_and_return.stderr +++ b/src/tools/clippy/tests/ui/let_and_return.stderr @@ -1,5 +1,5 @@ error: returning the result of a `let` binding from a block - --> $DIR/let_and_return.rs:7:5 + --> $DIR/let_and_return.rs:9:5 | LL | let x = 5; | ---------- unnecessary `let` binding @@ -14,7 +14,7 @@ LL ~ 5 | error: returning the result of a `let` binding from a block - --> $DIR/let_and_return.rs:13:9 + --> $DIR/let_and_return.rs:15:9 | LL | let x = 5; | ---------- unnecessary `let` binding @@ -28,7 +28,21 @@ LL ~ 5 | error: returning the result of a `let` binding from a block - --> $DIR/let_and_return.rs:164:13 + --> $DIR/let_and_return.rs:77:5 + | +LL | let line = stdin.lock().lines().next().unwrap().unwrap(); + | --------------------------------------------------------- unnecessary `let` binding +LL | line + | ^^^^ + | +help: return the expression directly + | +LL ~ +LL ~ stdin.lock().lines().next().unwrap().unwrap() + | + +error: returning the result of a `let` binding from a block + --> $DIR/let_and_return.rs:167:13 | LL | let clone = Arc::clone(&self.foo); | ---------------------------------- unnecessary `let` binding @@ -41,5 +55,5 @@ LL ~ LL ~ Arc::clone(&self.foo) as _ | -error: aborting due to 3 previous errors +error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui/let_underscore_future.stderr b/src/tools/clippy/tests/ui/let_underscore_future.stderr index 33a748736a8..9e69fb04133 100644 --- a/src/tools/clippy/tests/ui/let_underscore_future.stderr +++ b/src/tools/clippy/tests/ui/let_underscore_future.stderr @@ -1,3 +1,11 @@ +error: this argument is a mutable reference, but not used mutably + --> $DIR/let_underscore_future.rs:11:35 + | +LL | fn do_something_to_future(future: &mut impl Future<Output = ()>) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&impl Future<Output = ()>` + | + = note: `-D clippy::needless-pass-by-ref-mut` implied by `-D warnings` + error: non-binding `let` on a future --> $DIR/let_underscore_future.rs:14:5 | @@ -23,5 +31,5 @@ LL | let _ = future; | = help: consider awaiting the future or dropping explicitly with `std::mem::drop` -error: aborting due to 3 previous errors +error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui/let_underscore_lock.rs b/src/tools/clippy/tests/ui/let_underscore_lock.rs index 4dff4d766bc..87f12e2789f 100644 --- a/src/tools/clippy/tests/ui/let_underscore_lock.rs +++ b/src/tools/clippy/tests/ui/let_underscore_lock.rs @@ -3,7 +3,8 @@ extern crate parking_lot; fn main() { - use parking_lot::{lock_api::RawMutex, Mutex, RwLock}; + use parking_lot::lock_api::RawMutex; + use parking_lot::{Mutex, RwLock}; let p_m: Mutex<()> = Mutex::const_new(RawMutex::INIT, ()); let _ = p_m.lock(); diff --git a/src/tools/clippy/tests/ui/let_underscore_lock.stderr b/src/tools/clippy/tests/ui/let_underscore_lock.stderr index f137d411209..5027e6b3cbc 100644 --- a/src/tools/clippy/tests/ui/let_underscore_lock.stderr +++ b/src/tools/clippy/tests/ui/let_underscore_lock.stderr @@ -1,5 +1,5 @@ error: non-binding `let` on a synchronization lock - --> $DIR/let_underscore_lock.rs:9:5 + --> $DIR/let_underscore_lock.rs:10:5 | LL | let _ = p_m.lock(); | ^^^^^^^^^^^^^^^^^^^ @@ -8,7 +8,7 @@ LL | let _ = p_m.lock(); = note: `-D clippy::let-underscore-lock` implied by `-D warnings` error: non-binding `let` on a synchronization lock - --> $DIR/let_underscore_lock.rs:12:5 + --> $DIR/let_underscore_lock.rs:13:5 | LL | let _ = p_m1.lock(); | ^^^^^^^^^^^^^^^^^^^^ @@ -16,7 +16,7 @@ LL | let _ = p_m1.lock(); = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop` error: non-binding `let` on a synchronization lock - --> $DIR/let_underscore_lock.rs:15:5 + --> $DIR/let_underscore_lock.rs:16:5 | LL | let _ = p_rw.read(); | ^^^^^^^^^^^^^^^^^^^^ @@ -24,7 +24,7 @@ LL | let _ = p_rw.read(); = help: consider using an underscore-prefixed named binding or dropping explicitly with `std::mem::drop` error: non-binding `let` on a synchronization lock - --> $DIR/let_underscore_lock.rs:16:5 + --> $DIR/let_underscore_lock.rs:17:5 | LL | let _ = p_rw.write(); | ^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/let_underscore_untyped.rs b/src/tools/clippy/tests/ui/let_underscore_untyped.rs index 431d83778e3..18630c27f41 100644 --- a/src/tools/clippy/tests/ui/let_underscore_untyped.rs +++ b/src/tools/clippy/tests/ui/let_underscore_untyped.rs @@ -7,8 +7,9 @@ extern crate proc_macros; use proc_macros::with_span; use clippy_utils::is_from_proc_macro; +use std::boxed::Box; +use std::fmt::Display; use std::future::Future; -use std::{boxed::Box, fmt::Display}; fn a() -> u32 { 1 diff --git a/src/tools/clippy/tests/ui/let_underscore_untyped.stderr b/src/tools/clippy/tests/ui/let_underscore_untyped.stderr index bbf2508af10..e0c39b6eeaf 100644 --- a/src/tools/clippy/tests/ui/let_underscore_untyped.stderr +++ b/src/tools/clippy/tests/ui/let_underscore_untyped.stderr @@ -1,60 +1,60 @@ error: non-binding `let` without a type annotation - --> $DIR/let_underscore_untyped.rs:50:5 + --> $DIR/let_underscore_untyped.rs:51:5 | LL | let _ = a(); | ^^^^^^^^^^^^ | help: consider adding a type annotation - --> $DIR/let_underscore_untyped.rs:50:10 + --> $DIR/let_underscore_untyped.rs:51:10 | LL | let _ = a(); | ^ = note: `-D clippy::let-underscore-untyped` implied by `-D warnings` error: non-binding `let` without a type annotation - --> $DIR/let_underscore_untyped.rs:51:5 + --> $DIR/let_underscore_untyped.rs:52:5 | LL | let _ = b(1); | ^^^^^^^^^^^^^ | help: consider adding a type annotation - --> $DIR/let_underscore_untyped.rs:51:10 + --> $DIR/let_underscore_untyped.rs:52:10 | LL | let _ = b(1); | ^ error: non-binding `let` without a type annotation - --> $DIR/let_underscore_untyped.rs:53:5 + --> $DIR/let_underscore_untyped.rs:54:5 | LL | let _ = d(&1); | ^^^^^^^^^^^^^^ | help: consider adding a type annotation - --> $DIR/let_underscore_untyped.rs:53:10 + --> $DIR/let_underscore_untyped.rs:54:10 | LL | let _ = d(&1); | ^ error: non-binding `let` without a type annotation - --> $DIR/let_underscore_untyped.rs:54:5 + --> $DIR/let_underscore_untyped.rs:55:5 | LL | let _ = e(); | ^^^^^^^^^^^^ | help: consider adding a type annotation - --> $DIR/let_underscore_untyped.rs:54:10 + --> $DIR/let_underscore_untyped.rs:55:10 | LL | let _ = e(); | ^ error: non-binding `let` without a type annotation - --> $DIR/let_underscore_untyped.rs:55:5 + --> $DIR/let_underscore_untyped.rs:56:5 | LL | let _ = f(); | ^^^^^^^^^^^^ | help: consider adding a type annotation - --> $DIR/let_underscore_untyped.rs:55:10 + --> $DIR/let_underscore_untyped.rs:56:10 | LL | let _ = f(); | ^ diff --git a/src/tools/clippy/tests/ui/manual_filter.stderr b/src/tools/clippy/tests/ui/manual_filter.stderr index 53dea922930..f62d3e96059 100644 --- a/src/tools/clippy/tests/ui/manual_filter.stderr +++ b/src/tools/clippy/tests/ui/manual_filter.stderr @@ -8,7 +8,7 @@ LL | | if x > 0 { ... | LL | | }, LL | | }; - | |_____^ help: try this: `Some(0).filter(|&x| x <= 0)` + | |_____^ help: try: `Some(0).filter(|&x| x <= 0)` | = note: `-D clippy::manual-filter` implied by `-D warnings` @@ -22,7 +22,7 @@ LL | | None ... | LL | | None => None, LL | | }; - | |_____^ help: try this: `Some(1).filter(|&x| x <= 0)` + | |_____^ help: try: `Some(1).filter(|&x| x <= 0)` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:29:5 @@ -34,7 +34,7 @@ LL | | None ... | LL | | _ => None, LL | | }; - | |_____^ help: try this: `Some(2).filter(|&x| x <= 0)` + | |_____^ help: try: `Some(2).filter(|&x| x <= 0)` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:40:5 @@ -46,7 +46,7 @@ LL | | Some(x) ... | LL | | None => None, LL | | }; - | |_____^ help: try this: `Some(3).filter(|&x| x > 0)` + | |_____^ help: try: `Some(3).filter(|&x| x > 0)` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:52:5 @@ -58,7 +58,7 @@ LL | | Some(x) => { ... | LL | | }, LL | | }; - | |_____^ help: try this: `y.filter(|&x| x <= 0)` + | |_____^ help: try: `y.filter(|&x| x <= 0)` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:64:5 @@ -70,7 +70,7 @@ LL | | Some(x) ... | LL | | _ => None, LL | | }; - | |_____^ help: try this: `Some(5).filter(|&x| x > 0)` + | |_____^ help: try: `Some(5).filter(|&x| x > 0)` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:75:5 @@ -82,7 +82,7 @@ LL | | Some(x) ... | LL | | _ => None, LL | | }; - | |_____^ help: try this: `Some(6).as_ref().filter(|&x| x > &0)` + | |_____^ help: try: `Some(6).as_ref().filter(|&x| x > &0)` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:87:5 @@ -94,7 +94,7 @@ LL | | Some(x) ... | LL | | _ => None, LL | | }; - | |_____^ help: try this: `Some(String::new()).filter(|x| external_cond)` + | |_____^ help: try: `Some(String::new()).filter(|x| external_cond)` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:98:5 @@ -104,7 +104,7 @@ LL | | if external_cond { Some(x) } else { None } LL | | } else { LL | | None LL | | }; - | |_____^ help: try this: `Some(7).filter(|&x| external_cond)` + | |_____^ help: try: `Some(7).filter(|&x| external_cond)` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:104:5 @@ -116,7 +116,7 @@ LL | | Some(x) ... | LL | | _ => None, LL | | }; - | |_____^ help: try this: `Some(8).filter(|&x| x != 0)` + | |_____^ help: try: `Some(8).filter(|&x| x != 0)` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:115:5 @@ -128,7 +128,7 @@ LL | | Some(x) ... | LL | | None => None, LL | | }; - | |_____^ help: try this: `Some(9).filter(|&x| x > 10 && x < 100)` + | |_____^ help: try: `Some(9).filter(|&x| x > 10 && x < 100)` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:141:5 @@ -142,7 +142,7 @@ LL | | None => None, LL | | }; | |_____^ | -help: try this +help: try | LL ~ Some(11).filter(|&x| { LL + println!("foo"); @@ -161,7 +161,7 @@ LL | | Some(x) ... | LL | | None => None, LL | | }; - | |_____^ help: try this: `Some(14).filter(|&x| unsafe { f(x) })` + | |_____^ help: try: `Some(14).filter(|&x| unsafe { f(x) })` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:195:13 @@ -173,7 +173,7 @@ LL | | if f(x) { Some(x) } else { None } LL | | }, LL | | None => None, LL | | }; - | |_____^ help: try this: `Some(15).filter(|&x| unsafe { f(x) })` + | |_____^ help: try: `Some(15).filter(|&x| unsafe { f(x) })` error: manual implementation of `Option::filter` --> $DIR/manual_filter.rs:205:12 @@ -185,7 +185,7 @@ LL | | if x % 2 == 0 { Some(x) } else { None } LL | | } else { LL | | None LL | | }; - | |_____^ help: try this: `{ Some(16).filter(|&x| x % 2 == 0) }` + | |_____^ help: try: `{ Some(16).filter(|&x| x % 2 == 0) }` error: aborting due to 15 previous errors diff --git a/src/tools/clippy/tests/ui/manual_float_methods.rs b/src/tools/clippy/tests/ui/manual_float_methods.rs new file mode 100644 index 00000000000..af9076cfb71 --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_float_methods.rs @@ -0,0 +1,55 @@ +//@aux-build:proc_macros.rs:proc-macro +#![allow(clippy::needless_if, unused)] +#![warn(clippy::manual_is_infinite, clippy::manual_is_finite)] +#![feature(inline_const)] + +#[macro_use] +extern crate proc_macros; + +const INFINITE: f32 = f32::INFINITY; +const NEG_INFINITE: f32 = f32::NEG_INFINITY; + +fn fn_test() -> f64 { + f64::NEG_INFINITY +} + +fn fn_test_not_inf() -> f64 { + 112.0 +} + +fn main() { + let x = 1.0f32; + if x == f32::INFINITY || x == f32::NEG_INFINITY {} + if x != f32::INFINITY && x != f32::NEG_INFINITY {} + if x == INFINITE || x == NEG_INFINITE {} + if x != INFINITE && x != NEG_INFINITE {} + let x = 1.0f64; + if x == f64::INFINITY || x == f64::NEG_INFINITY {} + if x != f64::INFINITY && x != f64::NEG_INFINITY {} + // Don't lint + if x.is_infinite() {} + if x.is_finite() {} + if x.abs() < f64::INFINITY {} + if f64::INFINITY > x.abs() {} + if f64::abs(x) < f64::INFINITY {} + if f64::INFINITY > f64::abs(x) {} + // Is not evaluated by `clippy_utils::constant` + if x != f64::INFINITY && x != fn_test() {} + // Not -inf + if x != f64::INFINITY && x != fn_test_not_inf() {} + const X: f64 = 1.0f64; + // Will be linted if `const_float_classify` is enabled + if const { X == f64::INFINITY || X == f64::NEG_INFINITY } {} + if const { X != f64::INFINITY && X != f64::NEG_INFINITY } {} + external! { + let x = 1.0; + if x == f32::INFINITY || x == f32::NEG_INFINITY {} + if x != f32::INFINITY && x != f32::NEG_INFINITY {} + } + with_span! { + span + let x = 1.0; + if x == f32::INFINITY || x == f32::NEG_INFINITY {} + if x != f32::INFINITY && x != f32::NEG_INFINITY {} + } +} diff --git a/src/tools/clippy/tests/ui/manual_float_methods.stderr b/src/tools/clippy/tests/ui/manual_float_methods.stderr new file mode 100644 index 00000000000..a56118b316a --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_float_methods.stderr @@ -0,0 +1,80 @@ +error: manually checking if a float is infinite + --> $DIR/manual_float_methods.rs:22:8 + | +LL | if x == f32::INFINITY || x == f32::NEG_INFINITY {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the dedicated method instead: `x.is_infinite()` + | + = note: `-D clippy::manual-is-infinite` implied by `-D warnings` + +error: manually checking if a float is finite + --> $DIR/manual_float_methods.rs:23:8 + | +LL | if x != f32::INFINITY && x != f32::NEG_INFINITY {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `-D clippy::manual-is-finite` implied by `-D warnings` +help: use the dedicated method instead + | +LL | if x.is_finite() {} + | ~~~~~~~~~~~~~ +help: this will alter how it handles NaN; if that is a problem, use instead + | +LL | if x.is_finite() || x.is_nan() {} + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ +help: or, for conciseness + | +LL | if !x.is_infinite() {} + | ~~~~~~~~~~~~~~~~ + +error: manually checking if a float is infinite + --> $DIR/manual_float_methods.rs:24:8 + | +LL | if x == INFINITE || x == NEG_INFINITE {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the dedicated method instead: `x.is_infinite()` + +error: manually checking if a float is finite + --> $DIR/manual_float_methods.rs:25:8 + | +LL | if x != INFINITE && x != NEG_INFINITE {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: use the dedicated method instead + | +LL | if x.is_finite() {} + | ~~~~~~~~~~~~~ +help: this will alter how it handles NaN; if that is a problem, use instead + | +LL | if x.is_finite() || x.is_nan() {} + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ +help: or, for conciseness + | +LL | if !x.is_infinite() {} + | ~~~~~~~~~~~~~~~~ + +error: manually checking if a float is infinite + --> $DIR/manual_float_methods.rs:27:8 + | +LL | if x == f64::INFINITY || x == f64::NEG_INFINITY {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use the dedicated method instead: `x.is_infinite()` + +error: manually checking if a float is finite + --> $DIR/manual_float_methods.rs:28:8 + | +LL | if x != f64::INFINITY && x != f64::NEG_INFINITY {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: use the dedicated method instead + | +LL | if x.is_finite() {} + | ~~~~~~~~~~~~~ +help: this will alter how it handles NaN; if that is a problem, use instead + | +LL | if x.is_finite() || x.is_nan() {} + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ +help: or, for conciseness + | +LL | if !x.is_infinite() {} + | ~~~~~~~~~~~~~~~~ + +error: aborting due to 6 previous errors + diff --git a/src/tools/clippy/tests/ui/manual_let_else.rs b/src/tools/clippy/tests/ui/manual_let_else.rs index 46241afec94..381b83409e9 100644 --- a/src/tools/clippy/tests/ui/manual_let_else.rs +++ b/src/tools/clippy/tests/ui/manual_let_else.rs @@ -279,7 +279,9 @@ fn not_fire() { create_binding_if_some_nf!(v, g()); // Already a let-else - let Some(a) = (if let Some(b) = Some(Some(())) { b } else { return }) else { panic!() }; + let Some(a) = (if let Some(b) = Some(Some(())) { b } else { return }) else { + panic!() + }; // If a type annotation is present, don't lint as // expressing the type might be too hard @@ -304,9 +306,7 @@ fn not_fire() { let _x = if let Some(x) = Some(1) { x } else { - let Some(_z) = Some(3) else { - return - }; + let Some(_z) = Some(3) else { return }; 1 }; diff --git a/src/tools/clippy/tests/ui/manual_let_else.stderr b/src/tools/clippy/tests/ui/manual_let_else.stderr index 1eada4f992e..912302b17a8 100644 --- a/src/tools/clippy/tests/ui/manual_let_else.stderr +++ b/src/tools/clippy/tests/ui/manual_let_else.stderr @@ -352,7 +352,7 @@ LL + }; | error: this could be rewritten as `let...else` - --> $DIR/manual_let_else.rs:297:5 + --> $DIR/manual_let_else.rs:299:5 | LL | / let _ = match ff { LL | | Some(value) => value, diff --git a/src/tools/clippy/tests/ui/manual_let_else_question_mark.fixed b/src/tools/clippy/tests/ui/manual_let_else_question_mark.fixed new file mode 100644 index 00000000000..02308bc7c4c --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_let_else_question_mark.fixed @@ -0,0 +1,63 @@ +//@run-rustfix +#![allow(unused_braces, unused_variables, dead_code)] +#![allow( + clippy::collapsible_else_if, + clippy::unused_unit, + clippy::let_unit_value, + clippy::match_single_binding, + clippy::never_loop +)] +#![warn(clippy::manual_let_else, clippy::question_mark)] + +enum Variant { + A(usize, usize), + B(usize), + C, +} + +fn g() -> Option<(u8, u8)> { + None +} + +fn e() -> Variant { + Variant::A(0, 0) +} + +fn main() {} + +fn foo() -> Option<()> { + // Fire here, normal case + let v = g()?; + + // Don't fire here, the pattern is refutable + let Variant::A(v, w) = e() else { return None }; + + // Fire here, the pattern is irrefutable + let (v, w) = g()?; + + // Don't fire manual_let_else in this instance: question mark can be used instead. + let v = g()?; + + // Do fire manual_let_else in this instance: question mark cannot be used here due to the return + // body. + let Some(v) = g() else { + return Some(()); + }; + + // Here we could also fire the question_mark lint, but we don't (as it's a match and not an if let). + // So we still emit manual_let_else here. For the *resulting* code, we *do* emit the question_mark + // lint, so for rustfix reasons, we allow the question_mark lint here. + #[allow(clippy::question_mark)] + { + let Some(v) = g() else { return None }; + } + + // This is a copy of the case above where we'd fire the question_mark lint, but here we have allowed + // it. Make sure that manual_let_else is fired as the fallback. + #[allow(clippy::question_mark)] + { + let Some(v) = g() else { return None }; + } + + Some(()) +} diff --git a/src/tools/clippy/tests/ui/manual_let_else_question_mark.rs b/src/tools/clippy/tests/ui/manual_let_else_question_mark.rs new file mode 100644 index 00000000000..9c7ad386dc9 --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_let_else_question_mark.rs @@ -0,0 +1,68 @@ +//@run-rustfix +#![allow(unused_braces, unused_variables, dead_code)] +#![allow( + clippy::collapsible_else_if, + clippy::unused_unit, + clippy::let_unit_value, + clippy::match_single_binding, + clippy::never_loop +)] +#![warn(clippy::manual_let_else, clippy::question_mark)] + +enum Variant { + A(usize, usize), + B(usize), + C, +} + +fn g() -> Option<(u8, u8)> { + None +} + +fn e() -> Variant { + Variant::A(0, 0) +} + +fn main() {} + +fn foo() -> Option<()> { + // Fire here, normal case + let Some(v) = g() else { return None }; + + // Don't fire here, the pattern is refutable + let Variant::A(v, w) = e() else { return None }; + + // Fire here, the pattern is irrefutable + let Some((v, w)) = g() else { return None }; + + // Don't fire manual_let_else in this instance: question mark can be used instead. + let v = if let Some(v_some) = g() { v_some } else { return None }; + + // Do fire manual_let_else in this instance: question mark cannot be used here due to the return + // body. + let v = if let Some(v_some) = g() { + v_some + } else { + return Some(()); + }; + + // Here we could also fire the question_mark lint, but we don't (as it's a match and not an if let). + // So we still emit manual_let_else here. For the *resulting* code, we *do* emit the question_mark + // lint, so for rustfix reasons, we allow the question_mark lint here. + #[allow(clippy::question_mark)] + { + let v = match g() { + Some(v_some) => v_some, + _ => return None, + }; + } + + // This is a copy of the case above where we'd fire the question_mark lint, but here we have allowed + // it. Make sure that manual_let_else is fired as the fallback. + #[allow(clippy::question_mark)] + { + let v = if let Some(v_some) = g() { v_some } else { return None }; + } + + Some(()) +} diff --git a/src/tools/clippy/tests/ui/manual_let_else_question_mark.stderr b/src/tools/clippy/tests/ui/manual_let_else_question_mark.stderr new file mode 100644 index 00000000000..d7d2e127ea3 --- /dev/null +++ b/src/tools/clippy/tests/ui/manual_let_else_question_mark.stderr @@ -0,0 +1,55 @@ +error: this `let...else` may be rewritten with the `?` operator + --> $DIR/manual_let_else_question_mark.rs:30:5 + | +LL | let Some(v) = g() else { return None }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `let v = g()?;` + | + = note: `-D clippy::question-mark` implied by `-D warnings` + +error: this `let...else` may be rewritten with the `?` operator + --> $DIR/manual_let_else_question_mark.rs:36:5 + | +LL | let Some((v, w)) = g() else { return None }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `let (v, w) = g()?;` + +error: this block may be rewritten with the `?` operator + --> $DIR/manual_let_else_question_mark.rs:39:13 + | +LL | let v = if let Some(v_some) = g() { v_some } else { return None }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: replace it with: `g()?` + +error: this could be rewritten as `let...else` + --> $DIR/manual_let_else_question_mark.rs:43:5 + | +LL | / let v = if let Some(v_some) = g() { +LL | | v_some +LL | | } else { +LL | | return Some(()); +LL | | }; + | |______^ + | + = note: `-D clippy::manual-let-else` implied by `-D warnings` +help: consider writing + | +LL ~ let Some(v) = g() else { +LL + return Some(()); +LL + }; + | + +error: this could be rewritten as `let...else` + --> $DIR/manual_let_else_question_mark.rs:54:9 + | +LL | / let v = match g() { +LL | | Some(v_some) => v_some, +LL | | _ => return None, +LL | | }; + | |__________^ help: consider writing: `let Some(v) = g() else { return None };` + +error: this could be rewritten as `let...else` + --> $DIR/manual_let_else_question_mark.rs:64:9 + | +LL | let v = if let Some(v_some) = g() { v_some } else { return None }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider writing: `let Some(v) = g() else { return None };` + +error: aborting due to 6 previous errors + diff --git a/src/tools/clippy/tests/ui/manual_map_option.stderr b/src/tools/clippy/tests/ui/manual_map_option.stderr index cdc2c0e62a9..3f9caad4e89 100644 --- a/src/tools/clippy/tests/ui/manual_map_option.stderr +++ b/src/tools/clippy/tests/ui/manual_map_option.stderr @@ -5,7 +5,7 @@ LL | / match Some(0) { LL | | Some(_) => Some(2), LL | | None::<u32> => None, LL | | }; - | |_____^ help: try this: `Some(0).map(|_| 2)` + | |_____^ help: try: `Some(0).map(|_| 2)` | = note: `-D clippy::manual-map` implied by `-D warnings` @@ -16,7 +16,7 @@ LL | / match Some(0) { LL | | Some(x) => Some(x + 1), LL | | _ => None, LL | | }; - | |_____^ help: try this: `Some(0).map(|x| x + 1)` + | |_____^ help: try: `Some(0).map(|x| x + 1)` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:25:5 @@ -25,7 +25,7 @@ LL | / match Some("") { LL | | Some(x) => Some(x.is_empty()), LL | | None => None, LL | | }; - | |_____^ help: try this: `Some("").map(|x| x.is_empty())` + | |_____^ help: try: `Some("").map(|x| x.is_empty())` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:30:5 @@ -35,7 +35,7 @@ LL | | Some(!x) LL | | } else { LL | | None LL | | }; - | |_____^ help: try this: `Some(0).map(|x| !x)` + | |_____^ help: try: `Some(0).map(|x| !x)` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:37:5 @@ -44,7 +44,7 @@ LL | / match Some(0) { LL | | Some(x) => { Some(std::convert::identity(x)) } LL | | None => { None } LL | | }; - | |_____^ help: try this: `Some(0).map(std::convert::identity)` + | |_____^ help: try: `Some(0).map(std::convert::identity)` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:42:5 @@ -53,7 +53,7 @@ LL | / match Some(&String::new()) { LL | | Some(x) => Some(str::len(x)), LL | | None => None, LL | | }; - | |_____^ help: try this: `Some(&String::new()).map(|x| str::len(x))` + | |_____^ help: try: `Some(&String::new()).map(|x| str::len(x))` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:52:5 @@ -62,7 +62,7 @@ LL | / match &Some([0, 1]) { LL | | Some(x) => Some(x[0]), LL | | &None => None, LL | | }; - | |_____^ help: try this: `Some([0, 1]).as_ref().map(|x| x[0])` + | |_____^ help: try: `Some([0, 1]).as_ref().map(|x| x[0])` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:57:5 @@ -71,7 +71,7 @@ LL | / match &Some(0) { LL | | &Some(x) => Some(x * 2), LL | | None => None, LL | | }; - | |_____^ help: try this: `Some(0).map(|x| x * 2)` + | |_____^ help: try: `Some(0).map(|x| x * 2)` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:62:5 @@ -80,7 +80,7 @@ LL | / match Some(String::new()) { LL | | Some(ref x) => Some(x.is_empty()), LL | | _ => None, LL | | }; - | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.is_empty())` + | |_____^ help: try: `Some(String::new()).as_ref().map(|x| x.is_empty())` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:67:5 @@ -89,7 +89,7 @@ LL | / match &&Some(String::new()) { LL | | Some(x) => Some(x.len()), LL | | _ => None, LL | | }; - | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.len())` + | |_____^ help: try: `Some(String::new()).as_ref().map(|x| x.len())` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:72:5 @@ -98,7 +98,7 @@ LL | / match &&Some(0) { LL | | &&Some(x) => Some(x + x), LL | | &&_ => None, LL | | }; - | |_____^ help: try this: `Some(0).map(|x| x + x)` + | |_____^ help: try: `Some(0).map(|x| x + x)` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:85:9 @@ -107,7 +107,7 @@ LL | / match &mut Some(String::new()) { LL | | Some(x) => Some(x.push_str("")), LL | | None => None, LL | | }; - | |_________^ help: try this: `Some(String::new()).as_mut().map(|x| x.push_str(""))` + | |_________^ help: try: `Some(String::new()).as_mut().map(|x| x.push_str(""))` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:91:5 @@ -116,7 +116,7 @@ LL | / match &mut Some(String::new()) { LL | | Some(ref x) => Some(x.len()), LL | | None => None, LL | | }; - | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.len())` + | |_____^ help: try: `Some(String::new()).as_ref().map(|x| x.len())` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:96:5 @@ -125,7 +125,7 @@ LL | / match &mut &Some(String::new()) { LL | | Some(x) => Some(x.is_empty()), LL | | &mut _ => None, LL | | }; - | |_____^ help: try this: `Some(String::new()).as_ref().map(|x| x.is_empty())` + | |_____^ help: try: `Some(String::new()).as_ref().map(|x| x.is_empty())` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:101:5 @@ -134,7 +134,7 @@ LL | / match Some((0, 1, 2)) { LL | | Some((x, y, z)) => Some(x + y + z), LL | | None => None, LL | | }; - | |_____^ help: try this: `Some((0, 1, 2)).map(|(x, y, z)| x + y + z)` + | |_____^ help: try: `Some((0, 1, 2)).map(|(x, y, z)| x + y + z)` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:106:5 @@ -143,7 +143,7 @@ LL | / match Some([1, 2, 3]) { LL | | Some([first, ..]) => Some(first), LL | | None => None, LL | | }; - | |_____^ help: try this: `Some([1, 2, 3]).map(|[first, ..]| first)` + | |_____^ help: try: `Some([1, 2, 3]).map(|[first, ..]| first)` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:111:5 @@ -152,7 +152,7 @@ LL | / match &Some((String::new(), "test")) { LL | | Some((x, y)) => Some((y, x)), LL | | None => None, LL | | }; - | |_____^ help: try this: `Some((String::new(), "test")).as_ref().map(|(x, y)| (y, x))` + | |_____^ help: try: `Some((String::new(), "test")).as_ref().map(|(x, y)| (y, x))` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:169:5 @@ -161,7 +161,7 @@ LL | / match Some(0) { LL | | Some(x) => Some(vec![x]), LL | | None => None, LL | | }; - | |_____^ help: try this: `Some(0).map(|x| vec![x])` + | |_____^ help: try: `Some(0).map(|x| vec![x])` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:174:5 @@ -170,7 +170,7 @@ LL | / match option_env!("") { LL | | Some(x) => Some(String::from(x)), LL | | None => None, LL | | }; - | |_____^ help: try this: `option_env!("").map(String::from)` + | |_____^ help: try: `option_env!("").map(String::from)` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:194:12 @@ -181,7 +181,7 @@ LL | | Some(x + 1) LL | | } else { LL | | None LL | | }; - | |_____^ help: try this: `{ Some(0).map(|x| x + 1) }` + | |_____^ help: try: `{ Some(0).map(|x| x + 1) }` error: manual implementation of `Option::map` --> $DIR/manual_map_option.rs:202:12 @@ -192,7 +192,7 @@ LL | | Some(x + 1) LL | | } else { LL | | None LL | | }; - | |_____^ help: try this: `{ Some(0).map(|x| x + 1) }` + | |_____^ help: try: `{ Some(0).map(|x| x + 1) }` error: aborting due to 21 previous errors diff --git a/src/tools/clippy/tests/ui/manual_map_option_2.stderr b/src/tools/clippy/tests/ui/manual_map_option_2.stderr index d35b6252fb8..8c78fcffca8 100644 --- a/src/tools/clippy/tests/ui/manual_map_option_2.stderr +++ b/src/tools/clippy/tests/ui/manual_map_option_2.stderr @@ -12,7 +12,7 @@ LL | | }; | |_____^ | = note: `-D clippy::manual-map` implied by `-D warnings` -help: try this +help: try | LL ~ let _ = Some(0).map(|x| { LL + let y = (String::new(), String::new()); @@ -32,7 +32,7 @@ LL | | None => None, LL | | }; | |_____^ | -help: try this +help: try | LL ~ let _ = s.as_ref().map(|x| { LL + if let Some(ref s) = s { (x.clone(), s) } else { panic!() } @@ -47,7 +47,7 @@ LL | let _ = match Some(0) { LL | | Some(x) => Some(f(x)), LL | | None => None, LL | | }; - | |_________^ help: try this: `Some(0).map(|x| f(x))` + | |_________^ help: try: `Some(0).map(|x| f(x))` error: manual implementation of `Option::map` --> $DIR/manual_map_option_2.rs:67:13 @@ -57,7 +57,7 @@ LL | let _ = match Some(0) { LL | | Some(x) => unsafe { Some(f(x)) }, LL | | None => None, LL | | }; - | |_____^ help: try this: `Some(0).map(|x| unsafe { f(x) })` + | |_____^ help: try: `Some(0).map(|x| unsafe { f(x) })` error: manual implementation of `Option::map` --> $DIR/manual_map_option_2.rs:71:13 @@ -67,7 +67,7 @@ LL | let _ = match Some(0) { LL | | Some(x) => Some(unsafe { f(x) }), LL | | None => None, LL | | }; - | |_____^ help: try this: `Some(0).map(|x| unsafe { f(x) })` + | |_____^ help: try: `Some(0).map(|x| unsafe { f(x) })` error: aborting due to 5 previous errors diff --git a/src/tools/clippy/tests/ui/manual_range_patterns.fixed b/src/tools/clippy/tests/ui/manual_range_patterns.fixed index 9eee8f37187..6bfcf263aa5 100644 --- a/src/tools/clippy/tests/ui/manual_range_patterns.fixed +++ b/src/tools/clippy/tests/ui/manual_range_patterns.fixed @@ -25,6 +25,10 @@ fn main() { 1..=10 => true, _ => false, }; + let _ = matches!(f, -5..=3); + let _ = matches!(f, -1 | -5 | 3 | -2 | -4 | -3 | 0 | 1); // 2 is missing + let _ = matches!(f, -1000001..=1000001); + let _ = matches!(f, -1_000_000..=1_000_000 | -1_000_001 | 1_000_002); macro_rules! mac { ($e:expr) => { diff --git a/src/tools/clippy/tests/ui/manual_range_patterns.rs b/src/tools/clippy/tests/ui/manual_range_patterns.rs index 10743a7d04c..4a429bb2aed 100644 --- a/src/tools/clippy/tests/ui/manual_range_patterns.rs +++ b/src/tools/clippy/tests/ui/manual_range_patterns.rs @@ -25,6 +25,10 @@ fn main() { 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 => true, _ => false, }; + let _ = matches!(f, -1 | -5 | 3 | -2 | -4 | -3 | 0 | 1 | 2); + let _ = matches!(f, -1 | -5 | 3 | -2 | -4 | -3 | 0 | 1); // 2 is missing + let _ = matches!(f, -1_000_000..=1_000_000 | -1_000_001 | 1_000_001); + let _ = matches!(f, -1_000_000..=1_000_000 | -1_000_001 | 1_000_002); macro_rules! mac { ($e:expr) => { diff --git a/src/tools/clippy/tests/ui/manual_range_patterns.stderr b/src/tools/clippy/tests/ui/manual_range_patterns.stderr index bc9e3350164..b1b55d483e7 100644 --- a/src/tools/clippy/tests/ui/manual_range_patterns.stderr +++ b/src/tools/clippy/tests/ui/manual_range_patterns.stderr @@ -37,7 +37,19 @@ LL | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 => true, | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `1..=10` error: this OR pattern can be rewritten using a range - --> $DIR/manual_range_patterns.rs:31:26 + --> $DIR/manual_range_patterns.rs:28:25 + | +LL | let _ = matches!(f, -1 | -5 | 3 | -2 | -4 | -3 | 0 | 1 | 2); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `-5..=3` + +error: this OR pattern can be rewritten using a range + --> $DIR/manual_range_patterns.rs:30:25 + | +LL | let _ = matches!(f, -1_000_000..=1_000_000 | -1_000_001 | 1_000_001); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `-1000001..=1000001` + +error: this OR pattern can be rewritten using a range + --> $DIR/manual_range_patterns.rs:35:26 | LL | matches!($e, 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `1..=10` @@ -47,5 +59,5 @@ LL | mac!(f); | = note: this error originates in the macro `mac` (in Nightly builds, run with -Z macro-backtrace for more info) -error: aborting due to 7 previous errors +error: aborting due to 9 previous errors diff --git a/src/tools/clippy/tests/ui/manual_retain.fixed b/src/tools/clippy/tests/ui/manual_retain.fixed index 09fb0d75852..c95d40fecba 100644 --- a/src/tools/clippy/tests/ui/manual_retain.fixed +++ b/src/tools/clippy/tests/ui/manual_retain.fixed @@ -1,12 +1,7 @@ //@run-rustfix #![warn(clippy::manual_retain)] #![allow(unused, clippy::redundant_clone)] -use std::collections::BTreeMap; -use std::collections::BTreeSet; -use std::collections::BinaryHeap; -use std::collections::HashMap; -use std::collections::HashSet; -use std::collections::VecDeque; +use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}; fn main() { binary_heap_retain(); diff --git a/src/tools/clippy/tests/ui/manual_retain.rs b/src/tools/clippy/tests/ui/manual_retain.rs index 7fee4c95cea..9a3434f489d 100644 --- a/src/tools/clippy/tests/ui/manual_retain.rs +++ b/src/tools/clippy/tests/ui/manual_retain.rs @@ -1,12 +1,7 @@ //@run-rustfix #![warn(clippy::manual_retain)] #![allow(unused, clippy::redundant_clone)] -use std::collections::BTreeMap; -use std::collections::BTreeSet; -use std::collections::BinaryHeap; -use std::collections::HashMap; -use std::collections::HashSet; -use std::collections::VecDeque; +use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, VecDeque}; fn main() { binary_heap_retain(); diff --git a/src/tools/clippy/tests/ui/manual_retain.stderr b/src/tools/clippy/tests/ui/manual_retain.stderr index 89316ce1d99..0936a23841c 100644 --- a/src/tools/clippy/tests/ui/manual_retain.stderr +++ b/src/tools/clippy/tests/ui/manual_retain.stderr @@ -1,5 +1,5 @@ error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:51:5 + --> $DIR/manual_retain.rs:46:5 | LL | btree_map = btree_map.into_iter().filter(|(k, _)| k % 2 == 0).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_map.retain(|k, _| k % 2 == 0)` @@ -7,13 +7,13 @@ LL | btree_map = btree_map.into_iter().filter(|(k, _)| k % 2 == 0).collect() = note: `-D clippy::manual-retain` implied by `-D warnings` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:52:5 + --> $DIR/manual_retain.rs:47:5 | LL | btree_map = btree_map.into_iter().filter(|(_, v)| v % 2 == 0).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_map.retain(|_, &mut v| v % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:53:5 + --> $DIR/manual_retain.rs:48:5 | LL | / btree_map = btree_map LL | | .into_iter() @@ -22,37 +22,37 @@ LL | | .collect(); | |__________________^ help: consider calling `.retain()` instead: `btree_map.retain(|k, &mut v| (k % 2 == 0) && (v % 2 == 0))` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:75:5 + --> $DIR/manual_retain.rs:70:5 | LL | btree_set = btree_set.iter().filter(|&x| x % 2 == 0).copied().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_set.retain(|x| x % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:76:5 + --> $DIR/manual_retain.rs:71:5 | LL | btree_set = btree_set.iter().filter(|&x| x % 2 == 0).cloned().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_set.retain(|x| x % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:77:5 + --> $DIR/manual_retain.rs:72:5 | LL | btree_set = btree_set.into_iter().filter(|x| x % 2 == 0).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `btree_set.retain(|x| x % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:107:5 + --> $DIR/manual_retain.rs:102:5 | LL | hash_map = hash_map.into_iter().filter(|(k, _)| k % 2 == 0).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_map.retain(|k, _| k % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:108:5 + --> $DIR/manual_retain.rs:103:5 | LL | hash_map = hash_map.into_iter().filter(|(_, v)| v % 2 == 0).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_map.retain(|_, &mut v| v % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:109:5 + --> $DIR/manual_retain.rs:104:5 | LL | / hash_map = hash_map LL | | .into_iter() @@ -61,61 +61,61 @@ LL | | .collect(); | |__________________^ help: consider calling `.retain()` instead: `hash_map.retain(|k, &mut v| (k % 2 == 0) && (v % 2 == 0))` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:130:5 + --> $DIR/manual_retain.rs:125:5 | LL | hash_set = hash_set.into_iter().filter(|x| x % 2 == 0).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_set.retain(|x| x % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:131:5 + --> $DIR/manual_retain.rs:126:5 | LL | hash_set = hash_set.iter().filter(|&x| x % 2 == 0).copied().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_set.retain(|x| x % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:132:5 + --> $DIR/manual_retain.rs:127:5 | LL | hash_set = hash_set.iter().filter(|&x| x % 2 == 0).cloned().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `hash_set.retain(|x| x % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:161:5 + --> $DIR/manual_retain.rs:156:5 | LL | s = s.chars().filter(|&c| c != 'o').to_owned().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `s.retain(|c| c != 'o')` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:173:5 + --> $DIR/manual_retain.rs:168:5 | LL | vec = vec.iter().filter(|&x| x % 2 == 0).copied().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec.retain(|x| x % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:174:5 + --> $DIR/manual_retain.rs:169:5 | LL | vec = vec.iter().filter(|&x| x % 2 == 0).cloned().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec.retain(|x| x % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:175:5 + --> $DIR/manual_retain.rs:170:5 | LL | vec = vec.into_iter().filter(|x| x % 2 == 0).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec.retain(|x| x % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:197:5 + --> $DIR/manual_retain.rs:192:5 | LL | vec_deque = vec_deque.iter().filter(|&x| x % 2 == 0).copied().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec_deque.retain(|x| x % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:198:5 + --> $DIR/manual_retain.rs:193:5 | LL | vec_deque = vec_deque.iter().filter(|&x| x % 2 == 0).cloned().collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec_deque.retain(|x| x % 2 == 0)` error: this expression can be written more simply using `.retain()` - --> $DIR/manual_retain.rs:199:5 + --> $DIR/manual_retain.rs:194:5 | LL | vec_deque = vec_deque.into_iter().filter(|x| x % 2 == 0).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: consider calling `.retain()` instead: `vec_deque.retain(|x| x % 2 == 0)` diff --git a/src/tools/clippy/tests/ui/manual_split_once.stderr b/src/tools/clippy/tests/ui/manual_split_once.stderr index 78da5a16cc5..f454f95b41d 100644 --- a/src/tools/clippy/tests/ui/manual_split_once.stderr +++ b/src/tools/clippy/tests/ui/manual_split_once.stderr @@ -2,7 +2,7 @@ error: manual implementation of `split_once` --> $DIR/manual_split_once.rs:13:13 | LL | let _ = "key=value".splitn(2, '=').nth(1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=').unwrap().1` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=').unwrap().1` | = note: `-D clippy::manual-split-once` implied by `-D warnings` @@ -10,73 +10,73 @@ error: manual implementation of `split_once` --> $DIR/manual_split_once.rs:14:13 | LL | let _ = "key=value".splitn(2, '=').skip(1).next().unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=').unwrap().1` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=').unwrap().1` error: manual implementation of `split_once` --> $DIR/manual_split_once.rs:15:18 | LL | let (_, _) = "key=value".splitn(2, '=').next_tuple().unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=')` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=')` error: manual implementation of `split_once` --> $DIR/manual_split_once.rs:18:13 | LL | let _ = s.splitn(2, '=').nth(1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=').unwrap().1` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=').unwrap().1` error: manual implementation of `split_once` --> $DIR/manual_split_once.rs:21:13 | LL | let _ = s.splitn(2, '=').nth(1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=').unwrap().1` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=').unwrap().1` error: manual implementation of `split_once` --> $DIR/manual_split_once.rs:24:13 | LL | let _ = s.splitn(2, '=').skip(1).next().unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=').unwrap().1` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=').unwrap().1` error: manual implementation of `split_once` --> $DIR/manual_split_once.rs:27:17 | LL | let _ = s.splitn(2, '=').nth(1)?; - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=')?.1` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=')?.1` error: manual implementation of `split_once` --> $DIR/manual_split_once.rs:28:17 | LL | let _ = s.splitn(2, '=').skip(1).next()?; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.split_once('=')?.1` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.split_once('=')?.1` error: manual implementation of `rsplit_once` --> $DIR/manual_split_once.rs:29:17 | LL | let _ = s.rsplitn(2, '=').nth(1)?; - | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit_once('=')?.0` + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.rsplit_once('=')?.0` error: manual implementation of `rsplit_once` --> $DIR/manual_split_once.rs:30:17 | LL | let _ = s.rsplitn(2, '=').skip(1).next()?; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit_once('=')?.0` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.rsplit_once('=')?.0` error: manual implementation of `rsplit_once` --> $DIR/manual_split_once.rs:38:13 | LL | let _ = "key=value".rsplitn(2, '=').nth(1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".rsplit_once('=').unwrap().0` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".rsplit_once('=').unwrap().0` error: manual implementation of `rsplit_once` --> $DIR/manual_split_once.rs:39:18 | LL | let (_, _) = "key=value".rsplitn(2, '=').next_tuple().unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".rsplit_once('=').map(|(x, y)| (y, x))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".rsplit_once('=').map(|(x, y)| (y, x))` error: manual implementation of `rsplit_once` --> $DIR/manual_split_once.rs:40:13 | LL | let _ = s.rsplitn(2, '=').nth(1); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit_once('=').map(|x| x.0)` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.rsplit_once('=').map(|x| x.0)` error: manual implementation of `split_once` --> $DIR/manual_split_once.rs:44:5 @@ -182,7 +182,7 @@ error: manual implementation of `split_once` --> $DIR/manual_split_once.rs:141:13 | LL | let _ = "key=value".splitn(2, '=').nth(1).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split_once('=').unwrap().1` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split_once('=').unwrap().1` error: manual implementation of `split_once` --> $DIR/manual_split_once.rs:143:5 diff --git a/src/tools/clippy/tests/ui/manual_str_repeat.stderr b/src/tools/clippy/tests/ui/manual_str_repeat.stderr index bdfee7cab26..331bb6ea575 100644 --- a/src/tools/clippy/tests/ui/manual_str_repeat.stderr +++ b/src/tools/clippy/tests/ui/manual_str_repeat.stderr @@ -2,7 +2,7 @@ error: manual implementation of `str::repeat` using iterators --> $DIR/manual_str_repeat.rs:9:21 | LL | let _: String = std::iter::repeat("test").take(10).collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"test".repeat(10)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"test".repeat(10)` | = note: `-D clippy::manual-str-repeat` implied by `-D warnings` @@ -10,55 +10,55 @@ error: manual implementation of `str::repeat` using iterators --> $DIR/manual_str_repeat.rs:10:21 | LL | let _: String = std::iter::repeat('x').take(10).collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"x".repeat(10)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"x".repeat(10)` error: manual implementation of `str::repeat` using iterators --> $DIR/manual_str_repeat.rs:11:21 | LL | let _: String = std::iter::repeat('/'').take(10).collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"'".repeat(10)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"'".repeat(10)` error: manual implementation of `str::repeat` using iterators --> $DIR/manual_str_repeat.rs:12:21 | LL | let _: String = std::iter::repeat('"').take(10).collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"/"".repeat(10)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"/"".repeat(10)` error: manual implementation of `str::repeat` using iterators --> $DIR/manual_str_repeat.rs:16:13 | LL | let _ = repeat(x).take(count + 2).collect::<String>(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.repeat(count + 2)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count + 2)` error: manual implementation of `str::repeat` using iterators --> $DIR/manual_str_repeat.rs:25:21 | LL | let _: String = repeat(*x).take(count).collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(*x).repeat(count)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(*x).repeat(count)` error: manual implementation of `str::repeat` using iterators --> $DIR/manual_str_repeat.rs:34:21 | LL | let _: String = repeat(x).take(count).collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.repeat(count)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count)` error: manual implementation of `str::repeat` using iterators --> $DIR/manual_str_repeat.rs:46:21 | LL | let _: String = repeat(Cow::Borrowed("test")).take(count).collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `Cow::Borrowed("test").repeat(count)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Cow::Borrowed("test").repeat(count)` error: manual implementation of `str::repeat` using iterators --> $DIR/manual_str_repeat.rs:49:21 | LL | let _: String = repeat(x).take(count).collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `x.repeat(count)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count)` error: manual implementation of `str::repeat` using iterators --> $DIR/manual_str_repeat.rs:64:21 | LL | let _: String = std::iter::repeat("test").take(10).collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"test".repeat(10)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"test".repeat(10)` error: aborting due to 10 previous errors diff --git a/src/tools/clippy/tests/ui/manual_try_fold.rs b/src/tools/clippy/tests/ui/manual_try_fold.rs index 4521e9fa1a5..05c658579a5 100644 --- a/src/tools/clippy/tests/ui/manual_try_fold.rs +++ b/src/tools/clippy/tests/ui/manual_try_fold.rs @@ -3,9 +3,7 @@ #![warn(clippy::manual_try_fold)] #![feature(try_trait_v2)] -use std::ops::ControlFlow; -use std::ops::FromResidual; -use std::ops::Try; +use std::ops::{ControlFlow, FromResidual, Try}; #[macro_use] extern crate proc_macros; diff --git a/src/tools/clippy/tests/ui/manual_try_fold.stderr b/src/tools/clippy/tests/ui/manual_try_fold.stderr index a0cf5b3b5fc..f1bb97c6d0f 100644 --- a/src/tools/clippy/tests/ui/manual_try_fold.stderr +++ b/src/tools/clippy/tests/ui/manual_try_fold.stderr @@ -1,5 +1,5 @@ error: usage of `Iterator::fold` on a type that implements `Try` - --> $DIR/manual_try_fold.rs:61:10 + --> $DIR/manual_try_fold.rs:59:10 | LL | .fold(Some(0i32), |sum, i| sum?.checked_add(*i)) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `try_fold` instead: `try_fold(0i32, |sum, i| ...)` @@ -7,19 +7,19 @@ LL | .fold(Some(0i32), |sum, i| sum?.checked_add(*i)) = note: `-D clippy::manual-try-fold` implied by `-D warnings` error: usage of `Iterator::fold` on a type that implements `Try` - --> $DIR/manual_try_fold.rs:65:10 + --> $DIR/manual_try_fold.rs:63:10 | LL | .fold(NotOption(0i32, 0i32), |sum, i| NotOption(0i32, 0i32)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `try_fold` instead: `try_fold(..., |sum, i| ...)` error: usage of `Iterator::fold` on a type that implements `Try` - --> $DIR/manual_try_fold.rs:68:10 + --> $DIR/manual_try_fold.rs:66:10 | LL | .fold(NotOptionButWorse(0i32), |sum, i| NotOptionButWorse(0i32)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `try_fold` instead: `try_fold(0i32, |sum, i| ...)` error: usage of `Iterator::fold` on a type that implements `Try` - --> $DIR/manual_try_fold.rs:98:10 + --> $DIR/manual_try_fold.rs:96:10 | LL | .fold(Some(0i32), |sum, i| sum?.checked_add(*i)) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: use `try_fold` instead: `try_fold(0i32, |sum, i| ...)` diff --git a/src/tools/clippy/tests/ui/map_collect_result_unit.stderr b/src/tools/clippy/tests/ui/map_collect_result_unit.stderr index 8b06e13baa6..596e51e5741 100644 --- a/src/tools/clippy/tests/ui/map_collect_result_unit.stderr +++ b/src/tools/clippy/tests/ui/map_collect_result_unit.stderr @@ -2,7 +2,7 @@ error: `.map().collect()` can be replaced with `.try_for_each()` --> $DIR/map_collect_result_unit.rs:6:17 | LL | let _ = (0..3).map(|t| Err(t + 1)).collect::<Result<(), _>>(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(0..3).try_for_each(|t| Err(t + 1))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(0..3).try_for_each(|t| Err(t + 1))` | = note: `-D clippy::map-collect-result-unit` implied by `-D warnings` @@ -10,7 +10,7 @@ error: `.map().collect()` can be replaced with `.try_for_each()` --> $DIR/map_collect_result_unit.rs:7:32 | LL | let _: Result<(), _> = (0..3).map(|t| Err(t + 1)).collect(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(0..3).try_for_each(|t| Err(t + 1))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(0..3).try_for_each(|t| Err(t + 1))` error: aborting due to 2 previous errors diff --git a/src/tools/clippy/tests/ui/map_unwrap_or.stderr b/src/tools/clippy/tests/ui/map_unwrap_or.stderr index 9f4a4a9ae6b..5b3c61acf74 100644 --- a/src/tools/clippy/tests/ui/map_unwrap_or.stderr +++ b/src/tools/clippy/tests/ui/map_unwrap_or.stderr @@ -162,7 +162,7 @@ error: called `map(<f>).unwrap_or_else(<g>)` on a `Result` value. This can be do --> $DIR/map_unwrap_or.rs:99:13 | LL | let _ = res.map(|x| x + 1).unwrap_or_else(|_e| 0); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `res.map_or_else(|_e| 0, |x| x + 1)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `res.map_or_else(|_e| 0, |x| x + 1)` error: called `map(<f>).unwrap_or(<a>)` on an `Option` value. This can be done more directly by calling `map_or(<a>, <f>)` instead --> $DIR/map_unwrap_or.rs:106:13 diff --git a/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr index 1837bc2ca3b..71dc009f2ce 100644 --- a/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr +++ b/src/tools/clippy/tests/ui/map_unwrap_or_fixable.stderr @@ -5,7 +5,7 @@ LL | let _ = opt.map(|x| x + 1) | _____________^ LL | | // Should lint even though this call is on a separate line. LL | | .unwrap_or_else(|| 0); - | |_____________________________^ help: try this: `opt.map_or_else(|| 0, |x| x + 1)` + | |_____________________________^ help: try: `opt.map_or_else(|| 0, |x| x + 1)` | = note: `-D clippy::map-unwrap-or` implied by `-D warnings` @@ -16,7 +16,7 @@ LL | let _ = res.map(|x| x + 1) | _____________^ LL | | // should lint even though this call is on a separate line LL | | .unwrap_or_else(|_e| 0); - | |_______________________________^ help: try this: `res.map_or_else(|_e| 0, |x| x + 1)` + | |_______________________________^ help: try: `res.map_or_else(|_e| 0, |x| x + 1)` error: aborting due to 2 previous errors diff --git a/src/tools/clippy/tests/ui/match_as_ref.fixed b/src/tools/clippy/tests/ui/match_as_ref.fixed index 8fa3f532587..61d414bdf4b 100644 --- a/src/tools/clippy/tests/ui/match_as_ref.fixed +++ b/src/tools/clippy/tests/ui/match_as_ref.fixed @@ -12,7 +12,9 @@ fn match_as_ref() { } mod issue4437 { - use std::{error::Error, fmt, num::ParseIntError}; + use std::error::Error; + use std::fmt; + use std::num::ParseIntError; #[derive(Debug)] struct E { diff --git a/src/tools/clippy/tests/ui/match_as_ref.rs b/src/tools/clippy/tests/ui/match_as_ref.rs index 02a17791426..cd39514c59a 100644 --- a/src/tools/clippy/tests/ui/match_as_ref.rs +++ b/src/tools/clippy/tests/ui/match_as_ref.rs @@ -18,7 +18,9 @@ fn match_as_ref() { } mod issue4437 { - use std::{error::Error, fmt, num::ParseIntError}; + use std::error::Error; + use std::fmt; + use std::num::ParseIntError; #[derive(Debug)] struct E { diff --git a/src/tools/clippy/tests/ui/match_as_ref.stderr b/src/tools/clippy/tests/ui/match_as_ref.stderr index c3b62849cb3..2e6955eb805 100644 --- a/src/tools/clippy/tests/ui/match_as_ref.stderr +++ b/src/tools/clippy/tests/ui/match_as_ref.stderr @@ -6,7 +6,7 @@ LL | let borrowed: Option<&()> = match owned { LL | | None => None, LL | | Some(ref v) => Some(v), LL | | }; - | |_____^ help: try this: `owned.as_ref()` + | |_____^ help: try: `owned.as_ref()` | = note: `-D clippy::match-as-ref` implied by `-D warnings` @@ -18,16 +18,16 @@ LL | let borrow_mut: Option<&mut ()> = match mut_owned { LL | | None => None, LL | | Some(ref mut v) => Some(v), LL | | }; - | |_____^ help: try this: `mut_owned.as_mut()` + | |_____^ help: try: `mut_owned.as_mut()` error: use `as_ref()` instead - --> $DIR/match_as_ref.rs:30:13 + --> $DIR/match_as_ref.rs:32:13 | LL | / match self.source { LL | | Some(ref s) => Some(s), LL | | None => None, LL | | } - | |_____________^ help: try this: `self.source.as_ref().map(|x| x as _)` + | |_____________^ help: try: `self.source.as_ref().map(|x| x as _)` error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr index b72fe10b748..c8c1e5da05f 100644 --- a/src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr +++ b/src/tools/clippy/tests/ui/match_expr_like_matches_macro.stderr @@ -6,7 +6,7 @@ LL | let _y = match x { LL | | Some(0) => true, LL | | _ => false, LL | | }; - | |_____^ help: try this: `matches!(x, Some(0))` + | |_____^ help: try: `matches!(x, Some(0))` | = note: `-D clippy::match-like-matches-macro` implied by `-D warnings` @@ -18,7 +18,7 @@ LL | let _w = match x { LL | | Some(_) => true, LL | | _ => false, LL | | }; - | |_____^ help: try this: `x.is_some()` + | |_____^ help: try: `x.is_some()` | = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings` @@ -30,7 +30,7 @@ LL | let _z = match x { LL | | Some(_) => false, LL | | None => true, LL | | }; - | |_____^ help: try this: `x.is_none()` + | |_____^ help: try: `x.is_none()` error: match expression looks like `matches!` macro --> $DIR/match_expr_like_matches_macro.rs:33:15 @@ -40,13 +40,13 @@ LL | let _zz = match x { LL | | Some(r) if r == 0 => false, LL | | _ => true, LL | | }; - | |_____^ help: try this: `!matches!(x, Some(r) if r == 0)` + | |_____^ help: try: `!matches!(x, Some(r) if r == 0)` error: if let .. else expression looks like `matches!` macro --> $DIR/match_expr_like_matches_macro.rs:39:16 | LL | let _zzz = if let Some(5) = x { true } else { false }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `matches!(x, Some(5))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `matches!(x, Some(5))` error: match expression looks like `matches!` macro --> $DIR/match_expr_like_matches_macro.rs:63:20 @@ -57,7 +57,7 @@ LL | | E::A(_) => true, LL | | E::B(_) => true, LL | | _ => false, LL | | }; - | |_________^ help: try this: `matches!(x, E::A(_) | E::B(_))` + | |_________^ help: try: `matches!(x, E::A(_) | E::B(_))` error: match expression looks like `matches!` macro --> $DIR/match_expr_like_matches_macro.rs:73:20 @@ -70,7 +70,7 @@ LL | | } LL | | E::B(_) => true, LL | | _ => false, LL | | }; - | |_________^ help: try this: `matches!(x, E::A(_) | E::B(_))` + | |_________^ help: try: `matches!(x, E::A(_) | E::B(_))` error: match expression looks like `matches!` macro --> $DIR/match_expr_like_matches_macro.rs:83:20 @@ -81,7 +81,7 @@ LL | | E::B(_) => false, LL | | E::C => false, LL | | _ => true, LL | | }; - | |_________^ help: try this: `!matches!(x, E::B(_) | E::C)` + | |_________^ help: try: `!matches!(x, E::B(_) | E::C)` error: match expression looks like `matches!` macro --> $DIR/match_expr_like_matches_macro.rs:143:18 @@ -91,7 +91,7 @@ LL | let _z = match &z { LL | | Some(3) => true, LL | | _ => false, LL | | }; - | |_________^ help: try this: `matches!(z, Some(3))` + | |_________^ help: try: `matches!(z, Some(3))` error: match expression looks like `matches!` macro --> $DIR/match_expr_like_matches_macro.rs:152:18 @@ -101,7 +101,7 @@ LL | let _z = match &z { LL | | Some(3) => true, LL | | _ => false, LL | | }; - | |_________^ help: try this: `matches!(&z, Some(3))` + | |_________^ help: try: `matches!(&z, Some(3))` error: match expression looks like `matches!` macro --> $DIR/match_expr_like_matches_macro.rs:169:21 @@ -111,7 +111,7 @@ LL | let _ = match &z { LL | | AnEnum::X => true, LL | | _ => false, LL | | }; - | |_____________^ help: try this: `matches!(&z, AnEnum::X)` + | |_____________^ help: try: `matches!(&z, AnEnum::X)` error: match expression looks like `matches!` macro --> $DIR/match_expr_like_matches_macro.rs:183:20 @@ -121,7 +121,7 @@ LL | let _res = match &val { LL | | &Some(ref _a) => true, LL | | _ => false, LL | | }; - | |_________^ help: try this: `matches!(&val, &Some(ref _a))` + | |_________^ help: try: `matches!(&val, &Some(ref _a))` error: match expression looks like `matches!` macro --> $DIR/match_expr_like_matches_macro.rs:195:20 @@ -131,7 +131,7 @@ LL | let _res = match &val { LL | | &Some(ref _a) => true, LL | | _ => false, LL | | }; - | |_________^ help: try this: `matches!(&val, &Some(ref _a))` + | |_________^ help: try: `matches!(&val, &Some(ref _a))` error: match expression looks like `matches!` macro --> $DIR/match_expr_like_matches_macro.rs:253:14 @@ -141,7 +141,7 @@ LL | let _y = match Some(5) { LL | | Some(0) => true, LL | | _ => false, LL | | }; - | |_____^ help: try this: `matches!(Some(5), Some(0))` + | |_____^ help: try: `matches!(Some(5), Some(0))` error: aborting due to 14 previous errors diff --git a/src/tools/clippy/tests/ui/match_on_vec_items.stderr b/src/tools/clippy/tests/ui/match_on_vec_items.stderr index 9b1f052867e..fc4a3ce1946 100644 --- a/src/tools/clippy/tests/ui/match_on_vec_items.stderr +++ b/src/tools/clippy/tests/ui/match_on_vec_items.stderr @@ -2,7 +2,7 @@ error: indexing into a vector may panic --> $DIR/match_on_vec_items.rs:10:11 | LL | match arr[idx] { - | ^^^^^^^^ help: try this: `arr.get(idx)` + | ^^^^^^^^ help: try: `arr.get(idx)` | = note: `-D clippy::match-on-vec-items` implied by `-D warnings` @@ -10,43 +10,43 @@ error: indexing into a vector may panic --> $DIR/match_on_vec_items.rs:17:11 | LL | match arr[range] { - | ^^^^^^^^^^ help: try this: `arr.get(range)` + | ^^^^^^^^^^ help: try: `arr.get(range)` error: indexing into a vector may panic --> $DIR/match_on_vec_items.rs:30:11 | LL | match arr[idx] { - | ^^^^^^^^ help: try this: `arr.get(idx)` + | ^^^^^^^^ help: try: `arr.get(idx)` error: indexing into a vector may panic --> $DIR/match_on_vec_items.rs:37:11 | LL | match arr[range] { - | ^^^^^^^^^^ help: try this: `arr.get(range)` + | ^^^^^^^^^^ help: try: `arr.get(range)` error: indexing into a vector may panic --> $DIR/match_on_vec_items.rs:50:11 | LL | match arr[idx] { - | ^^^^^^^^ help: try this: `arr.get(idx)` + | ^^^^^^^^ help: try: `arr.get(idx)` error: indexing into a vector may panic --> $DIR/match_on_vec_items.rs:57:11 | LL | match arr[range] { - | ^^^^^^^^^^ help: try this: `arr.get(range)` + | ^^^^^^^^^^ help: try: `arr.get(range)` error: indexing into a vector may panic --> $DIR/match_on_vec_items.rs:70:11 | LL | match arr[idx] { - | ^^^^^^^^ help: try this: `arr.get(idx)` + | ^^^^^^^^ help: try: `arr.get(idx)` error: indexing into a vector may panic --> $DIR/match_on_vec_items.rs:77:11 | LL | match arr[range] { - | ^^^^^^^^^^ help: try this: `arr.get(range)` + | ^^^^^^^^^^ help: try: `arr.get(range)` error: aborting due to 8 previous errors diff --git a/src/tools/clippy/tests/ui/match_ref_pats.stderr b/src/tools/clippy/tests/ui/match_ref_pats.stderr index 7d9646c842e..1294e0fe56f 100644 --- a/src/tools/clippy/tests/ui/match_ref_pats.stderr +++ b/src/tools/clippy/tests/ui/match_ref_pats.stderr @@ -35,7 +35,7 @@ error: redundant pattern matching, consider using `is_none()` --> $DIR/match_ref_pats.rs:38:12 | LL | if let &None = a { - | -------^^^^^---- help: try this: `if a.is_none()` + | -------^^^^^---- help: try: `if a.is_none()` | = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings` @@ -43,7 +43,7 @@ error: redundant pattern matching, consider using `is_none()` --> $DIR/match_ref_pats.rs:43:12 | LL | if let &None = &b { - | -------^^^^^----- help: try this: `if b.is_none()` + | -------^^^^^----- help: try: `if b.is_none()` error: you don't need to add `&` to all patterns --> $DIR/match_ref_pats.rs:103:9 diff --git a/src/tools/clippy/tests/ui/match_same_arms2.stderr b/src/tools/clippy/tests/ui/match_same_arms2.stderr index 7f0c70745ac..a7348187573 100644 --- a/src/tools/clippy/tests/ui/match_same_arms2.stderr +++ b/src/tools/clippy/tests/ui/match_same_arms2.stderr @@ -144,7 +144,7 @@ LL | | E::A => false, LL | | E::B => false, LL | | _ => true, LL | | }; - | |_____^ help: try this: `!matches!(x, E::A | E::B)` + | |_____^ help: try: `!matches!(x, E::A | E::B)` | = note: `-D clippy::match-like-matches-macro` implied by `-D warnings` diff --git a/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr b/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr index 105b4c4b41d..40ff4fbd316 100644 --- a/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr +++ b/src/tools/clippy/tests/ui/match_wildcard_for_single_variants.stderr @@ -2,7 +2,7 @@ error: wildcard matches only a single variant and will also match any future add --> $DIR/match_wildcard_for_single_variants.rs:24:13 | LL | _ => (), - | ^ help: try this: `Self::Rgb(..)` + | ^ help: try: `Self::Rgb(..)` | = note: `-D clippy::match-wildcard-for-single-variants` implied by `-D warnings` @@ -10,55 +10,55 @@ error: wildcard matches only a single variant and will also match any future add --> $DIR/match_wildcard_for_single_variants.rs:34:9 | LL | _ => {}, - | ^ help: try this: `Foo::C` + | ^ help: try: `Foo::C` error: wildcard matches only a single variant and will also match any future added variants --> $DIR/match_wildcard_for_single_variants.rs:44:9 | LL | _ => {}, - | ^ help: try this: `Color::Blue` + | ^ help: try: `Color::Blue` error: wildcard matches only a single variant and will also match any future added variants --> $DIR/match_wildcard_for_single_variants.rs:52:9 | LL | _ => {}, - | ^ help: try this: `Color::Blue` + | ^ help: try: `Color::Blue` error: wildcard matches only a single variant and will also match any future added variants --> $DIR/match_wildcard_for_single_variants.rs:58:9 | LL | _ => {}, - | ^ help: try this: `Color::Blue` + | ^ help: try: `Color::Blue` error: wildcard matches only a single variant and will also match any future added variants --> $DIR/match_wildcard_for_single_variants.rs:75:9 | LL | &_ => (), - | ^^ help: try this: `Color::Blue` + | ^^ help: try: `Color::Blue` error: wildcard matches only a single variant and will also match any future added variants --> $DIR/match_wildcard_for_single_variants.rs:84:9 | LL | _ => (), - | ^ help: try this: `C::Blue` + | ^ help: try: `C::Blue` error: wildcard matches only a single variant and will also match any future added variants --> $DIR/match_wildcard_for_single_variants.rs:91:9 | LL | _ => (), - | ^ help: try this: `Color::Blue` + | ^ help: try: `Color::Blue` error: wildcard matches only a single variant and will also match any future added variants --> $DIR/match_wildcard_for_single_variants.rs:126:13 | LL | _ => (), - | ^ help: try this: `Enum::__Private` + | ^ help: try: `Enum::__Private` error: wildcard matches only a single variant and will also match any future added variants --> $DIR/match_wildcard_for_single_variants.rs:153:13 | LL | _ => 2, - | ^ help: try this: `Foo::B` + | ^ help: try: `Foo::B` error: aborting due to 10 previous errors diff --git a/src/tools/clippy/tests/ui/methods.rs b/src/tools/clippy/tests/ui/methods.rs index 589eab5cdfc..cb1f695c651 100644 --- a/src/tools/clippy/tests/ui/methods.rs +++ b/src/tools/clippy/tests/ui/methods.rs @@ -25,10 +25,7 @@ #[macro_use] extern crate option_helpers; -use std::collections::BTreeMap; -use std::collections::HashMap; -use std::collections::HashSet; -use std::collections::VecDeque; +use std::collections::{BTreeMap, HashMap, HashSet, VecDeque}; use std::ops::Mul; use std::rc::{self, Rc}; use std::sync::{self, Arc}; diff --git a/src/tools/clippy/tests/ui/methods.stderr b/src/tools/clippy/tests/ui/methods.stderr index 73ec48643e0..6be38b24fbd 100644 --- a/src/tools/clippy/tests/ui/methods.stderr +++ b/src/tools/clippy/tests/ui/methods.stderr @@ -1,5 +1,5 @@ error: methods called `new` usually return `Self` - --> $DIR/methods.rs:106:5 + --> $DIR/methods.rs:103:5 | LL | / fn new() -> i32 { LL | | 0 @@ -9,7 +9,7 @@ LL | | } = note: `-D clippy::new-ret-no-self` implied by `-D warnings` error: called `filter(..).next()` on an `Iterator`. This is more succinctly expressed by calling `.find(..)` instead - --> $DIR/methods.rs:127:13 + --> $DIR/methods.rs:124:13 | LL | let _ = v.iter().filter(|&x| { | _____________^ diff --git a/src/tools/clippy/tests/ui/methods_fixable.stderr b/src/tools/clippy/tests/ui/methods_fixable.stderr index 187714c75fb..6f45d100d28 100644 --- a/src/tools/clippy/tests/ui/methods_fixable.stderr +++ b/src/tools/clippy/tests/ui/methods_fixable.stderr @@ -2,7 +2,7 @@ error: called `filter(..).next()` on an `Iterator`. This is more succinctly expr --> $DIR/methods_fixable.rs:11:13 | LL | let _ = v.iter().filter(|&x| *x < 0).next(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `v.iter().find(|&x| *x < 0)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `v.iter().find(|&x| *x < 0)` | = note: `-D clippy::filter-next` implied by `-D warnings` diff --git a/src/tools/clippy/tests/ui/methods_unfixable.rs b/src/tools/clippy/tests/ui/methods_unfixable.rs new file mode 100644 index 00000000000..3d88ce4b6bb --- /dev/null +++ b/src/tools/clippy/tests/ui/methods_unfixable.rs @@ -0,0 +1,10 @@ +#![warn(clippy::filter_next)] + +fn main() { + issue10029(); +} + +pub fn issue10029() { + let iter = (0..10); + let _ = iter.filter(|_| true).next(); +} diff --git a/src/tools/clippy/tests/ui/methods_unfixable.stderr b/src/tools/clippy/tests/ui/methods_unfixable.stderr new file mode 100644 index 00000000000..6e101fe16b0 --- /dev/null +++ b/src/tools/clippy/tests/ui/methods_unfixable.stderr @@ -0,0 +1,15 @@ +error: called `filter(..).next()` on an `Iterator`. This is more succinctly expressed by calling `.find(..)` instead + --> $DIR/methods_unfixable.rs:9:13 + | +LL | let _ = iter.filter(|_| true).next(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `iter.find(|_| true)` + | +help: you will also need to make `iter` mutable, because `find` takes `&mut self` + --> $DIR/methods_unfixable.rs:8:9 + | +LL | let iter = (0..10); + | ^^^^ + = note: `-D clippy::filter-next` implied by `-D warnings` + +error: aborting due to previous error + diff --git a/src/tools/clippy/tests/ui/min_ident_chars.rs b/src/tools/clippy/tests/ui/min_ident_chars.rs index b5b9e66aa7a..0fab224a29d 100644 --- a/src/tools/clippy/tests/ui/min_ident_chars.rs +++ b/src/tools/clippy/tests/ui/min_ident_chars.rs @@ -3,8 +3,7 @@ #![warn(clippy::min_ident_chars)] extern crate proc_macros; -use proc_macros::external; -use proc_macros::with_span; +use proc_macros::{external, with_span}; struct A { a: u32, diff --git a/src/tools/clippy/tests/ui/min_ident_chars.stderr b/src/tools/clippy/tests/ui/min_ident_chars.stderr index 66a63f65756..4dff6588bb1 100644 --- a/src/tools/clippy/tests/ui/min_ident_chars.stderr +++ b/src/tools/clippy/tests/ui/min_ident_chars.stderr @@ -1,5 +1,5 @@ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:9:8 + --> $DIR/min_ident_chars.rs:8:8 | LL | struct A { | ^ @@ -7,169 +7,169 @@ LL | struct A { = note: `-D clippy::min-ident-chars` implied by `-D warnings` error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:10:5 + --> $DIR/min_ident_chars.rs:9:5 | LL | a: u32, | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:12:5 + --> $DIR/min_ident_chars.rs:11:5 | LL | A: u32, | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:13:5 + --> $DIR/min_ident_chars.rs:12:5 | LL | I: u32, | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:16:8 + --> $DIR/min_ident_chars.rs:15:8 | LL | struct B(u32); | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:18:8 + --> $DIR/min_ident_chars.rs:17:8 | LL | struct O { | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:19:5 + --> $DIR/min_ident_chars.rs:18:5 | LL | o: u32, | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:24:6 + --> $DIR/min_ident_chars.rs:23:6 | LL | enum C { | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:25:5 + --> $DIR/min_ident_chars.rs:24:5 | LL | D, | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:26:5 + --> $DIR/min_ident_chars.rs:25:5 | LL | E, | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:27:5 + --> $DIR/min_ident_chars.rs:26:5 | LL | F, | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:51:9 + --> $DIR/min_ident_chars.rs:50:9 | LL | let h = 1; | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:52:9 + --> $DIR/min_ident_chars.rs:51:9 | LL | let e = 2; | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:53:9 + --> $DIR/min_ident_chars.rs:52:9 | LL | let l = 3; | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:54:9 + --> $DIR/min_ident_chars.rs:53:9 | LL | let l = 4; | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:55:9 + --> $DIR/min_ident_chars.rs:54:9 | LL | let o = 6; | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:59:10 + --> $DIR/min_ident_chars.rs:58:10 | LL | let (h, o, w) = (1, 2, 3); | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:59:13 + --> $DIR/min_ident_chars.rs:58:13 | LL | let (h, o, w) = (1, 2, 3); | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:60:10 + --> $DIR/min_ident_chars.rs:59:10 | LL | for (a, (r, e)) in (0..1000).enumerate().enumerate() {} | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:60:14 + --> $DIR/min_ident_chars.rs:59:14 | LL | for (a, (r, e)) in (0..1000).enumerate().enumerate() {} | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:60:17 + --> $DIR/min_ident_chars.rs:59:17 | LL | for (a, (r, e)) in (0..1000).enumerate().enumerate() {} | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:62:16 + --> $DIR/min_ident_chars.rs:61:16 | LL | while let (d, o, _i, n, g) = (true, true, false, false, true) {} | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:62:19 + --> $DIR/min_ident_chars.rs:61:19 | LL | while let (d, o, _i, n, g) = (true, true, false, false, true) {} | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:62:29 + --> $DIR/min_ident_chars.rs:61:29 | LL | while let (d, o, _i, n, g) = (true, true, false, false, true) {} | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:66:9 + --> $DIR/min_ident_chars.rs:65:9 | LL | let o = 1; | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:67:9 + --> $DIR/min_ident_chars.rs:66:9 | LL | let o = O { o }; | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:81:4 + --> $DIR/min_ident_chars.rs:80:4 | LL | fn b() {} | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:82:21 + --> $DIR/min_ident_chars.rs:81:21 | LL | fn wrong_pythagoras(a: f32, b: f32) -> f32 { | ^ error: this ident consists of a single char - --> $DIR/min_ident_chars.rs:82:29 + --> $DIR/min_ident_chars.rs:81:29 | LL | fn wrong_pythagoras(a: f32, b: f32) -> f32 { | ^ diff --git a/src/tools/clippy/tests/ui/min_max.rs b/src/tools/clippy/tests/ui/min_max.rs index 24e52afd691..1215a02286c 100644 --- a/src/tools/clippy/tests/ui/min_max.rs +++ b/src/tools/clippy/tests/ui/min_max.rs @@ -1,9 +1,7 @@ #![warn(clippy::all)] #![allow(clippy::manual_clamp)] -use std::cmp::max as my_max; -use std::cmp::min as my_min; -use std::cmp::{max, min}; +use std::cmp::{max as my_max, max, min as my_min, min}; const LARGE: usize = 3; diff --git a/src/tools/clippy/tests/ui/min_max.stderr b/src/tools/clippy/tests/ui/min_max.stderr index 069d9068657..402b094f4f7 100644 --- a/src/tools/clippy/tests/ui/min_max.stderr +++ b/src/tools/clippy/tests/ui/min_max.stderr @@ -1,5 +1,5 @@ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:24:5 + --> $DIR/min_max.rs:22:5 | LL | min(1, max(3, x)); | ^^^^^^^^^^^^^^^^^ @@ -7,73 +7,73 @@ LL | min(1, max(3, x)); = note: `-D clippy::min-max` implied by `-D warnings` error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:25:5 + --> $DIR/min_max.rs:23:5 | LL | min(max(3, x), 1); | ^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:26:5 + --> $DIR/min_max.rs:24:5 | LL | max(min(x, 1), 3); | ^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:27:5 + --> $DIR/min_max.rs:25:5 | LL | max(3, min(x, 1)); | ^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:29:5 + --> $DIR/min_max.rs:27:5 | LL | my_max(3, my_min(x, 1)); | ^^^^^^^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:39:5 + --> $DIR/min_max.rs:37:5 | LL | min("Apple", max("Zoo", s)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:40:5 + --> $DIR/min_max.rs:38:5 | LL | max(min(s, "Apple"), "Zoo"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:45:5 + --> $DIR/min_max.rs:43:5 | LL | x.min(1).max(3); | ^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:46:5 + --> $DIR/min_max.rs:44:5 | LL | x.max(3).min(1); | ^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:47:5 + --> $DIR/min_max.rs:45:5 | LL | f.max(3f32).min(1f32); | ^^^^^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:53:5 + --> $DIR/min_max.rs:51:5 | LL | max(x.min(1), 3); | ^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:56:5 + --> $DIR/min_max.rs:54:5 | LL | s.max("Zoo").min("Apple"); | ^^^^^^^^^^^^^^^^^^^^^^^^^ error: this `min`/`max` combination leads to constant result - --> $DIR/min_max.rs:57:5 + --> $DIR/min_max.rs:55:5 | LL | s.min("Apple").max("Zoo"); | ^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/missing_doc.rs b/src/tools/clippy/tests/ui/missing_doc.rs index cff1706a842..83ebf09c8a3 100644 --- a/src/tools/clippy/tests/ui/missing_doc.rs +++ b/src/tools/clippy/tests/ui/missing_doc.rs @@ -96,10 +96,8 @@ mod internal_impl { } /// dox pub mod public_interface { - pub use crate::internal_impl::documented as foo; pub use crate::internal_impl::globbed::*; - pub use crate::internal_impl::undocumented1 as bar; - pub use crate::internal_impl::{documented, undocumented2}; + pub use crate::internal_impl::{documented as foo, documented, undocumented1 as bar, undocumented2}; } fn main() {} diff --git a/src/tools/clippy/tests/ui/missing_spin_loop.stderr b/src/tools/clippy/tests/ui/missing_spin_loop.stderr index 485da00dc64..5795c2c2190 100644 --- a/src/tools/clippy/tests/ui/missing_spin_loop.stderr +++ b/src/tools/clippy/tests/ui/missing_spin_loop.stderr @@ -2,7 +2,7 @@ error: busy-waiting loop should at least have a spin loop hint --> $DIR/missing_spin_loop.rs:11:37 | LL | while b.load(Ordering::Acquire) {} - | ^^ help: try this: `{ std::hint::spin_loop() }` + | ^^ help: try: `{ std::hint::spin_loop() }` | = note: `-D clippy::missing-spin-loop` implied by `-D warnings` @@ -10,31 +10,31 @@ error: busy-waiting loop should at least have a spin loop hint --> $DIR/missing_spin_loop.rs:13:37 | LL | while !b.load(Ordering::SeqCst) {} - | ^^ help: try this: `{ std::hint::spin_loop() }` + | ^^ help: try: `{ std::hint::spin_loop() }` error: busy-waiting loop should at least have a spin loop hint --> $DIR/missing_spin_loop.rs:15:46 | LL | while b.load(Ordering::Acquire) == false {} - | ^^ help: try this: `{ std::hint::spin_loop() }` + | ^^ help: try: `{ std::hint::spin_loop() }` error: busy-waiting loop should at least have a spin loop hint --> $DIR/missing_spin_loop.rs:17:49 | LL | while { true == b.load(Ordering::Acquire) } {} - | ^^ help: try this: `{ std::hint::spin_loop() }` + | ^^ help: try: `{ std::hint::spin_loop() }` error: busy-waiting loop should at least have a spin loop hint --> $DIR/missing_spin_loop.rs:19:93 | LL | while b.compare_exchange(true, false, Ordering::Acquire, Ordering::Relaxed) != Ok(true) {} - | ^^ help: try this: `{ std::hint::spin_loop() }` + | ^^ help: try: `{ std::hint::spin_loop() }` error: busy-waiting loop should at least have a spin loop hint --> $DIR/missing_spin_loop.rs:21:94 | LL | while Ok(false) != b.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) {} - | ^^ help: try this: `{ std::hint::spin_loop() }` + | ^^ help: try: `{ std::hint::spin_loop() }` error: aborting due to 6 previous errors diff --git a/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr index 2b3b6873c3c..3322a7aae5f 100644 --- a/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr +++ b/src/tools/clippy/tests/ui/missing_spin_loop_no_std.stderr @@ -2,7 +2,7 @@ error: busy-waiting loop should at least have a spin loop hint --> $DIR/missing_spin_loop_no_std.rs:13:37 | LL | while b.load(Ordering::Acquire) {} - | ^^ help: try this: `{ core::hint::spin_loop() }` + | ^^ help: try: `{ core::hint::spin_loop() }` | = note: `-D clippy::missing-spin-loop` implied by `-D warnings` diff --git a/src/tools/clippy/tests/ui/must_use_candidates.fixed b/src/tools/clippy/tests/ui/must_use_candidates.fixed index 0c275504d36..3ca20c07d9b 100644 --- a/src/tools/clippy/tests/ui/must_use_candidates.fixed +++ b/src/tools/clippy/tests/ui/must_use_candidates.fixed @@ -1,6 +1,11 @@ //@run-rustfix #![feature(never_type)] -#![allow(unused_mut, unused_tuple_struct_fields, clippy::redundant_allocation)] +#![allow( + unused_mut, + unused_tuple_struct_fields, + clippy::redundant_allocation, + clippy::needless_pass_by_ref_mut +)] #![warn(clippy::must_use_candidate)] use std::rc::Rc; use std::sync::atomic::{AtomicBool, Ordering}; diff --git a/src/tools/clippy/tests/ui/must_use_candidates.rs b/src/tools/clippy/tests/ui/must_use_candidates.rs index d1c9267732f..dc4e0118ec7 100644 --- a/src/tools/clippy/tests/ui/must_use_candidates.rs +++ b/src/tools/clippy/tests/ui/must_use_candidates.rs @@ -1,6 +1,11 @@ //@run-rustfix #![feature(never_type)] -#![allow(unused_mut, unused_tuple_struct_fields, clippy::redundant_allocation)] +#![allow( + unused_mut, + unused_tuple_struct_fields, + clippy::redundant_allocation, + clippy::needless_pass_by_ref_mut +)] #![warn(clippy::must_use_candidate)] use std::rc::Rc; use std::sync::atomic::{AtomicBool, Ordering}; diff --git a/src/tools/clippy/tests/ui/must_use_candidates.stderr b/src/tools/clippy/tests/ui/must_use_candidates.stderr index 0fa3849d03b..5fb302ccbf1 100644 --- a/src/tools/clippy/tests/ui/must_use_candidates.stderr +++ b/src/tools/clippy/tests/ui/must_use_candidates.stderr @@ -1,5 +1,5 @@ error: this function could have a `#[must_use]` attribute - --> $DIR/must_use_candidates.rs:12:1 + --> $DIR/must_use_candidates.rs:17:1 | LL | pub fn pure(i: u8) -> u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn pure(i: u8) -> u8` @@ -7,25 +7,25 @@ LL | pub fn pure(i: u8) -> u8 { = note: `-D clippy::must-use-candidate` implied by `-D warnings` error: this method could have a `#[must_use]` attribute - --> $DIR/must_use_candidates.rs:17:5 + --> $DIR/must_use_candidates.rs:22:5 | LL | pub fn inherent_pure(&self) -> u8 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn inherent_pure(&self) -> u8` error: this function could have a `#[must_use]` attribute - --> $DIR/must_use_candidates.rs:48:1 + --> $DIR/must_use_candidates.rs:53:1 | LL | pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn with_marker(_d: std::marker::PhantomData<&mut u32>) -> bool` error: this function could have a `#[must_use]` attribute - --> $DIR/must_use_candidates.rs:60:1 + --> $DIR/must_use_candidates.rs:65:1 | LL | pub fn rcd(_x: Rc<u32>) -> bool { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn rcd(_x: Rc<u32>) -> bool` error: this function could have a `#[must_use]` attribute - --> $DIR/must_use_candidates.rs:68:1 + --> $DIR/must_use_candidates.rs:73:1 | LL | pub fn arcd(_x: Arc<u32>) -> bool { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add the attribute: `#[must_use] pub fn arcd(_x: Arc<u32>) -> bool` diff --git a/src/tools/clippy/tests/ui/mut_from_ref.rs b/src/tools/clippy/tests/ui/mut_from_ref.rs index 7de15330594..8c0c23b6570 100644 --- a/src/tools/clippy/tests/ui/mut_from_ref.rs +++ b/src/tools/clippy/tests/ui/mut_from_ref.rs @@ -1,4 +1,4 @@ -#![allow(unused, clippy::needless_lifetimes)] +#![allow(unused, clippy::needless_lifetimes, clippy::needless_pass_by_ref_mut)] #![warn(clippy::mut_from_ref)] struct Foo; diff --git a/src/tools/clippy/tests/ui/mut_key.rs b/src/tools/clippy/tests/ui/mut_key.rs index 1c0ba664580..15d68c08984 100644 --- a/src/tools/clippy/tests/ui/mut_key.rs +++ b/src/tools/clippy/tests/ui/mut_key.rs @@ -2,7 +2,8 @@ use std::cell::Cell; use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; use std::hash::{Hash, Hasher}; use std::rc::Rc; -use std::sync::atomic::{AtomicUsize, Ordering::Relaxed}; +use std::sync::atomic::AtomicUsize; +use std::sync::atomic::Ordering::Relaxed; use std::sync::Arc; struct Key(AtomicUsize); diff --git a/src/tools/clippy/tests/ui/mut_key.stderr b/src/tools/clippy/tests/ui/mut_key.stderr index 25dd029b16e..02a0da86a4b 100644 --- a/src/tools/clippy/tests/ui/mut_key.stderr +++ b/src/tools/clippy/tests/ui/mut_key.stderr @@ -1,5 +1,5 @@ error: mutable key type - --> $DIR/mut_key.rs:30:32 + --> $DIR/mut_key.rs:31:32 | LL | fn should_not_take_this_arg(m: &mut HashMap<Key, usize>, _n: usize) -> HashSet<Key> { | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -7,100 +7,108 @@ LL | fn should_not_take_this_arg(m: &mut HashMap<Key, usize>, _n: usize) -> Hash = note: `-D clippy::mutable-key-type` implied by `-D warnings` error: mutable key type - --> $DIR/mut_key.rs:30:72 + --> $DIR/mut_key.rs:31:72 | LL | fn should_not_take_this_arg(m: &mut HashMap<Key, usize>, _n: usize) -> HashSet<Key> { | ^^^^^^^^^^^^ +error: this argument is a mutable reference, but not used mutably + --> $DIR/mut_key.rs:31:32 + | +LL | fn should_not_take_this_arg(m: &mut HashMap<Key, usize>, _n: usize) -> HashSet<Key> { + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider changing to: `&HashMap<Key, usize>` + | + = note: `-D clippy::needless-pass-by-ref-mut` implied by `-D warnings` + error: mutable key type - --> $DIR/mut_key.rs:31:5 + --> $DIR/mut_key.rs:32:5 | LL | let _other: HashMap<Key, bool> = HashMap::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:58:22 + --> $DIR/mut_key.rs:59:22 | LL | fn tuples_bad<U>(_m: &mut HashMap<(Key, U), bool>) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:70:5 + --> $DIR/mut_key.rs:71:5 | LL | let _map = HashMap::<Cell<usize>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:71:5 + --> $DIR/mut_key.rs:72:5 | LL | let _map = HashMap::<&mut Cell<usize>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:72:5 + --> $DIR/mut_key.rs:73:5 | LL | let _map = HashMap::<&mut usize, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:74:5 + --> $DIR/mut_key.rs:75:5 | LL | let _map = HashMap::<Vec<Cell<usize>>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:75:5 + --> $DIR/mut_key.rs:76:5 | LL | let _map = HashMap::<BTreeMap<Cell<usize>, ()>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:76:5 + --> $DIR/mut_key.rs:77:5 | LL | let _map = HashMap::<BTreeMap<(), Cell<usize>>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:77:5 + --> $DIR/mut_key.rs:78:5 | LL | let _map = HashMap::<BTreeSet<Cell<usize>>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:78:5 + --> $DIR/mut_key.rs:79:5 | LL | let _map = HashMap::<Option<Cell<usize>>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:79:5 + --> $DIR/mut_key.rs:80:5 | LL | let _map = HashMap::<Option<Vec<Cell<usize>>>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:80:5 + --> $DIR/mut_key.rs:81:5 | LL | let _map = HashMap::<Result<&mut usize, ()>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:82:5 + --> $DIR/mut_key.rs:83:5 | LL | let _map = HashMap::<Box<Cell<usize>>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:83:5 + --> $DIR/mut_key.rs:84:5 | LL | let _map = HashMap::<Rc<Cell<usize>>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: mutable key type - --> $DIR/mut_key.rs:84:5 + --> $DIR/mut_key.rs:85:5 | LL | let _map = HashMap::<Arc<Cell<usize>>, usize>::new(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 17 previous errors +error: aborting due to 18 previous errors diff --git a/src/tools/clippy/tests/ui/mut_mut.rs b/src/tools/clippy/tests/ui/mut_mut.rs index b7213428367..fe7d53e8e99 100644 --- a/src/tools/clippy/tests/ui/mut_mut.rs +++ b/src/tools/clippy/tests/ui/mut_mut.rs @@ -1,7 +1,12 @@ //@aux-build:proc_macros.rs:proc-macro #![warn(clippy::mut_mut)] #![allow(unused)] -#![allow(clippy::no_effect, clippy::uninlined_format_args, clippy::unnecessary_operation)] +#![allow( + clippy::no_effect, + clippy::uninlined_format_args, + clippy::unnecessary_operation, + clippy::needless_pass_by_ref_mut +)] extern crate proc_macros; use proc_macros::{external, inline_macros}; diff --git a/src/tools/clippy/tests/ui/mut_mut.stderr b/src/tools/clippy/tests/ui/mut_mut.stderr index 93b857eb207..58a1c4e683c 100644 --- a/src/tools/clippy/tests/ui/mut_mut.stderr +++ b/src/tools/clippy/tests/ui/mut_mut.stderr @@ -1,5 +1,5 @@ error: generally you want to avoid `&mut &mut _` if possible - --> $DIR/mut_mut.rs:9:11 + --> $DIR/mut_mut.rs:14:11 | LL | fn fun(x: &mut &mut u32) -> bool { | ^^^^^^^^^^^^^ @@ -7,13 +7,13 @@ LL | fn fun(x: &mut &mut u32) -> bool { = note: `-D clippy::mut-mut` implied by `-D warnings` error: generally you want to avoid `&mut &mut _` if possible - --> $DIR/mut_mut.rs:26:17 + --> $DIR/mut_mut.rs:31:17 | LL | let mut x = &mut &mut 1u32; | ^^^^^^^^^^^^^^ error: generally you want to avoid `&mut &mut _` if possible - --> $DIR/mut_mut.rs:41:25 + --> $DIR/mut_mut.rs:46:25 | LL | let mut z = inline!(&mut $(&mut 3u32)); | ^ @@ -21,37 +21,37 @@ LL | let mut z = inline!(&mut $(&mut 3u32)); = note: this error originates in the macro `__inline_mac_fn_main` (in Nightly builds, run with -Z macro-backtrace for more info) error: this expression mutably borrows a mutable reference. Consider reborrowing - --> $DIR/mut_mut.rs:28:21 + --> $DIR/mut_mut.rs:33:21 | LL | let mut y = &mut x; | ^^^^^^ error: generally you want to avoid `&mut &mut _` if possible - --> $DIR/mut_mut.rs:32:32 + --> $DIR/mut_mut.rs:37:32 | LL | let y: &mut &mut u32 = &mut &mut 2; | ^^^^^^^^^^^ error: generally you want to avoid `&mut &mut _` if possible - --> $DIR/mut_mut.rs:32:16 + --> $DIR/mut_mut.rs:37:16 | LL | let y: &mut &mut u32 = &mut &mut 2; | ^^^^^^^^^^^^^ error: generally you want to avoid `&mut &mut _` if possible - --> $DIR/mut_mut.rs:37:37 + --> $DIR/mut_mut.rs:42:37 | LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2; | ^^^^^^^^^^^^^^^^ error: generally you want to avoid `&mut &mut _` if possible - --> $DIR/mut_mut.rs:37:16 + --> $DIR/mut_mut.rs:42:16 | LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2; | ^^^^^^^^^^^^^^^^^^ error: generally you want to avoid `&mut &mut _` if possible - --> $DIR/mut_mut.rs:37:21 + --> $DIR/mut_mut.rs:42:21 | LL | let y: &mut &mut &mut u32 = &mut &mut &mut 2; | ^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/mut_reference.stderr b/src/tools/clippy/tests/ui/mut_reference.stderr index 062d30b262c..23c812475c2 100644 --- a/src/tools/clippy/tests/ui/mut_reference.stderr +++ b/src/tools/clippy/tests/ui/mut_reference.stderr @@ -1,3 +1,17 @@ +error: this argument is a mutable reference, but not used mutably + --> $DIR/mut_reference.rs:4:33 + | +LL | fn takes_a_mutable_reference(a: &mut i32) {} + | ^^^^^^^^ help: consider changing to: `&i32` + | + = note: `-D clippy::needless-pass-by-ref-mut` implied by `-D warnings` + +error: this argument is a mutable reference, but not used mutably + --> $DIR/mut_reference.rs:11:44 + | +LL | fn takes_a_mutable_reference(&self, a: &mut i32) {} + | ^^^^^^^^ help: consider changing to: `&i32` + error: the function `takes_an_immutable_reference` doesn't need a mutable reference --> $DIR/mut_reference.rs:17:34 | @@ -18,5 +32,5 @@ error: the method `takes_an_immutable_reference` doesn't need a mutable referenc LL | my_struct.takes_an_immutable_reference(&mut 42); | ^^^^^^^ -error: aborting due to 3 previous errors +error: aborting due to 5 previous errors diff --git a/src/tools/clippy/tests/ui/needless_borrow.fixed b/src/tools/clippy/tests/ui/needless_borrow.fixed index 80cdb4e472d..1dfbee150d7 100644 --- a/src/tools/clippy/tests/ui/needless_borrow.fixed +++ b/src/tools/clippy/tests/ui/needless_borrow.fixed @@ -492,3 +492,15 @@ mod issue_9782_method_variant { S.foo::<&[u8; 100]>(&a); } } + +mod issue_10535 { + static SOME_STATIC: String = String::new(); + + static UNIT: () = compute(&SOME_STATIC); + + pub const fn compute<T>(_: T) + where + T: Copy, + { + } +} diff --git a/src/tools/clippy/tests/ui/needless_borrow.rs b/src/tools/clippy/tests/ui/needless_borrow.rs index 99f735127eb..3c0d73f5f02 100644 --- a/src/tools/clippy/tests/ui/needless_borrow.rs +++ b/src/tools/clippy/tests/ui/needless_borrow.rs @@ -492,3 +492,15 @@ mod issue_9782_method_variant { S.foo::<&[u8; 100]>(&a); } } + +mod issue_10535 { + static SOME_STATIC: String = String::new(); + + static UNIT: () = compute(&SOME_STATIC); + + pub const fn compute<T>(_: T) + where + T: Copy, + { + } +} diff --git a/src/tools/clippy/tests/ui/needless_borrow_pat.stderr b/src/tools/clippy/tests/ui/needless_borrow_pat.stderr index db3b52b8850..2d9b8f15902 100644 --- a/src/tools/clippy/tests/ui/needless_borrow_pat.stderr +++ b/src/tools/clippy/tests/ui/needless_borrow_pat.stderr @@ -2,7 +2,7 @@ error: this pattern creates a reference to a reference --> $DIR/needless_borrow_pat.rs:59:14 | LL | Some(ref x) => x, - | ^^^^^ help: try this: `x` + | ^^^^^ help: try: `x` | = note: `-D clippy::needless-borrow` implied by `-D warnings` @@ -12,7 +12,7 @@ error: this pattern creates a reference to a reference LL | Some(ref x) => *x, | ^^^^^ | -help: try this +help: try | LL | Some(x) => x, | ~ ~ @@ -23,7 +23,7 @@ error: this pattern creates a reference to a reference LL | Some(ref x) => { | ^^^^^ | -help: try this +help: try | LL ~ Some(x) => { LL | f1(x); @@ -34,13 +34,13 @@ error: this pattern creates a reference to a reference --> $DIR/needless_borrow_pat.rs:81:14 | LL | Some(ref x) => m1!(x), - | ^^^^^ help: try this: `x` + | ^^^^^ help: try: `x` error: this pattern creates a reference to a reference --> $DIR/needless_borrow_pat.rs:86:15 | LL | let _ = |&ref x: &&String| { - | ^^^^^ help: try this: `x` + | ^^^^^ help: try: `x` error: this pattern creates a reference to a reference --> $DIR/needless_borrow_pat.rs:91:10 @@ -48,7 +48,7 @@ error: this pattern creates a reference to a reference LL | let (ref y,) = (&x,); | ^^^^^ | -help: try this +help: try | LL ~ let (y,) = (&x,); LL ~ let _: &String = y; @@ -58,7 +58,7 @@ error: this pattern creates a reference to a reference --> $DIR/needless_borrow_pat.rs:101:14 | LL | Some(ref x) => x.0, - | ^^^^^ help: try this: `x` + | ^^^^^ help: try: `x` error: this pattern creates a reference to a reference --> $DIR/needless_borrow_pat.rs:111:14 @@ -66,7 +66,7 @@ error: this pattern creates a reference to a reference LL | E::A(ref x) | E::B(ref x) => *x, | ^^^^^ ^^^^^ | -help: try this +help: try | LL | E::A(x) | E::B(x) => x, | ~ ~ ~ @@ -75,7 +75,7 @@ error: this pattern creates a reference to a reference --> $DIR/needless_borrow_pat.rs:117:21 | LL | if let Some(ref x) = Some(&String::new()); - | ^^^^^ help: try this: `x` + | ^^^^^ help: try: `x` error: this pattern creates a reference to a reference --> $DIR/needless_borrow_pat.rs:125:12 @@ -83,7 +83,7 @@ error: this pattern creates a reference to a reference LL | fn f2<'a>(&ref x: &&'a String) -> &'a String { | ^^^^^ | -help: try this +help: try | LL ~ fn f2<'a>(&x: &&'a String) -> &'a String { LL | let _: &String = x; @@ -94,7 +94,7 @@ error: this pattern creates a reference to a reference --> $DIR/needless_borrow_pat.rs:132:11 | LL | fn f(&ref x: &&String) { - | ^^^^^ help: try this: `x` + | ^^^^^ help: try: `x` error: this pattern creates a reference to a reference --> $DIR/needless_borrow_pat.rs:140:11 @@ -102,7 +102,7 @@ error: this pattern creates a reference to a reference LL | fn f(&ref x: &&String) { | ^^^^^ | -help: try this +help: try | LL ~ fn f(&x: &&String) { LL ~ let _: &String = x; diff --git a/src/tools/clippy/tests/ui/needless_else.stderr b/src/tools/clippy/tests/ui/needless_else.stderr index ea693085164..49cd78501ea 100644 --- a/src/tools/clippy/tests/ui/needless_else.stderr +++ b/src/tools/clippy/tests/ui/needless_else.stderr @@ -1,4 +1,4 @@ -error: this else branch is empty +error: this `else` branch is empty --> $DIR/needless_else.rs:24:7 | LL | } else { diff --git a/src/tools/clippy/tests/ui/needless_if.fixed b/src/tools/clippy/tests/ui/needless_if.fixed index 5e6e140c2db..6001c9e9301 100644 --- a/src/tools/clippy/tests/ui/needless_if.fixed +++ b/src/tools/clippy/tests/ui/needless_if.fixed @@ -16,8 +16,7 @@ #![warn(clippy::needless_if)] extern crate proc_macros; -use proc_macros::external; -use proc_macros::with_span; +use proc_macros::{external, with_span}; fn maybe_side_effect() -> bool { true diff --git a/src/tools/clippy/tests/ui/needless_if.rs b/src/tools/clippy/tests/ui/needless_if.rs index eb28ce73be8..c6be4766dd8 100644 --- a/src/tools/clippy/tests/ui/needless_if.rs +++ b/src/tools/clippy/tests/ui/needless_if.rs @@ -16,8 +16,7 @@ #![warn(clippy::needless_if)] extern crate proc_macros; -use proc_macros::external; -use proc_macros::with_span; +use proc_macros::{external, with_span}; fn maybe_side_effect() -> bool { true diff --git a/src/tools/clippy/tests/ui/needless_if.stderr b/src/tools/clippy/tests/ui/needless_if.stderr index 5cb42c36921..14de400953b 100644 --- a/src/tools/clippy/tests/ui/needless_if.stderr +++ b/src/tools/clippy/tests/ui/needless_if.stderr @@ -1,5 +1,5 @@ error: this `if` branch is empty - --> $DIR/needless_if.rs:28:5 + --> $DIR/needless_if.rs:27:5 | LL | if (true) {} | ^^^^^^^^^^^^ help: you can remove it @@ -7,13 +7,13 @@ LL | if (true) {} = note: `-D clippy::needless-if` implied by `-D warnings` error: this `if` branch is empty - --> $DIR/needless_if.rs:30:5 + --> $DIR/needless_if.rs:29:5 | LL | if maybe_side_effect() {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can remove it: `maybe_side_effect();` error: this `if` branch is empty - --> $DIR/needless_if.rs:35:5 + --> $DIR/needless_if.rs:34:5 | LL | / if { LL | | return; @@ -28,7 +28,7 @@ LL + }); | error: this `if` branch is empty - --> $DIR/needless_if.rs:47:5 + --> $DIR/needless_if.rs:46:5 | LL | / if { LL | | if let true = true && true { true } else { false } @@ -44,19 +44,19 @@ LL + } && true); | error: this `if` branch is empty - --> $DIR/needless_if.rs:85:5 + --> $DIR/needless_if.rs:84:5 | LL | if { maybe_side_effect() } {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can remove it: `({ maybe_side_effect() });` error: this `if` branch is empty - --> $DIR/needless_if.rs:87:5 + --> $DIR/needless_if.rs:86:5 | LL | if { maybe_side_effect() } && true {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can remove it: `({ maybe_side_effect() } && true);` error: this `if` branch is empty - --> $DIR/needless_if.rs:91:5 + --> $DIR/needless_if.rs:90:5 | LL | if true {} | ^^^^^^^^^^ help: you can remove it: `true;` diff --git a/src/tools/clippy/tests/ui/needless_option_as_deref.stderr b/src/tools/clippy/tests/ui/needless_option_as_deref.stderr index 20d28a968c9..4c0d502a203 100644 --- a/src/tools/clippy/tests/ui/needless_option_as_deref.stderr +++ b/src/tools/clippy/tests/ui/needless_option_as_deref.stderr @@ -2,7 +2,7 @@ error: derefed type is same as origin --> $DIR/needless_option_as_deref.rs:9:29 | LL | let _: Option<&usize> = Some(&1).as_deref(); - | ^^^^^^^^^^^^^^^^^^^ help: try this: `Some(&1)` + | ^^^^^^^^^^^^^^^^^^^ help: try: `Some(&1)` | = note: `-D clippy::needless-option-as-deref` implied by `-D warnings` @@ -10,13 +10,13 @@ error: derefed type is same as origin --> $DIR/needless_option_as_deref.rs:10:33 | LL | let _: Option<&mut usize> = Some(&mut 1).as_deref_mut(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `Some(&mut 1)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Some(&mut 1)` error: derefed type is same as origin --> $DIR/needless_option_as_deref.rs:14:13 | LL | let _ = x.as_deref_mut(); - | ^^^^^^^^^^^^^^^^ help: try this: `x` + | ^^^^^^^^^^^^^^^^ help: try: `x` error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs new file mode 100644 index 00000000000..5e7280995c6 --- /dev/null +++ b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.rs @@ -0,0 +1,105 @@ +#![allow(unused)] + +use std::ptr::NonNull; + +// Should only warn for `s`. +fn foo(s: &mut Vec<u32>, b: &u32, x: &mut u32) { + *x += *b + s.len() as u32; +} + +// Should not warn. +fn foo2(s: &mut Vec<u32>) { + s.push(8); +} + +// Should not warn because we return it. +fn foo3(s: &mut Vec<u32>) -> &mut Vec<u32> { + s +} + +// Should not warn because `s` is used as mutable. +fn foo4(s: &mut Vec<u32>) { + Vec::push(s, 4); +} + +// Should not warn. +fn foo5(s: &mut Vec<u32>) { + foo2(s); +} + +// Should warn. +fn foo6(s: &mut Vec<u32>) { + non_mut_ref(s); +} + +fn non_mut_ref(_: &Vec<u32>) {} + +struct Bar; + +impl Bar { + // Should not warn on `&mut self`. + fn bar(&mut self) {} + + // Should warn about `vec` + fn mushroom(&self, vec: &mut Vec<i32>) -> usize { + vec.len() + } + + // Should warn about `vec` (and not `self`). + fn badger(&mut self, vec: &mut Vec<i32>) -> usize { + vec.len() + } +} + +trait Babar { + // Should not warn here since it's a trait method. + fn method(arg: &mut u32); +} + +impl Babar for Bar { + // Should not warn here since it's a trait method. + fn method(a: &mut u32) {} +} + +// Should not warn (checking variable aliasing). +fn alias_check(s: &mut Vec<u32>) { + let mut alias = s; + let mut alias2 = alias; + let mut alias3 = alias2; + alias3.push(0); +} + +// Should not warn (checking variable aliasing). +fn alias_check2(mut s: &mut Vec<u32>) { + let mut alias = &mut s; + alias.push(0); +} + +struct Mut<T> { + ptr: NonNull<T>, +} + +impl<T> Mut<T> { + // Should not warn because `NonNull::from` also accepts `&mut`. + fn new(ptr: &mut T) -> Self { + Mut { + ptr: NonNull::from(ptr), + } + } +} + +// Should not warn. +fn unused(_: &mut u32, _b: &mut u8) {} + +fn main() { + let mut u = 0; + let mut v = vec![0]; + foo(&mut v, &0, &mut u); + foo2(&mut v); + foo3(&mut v); + foo4(&mut v); + foo5(&mut v); + alias_check(&mut v); + alias_check2(&mut v); + println!("{u}"); +} diff --git a/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr new file mode 100644 index 00000000000..5e9d80bb6c4 --- /dev/null +++ b/src/tools/clippy/tests/ui/needless_pass_by_ref_mut.stderr @@ -0,0 +1,28 @@ +error: this argument is a mutable reference, but not used mutably + --> $DIR/needless_pass_by_ref_mut.rs:6:11 + | +LL | fn foo(s: &mut Vec<u32>, b: &u32, x: &mut u32) { + | ^^^^^^^^^^^^^ help: consider changing to: `&Vec<u32>` + | + = note: `-D clippy::needless-pass-by-ref-mut` implied by `-D warnings` + +error: this argument is a mutable reference, but not used mutably + --> $DIR/needless_pass_by_ref_mut.rs:31:12 + | +LL | fn foo6(s: &mut Vec<u32>) { + | ^^^^^^^^^^^^^ help: consider changing to: `&Vec<u32>` + +error: this argument is a mutable reference, but not used mutably + --> $DIR/needless_pass_by_ref_mut.rs:44:29 + | +LL | fn mushroom(&self, vec: &mut Vec<i32>) -> usize { + | ^^^^^^^^^^^^^ help: consider changing to: `&Vec<i32>` + +error: this argument is a mutable reference, but not used mutably + --> $DIR/needless_pass_by_ref_mut.rs:49:31 + | +LL | fn badger(&mut self, vec: &mut Vec<i32>) -> usize { + | ^^^^^^^^^^^^^ help: consider changing to: `&Vec<i32>` + +error: aborting due to 4 previous errors + diff --git a/src/tools/clippy/tests/ui/needless_splitn.stderr b/src/tools/clippy/tests/ui/needless_splitn.stderr index f607d8e1ab5..0005f758104 100644 --- a/src/tools/clippy/tests/ui/needless_splitn.stderr +++ b/src/tools/clippy/tests/ui/needless_splitn.stderr @@ -2,7 +2,7 @@ error: unnecessary use of `splitn` --> $DIR/needless_splitn.rs:14:13 | LL | let _ = str.splitn(2, '=').next(); - | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')` + | ^^^^^^^^^^^^^^^^^^ help: try: `str.split('=')` | = note: `-D clippy::needless-splitn` implied by `-D warnings` @@ -10,73 +10,73 @@ error: unnecessary use of `splitn` --> $DIR/needless_splitn.rs:15:13 | LL | let _ = str.splitn(2, '=').nth(0); - | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')` + | ^^^^^^^^^^^^^^^^^^ help: try: `str.split('=')` error: unnecessary use of `splitn` --> $DIR/needless_splitn.rs:18:18 | LL | let (_, _) = str.splitn(3, '=').next_tuple().unwrap(); - | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')` + | ^^^^^^^^^^^^^^^^^^ help: try: `str.split('=')` error: unnecessary use of `rsplitn` --> $DIR/needless_splitn.rs:21:13 | LL | let _ = str.rsplitn(2, '=').next(); - | ^^^^^^^^^^^^^^^^^^^ help: try this: `str.rsplit('=')` + | ^^^^^^^^^^^^^^^^^^^ help: try: `str.rsplit('=')` error: unnecessary use of `rsplitn` --> $DIR/needless_splitn.rs:22:13 | LL | let _ = str.rsplitn(2, '=').nth(0); - | ^^^^^^^^^^^^^^^^^^^ help: try this: `str.rsplit('=')` + | ^^^^^^^^^^^^^^^^^^^ help: try: `str.rsplit('=')` error: unnecessary use of `rsplitn` --> $DIR/needless_splitn.rs:25:18 | LL | let (_, _) = str.rsplitn(3, '=').next_tuple().unwrap(); - | ^^^^^^^^^^^^^^^^^^^ help: try this: `str.rsplit('=')` + | ^^^^^^^^^^^^^^^^^^^ help: try: `str.rsplit('=')` error: unnecessary use of `splitn` --> $DIR/needless_splitn.rs:27:13 | LL | let _ = str.splitn(5, '=').next(); - | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')` + | ^^^^^^^^^^^^^^^^^^ help: try: `str.split('=')` error: unnecessary use of `splitn` --> $DIR/needless_splitn.rs:28:13 | LL | let _ = str.splitn(5, '=').nth(3); - | ^^^^^^^^^^^^^^^^^^ help: try this: `str.split('=')` + | ^^^^^^^^^^^^^^^^^^ help: try: `str.split('=')` error: unnecessary use of `splitn` --> $DIR/needless_splitn.rs:34:13 | LL | let _ = s.splitn(2, '=').next()?; - | ^^^^^^^^^^^^^^^^ help: try this: `s.split('=')` + | ^^^^^^^^^^^^^^^^ help: try: `s.split('=')` error: unnecessary use of `splitn` --> $DIR/needless_splitn.rs:35:13 | LL | let _ = s.splitn(2, '=').nth(0)?; - | ^^^^^^^^^^^^^^^^ help: try this: `s.split('=')` + | ^^^^^^^^^^^^^^^^ help: try: `s.split('=')` error: unnecessary use of `rsplitn` --> $DIR/needless_splitn.rs:36:13 | LL | let _ = s.rsplitn(2, '=').next()?; - | ^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit('=')` + | ^^^^^^^^^^^^^^^^^ help: try: `s.rsplit('=')` error: unnecessary use of `rsplitn` --> $DIR/needless_splitn.rs:37:13 | LL | let _ = s.rsplitn(2, '=').nth(0)?; - | ^^^^^^^^^^^^^^^^^ help: try this: `s.rsplit('=')` + | ^^^^^^^^^^^^^^^^^ help: try: `s.rsplit('=')` error: unnecessary use of `splitn` --> $DIR/needless_splitn.rs:45:13 | LL | let _ = "key=value".splitn(2, '=').nth(0).unwrap(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `"key=value".split('=')` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"key=value".split('=')` error: aborting due to 13 previous errors diff --git a/src/tools/clippy/tests/ui/numbered_fields.stderr b/src/tools/clippy/tests/ui/numbered_fields.stderr index 60c0d789806..26f7ad9048b 100644 --- a/src/tools/clippy/tests/ui/numbered_fields.stderr +++ b/src/tools/clippy/tests/ui/numbered_fields.stderr @@ -7,7 +7,7 @@ LL | | 0: 1u32, LL | | 1: 42, LL | | 2: 23u8, LL | | }; - | |_____^ help: try this instead: `TupleStruct(1u32, 42, 23u8)` + | |_____^ help: try: `TupleStruct(1u32, 42, 23u8)` | = note: `-D clippy::init-numbered-fields` implied by `-D warnings` @@ -20,7 +20,7 @@ LL | | 0: 1u32, LL | | 2: 2u8, LL | | 1: 3u32, LL | | }; - | |_____^ help: try this instead: `TupleStruct(1u32, 3u32, 2u8)` + | |_____^ help: try: `TupleStruct(1u32, 3u32, 2u8)` error: aborting due to 2 previous errors diff --git a/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr b/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr index 0305387b9f8..5be5f10b017 100644 --- a/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr +++ b/src/tools/clippy/tests/ui/option_map_unit_fn_fixable.stderr @@ -4,7 +4,7 @@ error: called `map(f)` on an `Option` value where `f` is a function that returns LL | x.field.map(do_nothing); | ^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(x_field) = x.field { do_nothing(x_field) }` + | help: try: `if let Some(x_field) = x.field { do_nothing(x_field) }` | = note: `-D clippy::option-map-unit-fn` implied by `-D warnings` @@ -14,7 +14,7 @@ error: called `map(f)` on an `Option` value where `f` is a function that returns LL | x.field.map(do_nothing); | ^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(x_field) = x.field { do_nothing(x_field) }` + | help: try: `if let Some(x_field) = x.field { do_nothing(x_field) }` error: called `map(f)` on an `Option` value where `f` is a function that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:42:5 @@ -22,7 +22,7 @@ error: called `map(f)` on an `Option` value where `f` is a function that returns LL | x.field.map(diverge); | ^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(x_field) = x.field { diverge(x_field) }` + | help: try: `if let Some(x_field) = x.field { diverge(x_field) }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:48:5 @@ -30,7 +30,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| x.do_option_nothing(value + captured)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { x.do_option_nothing(value + captured) }` + | help: try: `if let Some(value) = x.field { x.do_option_nothing(value + captured) }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:50:5 @@ -38,7 +38,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| { x.do_option_plus_one(value + captured); }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { x.do_option_plus_one(value + captured); }` + | help: try: `if let Some(value) = x.field { x.do_option_plus_one(value + captured); }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:53:5 @@ -46,7 +46,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| do_nothing(value + captured)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { do_nothing(value + captured) }` + | help: try: `if let Some(value) = x.field { do_nothing(value + captured) }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:55:5 @@ -54,7 +54,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| { do_nothing(value + captured) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { do_nothing(value + captured) }` + | help: try: `if let Some(value) = x.field { do_nothing(value + captured) }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:57:5 @@ -62,7 +62,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| { do_nothing(value + captured); }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { do_nothing(value + captured); }` + | help: try: `if let Some(value) = x.field { do_nothing(value + captured); }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:59:5 @@ -70,7 +70,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| { { do_nothing(value + captured); } }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { do_nothing(value + captured); }` + | help: try: `if let Some(value) = x.field { do_nothing(value + captured); }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:62:5 @@ -78,7 +78,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| diverge(value + captured)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { diverge(value + captured) }` + | help: try: `if let Some(value) = x.field { diverge(value + captured) }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:64:5 @@ -86,7 +86,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| { diverge(value + captured) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { diverge(value + captured) }` + | help: try: `if let Some(value) = x.field { diverge(value + captured) }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:66:5 @@ -94,7 +94,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| { diverge(value + captured); }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { diverge(value + captured); }` + | help: try: `if let Some(value) = x.field { diverge(value + captured); }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:68:5 @@ -102,7 +102,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| { { diverge(value + captured); } }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { diverge(value + captured); }` + | help: try: `if let Some(value) = x.field { diverge(value + captured); }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:73:5 @@ -110,7 +110,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| { let y = plus_one(value + captured); }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { let y = plus_one(value + captured); }` + | help: try: `if let Some(value) = x.field { let y = plus_one(value + captured); }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:75:5 @@ -118,7 +118,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| { plus_one(value + captured); }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { plus_one(value + captured); }` + | help: try: `if let Some(value) = x.field { plus_one(value + captured); }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:77:5 @@ -126,7 +126,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|value| { { plus_one(value + captured); } }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = x.field { plus_one(value + captured); }` + | help: try: `if let Some(value) = x.field { plus_one(value + captured); }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:80:5 @@ -134,7 +134,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | x.field.map(|ref value| { do_nothing(value + captured) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(ref value) = x.field { do_nothing(value + captured) }` + | help: try: `if let Some(ref value) = x.field { do_nothing(value + captured) }` error: called `map(f)` on an `Option` value where `f` is a function that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:82:5 @@ -142,7 +142,7 @@ error: called `map(f)` on an `Option` value where `f` is a function that returns LL | option().map(do_nothing); | ^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(a) = option() { do_nothing(a) }` + | help: try: `if let Some(a) = option() { do_nothing(a) }` error: called `map(f)` on an `Option` value where `f` is a closure that returns the unit type `()` --> $DIR/option_map_unit_fn_fixable.rs:84:5 @@ -150,7 +150,7 @@ error: called `map(f)` on an `Option` value where `f` is a closure that returns LL | option().map(|value| println!("{:?}", value)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Some(value) = option() { println!("{:?}", value) }` + | help: try: `if let Some(value) = option() { println!("{:?}", value) }` error: aborting due to 19 previous errors diff --git a/src/tools/clippy/tests/ui/or_fun_call.fixed b/src/tools/clippy/tests/ui/or_fun_call.fixed index 703debb7a26..6deff0f3240 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.fixed +++ b/src/tools/clippy/tests/ui/or_fun_call.fixed @@ -9,8 +9,7 @@ clippy::useless_vec )] -use std::collections::BTreeMap; -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; use std::time::Duration; /// Checks implementation of the `OR_FUN_CALL` lint. diff --git a/src/tools/clippy/tests/ui/or_fun_call.rs b/src/tools/clippy/tests/ui/or_fun_call.rs index bb86fe0d45f..b05b33e6ee2 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.rs +++ b/src/tools/clippy/tests/ui/or_fun_call.rs @@ -9,8 +9,7 @@ clippy::useless_vec )] -use std::collections::BTreeMap; -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; use std::time::Duration; /// Checks implementation of the `OR_FUN_CALL` lint. diff --git a/src/tools/clippy/tests/ui/or_fun_call.stderr b/src/tools/clippy/tests/ui/or_fun_call.stderr index 0b5c686bec0..7342b0c2914 100644 --- a/src/tools/clippy/tests/ui/or_fun_call.stderr +++ b/src/tools/clippy/tests/ui/or_fun_call.stderr @@ -1,172 +1,172 @@ error: use of `unwrap_or` followed by a function call - --> $DIR/or_fun_call.rs:54:22 + --> $DIR/or_fun_call.rs:53:22 | LL | with_constructor.unwrap_or(make()); - | ^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(make)` + | ^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(make)` | = note: `-D clippy::or-fun-call` implied by `-D warnings` error: use of `unwrap_or` followed by a call to `new` - --> $DIR/or_fun_call.rs:57:14 + --> $DIR/or_fun_call.rs:56:14 | LL | with_new.unwrap_or(Vec::new()); - | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()` + | ^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or` followed by a function call - --> $DIR/or_fun_call.rs:60:21 + --> $DIR/or_fun_call.rs:59:21 | LL | with_const_args.unwrap_or(Vec::with_capacity(12)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| Vec::with_capacity(12))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| Vec::with_capacity(12))` error: use of `unwrap_or` followed by a function call - --> $DIR/or_fun_call.rs:63:14 + --> $DIR/or_fun_call.rs:62:14 | LL | with_err.unwrap_or(make()); - | ^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| make())` + | ^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|_| make())` error: use of `unwrap_or` followed by a function call - --> $DIR/or_fun_call.rs:66:19 + --> $DIR/or_fun_call.rs:65:19 | LL | with_err_args.unwrap_or(Vec::with_capacity(12)); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|_| Vec::with_capacity(12))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|_| Vec::with_capacity(12))` error: use of `unwrap_or` followed by a call to `default` - --> $DIR/or_fun_call.rs:69:24 + --> $DIR/or_fun_call.rs:68:24 | LL | with_default_trait.unwrap_or(Default::default()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or` followed by a call to `default` - --> $DIR/or_fun_call.rs:72:23 + --> $DIR/or_fun_call.rs:71:23 | LL | with_default_type.unwrap_or(u64::default()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()` + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or` followed by a function call - --> $DIR/or_fun_call.rs:75:18 + --> $DIR/or_fun_call.rs:74:18 | LL | self_default.unwrap_or(<FakeDefault>::default()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(<FakeDefault>::default)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(<FakeDefault>::default)` error: use of `unwrap_or` followed by a call to `default` - --> $DIR/or_fun_call.rs:78:18 + --> $DIR/or_fun_call.rs:77:18 | LL | real_default.unwrap_or(<FakeDefault as Default>::default()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or` followed by a call to `new` - --> $DIR/or_fun_call.rs:81:14 + --> $DIR/or_fun_call.rs:80:14 | LL | with_vec.unwrap_or(vec![]); - | ^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()` + | ^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or` followed by a function call - --> $DIR/or_fun_call.rs:84:21 + --> $DIR/or_fun_call.rs:83:21 | LL | without_default.unwrap_or(Foo::new()); - | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(Foo::new)` + | ^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(Foo::new)` error: use of `or_insert` followed by a call to `new` - --> $DIR/or_fun_call.rs:87:19 + --> $DIR/or_fun_call.rs:86:19 | LL | map.entry(42).or_insert(String::new()); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `or_default()` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()` error: use of `or_insert` followed by a call to `new` - --> $DIR/or_fun_call.rs:90:23 + --> $DIR/or_fun_call.rs:89:23 | LL | map_vec.entry(42).or_insert(vec![]); - | ^^^^^^^^^^^^^^^^^ help: try this: `or_default()` + | ^^^^^^^^^^^^^^^^^ help: try: `or_default()` error: use of `or_insert` followed by a call to `new` - --> $DIR/or_fun_call.rs:93:21 + --> $DIR/or_fun_call.rs:92:21 | LL | btree.entry(42).or_insert(String::new()); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `or_default()` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_default()` error: use of `or_insert` followed by a call to `new` - --> $DIR/or_fun_call.rs:96:25 + --> $DIR/or_fun_call.rs:95:25 | LL | btree_vec.entry(42).or_insert(vec![]); - | ^^^^^^^^^^^^^^^^^ help: try this: `or_default()` + | ^^^^^^^^^^^^^^^^^ help: try: `or_default()` error: use of `unwrap_or` followed by a call to `new` - --> $DIR/or_fun_call.rs:99:21 + --> $DIR/or_fun_call.rs:98:21 | LL | let _ = stringy.unwrap_or(String::new()); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or` followed by a function call - --> $DIR/or_fun_call.rs:107:21 + --> $DIR/or_fun_call.rs:106:21 | LL | let _ = Some(1).unwrap_or(map[&1]); - | ^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| map[&1])` + | ^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| map[&1])` error: use of `unwrap_or` followed by a function call - --> $DIR/or_fun_call.rs:109:21 + --> $DIR/or_fun_call.rs:108:21 | LL | let _ = Some(1).unwrap_or(map[&1]); - | ^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| map[&1])` + | ^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| map[&1])` error: use of `or` followed by a function call - --> $DIR/or_fun_call.rs:133:35 + --> $DIR/or_fun_call.rs:132:35 | LL | let _ = Some("a".to_string()).or(Some("b".to_string())); - | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `or_else(|| Some("b".to_string()))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `or_else(|| Some("b".to_string()))` error: use of `unwrap_or` followed by a function call - --> $DIR/or_fun_call.rs:172:14 + --> $DIR/or_fun_call.rs:171:14 | LL | None.unwrap_or(ptr_to_ref(s)); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| ptr_to_ref(s))` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| ptr_to_ref(s))` error: use of `unwrap_or` followed by a function call - --> $DIR/or_fun_call.rs:178:14 + --> $DIR/or_fun_call.rs:177:14 | LL | None.unwrap_or(unsafe { ptr_to_ref(s) }); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })` error: use of `unwrap_or` followed by a function call - --> $DIR/or_fun_call.rs:180:14 + --> $DIR/or_fun_call.rs:179:14 | LL | None.unwrap_or( unsafe { ptr_to_ref(s) } ); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| unsafe { ptr_to_ref(s) })` error: use of `unwrap_or` followed by a call to `new` - --> $DIR/or_fun_call.rs:194:14 + --> $DIR/or_fun_call.rs:193:14 | LL | .unwrap_or(String::new()); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or` followed by a call to `new` - --> $DIR/or_fun_call.rs:207:14 + --> $DIR/or_fun_call.rs:206:14 | LL | .unwrap_or(String::new()); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or` followed by a call to `new` - --> $DIR/or_fun_call.rs:219:14 + --> $DIR/or_fun_call.rs:218:14 | LL | .unwrap_or(String::new()); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `unwrap_or` followed by a call to `new` - --> $DIR/or_fun_call.rs:230:10 + --> $DIR/or_fun_call.rs:229:10 | LL | .unwrap_or(String::new()); - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_default()` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_default()` error: use of `map_or` followed by a function call - --> $DIR/or_fun_call.rs:255:25 + --> $DIR/or_fun_call.rs:254:25 | LL | let _ = Some(4).map_or(g(), |v| v); - | ^^^^^^^^^^^^^^^^^^ help: try this: `map_or_else(g, |v| v)` + | ^^^^^^^^^^^^^^^^^^ help: try: `map_or_else(g, |v| v)` error: use of `map_or` followed by a function call - --> $DIR/or_fun_call.rs:256:25 + --> $DIR/or_fun_call.rs:255:25 | LL | let _ = Some(4).map_or(g(), f); - | ^^^^^^^^^^^^^^ help: try this: `map_or_else(g, f)` + | ^^^^^^^^^^^^^^ help: try: `map_or_else(g, f)` error: aborting due to 28 previous errors diff --git a/src/tools/clippy/tests/ui/or_then_unwrap.stderr b/src/tools/clippy/tests/ui/or_then_unwrap.stderr index da88154c59f..2a1a52407dc 100644 --- a/src/tools/clippy/tests/ui/or_then_unwrap.stderr +++ b/src/tools/clippy/tests/ui/or_then_unwrap.stderr @@ -2,7 +2,7 @@ error: found `.or(Some(…)).unwrap()` --> $DIR/or_then_unwrap.rs:24:20 | LL | let _ = option.or(Some("fallback")).unwrap(); // should trigger lint - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or("fallback")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or("fallback")` | = note: `-D clippy::or-then-unwrap` implied by `-D warnings` @@ -10,13 +10,13 @@ error: found `.or(Ok(…)).unwrap()` --> $DIR/or_then_unwrap.rs:27:20 | LL | let _ = result.or::<&str>(Ok("fallback")).unwrap(); // should trigger lint - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or("fallback")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or("fallback")` error: found `.or(Some(…)).unwrap()` --> $DIR/or_then_unwrap.rs:31:31 | LL | let _ = option.map(|v| v).or(Some("fallback")).unwrap().to_string().chars(); // should trigger lint - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or("fallback")` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or("fallback")` error: aborting due to 3 previous errors diff --git a/src/tools/clippy/tests/ui/panic_in_result_fn.stderr b/src/tools/clippy/tests/ui/panic_in_result_fn.stderr index 97787bc84e2..b758fc23812 100644 --- a/src/tools/clippy/tests/ui/panic_in_result_fn.stderr +++ b/src/tools/clippy/tests/ui/panic_in_result_fn.stderr @@ -1,4 +1,4 @@ -error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result` +error: used `panic!()` or assertion in a function that returns `Result` --> $DIR/panic_in_result_fn.rs:6:5 | LL | / fn result_with_panic() -> Result<bool, String> // should emit lint @@ -7,7 +7,7 @@ LL | | panic!("error"); LL | | } | |_____^ | - = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing + = help: `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing note: return Err() instead of panicking --> $DIR/panic_in_result_fn.rs:8:9 | @@ -15,55 +15,7 @@ LL | panic!("error"); | ^^^^^^^^^^^^^^^ = note: `-D clippy::panic-in-result-fn` implied by `-D warnings` -error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result` - --> $DIR/panic_in_result_fn.rs:11:5 - | -LL | / fn result_with_unimplemented() -> Result<bool, String> // should emit lint -LL | | { -LL | | unimplemented!(); -LL | | } - | |_____^ - | - = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing -note: return Err() instead of panicking - --> $DIR/panic_in_result_fn.rs:13:9 - | -LL | unimplemented!(); - | ^^^^^^^^^^^^^^^^ - -error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result` - --> $DIR/panic_in_result_fn.rs:16:5 - | -LL | / fn result_with_unreachable() -> Result<bool, String> // should emit lint -LL | | { -LL | | unreachable!(); -LL | | } - | |_____^ - | - = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing -note: return Err() instead of panicking - --> $DIR/panic_in_result_fn.rs:18:9 - | -LL | unreachable!(); - | ^^^^^^^^^^^^^^ - -error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result` - --> $DIR/panic_in_result_fn.rs:21:5 - | -LL | / fn result_with_todo() -> Result<bool, String> // should emit lint -LL | | { -LL | | todo!("Finish this"); -LL | | } - | |_____^ - | - = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing -note: return Err() instead of panicking - --> $DIR/panic_in_result_fn.rs:23:9 - | -LL | todo!("Finish this"); - | ^^^^^^^^^^^^^^^^^^^^ - -error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result` +error: used `panic!()` or assertion in a function that returns `Result` --> $DIR/panic_in_result_fn.rs:52:1 | LL | / fn function_result_with_panic() -> Result<bool, String> // should emit lint @@ -72,28 +24,12 @@ LL | | panic!("error"); LL | | } | |_^ | - = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing + = help: `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing note: return Err() instead of panicking --> $DIR/panic_in_result_fn.rs:54:5 | LL | panic!("error"); | ^^^^^^^^^^^^^^^ -error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result` - --> $DIR/panic_in_result_fn.rs:67:1 - | -LL | / fn main() -> Result<(), String> { -LL | | todo!("finish main method"); -LL | | Ok(()) -LL | | } - | |_^ - | - = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing -note: return Err() instead of panicking - --> $DIR/panic_in_result_fn.rs:68:5 - | -LL | todo!("finish main method"); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -error: aborting due to 6 previous errors +error: aborting due to 2 previous errors diff --git a/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr b/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr index eb0aacbb6a4..0dd213a7eed 100644 --- a/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr +++ b/src/tools/clippy/tests/ui/panic_in_result_fn_assertions.stderr @@ -1,4 +1,4 @@ -error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result` +error: used `panic!()` or assertion in a function that returns `Result` --> $DIR/panic_in_result_fn_assertions.rs:7:5 | LL | / fn result_with_assert_with_message(x: i32) -> Result<bool, String> // should emit lint @@ -8,7 +8,7 @@ LL | | Ok(true) LL | | } | |_____^ | - = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing + = help: `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing note: return Err() instead of panicking --> $DIR/panic_in_result_fn_assertions.rs:9:9 | @@ -16,7 +16,7 @@ LL | assert!(x == 5, "wrong argument"); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: `-D clippy::panic-in-result-fn` implied by `-D warnings` -error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result` +error: used `panic!()` or assertion in a function that returns `Result` --> $DIR/panic_in_result_fn_assertions.rs:13:5 | LL | / fn result_with_assert_eq(x: i32) -> Result<bool, String> // should emit lint @@ -26,14 +26,14 @@ LL | | Ok(true) LL | | } | |_____^ | - = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing + = help: `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing note: return Err() instead of panicking --> $DIR/panic_in_result_fn_assertions.rs:15:9 | LL | assert_eq!(x, 5); | ^^^^^^^^^^^^^^^^ -error: used `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertion in a function that returns `Result` +error: used `panic!()` or assertion in a function that returns `Result` --> $DIR/panic_in_result_fn_assertions.rs:19:5 | LL | / fn result_with_assert_ne(x: i32) -> Result<bool, String> // should emit lint @@ -43,7 +43,7 @@ LL | | Ok(true) LL | | } | |_____^ | - = help: `unimplemented!()`, `unreachable!()`, `todo!()`, `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing + = help: `panic!()` or assertions should not be used in a function that returns `Result` as `Result` is expected to return an error instead of crashing note: return Err() instead of panicking --> $DIR/panic_in_result_fn_assertions.rs:21:9 | diff --git a/src/tools/clippy/tests/ui/print_literal.stderr b/src/tools/clippy/tests/ui/print_literal.stderr index 6404dacdafa..71c8d188f16 100644 --- a/src/tools/clippy/tests/ui/print_literal.stderr +++ b/src/tools/clippy/tests/ui/print_literal.stderr @@ -5,7 +5,7 @@ LL | print!("Hello {}", "world"); | ^^^^^^^ | = note: `-D clippy::print-literal` implied by `-D warnings` -help: try this +help: try | LL - print!("Hello {}", "world"); LL + print!("Hello world"); @@ -17,7 +17,7 @@ error: literal with an empty format string LL | println!("Hello {} {}", world, "world"); | ^^^^^^^ | -help: try this +help: try | LL - println!("Hello {} {}", world, "world"); LL + println!("Hello {} world", world); @@ -29,7 +29,7 @@ error: literal with an empty format string LL | println!("Hello {}", "world"); | ^^^^^^^ | -help: try this +help: try | LL - println!("Hello {}", "world"); LL + println!("Hello world"); @@ -41,7 +41,7 @@ error: literal with an empty format string LL | println!("{} {:.4}", "a literal", 5); | ^^^^^^^^^^^ | -help: try this +help: try | LL - println!("{} {:.4}", "a literal", 5); LL + println!("a literal {:.4}", 5); @@ -53,7 +53,7 @@ error: literal with an empty format string LL | println!("{0} {1}", "hello", "world"); | ^^^^^^^ | -help: try this +help: try | LL - println!("{0} {1}", "hello", "world"); LL + println!("hello {1}", "world"); @@ -65,7 +65,7 @@ error: literal with an empty format string LL | println!("{0} {1}", "hello", "world"); | ^^^^^^^ | -help: try this +help: try | LL - println!("{0} {1}", "hello", "world"); LL + println!("{0} world", "hello"); @@ -77,7 +77,7 @@ error: literal with an empty format string LL | println!("{1} {0}", "hello", "world"); | ^^^^^^^ | -help: try this +help: try | LL - println!("{1} {0}", "hello", "world"); LL + println!("world {0}", "hello"); @@ -89,7 +89,7 @@ error: literal with an empty format string LL | println!("{1} {0}", "hello", "world"); | ^^^^^^^ | -help: try this +help: try | LL - println!("{1} {0}", "hello", "world"); LL + println!("{1} hello", "world"); @@ -101,7 +101,7 @@ error: literal with an empty format string LL | println!("{foo} {bar}", foo = "hello", bar = "world"); | ^^^^^^^ | -help: try this +help: try | LL - println!("{foo} {bar}", foo = "hello", bar = "world"); LL + println!("hello {bar}", bar = "world"); @@ -113,7 +113,7 @@ error: literal with an empty format string LL | println!("{foo} {bar}", foo = "hello", bar = "world"); | ^^^^^^^ | -help: try this +help: try | LL - println!("{foo} {bar}", foo = "hello", bar = "world"); LL + println!("{foo} world", foo = "hello"); @@ -125,7 +125,7 @@ error: literal with an empty format string LL | println!("{bar} {foo}", foo = "hello", bar = "world"); | ^^^^^^^ | -help: try this +help: try | LL - println!("{bar} {foo}", foo = "hello", bar = "world"); LL + println!("world {foo}", foo = "hello"); @@ -137,7 +137,7 @@ error: literal with an empty format string LL | println!("{bar} {foo}", foo = "hello", bar = "world"); | ^^^^^^^ | -help: try this +help: try | LL - println!("{bar} {foo}", foo = "hello", bar = "world"); LL + println!("{bar} hello", bar = "world"); diff --git a/src/tools/clippy/tests/ui/ptr_arg.rs b/src/tools/clippy/tests/ui/ptr_arg.rs index 709f74ee6aa..13e993d247b 100644 --- a/src/tools/clippy/tests/ui/ptr_arg.rs +++ b/src/tools/clippy/tests/ui/ptr_arg.rs @@ -3,7 +3,8 @@ unused, clippy::many_single_char_names, clippy::needless_lifetimes, - clippy::redundant_clone + clippy::redundant_clone, + clippy::needless_pass_by_ref_mut )] #![warn(clippy::ptr_arg)] diff --git a/src/tools/clippy/tests/ui/ptr_arg.stderr b/src/tools/clippy/tests/ui/ptr_arg.stderr index d663b070b9c..0e9dd760f45 100644 --- a/src/tools/clippy/tests/ui/ptr_arg.stderr +++ b/src/tools/clippy/tests/ui/ptr_arg.stderr @@ -1,5 +1,5 @@ error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:13:14 + --> $DIR/ptr_arg.rs:14:14 | LL | fn do_vec(x: &Vec<i64>) { | ^^^^^^^^^ help: change this to: `&[i64]` @@ -7,43 +7,43 @@ LL | fn do_vec(x: &Vec<i64>) { = note: `-D clippy::ptr-arg` implied by `-D warnings` error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:17:18 + --> $DIR/ptr_arg.rs:18:18 | LL | fn do_vec_mut(x: &mut Vec<i64>) { | ^^^^^^^^^^^^^ help: change this to: `&mut [i64]` error: writing `&String` instead of `&str` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:21:14 + --> $DIR/ptr_arg.rs:22:14 | LL | fn do_str(x: &String) { | ^^^^^^^ help: change this to: `&str` error: writing `&mut String` instead of `&mut str` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:25:18 + --> $DIR/ptr_arg.rs:26:18 | LL | fn do_str_mut(x: &mut String) { | ^^^^^^^^^^^ help: change this to: `&mut str` error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:29:15 + --> $DIR/ptr_arg.rs:30:15 | LL | fn do_path(x: &PathBuf) { | ^^^^^^^^ help: change this to: `&Path` error: writing `&mut PathBuf` instead of `&mut Path` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:33:19 + --> $DIR/ptr_arg.rs:34:19 | LL | fn do_path_mut(x: &mut PathBuf) { | ^^^^^^^^^^^^ help: change this to: `&mut Path` error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:41:18 + --> $DIR/ptr_arg.rs:42:18 | LL | fn do_vec(x: &Vec<i64>); | ^^^^^^^^^ help: change this to: `&[i64]` error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:54:14 + --> $DIR/ptr_arg.rs:55:14 | LL | fn cloned(x: &Vec<u8>) -> Vec<u8> { | ^^^^^^^^ @@ -60,7 +60,7 @@ LL ~ x.to_owned() | error: writing `&String` instead of `&str` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:63:18 + --> $DIR/ptr_arg.rs:64:18 | LL | fn str_cloned(x: &String) -> String { | ^^^^^^^ @@ -76,7 +76,7 @@ LL ~ x.to_owned() | error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:71:19 + --> $DIR/ptr_arg.rs:72:19 | LL | fn path_cloned(x: &PathBuf) -> PathBuf { | ^^^^^^^^ @@ -92,7 +92,7 @@ LL ~ x.to_path_buf() | error: writing `&String` instead of `&str` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:79:44 + --> $DIR/ptr_arg.rs:80:44 | LL | fn false_positive_capacity(x: &Vec<u8>, y: &String) { | ^^^^^^^ @@ -106,19 +106,19 @@ LL ~ let c = y; | error: using a reference to `Cow` is not recommended - --> $DIR/ptr_arg.rs:93:25 + --> $DIR/ptr_arg.rs:94:25 | LL | fn test_cow_with_ref(c: &Cow<[i32]>) {} | ^^^^^^^^^^^ help: change this to: `&[i32]` error: writing `&String` instead of `&str` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:122:66 + --> $DIR/ptr_arg.rs:123:66 | LL | fn some_allowed(#[allow(clippy::ptr_arg)] _v: &Vec<u32>, _s: &String) {} | ^^^^^^^ help: change this to: `&str` error: writing `&Vec` instead of `&[_]` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:151:21 + --> $DIR/ptr_arg.rs:152:21 | LL | fn foo_vec(vec: &Vec<u8>) { | ^^^^^^^^ @@ -131,7 +131,7 @@ LL ~ let _ = vec.to_owned().clone(); | error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:156:23 + --> $DIR/ptr_arg.rs:157:23 | LL | fn foo_path(path: &PathBuf) { | ^^^^^^^^ @@ -144,7 +144,7 @@ LL ~ let _ = path.to_path_buf().clone(); | error: writing `&PathBuf` instead of `&Path` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:161:21 + --> $DIR/ptr_arg.rs:162:21 | LL | fn foo_str(str: &PathBuf) { | ^^^^^^^^ @@ -157,43 +157,43 @@ LL ~ let _ = str.to_path_buf().clone(); | error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:167:29 + --> $DIR/ptr_arg.rs:168:29 | LL | fn mut_vec_slice_methods(v: &mut Vec<u32>) { | ^^^^^^^^^^^^^ help: change this to: `&mut [u32]` error: writing `&mut Vec` instead of `&mut [_]` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:229:17 + --> $DIR/ptr_arg.rs:230:17 | LL | fn dyn_trait(a: &mut Vec<u32>, b: &mut String, c: &mut PathBuf) { | ^^^^^^^^^^^^^ help: change this to: `&mut [u32]` error: writing `&mut String` instead of `&mut str` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:229:35 + --> $DIR/ptr_arg.rs:230:35 | LL | fn dyn_trait(a: &mut Vec<u32>, b: &mut String, c: &mut PathBuf) { | ^^^^^^^^^^^ help: change this to: `&mut str` error: writing `&mut PathBuf` instead of `&mut Path` involves a new object where a slice will do - --> $DIR/ptr_arg.rs:229:51 + --> $DIR/ptr_arg.rs:230:51 | LL | fn dyn_trait(a: &mut Vec<u32>, b: &mut String, c: &mut PathBuf) { | ^^^^^^^^^^^^ help: change this to: `&mut Path` error: using a reference to `Cow` is not recommended - --> $DIR/ptr_arg.rs:252:39 + --> $DIR/ptr_arg.rs:253:39 | LL | fn cow_elided_lifetime<'a>(input: &'a Cow<str>) -> &'a str { | ^^^^^^^^^^^^ help: change this to: `&str` error: using a reference to `Cow` is not recommended - --> $DIR/ptr_arg.rs:257:36 + --> $DIR/ptr_arg.rs:258:36 | LL | fn cow_bad_ret_ty_1<'a>(input: &'a Cow<'a, str>) -> &'static str { | ^^^^^^^^^^^^^^^^ help: change this to: `&str` error: using a reference to `Cow` is not recommended - --> $DIR/ptr_arg.rs:260:40 + --> $DIR/ptr_arg.rs:261:40 | LL | fn cow_bad_ret_ty_2<'a, 'b>(input: &'a Cow<'a, str>) -> &'b str { | ^^^^^^^^^^^^^^^^ help: change this to: `&str` diff --git a/src/tools/clippy/tests/ui/read_line_without_trim.fixed b/src/tools/clippy/tests/ui/read_line_without_trim.fixed new file mode 100644 index 00000000000..cb6aab84e49 --- /dev/null +++ b/src/tools/clippy/tests/ui/read_line_without_trim.fixed @@ -0,0 +1,36 @@ +//@run-rustfix + +#![allow(unused)] +#![warn(clippy::read_line_without_trim)] + +fn main() { + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + input.pop(); + let _x: i32 = input.parse().unwrap(); // don't trigger here, newline character is popped + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + let _x: i32 = input.trim_end().parse().unwrap(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + let _x = input.trim_end().parse::<i32>().unwrap(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + let _x = input.trim_end().parse::<u32>().unwrap(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + let _x = input.trim_end().parse::<f32>().unwrap(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + let _x = input.trim_end().parse::<bool>().unwrap(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + // this is actually ok, so don't lint here + let _x = input.parse::<String>().unwrap(); +} diff --git a/src/tools/clippy/tests/ui/read_line_without_trim.rs b/src/tools/clippy/tests/ui/read_line_without_trim.rs new file mode 100644 index 00000000000..bdc409a7010 --- /dev/null +++ b/src/tools/clippy/tests/ui/read_line_without_trim.rs @@ -0,0 +1,36 @@ +//@run-rustfix + +#![allow(unused)] +#![warn(clippy::read_line_without_trim)] + +fn main() { + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + input.pop(); + let _x: i32 = input.parse().unwrap(); // don't trigger here, newline character is popped + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + let _x: i32 = input.parse().unwrap(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + let _x = input.parse::<i32>().unwrap(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + let _x = input.parse::<u32>().unwrap(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + let _x = input.parse::<f32>().unwrap(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + let _x = input.parse::<bool>().unwrap(); + + let mut input = String::new(); + std::io::stdin().read_line(&mut input).unwrap(); + // this is actually ok, so don't lint here + let _x = input.parse::<String>().unwrap(); +} diff --git a/src/tools/clippy/tests/ui/read_line_without_trim.stderr b/src/tools/clippy/tests/ui/read_line_without_trim.stderr new file mode 100644 index 00000000000..f3d7b60425f --- /dev/null +++ b/src/tools/clippy/tests/ui/read_line_without_trim.stderr @@ -0,0 +1,73 @@ +error: calling `.parse()` without trimming the trailing newline character + --> $DIR/read_line_without_trim.rs:14:25 + | +LL | let _x: i32 = input.parse().unwrap(); + | ----- ^^^^^^^ + | | + | help: try: `input.trim_end()` + | +note: call to `.read_line()` here, which leaves a trailing newline character in the buffer, which in turn will cause `.parse()` to fail + --> $DIR/read_line_without_trim.rs:13:5 + | +LL | std::io::stdin().read_line(&mut input).unwrap(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + = note: `-D clippy::read-line-without-trim` implied by `-D warnings` + +error: calling `.parse()` without trimming the trailing newline character + --> $DIR/read_line_without_trim.rs:18:20 + | +LL | let _x = input.parse::<i32>().unwrap(); + | ----- ^^^^^^^^^^^^^^ + | | + | help: try: `input.trim_end()` + | +note: call to `.read_line()` here, which leaves a trailing newline character in the buffer, which in turn will cause `.parse()` to fail + --> $DIR/read_line_without_trim.rs:17:5 + | +LL | std::io::stdin().read_line(&mut input).unwrap(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: calling `.parse()` without trimming the trailing newline character + --> $DIR/read_line_without_trim.rs:22:20 + | +LL | let _x = input.parse::<u32>().unwrap(); + | ----- ^^^^^^^^^^^^^^ + | | + | help: try: `input.trim_end()` + | +note: call to `.read_line()` here, which leaves a trailing newline character in the buffer, which in turn will cause `.parse()` to fail + --> $DIR/read_line_without_trim.rs:21:5 + | +LL | std::io::stdin().read_line(&mut input).unwrap(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: calling `.parse()` without trimming the trailing newline character + --> $DIR/read_line_without_trim.rs:26:20 + | +LL | let _x = input.parse::<f32>().unwrap(); + | ----- ^^^^^^^^^^^^^^ + | | + | help: try: `input.trim_end()` + | +note: call to `.read_line()` here, which leaves a trailing newline character in the buffer, which in turn will cause `.parse()` to fail + --> $DIR/read_line_without_trim.rs:25:5 + | +LL | std::io::stdin().read_line(&mut input).unwrap(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: calling `.parse()` without trimming the trailing newline character + --> $DIR/read_line_without_trim.rs:30:20 + | +LL | let _x = input.parse::<bool>().unwrap(); + | ----- ^^^^^^^^^^^^^^^ + | | + | help: try: `input.trim_end()` + | +note: call to `.read_line()` here, which leaves a trailing newline character in the buffer, which in turn will cause `.parse()` to fail + --> $DIR/read_line_without_trim.rs:29:5 + | +LL | std::io::stdin().read_line(&mut input).unwrap(); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 5 previous errors + diff --git a/src/tools/clippy/tests/ui/read_zero_byte_vec.rs b/src/tools/clippy/tests/ui/read_zero_byte_vec.rs index 30807e0f8b9..c6025ef1f4d 100644 --- a/src/tools/clippy/tests/ui/read_zero_byte_vec.rs +++ b/src/tools/clippy/tests/ui/read_zero_byte_vec.rs @@ -1,5 +1,5 @@ #![warn(clippy::read_zero_byte_vec)] -#![allow(clippy::unused_io_amount)] +#![allow(clippy::unused_io_amount, clippy::needless_pass_by_ref_mut)] use std::fs::File; use std::io; use std::io::prelude::*; diff --git a/src/tools/clippy/tests/ui/redundant_allocation.rs b/src/tools/clippy/tests/ui/redundant_allocation.rs index 574d34aed2d..9eb58a3e53f 100644 --- a/src/tools/clippy/tests/ui/redundant_allocation.rs +++ b/src/tools/clippy/tests/ui/redundant_allocation.rs @@ -8,8 +8,7 @@ pub struct SubT<T> { } mod outer_box { - use crate::MyStruct; - use crate::SubT; + use crate::{MyStruct, SubT}; use std::boxed::Box; use std::rc::Rc; use std::sync::Arc; @@ -28,8 +27,7 @@ mod outer_box { } mod outer_rc { - use crate::MyStruct; - use crate::SubT; + use crate::{MyStruct, SubT}; use std::boxed::Box; use std::rc::Rc; use std::sync::Arc; @@ -48,8 +46,7 @@ mod outer_rc { } mod outer_arc { - use crate::MyStruct; - use crate::SubT; + use crate::{MyStruct, SubT}; use std::boxed::Box; use std::rc::Rc; use std::sync::Arc; diff --git a/src/tools/clippy/tests/ui/redundant_allocation.stderr b/src/tools/clippy/tests/ui/redundant_allocation.stderr index e0826fefa6c..a9a1eed702b 100644 --- a/src/tools/clippy/tests/ui/redundant_allocation.stderr +++ b/src/tools/clippy/tests/ui/redundant_allocation.stderr @@ -1,5 +1,5 @@ error: usage of `Box<Rc<T>>` - --> $DIR/redundant_allocation.rs:17:30 + --> $DIR/redundant_allocation.rs:16:30 | LL | pub fn box_test6<T>(foo: Box<Rc<T>>) {} | ^^^^^^^^^^ @@ -9,7 +9,7 @@ LL | pub fn box_test6<T>(foo: Box<Rc<T>>) {} = note: `-D clippy::redundant-allocation` implied by `-D warnings` error: usage of `Box<Arc<T>>` - --> $DIR/redundant_allocation.rs:19:30 + --> $DIR/redundant_allocation.rs:18:30 | LL | pub fn box_test7<T>(foo: Box<Arc<T>>) {} | ^^^^^^^^^^^ @@ -18,7 +18,7 @@ LL | pub fn box_test7<T>(foo: Box<Arc<T>>) {} = help: consider using just `Box<T>` or `Arc<T>` error: usage of `Box<Rc<SubT<usize>>>` - --> $DIR/redundant_allocation.rs:21:27 + --> $DIR/redundant_allocation.rs:20:27 | LL | pub fn box_test8() -> Box<Rc<SubT<usize>>> { | ^^^^^^^^^^^^^^^^^^^^ @@ -27,7 +27,7 @@ LL | pub fn box_test8() -> Box<Rc<SubT<usize>>> { = help: consider using just `Box<SubT<usize>>` or `Rc<SubT<usize>>` error: usage of `Box<Arc<T>>` - --> $DIR/redundant_allocation.rs:25:30 + --> $DIR/redundant_allocation.rs:24:30 | LL | pub fn box_test9<T>(foo: Box<Arc<T>>) -> Box<Arc<SubT<T>>> { | ^^^^^^^^^^^ @@ -36,7 +36,7 @@ LL | pub fn box_test9<T>(foo: Box<Arc<T>>) -> Box<Arc<SubT<T>>> { = help: consider using just `Box<T>` or `Arc<T>` error: usage of `Box<Arc<SubT<T>>>` - --> $DIR/redundant_allocation.rs:25:46 + --> $DIR/redundant_allocation.rs:24:46 | LL | pub fn box_test9<T>(foo: Box<Arc<T>>) -> Box<Arc<SubT<T>>> { | ^^^^^^^^^^^^^^^^^ @@ -45,7 +45,7 @@ LL | pub fn box_test9<T>(foo: Box<Arc<T>>) -> Box<Arc<SubT<T>>> { = help: consider using just `Box<SubT<T>>` or `Arc<SubT<T>>` error: usage of `Rc<Box<bool>>` - --> $DIR/redundant_allocation.rs:37:24 + --> $DIR/redundant_allocation.rs:35:24 | LL | pub fn rc_test5(a: Rc<Box<bool>>) {} | ^^^^^^^^^^^^^ @@ -54,7 +54,7 @@ LL | pub fn rc_test5(a: Rc<Box<bool>>) {} = help: consider using just `Rc<bool>` or `Box<bool>` error: usage of `Rc<Arc<bool>>` - --> $DIR/redundant_allocation.rs:39:24 + --> $DIR/redundant_allocation.rs:37:24 | LL | pub fn rc_test7(a: Rc<Arc<bool>>) {} | ^^^^^^^^^^^^^ @@ -63,7 +63,7 @@ LL | pub fn rc_test7(a: Rc<Arc<bool>>) {} = help: consider using just `Rc<bool>` or `Arc<bool>` error: usage of `Rc<Box<SubT<usize>>>` - --> $DIR/redundant_allocation.rs:41:26 + --> $DIR/redundant_allocation.rs:39:26 | LL | pub fn rc_test8() -> Rc<Box<SubT<usize>>> { | ^^^^^^^^^^^^^^^^^^^^ @@ -72,7 +72,7 @@ LL | pub fn rc_test8() -> Rc<Box<SubT<usize>>> { = help: consider using just `Rc<SubT<usize>>` or `Box<SubT<usize>>` error: usage of `Rc<Arc<T>>` - --> $DIR/redundant_allocation.rs:45:29 + --> $DIR/redundant_allocation.rs:43:29 | LL | pub fn rc_test9<T>(foo: Rc<Arc<T>>) -> Rc<Arc<SubT<T>>> { | ^^^^^^^^^^ @@ -81,7 +81,7 @@ LL | pub fn rc_test9<T>(foo: Rc<Arc<T>>) -> Rc<Arc<SubT<T>>> { = help: consider using just `Rc<T>` or `Arc<T>` error: usage of `Rc<Arc<SubT<T>>>` - --> $DIR/redundant_allocation.rs:45:44 + --> $DIR/redundant_allocation.rs:43:44 | LL | pub fn rc_test9<T>(foo: Rc<Arc<T>>) -> Rc<Arc<SubT<T>>> { | ^^^^^^^^^^^^^^^^ @@ -90,7 +90,7 @@ LL | pub fn rc_test9<T>(foo: Rc<Arc<T>>) -> Rc<Arc<SubT<T>>> { = help: consider using just `Rc<SubT<T>>` or `Arc<SubT<T>>` error: usage of `Arc<Box<bool>>` - --> $DIR/redundant_allocation.rs:57:25 + --> $DIR/redundant_allocation.rs:54:25 | LL | pub fn arc_test5(a: Arc<Box<bool>>) {} | ^^^^^^^^^^^^^^ @@ -99,7 +99,7 @@ LL | pub fn arc_test5(a: Arc<Box<bool>>) {} = help: consider using just `Arc<bool>` or `Box<bool>` error: usage of `Arc<Rc<bool>>` - --> $DIR/redundant_allocation.rs:59:25 + --> $DIR/redundant_allocation.rs:56:25 | LL | pub fn arc_test6(a: Arc<Rc<bool>>) {} | ^^^^^^^^^^^^^ @@ -108,7 +108,7 @@ LL | pub fn arc_test6(a: Arc<Rc<bool>>) {} = help: consider using just `Arc<bool>` or `Rc<bool>` error: usage of `Arc<Box<SubT<usize>>>` - --> $DIR/redundant_allocation.rs:61:27 + --> $DIR/redundant_allocation.rs:58:27 | LL | pub fn arc_test8() -> Arc<Box<SubT<usize>>> { | ^^^^^^^^^^^^^^^^^^^^^ @@ -117,7 +117,7 @@ LL | pub fn arc_test8() -> Arc<Box<SubT<usize>>> { = help: consider using just `Arc<SubT<usize>>` or `Box<SubT<usize>>` error: usage of `Arc<Rc<T>>` - --> $DIR/redundant_allocation.rs:65:30 + --> $DIR/redundant_allocation.rs:62:30 | LL | pub fn arc_test9<T>(foo: Arc<Rc<T>>) -> Arc<Rc<SubT<T>>> { | ^^^^^^^^^^ @@ -126,7 +126,7 @@ LL | pub fn arc_test9<T>(foo: Arc<Rc<T>>) -> Arc<Rc<SubT<T>>> { = help: consider using just `Arc<T>` or `Rc<T>` error: usage of `Arc<Rc<SubT<T>>>` - --> $DIR/redundant_allocation.rs:65:45 + --> $DIR/redundant_allocation.rs:62:45 | LL | pub fn arc_test9<T>(foo: Arc<Rc<T>>) -> Arc<Rc<SubT<T>>> { | ^^^^^^^^^^^^^^^^ @@ -135,7 +135,7 @@ LL | pub fn arc_test9<T>(foo: Arc<Rc<T>>) -> Arc<Rc<SubT<T>>> { = help: consider using just `Arc<SubT<T>>` or `Rc<SubT<T>>` error: usage of `Rc<Box<Box<dyn T>>>` - --> $DIR/redundant_allocation.rs:87:27 + --> $DIR/redundant_allocation.rs:84:27 | LL | pub fn test_rc_box(_: Rc<Box<Box<dyn T>>>) {} | ^^^^^^^^^^^^^^^^^^^ @@ -144,7 +144,7 @@ LL | pub fn test_rc_box(_: Rc<Box<Box<dyn T>>>) {} = help: consider using just `Rc<Box<dyn T>>` or `Box<Box<dyn T>>` error: usage of `Rc<Box<Box<str>>>` - --> $DIR/redundant_allocation.rs:119:31 + --> $DIR/redundant_allocation.rs:116:31 | LL | pub fn test_rc_box_str(_: Rc<Box<Box<str>>>) {} | ^^^^^^^^^^^^^^^^^ @@ -153,7 +153,7 @@ LL | pub fn test_rc_box_str(_: Rc<Box<Box<str>>>) {} = help: consider using just `Rc<Box<str>>` or `Box<Box<str>>` error: usage of `Rc<Box<Box<[usize]>>>` - --> $DIR/redundant_allocation.rs:120:33 + --> $DIR/redundant_allocation.rs:117:33 | LL | pub fn test_rc_box_slice(_: Rc<Box<Box<[usize]>>>) {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -162,7 +162,7 @@ LL | pub fn test_rc_box_slice(_: Rc<Box<Box<[usize]>>>) {} = help: consider using just `Rc<Box<[usize]>>` or `Box<Box<[usize]>>` error: usage of `Rc<Box<Box<Path>>>` - --> $DIR/redundant_allocation.rs:121:32 + --> $DIR/redundant_allocation.rs:118:32 | LL | pub fn test_rc_box_path(_: Rc<Box<Box<Path>>>) {} | ^^^^^^^^^^^^^^^^^^ @@ -171,7 +171,7 @@ LL | pub fn test_rc_box_path(_: Rc<Box<Box<Path>>>) {} = help: consider using just `Rc<Box<Path>>` or `Box<Box<Path>>` error: usage of `Rc<Box<Box<DynSized>>>` - --> $DIR/redundant_allocation.rs:122:34 + --> $DIR/redundant_allocation.rs:119:34 | LL | pub fn test_rc_box_custom(_: Rc<Box<Box<DynSized>>>) {} | ^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed b/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed index edb7715f42c..b97863daf22 100644 --- a/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed +++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.fixed @@ -16,9 +16,7 @@ pub enum MyEnum { } mod outer_box { - use crate::MyEnum; - use crate::MyStruct; - use crate::SubT; + use crate::{MyEnum, MyStruct, SubT}; use std::boxed::Box; use std::rc::Rc; use std::sync::Arc; @@ -35,9 +33,7 @@ mod outer_box { } mod outer_rc { - use crate::MyEnum; - use crate::MyStruct; - use crate::SubT; + use crate::{MyEnum, MyStruct, SubT}; use std::boxed::Box; use std::rc::Rc; use std::sync::Arc; @@ -54,9 +50,7 @@ mod outer_rc { } mod outer_arc { - use crate::MyEnum; - use crate::MyStruct; - use crate::SubT; + use crate::{MyEnum, MyStruct, SubT}; use std::boxed::Box; use std::rc::Rc; use std::sync::Arc; diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs b/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs index c59422dd966..bffb6f8c000 100644 --- a/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs +++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.rs @@ -16,9 +16,7 @@ pub enum MyEnum { } mod outer_box { - use crate::MyEnum; - use crate::MyStruct; - use crate::SubT; + use crate::{MyEnum, MyStruct, SubT}; use std::boxed::Box; use std::rc::Rc; use std::sync::Arc; @@ -35,9 +33,7 @@ mod outer_box { } mod outer_rc { - use crate::MyEnum; - use crate::MyStruct; - use crate::SubT; + use crate::{MyEnum, MyStruct, SubT}; use std::boxed::Box; use std::rc::Rc; use std::sync::Arc; @@ -54,9 +50,7 @@ mod outer_rc { } mod outer_arc { - use crate::MyEnum; - use crate::MyStruct; - use crate::SubT; + use crate::{MyEnum, MyStruct, SubT}; use std::boxed::Box; use std::rc::Rc; use std::sync::Arc; diff --git a/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr b/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr index 8dd4a6a2687..524ca5bf467 100644 --- a/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr +++ b/src/tools/clippy/tests/ui/redundant_allocation_fixable.stderr @@ -1,5 +1,5 @@ error: usage of `Box<&T>` - --> $DIR/redundant_allocation_fixable.rs:26:30 + --> $DIR/redundant_allocation_fixable.rs:24:30 | LL | pub fn box_test1<T>(foo: Box<&T>) {} | ^^^^^^^ help: try: `&T` @@ -8,7 +8,7 @@ LL | pub fn box_test1<T>(foo: Box<&T>) {} = note: `-D clippy::redundant-allocation` implied by `-D warnings` error: usage of `Box<&MyStruct>` - --> $DIR/redundant_allocation_fixable.rs:28:27 + --> $DIR/redundant_allocation_fixable.rs:26:27 | LL | pub fn box_test2(foo: Box<&MyStruct>) {} | ^^^^^^^^^^^^^^ help: try: `&MyStruct` @@ -16,7 +16,7 @@ LL | pub fn box_test2(foo: Box<&MyStruct>) {} = note: `&MyStruct` is already a pointer, `Box<&MyStruct>` allocates a pointer on the heap error: usage of `Box<&MyEnum>` - --> $DIR/redundant_allocation_fixable.rs:30:27 + --> $DIR/redundant_allocation_fixable.rs:28:27 | LL | pub fn box_test3(foo: Box<&MyEnum>) {} | ^^^^^^^^^^^^ help: try: `&MyEnum` @@ -24,7 +24,7 @@ LL | pub fn box_test3(foo: Box<&MyEnum>) {} = note: `&MyEnum` is already a pointer, `Box<&MyEnum>` allocates a pointer on the heap error: usage of `Box<Box<T>>` - --> $DIR/redundant_allocation_fixable.rs:34:30 + --> $DIR/redundant_allocation_fixable.rs:32:30 | LL | pub fn box_test5<T>(foo: Box<Box<T>>) {} | ^^^^^^^^^^^ help: try: `Box<T>` @@ -32,7 +32,7 @@ LL | pub fn box_test5<T>(foo: Box<Box<T>>) {} = note: `Box<T>` is already on the heap, `Box<Box<T>>` makes an extra allocation error: usage of `Rc<&T>` - --> $DIR/redundant_allocation_fixable.rs:45:29 + --> $DIR/redundant_allocation_fixable.rs:41:29 | LL | pub fn rc_test1<T>(foo: Rc<&T>) {} | ^^^^^^ help: try: `&T` @@ -40,7 +40,7 @@ LL | pub fn rc_test1<T>(foo: Rc<&T>) {} = note: `&T` is already a pointer, `Rc<&T>` allocates a pointer on the heap error: usage of `Rc<&MyStruct>` - --> $DIR/redundant_allocation_fixable.rs:47:26 + --> $DIR/redundant_allocation_fixable.rs:43:26 | LL | pub fn rc_test2(foo: Rc<&MyStruct>) {} | ^^^^^^^^^^^^^ help: try: `&MyStruct` @@ -48,7 +48,7 @@ LL | pub fn rc_test2(foo: Rc<&MyStruct>) {} = note: `&MyStruct` is already a pointer, `Rc<&MyStruct>` allocates a pointer on the heap error: usage of `Rc<&MyEnum>` - --> $DIR/redundant_allocation_fixable.rs:49:26 + --> $DIR/redundant_allocation_fixable.rs:45:26 | LL | pub fn rc_test3(foo: Rc<&MyEnum>) {} | ^^^^^^^^^^^ help: try: `&MyEnum` @@ -56,7 +56,7 @@ LL | pub fn rc_test3(foo: Rc<&MyEnum>) {} = note: `&MyEnum` is already a pointer, `Rc<&MyEnum>` allocates a pointer on the heap error: usage of `Rc<Rc<bool>>` - --> $DIR/redundant_allocation_fixable.rs:53:24 + --> $DIR/redundant_allocation_fixable.rs:49:24 | LL | pub fn rc_test6(a: Rc<Rc<bool>>) {} | ^^^^^^^^^^^^ help: try: `Rc<bool>` @@ -64,7 +64,7 @@ LL | pub fn rc_test6(a: Rc<Rc<bool>>) {} = note: `Rc<bool>` is already on the heap, `Rc<Rc<bool>>` makes an extra allocation error: usage of `Arc<&T>` - --> $DIR/redundant_allocation_fixable.rs:64:30 + --> $DIR/redundant_allocation_fixable.rs:58:30 | LL | pub fn arc_test1<T>(foo: Arc<&T>) {} | ^^^^^^^ help: try: `&T` @@ -72,7 +72,7 @@ LL | pub fn arc_test1<T>(foo: Arc<&T>) {} = note: `&T` is already a pointer, `Arc<&T>` allocates a pointer on the heap error: usage of `Arc<&MyStruct>` - --> $DIR/redundant_allocation_fixable.rs:66:27 + --> $DIR/redundant_allocation_fixable.rs:60:27 | LL | pub fn arc_test2(foo: Arc<&MyStruct>) {} | ^^^^^^^^^^^^^^ help: try: `&MyStruct` @@ -80,7 +80,7 @@ LL | pub fn arc_test2(foo: Arc<&MyStruct>) {} = note: `&MyStruct` is already a pointer, `Arc<&MyStruct>` allocates a pointer on the heap error: usage of `Arc<&MyEnum>` - --> $DIR/redundant_allocation_fixable.rs:68:27 + --> $DIR/redundant_allocation_fixable.rs:62:27 | LL | pub fn arc_test3(foo: Arc<&MyEnum>) {} | ^^^^^^^^^^^^ help: try: `&MyEnum` @@ -88,7 +88,7 @@ LL | pub fn arc_test3(foo: Arc<&MyEnum>) {} = note: `&MyEnum` is already a pointer, `Arc<&MyEnum>` allocates a pointer on the heap error: usage of `Arc<Arc<bool>>` - --> $DIR/redundant_allocation_fixable.rs:72:25 + --> $DIR/redundant_allocation_fixable.rs:66:25 | LL | pub fn arc_test7(a: Arc<Arc<bool>>) {} | ^^^^^^^^^^^^^^ help: try: `Arc<bool>` diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr index e9ea3f2e688..28f33f0c95d 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_drop_order.stderr @@ -2,7 +2,7 @@ error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_drop_order.rs:17:12 | LL | if let Ok(_) = m.lock() {} - | -------^^^^^----------- help: try this: `if m.lock().is_ok()` + | -------^^^^^----------- help: try: `if m.lock().is_ok()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -12,7 +12,7 @@ error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_drop_order.rs:18:12 | LL | if let Err(_) = Err::<(), _>(m.lock().unwrap().0) {} - | -------^^^^^^------------------------------------ help: try this: `if Err::<(), _>(m.lock().unwrap().0).is_err()` + | -------^^^^^^------------------------------------ help: try: `if Err::<(), _>(m.lock().unwrap().0).is_err()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -21,7 +21,7 @@ error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_drop_order.rs:21:16 | LL | if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) {} - | -------^^^^^----------------------------------------- help: try this: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()` + | -------^^^^^----------------------------------------- help: try: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -30,7 +30,7 @@ error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_drop_order.rs:23:12 | LL | if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) { - | -------^^^^^----------------------------------------- help: try this: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()` + | -------^^^^^----------------------------------------- help: try: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -39,31 +39,31 @@ error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_drop_order.rs:26:12 | LL | if let Ok(_) = Ok::<_, std::sync::MutexGuard<()>>(()) {} - | -------^^^^^----------------------------------------- help: try this: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()` + | -------^^^^^----------------------------------------- help: try: `if Ok::<_, std::sync::MutexGuard<()>>(()).is_ok()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_drop_order.rs:27:12 | LL | if let Err(_) = Err::<std::sync::MutexGuard<()>, _>(()) {} - | -------^^^^^^------------------------------------------ help: try this: `if Err::<std::sync::MutexGuard<()>, _>(()).is_err()` + | -------^^^^^^------------------------------------------ help: try: `if Err::<std::sync::MutexGuard<()>, _>(()).is_err()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_drop_order.rs:29:12 | LL | if let Ok(_) = Ok::<_, ()>(String::new()) {} - | -------^^^^^----------------------------- help: try this: `if Ok::<_, ()>(String::new()).is_ok()` + | -------^^^^^----------------------------- help: try: `if Ok::<_, ()>(String::new()).is_ok()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_drop_order.rs:30:12 | LL | if let Err(_) = Err::<(), _>((String::new(), ())) {} - | -------^^^^^^------------------------------------ help: try this: `if Err::<(), _>((String::new(), ())).is_err()` + | -------^^^^^^------------------------------------ help: try: `if Err::<(), _>((String::new(), ())).is_err()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_drop_order.rs:33:12 | LL | if let Some(_) = Some(m.lock()) {} - | -------^^^^^^^----------------- help: try this: `if Some(m.lock()).is_some()` + | -------^^^^^^^----------------- help: try: `if Some(m.lock()).is_some()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -72,7 +72,7 @@ error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_drop_order.rs:34:12 | LL | if let Some(_) = Some(m.lock().unwrap().0) {} - | -------^^^^^^^---------------------------- help: try this: `if Some(m.lock().unwrap().0).is_some()` + | -------^^^^^^^---------------------------- help: try: `if Some(m.lock().unwrap().0).is_some()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -81,7 +81,7 @@ error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_drop_order.rs:37:16 | LL | if let None = None::<std::sync::MutexGuard<()>> {} - | -------^^^^------------------------------------ help: try this: `if None::<std::sync::MutexGuard<()>>.is_none()` + | -------^^^^------------------------------------ help: try: `if None::<std::sync::MutexGuard<()>>.is_none()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -90,7 +90,7 @@ error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_drop_order.rs:39:12 | LL | if let None = None::<std::sync::MutexGuard<()>> { - | -------^^^^------------------------------------ help: try this: `if None::<std::sync::MutexGuard<()>>.is_none()` + | -------^^^^------------------------------------ help: try: `if None::<std::sync::MutexGuard<()>>.is_none()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -99,25 +99,25 @@ error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_drop_order.rs:43:12 | LL | if let None = None::<std::sync::MutexGuard<()>> {} - | -------^^^^------------------------------------ help: try this: `if None::<std::sync::MutexGuard<()>>.is_none()` + | -------^^^^------------------------------------ help: try: `if None::<std::sync::MutexGuard<()>>.is_none()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_drop_order.rs:45:12 | LL | if let Some(_) = Some(String::new()) {} - | -------^^^^^^^---------------------- help: try this: `if Some(String::new()).is_some()` + | -------^^^^^^^---------------------- help: try: `if Some(String::new()).is_some()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_drop_order.rs:46:12 | LL | if let Some(_) = Some((String::new(), ())) {} - | -------^^^^^^^---------------------------- help: try this: `if Some((String::new(), ())).is_some()` + | -------^^^^^^^---------------------------- help: try: `if Some((String::new(), ())).is_some()` error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_drop_order.rs:49:12 | LL | if let Ready(_) = Ready(m.lock()) {} - | -------^^^^^^^^------------------ help: try this: `if Ready(m.lock()).is_ready()` + | -------^^^^^^^^------------------ help: try: `if Ready(m.lock()).is_ready()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -126,7 +126,7 @@ error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_drop_order.rs:50:12 | LL | if let Ready(_) = Ready(m.lock().unwrap().0) {} - | -------^^^^^^^^----------------------------- help: try this: `if Ready(m.lock().unwrap().0).is_ready()` + | -------^^^^^^^^----------------------------- help: try: `if Ready(m.lock().unwrap().0).is_ready()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -135,7 +135,7 @@ error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_drop_order.rs:53:16 | LL | if let Pending = Pending::<std::sync::MutexGuard<()>> {} - | -------^^^^^^^--------------------------------------- help: try this: `if Pending::<std::sync::MutexGuard<()>>.is_pending()` + | -------^^^^^^^--------------------------------------- help: try: `if Pending::<std::sync::MutexGuard<()>>.is_pending()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -144,7 +144,7 @@ error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_drop_order.rs:55:12 | LL | if let Pending = Pending::<std::sync::MutexGuard<()>> { - | -------^^^^^^^--------------------------------------- help: try this: `if Pending::<std::sync::MutexGuard<()>>.is_pending()` + | -------^^^^^^^--------------------------------------- help: try: `if Pending::<std::sync::MutexGuard<()>>.is_pending()` | = note: this will change drop order of the result, as well as all temporaries = note: add `#[allow(clippy::redundant_pattern_matching)]` if this is important @@ -153,19 +153,19 @@ error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_drop_order.rs:59:12 | LL | if let Pending = Pending::<std::sync::MutexGuard<()>> {} - | -------^^^^^^^--------------------------------------- help: try this: `if Pending::<std::sync::MutexGuard<()>>.is_pending()` + | -------^^^^^^^--------------------------------------- help: try: `if Pending::<std::sync::MutexGuard<()>>.is_pending()` error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_drop_order.rs:61:12 | LL | if let Ready(_) = Ready(String::new()) {} - | -------^^^^^^^^----------------------- help: try this: `if Ready(String::new()).is_ready()` + | -------^^^^^^^^----------------------- help: try: `if Ready(String::new()).is_ready()` error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_drop_order.rs:62:12 | LL | if let Ready(_) = Ready((String::new(), ())) {} - | -------^^^^^^^^----------------------------- help: try this: `if Ready((String::new(), ())).is_ready()` + | -------^^^^^^^^----------------------------- help: try: `if Ready((String::new(), ())).is_ready()` error: aborting due to 22 previous errors diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed index 75ed143446c..02f197aa26a 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.fixed @@ -8,10 +8,8 @@ clippy::uninlined_format_args )] -use std::net::{ - IpAddr::{self, V4, V6}, - Ipv4Addr, Ipv6Addr, -}; +use std::net::IpAddr::{self, V4, V6}; +use std::net::{Ipv4Addr, Ipv6Addr}; fn main() { let ipaddr: IpAddr = V4(Ipv4Addr::LOCALHOST); diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs index 9ac77409f79..5c1e1810f55 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.rs @@ -8,10 +8,8 @@ clippy::uninlined_format_args )] -use std::net::{ - IpAddr::{self, V4, V6}, - Ipv4Addr, Ipv6Addr, -}; +use std::net::IpAddr::{self, V4, V6}; +use std::net::{Ipv4Addr, Ipv6Addr}; fn main() { let ipaddr: IpAddr = V4(Ipv4Addr::LOCALHOST); diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr index 6d1fb296463..bec8d30884d 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_ipaddr.stderr @@ -1,130 +1,130 @@ error: redundant pattern matching, consider using `is_ipv4()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:18:12 + --> $DIR/redundant_pattern_matching_ipaddr.rs:16:12 | LL | if let V4(_) = &ipaddr {} - | -------^^^^^---------- help: try this: `if ipaddr.is_ipv4()` + | -------^^^^^---------- help: try: `if ipaddr.is_ipv4()` | = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings` error: redundant pattern matching, consider using `is_ipv4()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:20:12 + --> $DIR/redundant_pattern_matching_ipaddr.rs:18:12 | LL | if let V4(_) = V4(Ipv4Addr::LOCALHOST) {} - | -------^^^^^-------------------------- help: try this: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()` + | -------^^^^^-------------------------- help: try: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:22:12 + --> $DIR/redundant_pattern_matching_ipaddr.rs:20:12 | LL | if let V6(_) = V6(Ipv6Addr::LOCALHOST) {} - | -------^^^^^-------------------------- help: try this: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()` + | -------^^^^^-------------------------- help: try: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:24:15 + --> $DIR/redundant_pattern_matching_ipaddr.rs:22:15 | LL | while let V4(_) = V4(Ipv4Addr::LOCALHOST) {} - | ----------^^^^^-------------------------- help: try this: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()` + | ----------^^^^^-------------------------- help: try: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:26:15 + --> $DIR/redundant_pattern_matching_ipaddr.rs:24:15 | LL | while let V6(_) = V6(Ipv6Addr::LOCALHOST) {} - | ----------^^^^^-------------------------- help: try this: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()` + | ----------^^^^^-------------------------- help: try: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:36:5 + --> $DIR/redundant_pattern_matching_ipaddr.rs:34:5 | LL | / match V4(Ipv4Addr::LOCALHOST) { LL | | V4(_) => true, LL | | V6(_) => false, LL | | }; - | |_____^ help: try this: `V4(Ipv4Addr::LOCALHOST).is_ipv4()` + | |_____^ help: try: `V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:41:5 + --> $DIR/redundant_pattern_matching_ipaddr.rs:39:5 | LL | / match V4(Ipv4Addr::LOCALHOST) { LL | | V4(_) => false, LL | | V6(_) => true, LL | | }; - | |_____^ help: try this: `V4(Ipv4Addr::LOCALHOST).is_ipv6()` + | |_____^ help: try: `V4(Ipv4Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv6()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:46:5 + --> $DIR/redundant_pattern_matching_ipaddr.rs:44:5 | LL | / match V6(Ipv6Addr::LOCALHOST) { LL | | V4(_) => false, LL | | V6(_) => true, LL | | }; - | |_____^ help: try this: `V6(Ipv6Addr::LOCALHOST).is_ipv6()` + | |_____^ help: try: `V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:51:5 + --> $DIR/redundant_pattern_matching_ipaddr.rs:49:5 | LL | / match V6(Ipv6Addr::LOCALHOST) { LL | | V4(_) => true, LL | | V6(_) => false, LL | | }; - | |_____^ help: try this: `V6(Ipv6Addr::LOCALHOST).is_ipv4()` + | |_____^ help: try: `V6(Ipv6Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv4()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:56:20 + --> $DIR/redundant_pattern_matching_ipaddr.rs:54:20 | LL | let _ = if let V4(_) = V4(Ipv4Addr::LOCALHOST) { - | -------^^^^^-------------------------- help: try this: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()` + | -------^^^^^-------------------------- help: try: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv4()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:64:20 + --> $DIR/redundant_pattern_matching_ipaddr.rs:62:20 | LL | let _ = if let V4(_) = gen_ipaddr() { - | -------^^^^^--------------- help: try this: `if gen_ipaddr().is_ipv4()` + | -------^^^^^--------------- help: try: `if gen_ipaddr().is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:66:19 + --> $DIR/redundant_pattern_matching_ipaddr.rs:64:19 | LL | } else if let V6(_) = gen_ipaddr() { - | -------^^^^^--------------- help: try this: `if gen_ipaddr().is_ipv6()` + | -------^^^^^--------------- help: try: `if gen_ipaddr().is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:78:12 + --> $DIR/redundant_pattern_matching_ipaddr.rs:76:12 | LL | if let V4(_) = V4(Ipv4Addr::LOCALHOST) {} - | -------^^^^^-------------------------- help: try this: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()` + | -------^^^^^-------------------------- help: try: `if V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:80:12 + --> $DIR/redundant_pattern_matching_ipaddr.rs:78:12 | LL | if let V6(_) = V6(Ipv6Addr::LOCALHOST) {} - | -------^^^^^-------------------------- help: try this: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()` + | -------^^^^^-------------------------- help: try: `if V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:82:15 + --> $DIR/redundant_pattern_matching_ipaddr.rs:80:15 | LL | while let V4(_) = V4(Ipv4Addr::LOCALHOST) {} - | ----------^^^^^-------------------------- help: try this: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()` + | ----------^^^^^-------------------------- help: try: `while V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:84:15 + --> $DIR/redundant_pattern_matching_ipaddr.rs:82:15 | LL | while let V6(_) = V6(Ipv6Addr::LOCALHOST) {} - | ----------^^^^^-------------------------- help: try this: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()` + | ----------^^^^^-------------------------- help: try: `while V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: redundant pattern matching, consider using `is_ipv4()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:86:5 + --> $DIR/redundant_pattern_matching_ipaddr.rs:84:5 | LL | / match V4(Ipv4Addr::LOCALHOST) { LL | | V4(_) => true, LL | | V6(_) => false, LL | | }; - | |_____^ help: try this: `V4(Ipv4Addr::LOCALHOST).is_ipv4()` + | |_____^ help: try: `V4(Ipv4Addr::LOCALHOST).is_ipv4()` error: redundant pattern matching, consider using `is_ipv6()` - --> $DIR/redundant_pattern_matching_ipaddr.rs:91:5 + --> $DIR/redundant_pattern_matching_ipaddr.rs:89:5 | LL | / match V6(Ipv6Addr::LOCALHOST) { LL | | V4(_) => false, LL | | V6(_) => true, LL | | }; - | |_____^ help: try this: `V6(Ipv6Addr::LOCALHOST).is_ipv6()` + | |_____^ help: try: `V6(Ipv6Addr::LOCALHOST).is_ipv6()` error: aborting due to 18 previous errors diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr index 717b603c496..097ca827a42 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_option.stderr @@ -2,7 +2,7 @@ error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:15:12 | LL | if let None = None::<()> {} - | -------^^^^------------- help: try this: `if None::<()>.is_none()` + | -------^^^^------------- help: try: `if None::<()>.is_none()` | = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings` @@ -10,37 +10,37 @@ error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:17:12 | LL | if let Some(_) = Some(42) {} - | -------^^^^^^^----------- help: try this: `if Some(42).is_some()` + | -------^^^^^^^----------- help: try: `if Some(42).is_some()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:19:12 | LL | if let Some(_) = Some(42) { - | -------^^^^^^^----------- help: try this: `if Some(42).is_some()` + | -------^^^^^^^----------- help: try: `if Some(42).is_some()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:25:15 | LL | while let Some(_) = Some(42) {} - | ----------^^^^^^^----------- help: try this: `while Some(42).is_some()` + | ----------^^^^^^^----------- help: try: `while Some(42).is_some()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:27:15 | LL | while let None = Some(42) {} - | ----------^^^^----------- help: try this: `while Some(42).is_none()` + | ----------^^^^----------- help: try: `while Some(42).is_none()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:29:15 | LL | while let None = None::<()> {} - | ----------^^^^------------- help: try this: `while None::<()>.is_none()` + | ----------^^^^------------- help: try: `while None::<()>.is_none()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:32:15 | LL | while let Some(_) = v.pop() { - | ----------^^^^^^^---------- help: try this: `while v.pop().is_some()` + | ----------^^^^^^^---------- help: try: `while v.pop().is_some()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:40:5 @@ -49,7 +49,7 @@ LL | / match Some(42) { LL | | Some(_) => true, LL | | None => false, LL | | }; - | |_____^ help: try this: `Some(42).is_some()` + | |_____^ help: try: `Some(42).is_some()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:45:5 @@ -58,7 +58,7 @@ LL | / match None::<()> { LL | | Some(_) => false, LL | | None => true, LL | | }; - | |_____^ help: try this: `None::<()>.is_none()` + | |_____^ help: try: `None::<()>.is_none()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:50:13 @@ -68,55 +68,55 @@ LL | let _ = match None::<()> { LL | | Some(_) => false, LL | | None => true, LL | | }; - | |_____^ help: try this: `None::<()>.is_none()` + | |_____^ help: try: `None::<()>.is_none()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:56:20 | LL | let _ = if let Some(_) = opt { true } else { false }; - | -------^^^^^^^------ help: try this: `if opt.is_some()` + | -------^^^^^^^------ help: try: `if opt.is_some()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:62:20 | LL | let _ = if let Some(_) = gen_opt() { - | -------^^^^^^^------------ help: try this: `if gen_opt().is_some()` + | -------^^^^^^^------------ help: try: `if gen_opt().is_some()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:64:19 | LL | } else if let None = gen_opt() { - | -------^^^^------------ help: try this: `if gen_opt().is_none()` + | -------^^^^------------ help: try: `if gen_opt().is_none()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:70:12 | LL | if let Some(..) = gen_opt() {} - | -------^^^^^^^^------------ help: try this: `if gen_opt().is_some()` + | -------^^^^^^^^------------ help: try: `if gen_opt().is_some()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:85:12 | LL | if let Some(_) = Some(42) {} - | -------^^^^^^^----------- help: try this: `if Some(42).is_some()` + | -------^^^^^^^----------- help: try: `if Some(42).is_some()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:87:12 | LL | if let None = None::<()> {} - | -------^^^^------------- help: try this: `if None::<()>.is_none()` + | -------^^^^------------- help: try: `if None::<()>.is_none()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:89:15 | LL | while let Some(_) = Some(42) {} - | ----------^^^^^^^----------- help: try this: `while Some(42).is_some()` + | ----------^^^^^^^----------- help: try: `while Some(42).is_some()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:91:15 | LL | while let None = None::<()> {} - | ----------^^^^------------- help: try this: `while None::<()>.is_none()` + | ----------^^^^------------- help: try: `while None::<()>.is_none()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:93:5 @@ -125,7 +125,7 @@ LL | / match Some(42) { LL | | Some(_) => true, LL | | None => false, LL | | }; - | |_____^ help: try this: `Some(42).is_some()` + | |_____^ help: try: `Some(42).is_some()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:98:5 @@ -134,19 +134,19 @@ LL | / match None::<()> { LL | | Some(_) => false, LL | | None => true, LL | | }; - | |_____^ help: try this: `None::<()>.is_none()` + | |_____^ help: try: `None::<()>.is_none()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:106:12 | LL | if let None = *(&None::<()>) {} - | -------^^^^----------------- help: try this: `if (&None::<()>).is_none()` + | -------^^^^----------------- help: try: `if (&None::<()>).is_none()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:107:12 | LL | if let None = *&None::<()> {} - | -------^^^^--------------- help: try this: `if (&None::<()>).is_none()` + | -------^^^^--------------- help: try: `if (&None::<()>).is_none()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:113:5 @@ -155,7 +155,7 @@ LL | / match x { LL | | Some(_) => true, LL | | _ => false, LL | | }; - | |_____^ help: try this: `x.is_some()` + | |_____^ help: try: `x.is_some()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:118:5 @@ -164,7 +164,7 @@ LL | / match x { LL | | None => true, LL | | _ => false, LL | | }; - | |_____^ help: try this: `x.is_none()` + | |_____^ help: try: `x.is_none()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:123:5 @@ -173,7 +173,7 @@ LL | / match x { LL | | Some(_) => false, LL | | _ => true, LL | | }; - | |_____^ help: try this: `x.is_none()` + | |_____^ help: try: `x.is_none()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:128:5 @@ -182,19 +182,19 @@ LL | / match x { LL | | None => false, LL | | _ => true, LL | | }; - | |_____^ help: try this: `x.is_some()` + | |_____^ help: try: `x.is_some()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_option.rs:143:13 | LL | let _ = matches!(x, Some(_)); - | ^^^^^^^^^^^^^^^^^^^^ help: try this: `x.is_some()` + | ^^^^^^^^^^^^^^^^^^^^ help: try: `x.is_some()` error: redundant pattern matching, consider using `is_none()` --> $DIR/redundant_pattern_matching_option.rs:145:13 | LL | let _ = matches!(x, None); - | ^^^^^^^^^^^^^^^^^ help: try this: `x.is_none()` + | ^^^^^^^^^^^^^^^^^ help: try: `x.is_none()` error: aborting due to 28 previous errors diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr index b89fde35fcf..28d3606c4fb 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_poll.stderr @@ -2,7 +2,7 @@ error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_poll.rs:17:12 | LL | if let Pending = Pending::<()> {} - | -------^^^^^^^---------------- help: try this: `if Pending::<()>.is_pending()` + | -------^^^^^^^---------------- help: try: `if Pending::<()>.is_pending()` | = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings` @@ -10,31 +10,31 @@ error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_poll.rs:19:12 | LL | if let Ready(_) = Ready(42) {} - | -------^^^^^^^^------------ help: try this: `if Ready(42).is_ready()` + | -------^^^^^^^^------------ help: try: `if Ready(42).is_ready()` error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_poll.rs:21:12 | LL | if let Ready(_) = Ready(42) { - | -------^^^^^^^^------------ help: try this: `if Ready(42).is_ready()` + | -------^^^^^^^^------------ help: try: `if Ready(42).is_ready()` error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_poll.rs:27:15 | LL | while let Ready(_) = Ready(42) {} - | ----------^^^^^^^^------------ help: try this: `while Ready(42).is_ready()` + | ----------^^^^^^^^------------ help: try: `while Ready(42).is_ready()` error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_poll.rs:29:15 | LL | while let Pending = Ready(42) {} - | ----------^^^^^^^------------ help: try this: `while Ready(42).is_pending()` + | ----------^^^^^^^------------ help: try: `while Ready(42).is_pending()` error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_poll.rs:31:15 | LL | while let Pending = Pending::<()> {} - | ----------^^^^^^^---------------- help: try this: `while Pending::<()>.is_pending()` + | ----------^^^^^^^---------------- help: try: `while Pending::<()>.is_pending()` error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_poll.rs:37:5 @@ -43,7 +43,7 @@ LL | / match Ready(42) { LL | | Ready(_) => true, LL | | Pending => false, LL | | }; - | |_____^ help: try this: `Ready(42).is_ready()` + | |_____^ help: try: `Ready(42).is_ready()` error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_poll.rs:42:5 @@ -52,7 +52,7 @@ LL | / match Pending::<()> { LL | | Ready(_) => false, LL | | Pending => true, LL | | }; - | |_____^ help: try this: `Pending::<()>.is_pending()` + | |_____^ help: try: `Pending::<()>.is_pending()` error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_poll.rs:47:13 @@ -62,49 +62,49 @@ LL | let _ = match Pending::<()> { LL | | Ready(_) => false, LL | | Pending => true, LL | | }; - | |_____^ help: try this: `Pending::<()>.is_pending()` + | |_____^ help: try: `Pending::<()>.is_pending()` error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_poll.rs:53:20 | LL | let _ = if let Ready(_) = poll { true } else { false }; - | -------^^^^^^^^------- help: try this: `if poll.is_ready()` + | -------^^^^^^^^------- help: try: `if poll.is_ready()` error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_poll.rs:57:20 | LL | let _ = if let Ready(_) = gen_poll() { - | -------^^^^^^^^------------- help: try this: `if gen_poll().is_ready()` + | -------^^^^^^^^------------- help: try: `if gen_poll().is_ready()` error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_poll.rs:59:19 | LL | } else if let Pending = gen_poll() { - | -------^^^^^^^------------- help: try this: `if gen_poll().is_pending()` + | -------^^^^^^^------------- help: try: `if gen_poll().is_pending()` error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_poll.rs:75:12 | LL | if let Ready(_) = Ready(42) {} - | -------^^^^^^^^------------ help: try this: `if Ready(42).is_ready()` + | -------^^^^^^^^------------ help: try: `if Ready(42).is_ready()` error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_poll.rs:77:12 | LL | if let Pending = Pending::<()> {} - | -------^^^^^^^---------------- help: try this: `if Pending::<()>.is_pending()` + | -------^^^^^^^---------------- help: try: `if Pending::<()>.is_pending()` error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_poll.rs:79:15 | LL | while let Ready(_) = Ready(42) {} - | ----------^^^^^^^^------------ help: try this: `while Ready(42).is_ready()` + | ----------^^^^^^^^------------ help: try: `while Ready(42).is_ready()` error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_poll.rs:81:15 | LL | while let Pending = Pending::<()> {} - | ----------^^^^^^^---------------- help: try this: `while Pending::<()>.is_pending()` + | ----------^^^^^^^---------------- help: try: `while Pending::<()>.is_pending()` error: redundant pattern matching, consider using `is_ready()` --> $DIR/redundant_pattern_matching_poll.rs:83:5 @@ -113,7 +113,7 @@ LL | / match Ready(42) { LL | | Ready(_) => true, LL | | Pending => false, LL | | }; - | |_____^ help: try this: `Ready(42).is_ready()` + | |_____^ help: try: `Ready(42).is_ready()` error: redundant pattern matching, consider using `is_pending()` --> $DIR/redundant_pattern_matching_poll.rs:88:5 @@ -122,7 +122,7 @@ LL | / match Pending::<()> { LL | | Ready(_) => false, LL | | Pending => true, LL | | }; - | |_____^ help: try this: `Pending::<()>.is_pending()` + | |_____^ help: try: `Pending::<()>.is_pending()` error: aborting due to 18 previous errors diff --git a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr index f6ce666bb4f..2b1ce9f5465 100644 --- a/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr +++ b/src/tools/clippy/tests/ui/redundant_pattern_matching_result.stderr @@ -2,7 +2,7 @@ error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:16:12 | LL | if let Ok(_) = &result {} - | -------^^^^^---------- help: try this: `if result.is_ok()` + | -------^^^^^---------- help: try: `if result.is_ok()` | = note: `-D clippy::redundant-pattern-matching` implied by `-D warnings` @@ -10,25 +10,25 @@ error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:18:12 | LL | if let Ok(_) = Ok::<i32, i32>(42) {} - | -------^^^^^--------------------- help: try this: `if Ok::<i32, i32>(42).is_ok()` + | -------^^^^^--------------------- help: try: `if Ok::<i32, i32>(42).is_ok()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_result.rs:20:12 | LL | if let Err(_) = Err::<i32, i32>(42) {} - | -------^^^^^^---------------------- help: try this: `if Err::<i32, i32>(42).is_err()` + | -------^^^^^^---------------------- help: try: `if Err::<i32, i32>(42).is_err()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:22:15 | LL | while let Ok(_) = Ok::<i32, i32>(10) {} - | ----------^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_ok()` + | ----------^^^^^--------------------- help: try: `while Ok::<i32, i32>(10).is_ok()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_result.rs:24:15 | LL | while let Err(_) = Ok::<i32, i32>(10) {} - | ----------^^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_err()` + | ----------^^^^^^--------------------- help: try: `while Ok::<i32, i32>(10).is_err()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:34:5 @@ -37,7 +37,7 @@ LL | / match Ok::<i32, i32>(42) { LL | | Ok(_) => true, LL | | Err(_) => false, LL | | }; - | |_____^ help: try this: `Ok::<i32, i32>(42).is_ok()` + | |_____^ help: try: `Ok::<i32, i32>(42).is_ok()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_result.rs:39:5 @@ -46,7 +46,7 @@ LL | / match Ok::<i32, i32>(42) { LL | | Ok(_) => false, LL | | Err(_) => true, LL | | }; - | |_____^ help: try this: `Ok::<i32, i32>(42).is_err()` + | |_____^ help: try: `Ok::<i32, i32>(42).is_err()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_result.rs:44:5 @@ -55,7 +55,7 @@ LL | / match Err::<i32, i32>(42) { LL | | Ok(_) => false, LL | | Err(_) => true, LL | | }; - | |_____^ help: try this: `Err::<i32, i32>(42).is_err()` + | |_____^ help: try: `Err::<i32, i32>(42).is_err()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:49:5 @@ -64,73 +64,73 @@ LL | / match Err::<i32, i32>(42) { LL | | Ok(_) => true, LL | | Err(_) => false, LL | | }; - | |_____^ help: try this: `Err::<i32, i32>(42).is_ok()` + | |_____^ help: try: `Err::<i32, i32>(42).is_ok()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:54:20 | LL | let _ = if let Ok(_) = Ok::<usize, ()>(4) { true } else { false }; - | -------^^^^^--------------------- help: try this: `if Ok::<usize, ()>(4).is_ok()` + | -------^^^^^--------------------- help: try: `if Ok::<usize, ()>(4).is_ok()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:62:20 | LL | let _ = if let Ok(_) = gen_res() { - | -------^^^^^------------ help: try this: `if gen_res().is_ok()` + | -------^^^^^------------ help: try: `if gen_res().is_ok()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_result.rs:64:19 | LL | } else if let Err(_) = gen_res() { - | -------^^^^^^------------ help: try this: `if gen_res().is_err()` + | -------^^^^^^------------ help: try: `if gen_res().is_err()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_result.rs:87:19 | LL | while let Some(_) = r#try!(result_opt()) {} - | ----------^^^^^^^----------------------- help: try this: `while r#try!(result_opt()).is_some()` + | ----------^^^^^^^----------------------- help: try: `while r#try!(result_opt()).is_some()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_result.rs:88:16 | LL | if let Some(_) = r#try!(result_opt()) {} - | -------^^^^^^^----------------------- help: try this: `if r#try!(result_opt()).is_some()` + | -------^^^^^^^----------------------- help: try: `if r#try!(result_opt()).is_some()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_result.rs:94:12 | LL | if let Some(_) = m!() {} - | -------^^^^^^^------- help: try this: `if m!().is_some()` + | -------^^^^^^^------- help: try: `if m!().is_some()` error: redundant pattern matching, consider using `is_some()` --> $DIR/redundant_pattern_matching_result.rs:95:15 | LL | while let Some(_) = m!() {} - | ----------^^^^^^^------- help: try this: `while m!().is_some()` + | ----------^^^^^^^------- help: try: `while m!().is_some()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:113:12 | LL | if let Ok(_) = Ok::<i32, i32>(42) {} - | -------^^^^^--------------------- help: try this: `if Ok::<i32, i32>(42).is_ok()` + | -------^^^^^--------------------- help: try: `if Ok::<i32, i32>(42).is_ok()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_result.rs:115:12 | LL | if let Err(_) = Err::<i32, i32>(42) {} - | -------^^^^^^---------------------- help: try this: `if Err::<i32, i32>(42).is_err()` + | -------^^^^^^---------------------- help: try: `if Err::<i32, i32>(42).is_err()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:117:15 | LL | while let Ok(_) = Ok::<i32, i32>(10) {} - | ----------^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_ok()` + | ----------^^^^^--------------------- help: try: `while Ok::<i32, i32>(10).is_ok()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_result.rs:119:15 | LL | while let Err(_) = Ok::<i32, i32>(10) {} - | ----------^^^^^^--------------------- help: try this: `while Ok::<i32, i32>(10).is_err()` + | ----------^^^^^^--------------------- help: try: `while Ok::<i32, i32>(10).is_err()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:121:5 @@ -139,7 +139,7 @@ LL | / match Ok::<i32, i32>(42) { LL | | Ok(_) => true, LL | | Err(_) => false, LL | | }; - | |_____^ help: try this: `Ok::<i32, i32>(42).is_ok()` + | |_____^ help: try: `Ok::<i32, i32>(42).is_ok()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_result.rs:126:5 @@ -148,7 +148,7 @@ LL | / match Err::<i32, i32>(42) { LL | | Ok(_) => false, LL | | Err(_) => true, LL | | }; - | |_____^ help: try this: `Err::<i32, i32>(42).is_err()` + | |_____^ help: try: `Err::<i32, i32>(42).is_err()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:136:5 @@ -157,7 +157,7 @@ LL | / match x { LL | | Ok(_) => true, LL | | _ => false, LL | | }; - | |_____^ help: try this: `x.is_ok()` + | |_____^ help: try: `x.is_ok()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_result.rs:141:5 @@ -166,7 +166,7 @@ LL | / match x { LL | | Ok(_) => false, LL | | _ => true, LL | | }; - | |_____^ help: try this: `x.is_err()` + | |_____^ help: try: `x.is_err()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_result.rs:146:5 @@ -175,7 +175,7 @@ LL | / match x { LL | | Err(_) => true, LL | | _ => false, LL | | }; - | |_____^ help: try this: `x.is_err()` + | |_____^ help: try: `x.is_err()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:151:5 @@ -184,19 +184,19 @@ LL | / match x { LL | | Err(_) => false, LL | | _ => true, LL | | }; - | |_____^ help: try this: `x.is_ok()` + | |_____^ help: try: `x.is_ok()` error: redundant pattern matching, consider using `is_ok()` --> $DIR/redundant_pattern_matching_result.rs:172:13 | LL | let _ = matches!(x, Ok(_)); - | ^^^^^^^^^^^^^^^^^^ help: try this: `x.is_ok()` + | ^^^^^^^^^^^^^^^^^^ help: try: `x.is_ok()` error: redundant pattern matching, consider using `is_err()` --> $DIR/redundant_pattern_matching_result.rs:174:13 | LL | let _ = matches!(x, Err(_)); - | ^^^^^^^^^^^^^^^^^^^ help: try this: `x.is_err()` + | ^^^^^^^^^^^^^^^^^^^ help: try: `x.is_err()` error: aborting due to 28 previous errors diff --git a/src/tools/clippy/tests/ui/ref_binding_to_reference.stderr b/src/tools/clippy/tests/ui/ref_binding_to_reference.stderr index eb36cd516a2..016feb103df 100644 --- a/src/tools/clippy/tests/ui/ref_binding_to_reference.stderr +++ b/src/tools/clippy/tests/ui/ref_binding_to_reference.stderr @@ -5,7 +5,7 @@ LL | Some(ref x) => x, | ^^^^^ | = note: `-D clippy::ref-binding-to-reference` implied by `-D warnings` -help: try this +help: try | LL | Some(x) => &x, | ~ ~~ @@ -16,7 +16,7 @@ error: this pattern creates a reference to a reference LL | Some(ref x) => { | ^^^^^ | -help: try this +help: try | LL ~ Some(x) => { LL | f1(x); @@ -30,7 +30,7 @@ error: this pattern creates a reference to a reference LL | Some(ref x) => m2!(x), | ^^^^^ | -help: try this +help: try | LL | Some(x) => m2!(&x), | ~ ~~ @@ -41,7 +41,7 @@ error: this pattern creates a reference to a reference LL | let _ = |&ref x: &&String| { | ^^^^^ | -help: try this +help: try | LL ~ let _ = |&x: &&String| { LL ~ let _: &&String = &x; @@ -53,7 +53,7 @@ error: this pattern creates a reference to a reference LL | fn f2<'a>(&ref x: &&'a String) -> &'a String { | ^^^^^ | -help: try this +help: try | LL ~ fn f2<'a>(&x: &&'a String) -> &'a String { LL ~ let _: &&String = &x; @@ -66,7 +66,7 @@ error: this pattern creates a reference to a reference LL | fn f(&ref x: &&String) { | ^^^^^ | -help: try this +help: try | LL ~ fn f(&x: &&String) { LL ~ let _: &&String = &x; @@ -78,7 +78,7 @@ error: this pattern creates a reference to a reference LL | fn f(&ref x: &&String) { | ^^^^^ | -help: try this +help: try | LL ~ fn f(&x: &&String) { LL ~ let _: &&String = &x; diff --git a/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr b/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr index 782febd5264..ad941fa8bcc 100644 --- a/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr +++ b/src/tools/clippy/tests/ui/result_map_unit_fn_fixable.stderr @@ -4,7 +4,7 @@ error: called `map(f)` on an `Result` value where `f` is a function that returns LL | x.field.map(do_nothing); | ^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(x_field) = x.field { do_nothing(x_field) }` + | help: try: `if let Ok(x_field) = x.field { do_nothing(x_field) }` | = note: `-D clippy::result-map-unit-fn` implied by `-D warnings` @@ -14,7 +14,7 @@ error: called `map(f)` on an `Result` value where `f` is a function that returns LL | x.field.map(do_nothing); | ^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(x_field) = x.field { do_nothing(x_field) }` + | help: try: `if let Ok(x_field) = x.field { do_nothing(x_field) }` error: called `map(f)` on an `Result` value where `f` is a function that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:39:5 @@ -22,7 +22,7 @@ error: called `map(f)` on an `Result` value where `f` is a function that returns LL | x.field.map(diverge); | ^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(x_field) = x.field { diverge(x_field) }` + | help: try: `if let Ok(x_field) = x.field { diverge(x_field) }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:45:5 @@ -30,7 +30,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| x.do_result_nothing(value + captured)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { x.do_result_nothing(value + captured) }` + | help: try: `if let Ok(value) = x.field { x.do_result_nothing(value + captured) }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:47:5 @@ -38,7 +38,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { x.do_result_plus_one(value + captured); }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { x.do_result_plus_one(value + captured); }` + | help: try: `if let Ok(value) = x.field { x.do_result_plus_one(value + captured); }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:50:5 @@ -46,7 +46,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| do_nothing(value + captured)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured) }` + | help: try: `if let Ok(value) = x.field { do_nothing(value + captured) }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:52:5 @@ -54,7 +54,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { do_nothing(value + captured) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured) }` + | help: try: `if let Ok(value) = x.field { do_nothing(value + captured) }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:54:5 @@ -62,7 +62,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { do_nothing(value + captured); }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured); }` + | help: try: `if let Ok(value) = x.field { do_nothing(value + captured); }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:56:5 @@ -70,7 +70,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { { do_nothing(value + captured); } }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { do_nothing(value + captured); }` + | help: try: `if let Ok(value) = x.field { do_nothing(value + captured); }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:59:5 @@ -78,7 +78,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| diverge(value + captured)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { diverge(value + captured) }` + | help: try: `if let Ok(value) = x.field { diverge(value + captured) }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:61:5 @@ -86,7 +86,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { diverge(value + captured) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { diverge(value + captured) }` + | help: try: `if let Ok(value) = x.field { diverge(value + captured) }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:63:5 @@ -94,7 +94,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { diverge(value + captured); }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { diverge(value + captured); }` + | help: try: `if let Ok(value) = x.field { diverge(value + captured); }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:65:5 @@ -102,7 +102,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { { diverge(value + captured); } }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { diverge(value + captured); }` + | help: try: `if let Ok(value) = x.field { diverge(value + captured); }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:70:5 @@ -110,7 +110,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { let y = plus_one(value + captured); }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { let y = plus_one(value + captured); }` + | help: try: `if let Ok(value) = x.field { let y = plus_one(value + captured); }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:72:5 @@ -118,7 +118,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { plus_one(value + captured); }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { plus_one(value + captured); }` + | help: try: `if let Ok(value) = x.field { plus_one(value + captured); }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:74:5 @@ -126,7 +126,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { { plus_one(value + captured); } }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { plus_one(value + captured); }` + | help: try: `if let Ok(value) = x.field { plus_one(value + captured); }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:77:5 @@ -134,7 +134,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|ref value| { do_nothing(value + captured) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(ref value) = x.field { do_nothing(value + captured) }` + | help: try: `if let Ok(ref value) = x.field { do_nothing(value + captured) }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_fixable.rs:79:5 @@ -142,7 +142,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| println!("{:?}", value)); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { println!("{:?}", value) }` + | help: try: `if let Ok(value) = x.field { println!("{:?}", value) }` error: aborting due to 18 previous errors diff --git a/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr b/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr index d0e534f6356..75ec1ba8024 100644 --- a/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr +++ b/src/tools/clippy/tests/ui/result_map_unit_fn_unfixable.stderr @@ -4,7 +4,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { do_nothing(value); do_nothing(value) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { ... }` + | help: try: `if let Ok(value) = x.field { ... }` | = note: `-D clippy::result-map-unit-fn` implied by `-D warnings` @@ -14,7 +14,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| if value > 0 { do_nothing(value); do_nothing(value) }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { ... }` + | help: try: `if let Ok(value) = x.field { ... }` error: called `map(f)` on an `Result` value where `f` is a closure that returns the unit type `()` --> $DIR/result_map_unit_fn_unfixable.rs:29:5 @@ -23,7 +23,7 @@ LL | // x.field.map(|value| { LL | || do_nothing(value); LL | || do_nothing(value) LL | || }); - | ||______^- help: try this: `if let Ok(value) = x.field { ... }` + | ||______^- help: try: `if let Ok(value) = x.field { ... }` | |______| | @@ -33,7 +33,7 @@ error: called `map(f)` on an `Result` value where `f` is a closure that returns LL | x.field.map(|value| { do_nothing(value); do_nothing(value); }); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(value) = x.field { ... }` + | help: try: `if let Ok(value) = x.field { ... }` error: called `map(f)` on an `Result` value where `f` is a function that returns the unit type `()` --> $DIR/result_map_unit_fn_unfixable.rs:37:5 @@ -41,7 +41,7 @@ error: called `map(f)` on an `Result` value where `f` is a function that returns LL | "12".parse::<i32>().map(diverge); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(a) = "12".parse::<i32>() { diverge(a) }` + | help: try: `if let Ok(a) = "12".parse::<i32>() { diverge(a) }` error: called `map(f)` on an `Result` value where `f` is a function that returns the unit type `()` --> $DIR/result_map_unit_fn_unfixable.rs:43:5 @@ -49,7 +49,7 @@ error: called `map(f)` on an `Result` value where `f` is a function that returns LL | y.map(do_nothing); | ^^^^^^^^^^^^^^^^^- | | - | help: try this: `if let Ok(_y) = y { do_nothing(_y) }` + | help: try: `if let Ok(_y) = y { do_nothing(_y) }` error: aborting due to 6 previous errors diff --git a/src/tools/clippy/tests/ui/self_assignment.rs b/src/tools/clippy/tests/ui/self_assignment.rs index ec3ae120942..d6682cc63dc 100644 --- a/src/tools/clippy/tests/ui/self_assignment.rs +++ b/src/tools/clippy/tests/ui/self_assignment.rs @@ -1,5 +1,5 @@ #![warn(clippy::self_assignment)] -#![allow(clippy::useless_vec)] +#![allow(clippy::useless_vec, clippy::needless_pass_by_ref_mut)] pub struct S<'a> { a: i32, diff --git a/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs index 4ab7dbab59c..8e7f1d862cf 100644 --- a/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs +++ b/src/tools/clippy/tests/ui/semicolon_if_nothing_returned.rs @@ -116,5 +116,7 @@ fn function_returning_option() -> Option<i32> { // No warning fn let_else_stmts() { - let Some(x) = function_returning_option() else { return; }; + let Some(x) = function_returning_option() else { + return; + }; } diff --git a/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr b/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr index 10bfea68ff5..2ae9fa34d14 100644 --- a/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr +++ b/src/tools/clippy/tests/ui/should_impl_trait/method_list_2.stderr @@ -39,6 +39,15 @@ LL | | } | = help: consider implementing the trait `std::hash::Hash` or choosing a less ambiguous method name +error: this argument is a mutable reference, but not used mutably + --> $DIR/method_list_2.rs:38:31 + | +LL | pub fn hash(&self, state: &mut T) { + | ^^^^^^ help: consider changing to: `&T` + | + = warning: changing this function will impact semver compatibility + = note: `-D clippy::needless-pass-by-ref-mut` implied by `-D warnings` + error: method `index` can be confused for the standard trait method `std::ops::Index::index` --> $DIR/method_list_2.rs:42:5 | @@ -149,5 +158,5 @@ LL | | } | = help: consider implementing the trait `std::ops::Sub` or choosing a less ambiguous method name -error: aborting due to 15 previous errors +error: aborting due to 16 previous errors diff --git a/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs index 8c48b21f188..17df9f88fff 100644 --- a/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs +++ b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.rs @@ -7,8 +7,7 @@ use std::num::ParseIntError; use std::ops::Deref; use std::sync::atomic::{AtomicU64, Ordering}; -use std::sync::RwLock; -use std::sync::{Mutex, MutexGuard}; +use std::sync::{Mutex, MutexGuard, RwLock}; struct State {} diff --git a/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr index 75063a8c987..b56ace200a8 100644 --- a/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr +++ b/src/tools/clippy/tests/ui/significant_drop_in_scrutinee.stderr @@ -1,5 +1,5 @@ error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:56:11 + --> $DIR/significant_drop_in_scrutinee.rs:55:11 | LL | match mutex.lock().unwrap().foo() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -19,7 +19,7 @@ LL ~ match value { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:142:11 + --> $DIR/significant_drop_in_scrutinee.rs:141:11 | LL | match s.lock_m().get_the_value() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -38,7 +38,7 @@ LL ~ match value { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:163:11 + --> $DIR/significant_drop_in_scrutinee.rs:162:11 | LL | match s.lock_m_m().get_the_value() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -57,7 +57,7 @@ LL ~ match value { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:211:11 + --> $DIR/significant_drop_in_scrutinee.rs:210:11 | LL | match counter.temp_increment().len() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -73,7 +73,7 @@ LL ~ match value { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:234:16 + --> $DIR/significant_drop_in_scrutinee.rs:233:16 | LL | match (mutex1.lock().unwrap().s.len(), true) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -92,7 +92,7 @@ LL ~ match (value, true) { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:243:22 + --> $DIR/significant_drop_in_scrutinee.rs:242:22 | LL | match (true, mutex1.lock().unwrap().s.len(), true) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -111,7 +111,7 @@ LL ~ match (true, value, true) { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:253:16 + --> $DIR/significant_drop_in_scrutinee.rs:252:16 | LL | match (mutex1.lock().unwrap().s.len(), true, mutex2.lock().unwrap().s.len()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -132,7 +132,7 @@ LL ~ match (value, true, mutex2.lock().unwrap().s.len()) { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:253:54 + --> $DIR/significant_drop_in_scrutinee.rs:252:54 | LL | match (mutex1.lock().unwrap().s.len(), true, mutex2.lock().unwrap().s.len()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -153,7 +153,7 @@ LL ~ match (mutex1.lock().unwrap().s.len(), true, value) { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:264:15 + --> $DIR/significant_drop_in_scrutinee.rs:263:15 | LL | match mutex3.lock().unwrap().s.as_str() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -169,7 +169,7 @@ LL | }; = note: this might lead to deadlocks or other unexpected behavior error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:274:22 + --> $DIR/significant_drop_in_scrutinee.rs:273:22 | LL | match (true, mutex3.lock().unwrap().s.as_str()) { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -185,7 +185,7 @@ LL | }; = note: this might lead to deadlocks or other unexpected behavior error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:293:11 + --> $DIR/significant_drop_in_scrutinee.rs:292:11 | LL | match mutex.lock().unwrap().s.len() > 1 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -204,7 +204,7 @@ LL ~ match value { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:300:11 + --> $DIR/significant_drop_in_scrutinee.rs:299:11 | LL | match 1 < mutex.lock().unwrap().s.len() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -223,7 +223,7 @@ LL ~ match value { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:318:11 + --> $DIR/significant_drop_in_scrutinee.rs:317:11 | LL | match mutex1.lock().unwrap().s.len() < mutex2.lock().unwrap().s.len() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -244,7 +244,7 @@ LL ~ match value { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:329:11 + --> $DIR/significant_drop_in_scrutinee.rs:328:11 | LL | match mutex1.lock().unwrap().s.len() >= mutex2.lock().unwrap().s.len() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -265,7 +265,7 @@ LL ~ match value { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:364:11 + --> $DIR/significant_drop_in_scrutinee.rs:363:11 | LL | match get_mutex_guard().s.len() > 1 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -284,7 +284,7 @@ LL ~ match value { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:381:11 + --> $DIR/significant_drop_in_scrutinee.rs:380:11 | LL | match match i { | ___________^ @@ -316,7 +316,7 @@ LL ~ match value | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:407:11 + --> $DIR/significant_drop_in_scrutinee.rs:406:11 | LL | match if i > 1 { | ___________^ @@ -349,7 +349,7 @@ LL ~ match value | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:461:11 + --> $DIR/significant_drop_in_scrutinee.rs:460:11 | LL | match s.lock().deref().deref() { | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -367,7 +367,7 @@ LL ~ match value { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:489:11 + --> $DIR/significant_drop_in_scrutinee.rs:488:11 | LL | match s.lock().deref().deref() { | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -380,7 +380,7 @@ LL | }; = note: this might lead to deadlocks or other unexpected behavior error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:508:11 + --> $DIR/significant_drop_in_scrutinee.rs:507:11 | LL | match mutex.lock().unwrap().i = i { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -399,7 +399,7 @@ LL ~ match () { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:514:11 + --> $DIR/significant_drop_in_scrutinee.rs:513:11 | LL | match i = mutex.lock().unwrap().i { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -418,7 +418,7 @@ LL ~ match () { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:520:11 + --> $DIR/significant_drop_in_scrutinee.rs:519:11 | LL | match mutex.lock().unwrap().i += 1 { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -437,7 +437,7 @@ LL ~ match () { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:526:11 + --> $DIR/significant_drop_in_scrutinee.rs:525:11 | LL | match i += mutex.lock().unwrap().i { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -456,7 +456,7 @@ LL ~ match () { | error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:589:11 + --> $DIR/significant_drop_in_scrutinee.rs:588:11 | LL | match rwlock.read().unwrap().to_number() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -467,7 +467,7 @@ LL | }; = note: this might lead to deadlocks or other unexpected behavior error: temporary with significant `Drop` in `for` loop condition will live until the end of the `for` expression - --> $DIR/significant_drop_in_scrutinee.rs:599:14 + --> $DIR/significant_drop_in_scrutinee.rs:598:14 | LL | for s in rwlock.read().unwrap().iter() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -478,7 +478,7 @@ LL | } = note: this might lead to deadlocks or other unexpected behavior error: temporary with significant `Drop` in `match` scrutinee will live until the end of the `match` expression - --> $DIR/significant_drop_in_scrutinee.rs:614:11 + --> $DIR/significant_drop_in_scrutinee.rs:613:11 | LL | match mutex.lock().unwrap().foo() { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/significant_drop_tightening.fixed b/src/tools/clippy/tests/ui/significant_drop_tightening.fixed index 7b848ead784..eb8524167c4 100644 --- a/src/tools/clippy/tests/ui/significant_drop_tightening.fixed +++ b/src/tools/clippy/tests/ui/significant_drop_tightening.fixed @@ -28,6 +28,29 @@ pub fn issue_10413() { } } +pub fn issue_11128() { + use std::mem::drop as unlock; + + struct Foo { + droppable: Option<Vec<i32>>, + mutex: Mutex<Vec<i32>>, + } + + impl Drop for Foo { + fn drop(&mut self) { + if let Some(droppable) = self.droppable.take() { + let lock = self.mutex.lock().unwrap(); + let idx_opt = lock.iter().copied().find(|el| Some(el) == droppable.first()); + if let Some(idx) = idx_opt { + let local_droppable = vec![lock.first().copied().unwrap_or_default()]; + unlock(lock); + drop(local_droppable); + } + } + } + } +} + pub fn path_return_can_be_ignored() -> i32 { let mutex = Mutex::new(1); let lock = mutex.lock().unwrap(); diff --git a/src/tools/clippy/tests/ui/significant_drop_tightening.rs b/src/tools/clippy/tests/ui/significant_drop_tightening.rs index 36f77cf1bdb..f7fa65ea922 100644 --- a/src/tools/clippy/tests/ui/significant_drop_tightening.rs +++ b/src/tools/clippy/tests/ui/significant_drop_tightening.rs @@ -27,6 +27,29 @@ pub fn issue_10413() { } } +pub fn issue_11128() { + use std::mem::drop as unlock; + + struct Foo { + droppable: Option<Vec<i32>>, + mutex: Mutex<Vec<i32>>, + } + + impl Drop for Foo { + fn drop(&mut self) { + if let Some(droppable) = self.droppable.take() { + let lock = self.mutex.lock().unwrap(); + let idx_opt = lock.iter().copied().find(|el| Some(el) == droppable.first()); + if let Some(idx) = idx_opt { + let local_droppable = vec![lock.first().copied().unwrap_or_default()]; + unlock(lock); + drop(local_droppable); + } + } + } + } +} + pub fn path_return_can_be_ignored() -> i32 { let mutex = Mutex::new(1); let lock = mutex.lock().unwrap(); diff --git a/src/tools/clippy/tests/ui/significant_drop_tightening.stderr b/src/tools/clippy/tests/ui/significant_drop_tightening.stderr index 3bdac0b0a6b..ca4fede17c9 100644 --- a/src/tools/clippy/tests/ui/significant_drop_tightening.stderr +++ b/src/tools/clippy/tests/ui/significant_drop_tightening.stderr @@ -23,7 +23,7 @@ LL + drop(lock); | error: temporary with significant `Drop` can be early dropped - --> $DIR/significant_drop_tightening.rs:56:13 + --> $DIR/significant_drop_tightening.rs:79:13 | LL | / { LL | | let mutex = Mutex::new(1i32); @@ -43,7 +43,7 @@ LL + drop(lock); | error: temporary with significant `Drop` can be early dropped - --> $DIR/significant_drop_tightening.rs:77:13 + --> $DIR/significant_drop_tightening.rs:100:13 | LL | / { LL | | let mutex = Mutex::new(1i32); @@ -67,7 +67,7 @@ LL + | error: temporary with significant `Drop` can be early dropped - --> $DIR/significant_drop_tightening.rs:83:17 + --> $DIR/significant_drop_tightening.rs:106:17 | LL | / { LL | | let mutex = Mutex::new(vec![1i32]); diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.fixed b/src/tools/clippy/tests/ui/single_component_path_imports.fixed index d4d2cbbe57a..b6b6b0288c5 100644 --- a/src/tools/clippy/tests/ui/single_component_path_imports.fixed +++ b/src/tools/clippy/tests/ui/single_component_path_imports.fixed @@ -4,8 +4,12 @@ use core; + + use serde as edres; + pub use serde; + use std; macro_rules! m { diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.rs b/src/tools/clippy/tests/ui/single_component_path_imports.rs index 80d72115f43..a8c4d899085 100644 --- a/src/tools/clippy/tests/ui/single_component_path_imports.rs +++ b/src/tools/clippy/tests/ui/single_component_path_imports.rs @@ -3,9 +3,13 @@ #![allow(unused_imports)] use core; + use regex; + use serde as edres; + pub use serde; + use std; macro_rules! m { diff --git a/src/tools/clippy/tests/ui/single_component_path_imports.stderr b/src/tools/clippy/tests/ui/single_component_path_imports.stderr index d69a86470a5..853a2fe0e7b 100644 --- a/src/tools/clippy/tests/ui/single_component_path_imports.stderr +++ b/src/tools/clippy/tests/ui/single_component_path_imports.stderr @@ -1,5 +1,5 @@ error: this import is redundant - --> $DIR/single_component_path_imports.rs:6:1 + --> $DIR/single_component_path_imports.rs:7:1 | LL | use regex; | ^^^^^^^^^^ help: remove it entirely @@ -7,7 +7,7 @@ LL | use regex; = note: `-D clippy::single-component-path-imports` implied by `-D warnings` error: this import is redundant - --> $DIR/single_component_path_imports.rs:29:5 + --> $DIR/single_component_path_imports.rs:33:5 | LL | use regex; | ^^^^^^^^^^ help: remove it entirely diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs index c75beb74786..d6243c19f55 100644 --- a/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs +++ b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.rs @@ -2,7 +2,9 @@ #![allow(unused_imports)] use regex; + use serde as edres; + pub use serde; fn main() { diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr index 330f285202d..ff148355e12 100644 --- a/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr +++ b/src/tools/clippy/tests/ui/single_component_path_imports_nested_first.stderr @@ -7,7 +7,7 @@ LL | use regex; = note: `-D clippy::single-component-path-imports` implied by `-D warnings` error: this import is redundant - --> $DIR/single_component_path_imports_nested_first.rs:13:10 + --> $DIR/single_component_path_imports_nested_first.rs:15:10 | LL | use {regex, serde}; | ^^^^^ @@ -15,7 +15,7 @@ LL | use {regex, serde}; = help: remove this import error: this import is redundant - --> $DIR/single_component_path_imports_nested_first.rs:13:17 + --> $DIR/single_component_path_imports_nested_first.rs:15:17 | LL | use {regex, serde}; | ^^^^^ diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs b/src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs index 48e8e530261..5723d480a2e 100644 --- a/src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs +++ b/src/tools/clippy/tests/ui/single_component_path_imports_self_after.rs @@ -2,6 +2,7 @@ #![allow(unused_imports)] use self::regex::{Regex as xeger, RegexSet as tesxeger}; +#[rustfmt::skip] pub use self::{ regex::{Regex, RegexSet}, some_mod::SomeType, diff --git a/src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs b/src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs index 4fb0cf40b6e..8a4fbf0dc5b 100644 --- a/src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs +++ b/src/tools/clippy/tests/ui/single_component_path_imports_self_before.rs @@ -4,6 +4,7 @@ use regex; use self::regex::{Regex as xeger, RegexSet as tesxeger}; +#[rustfmt::skip] pub use self::{ regex::{Regex, RegexSet}, some_mod::SomeType, diff --git a/src/tools/clippy/tests/ui/single_match.stderr b/src/tools/clippy/tests/ui/single_match.stderr index ef901513240..76f7e789589 100644 --- a/src/tools/clippy/tests/ui/single_match.stderr +++ b/src/tools/clippy/tests/ui/single_match.stderr @@ -10,7 +10,7 @@ LL | | }; | |_____^ | = note: `-D clippy::single-match` implied by `-D warnings` -help: try this +help: try | LL ~ if let Some(y) = x { LL + println!("{:?}", y); @@ -27,7 +27,7 @@ LL | | // is expanded before we can do anything. LL | | Some(y) => println!("{:?}", y), LL | | _ => (), LL | | } - | |_____^ help: try this: `if let Some(y) = x { println!("{:?}", y) }` + | |_____^ help: try: `if let Some(y) = x { println!("{:?}", y) }` error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` --> $DIR/single_match.rs:31:5 @@ -36,7 +36,7 @@ LL | / match z { LL | | (2..=3, 7..=9) => dummy(), LL | | _ => {}, LL | | }; - | |_____^ help: try this: `if let (2..=3, 7..=9) = z { dummy() }` + | |_____^ help: try: `if let (2..=3, 7..=9) = z { dummy() }` error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` --> $DIR/single_match.rs:60:5 @@ -45,7 +45,7 @@ LL | / match x { LL | | Some(y) => dummy(), LL | | None => (), LL | | }; - | |_____^ help: try this: `if let Some(y) = x { dummy() }` + | |_____^ help: try: `if let Some(y) = x { dummy() }` error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` --> $DIR/single_match.rs:65:5 @@ -54,7 +54,7 @@ LL | / match y { LL | | Ok(y) => dummy(), LL | | Err(..) => (), LL | | }; - | |_____^ help: try this: `if let Ok(y) = y { dummy() }` + | |_____^ help: try: `if let Ok(y) = y { dummy() }` error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` --> $DIR/single_match.rs:72:5 @@ -63,7 +63,7 @@ LL | / match c { LL | | Cow::Borrowed(..) => dummy(), LL | | Cow::Owned(..) => (), LL | | }; - | |_____^ help: try this: `if let Cow::Borrowed(..) = c { dummy() }` + | |_____^ help: try: `if let Cow::Borrowed(..) = c { dummy() }` error: you seem to be trying to use `match` for an equality check. Consider using `if` --> $DIR/single_match.rs:93:5 @@ -72,7 +72,7 @@ LL | / match x { LL | | "test" => println!(), LL | | _ => (), LL | | } - | |_____^ help: try this: `if x == "test" { println!() }` + | |_____^ help: try: `if x == "test" { println!() }` error: you seem to be trying to use `match` for an equality check. Consider using `if` --> $DIR/single_match.rs:106:5 @@ -81,7 +81,7 @@ LL | / match x { LL | | Foo::A => println!(), LL | | _ => (), LL | | } - | |_____^ help: try this: `if x == Foo::A { println!() }` + | |_____^ help: try: `if x == Foo::A { println!() }` error: you seem to be trying to use `match` for an equality check. Consider using `if` --> $DIR/single_match.rs:112:5 @@ -90,7 +90,7 @@ LL | / match x { LL | | FOO_C => println!(), LL | | _ => (), LL | | } - | |_____^ help: try this: `if x == FOO_C { println!() }` + | |_____^ help: try: `if x == FOO_C { println!() }` error: you seem to be trying to use `match` for an equality check. Consider using `if` --> $DIR/single_match.rs:117:5 @@ -99,7 +99,7 @@ LL | / match &&x { LL | | Foo::A => println!(), LL | | _ => (), LL | | } - | |_____^ help: try this: `if x == Foo::A { println!() }` + | |_____^ help: try: `if x == Foo::A { println!() }` error: you seem to be trying to use `match` for an equality check. Consider using `if` --> $DIR/single_match.rs:123:5 @@ -108,7 +108,7 @@ LL | / match &x { LL | | Foo::A => println!(), LL | | _ => (), LL | | } - | |_____^ help: try this: `if x == &Foo::A { println!() }` + | |_____^ help: try: `if x == &Foo::A { println!() }` error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` --> $DIR/single_match.rs:140:5 @@ -117,7 +117,7 @@ LL | / match x { LL | | Bar::A => println!(), LL | | _ => (), LL | | } - | |_____^ help: try this: `if let Bar::A = x { println!() }` + | |_____^ help: try: `if let Bar::A = x { println!() }` error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` --> $DIR/single_match.rs:148:5 @@ -126,7 +126,7 @@ LL | / match x { LL | | None => println!(), LL | | _ => (), LL | | }; - | |_____^ help: try this: `if let None = x { println!() }` + | |_____^ help: try: `if let None = x { println!() }` error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` --> $DIR/single_match.rs:170:5 @@ -135,7 +135,7 @@ LL | / match x { LL | | (Some(_), _) => {}, LL | | (None, _) => {}, LL | | } - | |_____^ help: try this: `if let (Some(_), _) = x {}` + | |_____^ help: try: `if let (Some(_), _) = x {}` error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` --> $DIR/single_match.rs:176:5 @@ -144,7 +144,7 @@ LL | / match x { LL | | (Some(E::V), _) => todo!(), LL | | (_, _) => {}, LL | | } - | |_____^ help: try this: `if let (Some(E::V), _) = x { todo!() }` + | |_____^ help: try: `if let (Some(E::V), _) = x { todo!() }` error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` --> $DIR/single_match.rs:182:5 @@ -153,7 +153,7 @@ LL | / match (Some(42), Some(E::V), Some(42)) { LL | | (.., Some(E::V), _) => {}, LL | | (..) => {}, LL | | } - | |_____^ help: try this: `if let (.., Some(E::V), _) = (Some(42), Some(E::V), Some(42)) {}` + | |_____^ help: try: `if let (.., Some(E::V), _) = (Some(42), Some(E::V), Some(42)) {}` error: you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let` --> $DIR/single_match.rs:254:5 @@ -167,7 +167,7 @@ LL | | _ => {}, LL | | } | |_____^ | -help: try this +help: try | LL ~ if let Some(v) = bar { unsafe { LL + let r = &v as *const i32; @@ -187,7 +187,7 @@ LL | | _ => {}, LL | | } | |_____^ | -help: try this +help: try | LL ~ if let Some(v) = bar { LL + unsafe { diff --git a/src/tools/clippy/tests/ui/single_match_else.stderr b/src/tools/clippy/tests/ui/single_match_else.stderr index 228236f3bb8..5e7d4062efe 100644 --- a/src/tools/clippy/tests/ui/single_match_else.stderr +++ b/src/tools/clippy/tests/ui/single_match_else.stderr @@ -12,7 +12,7 @@ LL | | }; | |_____^ | = note: `-D clippy::single-match-else` implied by `-D warnings` -help: try this +help: try | LL ~ let _ = if let ExprNode::ExprAddrOf = ExprNode::Butterflies { Some(&NODE) } else { LL + let x = 5; @@ -32,7 +32,7 @@ LL | | }, LL | | } | |_____^ | -help: try this +help: try | LL ~ if let Some(a) = Some(1) { println!("${:?}", a) } else { LL + println!("else block"); @@ -52,7 +52,7 @@ LL | | }, LL | | } | |_____^ | -help: try this +help: try | LL ~ if let Some(a) = Some(1) { println!("${:?}", a) } else { LL + println!("else block"); @@ -72,7 +72,7 @@ LL | | } LL | | } | |_____^ | -help: try this +help: try | LL ~ if let Ok(a) = Result::<i32, Infallible>::Ok(1) { println!("${:?}", a) } else { LL + println!("else block"); @@ -92,7 +92,7 @@ LL | | } LL | | } | |_____^ | -help: try this +help: try | LL ~ if let Cow::Owned(a) = Cow::from("moo") { println!("${:?}", a) } else { LL + println!("else block"); @@ -112,7 +112,7 @@ LL | | }, LL | | } | |_____^ | -help: try this +help: try | LL ~ if let Some(v) = bar { unsafe { LL + let r = &v as *const i32; @@ -135,7 +135,7 @@ LL | | }, LL | | } | |_____^ | -help: try this +help: try | LL ~ if let Some(v) = bar { LL + println!("Some"); @@ -159,7 +159,7 @@ LL | | }, LL | | } | |_____^ | -help: try this +help: try | LL ~ if let Some(v) = bar { unsafe { LL + let r = &v as *const i32; @@ -183,7 +183,7 @@ LL | | }, LL | | } | |_____^ | -help: try this +help: try | LL ~ if let Some(v) = bar { LL + unsafe { diff --git a/src/tools/clippy/tests/ui/slow_vector_initialization.stderr b/src/tools/clippy/tests/ui/slow_vector_initialization.stderr index cb3ce3e95a7..22376680a8e 100644 --- a/src/tools/clippy/tests/ui/slow_vector_initialization.stderr +++ b/src/tools/clippy/tests/ui/slow_vector_initialization.stderr @@ -72,5 +72,13 @@ LL | vec1 = Vec::with_capacity(10); LL | vec1.resize(10, 0); | ^^^^^^^^^^^^^^^^^^ -error: aborting due to 9 previous errors +error: this argument is a mutable reference, but not used mutably + --> $DIR/slow_vector_initialization.rs:62:18 + | +LL | fn do_stuff(vec: &mut [u8]) {} + | ^^^^^^^^^ help: consider changing to: `&[u8]` + | + = note: `-D clippy::needless-pass-by-ref-mut` implied by `-D warnings` + +error: aborting due to 10 previous errors diff --git a/src/tools/clippy/tests/ui/string_extend.stderr b/src/tools/clippy/tests/ui/string_extend.stderr index b35c77fd961..34b43290147 100644 --- a/src/tools/clippy/tests/ui/string_extend.stderr +++ b/src/tools/clippy/tests/ui/string_extend.stderr @@ -2,7 +2,7 @@ error: calling `.extend(_.chars())` --> $DIR/string_extend.rs:18:5 | LL | s.extend(abc.chars()); - | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.push_str(abc)` + | ^^^^^^^^^^^^^^^^^^^^^ help: try: `s.push_str(abc)` | = note: `-D clippy::string-extend-chars` implied by `-D warnings` @@ -10,19 +10,19 @@ error: calling `.extend(_.chars())` --> $DIR/string_extend.rs:21:5 | LL | s.extend("abc".chars()); - | ^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.push_str("abc")` + | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.push_str("abc")` error: calling `.extend(_.chars())` --> $DIR/string_extend.rs:24:5 | LL | s.extend(def.chars()); - | ^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.push_str(&def)` + | ^^^^^^^^^^^^^^^^^^^^^ help: try: `s.push_str(&def)` error: calling `.extend(_.chars())` --> $DIR/string_extend.rs:34:5 | LL | s.extend(abc[0..2].chars()); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `s.push_str(&abc[0..2])` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `s.push_str(&abc[0..2])` error: aborting due to 4 previous errors diff --git a/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr b/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr index 296268a5f1d..fcd17f68940 100644 --- a/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr +++ b/src/tools/clippy/tests/ui/strlen_on_c_strings.stderr @@ -2,7 +2,7 @@ error: using `libc::strlen` on a `CString` or `CStr` value --> $DIR/strlen_on_c_strings.rs:15:13 | LL | let _ = unsafe { libc::strlen(cstring.as_ptr()) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `cstring.as_bytes().len()` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `cstring.as_bytes().len()` | = note: `-D clippy::strlen-on-c-strings` implied by `-D warnings` @@ -10,37 +10,37 @@ error: using `libc::strlen` on a `CString` or `CStr` value --> $DIR/strlen_on_c_strings.rs:19:13 | LL | let _ = unsafe { libc::strlen(cstr.as_ptr()) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `cstr.to_bytes().len()` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `cstr.to_bytes().len()` error: using `libc::strlen` on a `CString` or `CStr` value --> $DIR/strlen_on_c_strings.rs:21:13 | LL | let _ = unsafe { strlen(cstr.as_ptr()) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `cstr.to_bytes().len()` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `cstr.to_bytes().len()` error: using `libc::strlen` on a `CString` or `CStr` value --> $DIR/strlen_on_c_strings.rs:24:22 | LL | let _ = unsafe { strlen((*pcstr).as_ptr()) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(*pcstr).to_bytes().len()` + | ^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(*pcstr).to_bytes().len()` error: using `libc::strlen` on a `CString` or `CStr` value --> $DIR/strlen_on_c_strings.rs:29:22 | LL | let _ = unsafe { strlen(unsafe_identity(cstr).as_ptr()) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unsafe_identity(cstr).to_bytes().len()` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unsafe_identity(cstr).to_bytes().len()` error: using `libc::strlen` on a `CString` or `CStr` value --> $DIR/strlen_on_c_strings.rs:30:13 | LL | let _ = unsafe { strlen(unsafe { unsafe_identity(cstr) }.as_ptr()) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unsafe { unsafe_identity(cstr) }.to_bytes().len()` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unsafe { unsafe_identity(cstr) }.to_bytes().len()` error: using `libc::strlen` on a `CString` or `CStr` value --> $DIR/strlen_on_c_strings.rs:33:22 | LL | let _ = unsafe { strlen(f(cstr).as_ptr()) }; - | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `f(cstr).to_bytes().len()` + | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `f(cstr).to_bytes().len()` error: aborting due to 7 previous errors diff --git a/src/tools/clippy/tests/ui/to_digit_is_some.stderr b/src/tools/clippy/tests/ui/to_digit_is_some.stderr index 10a1b393a39..c4718825dc2 100644 --- a/src/tools/clippy/tests/ui/to_digit_is_some.stderr +++ b/src/tools/clippy/tests/ui/to_digit_is_some.stderr @@ -2,7 +2,7 @@ error: use of `.to_digit(..).is_some()` --> $DIR/to_digit_is_some.rs:9:13 | LL | let _ = d.to_digit(8).is_some(); - | ^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `d.is_digit(8)` + | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `d.is_digit(8)` | = note: `-D clippy::to-digit-is-some` implied by `-D warnings` @@ -10,7 +10,7 @@ error: use of `.to_digit(..).is_some()` --> $DIR/to_digit_is_some.rs:10:13 | LL | let _ = char::to_digit(c, 8).is_some(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `char::is_digit(c, 8)` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `char::is_digit(c, 8)` error: aborting due to 2 previous errors diff --git a/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs index 48615583156..86f5cc937f4 100644 --- a/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs +++ b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.rs @@ -5,7 +5,8 @@ clippy::disallowed_names, clippy::needless_lifetimes, clippy::redundant_field_names, - clippy::uninlined_format_args + clippy::uninlined_format_args, + clippy::needless_pass_by_ref_mut )] #[derive(Copy, Clone)] diff --git a/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr index 8c5cfa8a0f1..2af668537f5 100644 --- a/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr +++ b/src/tools/clippy/tests/ui/trivially_copy_pass_by_ref.stderr @@ -1,5 +1,5 @@ error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:51:11 + --> $DIR/trivially_copy_pass_by_ref.rs:52:11 | LL | fn bad(x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `u32` @@ -11,103 +11,103 @@ LL | #![deny(clippy::trivially_copy_pass_by_ref)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:51:20 + --> $DIR/trivially_copy_pass_by_ref.rs:52:20 | LL | fn bad(x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `Foo` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:51:29 + --> $DIR/trivially_copy_pass_by_ref.rs:52:29 | LL | fn bad(x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `Baz` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:58:12 + --> $DIR/trivially_copy_pass_by_ref.rs:59:12 | LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {} | ^^^^^ help: consider passing by value instead: `self` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:58:22 + --> $DIR/trivially_copy_pass_by_ref.rs:59:22 | LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `u32` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:58:31 + --> $DIR/trivially_copy_pass_by_ref.rs:59:31 | LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `Foo` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:58:40 + --> $DIR/trivially_copy_pass_by_ref.rs:59:40 | LL | fn bad(&self, x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `Baz` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:60:16 + --> $DIR/trivially_copy_pass_by_ref.rs:61:16 | LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `u32` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:60:25 + --> $DIR/trivially_copy_pass_by_ref.rs:61:25 | LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `Foo` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:60:34 + --> $DIR/trivially_copy_pass_by_ref.rs:61:34 | LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `Baz` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:62:35 + --> $DIR/trivially_copy_pass_by_ref.rs:63:35 | LL | fn bad_issue7518(self, other: &Self) {} | ^^^^^ help: consider passing by value instead: `Self` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:74:16 + --> $DIR/trivially_copy_pass_by_ref.rs:75:16 | LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `u32` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:74:25 + --> $DIR/trivially_copy_pass_by_ref.rs:75:25 | LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `Foo` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:74:34 + --> $DIR/trivially_copy_pass_by_ref.rs:75:34 | LL | fn bad2(x: &u32, y: &Foo, z: &Baz) {} | ^^^^ help: consider passing by value instead: `Baz` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:78:34 + --> $DIR/trivially_copy_pass_by_ref.rs:79:34 | LL | fn trait_method(&self, _foo: &Foo); | ^^^^ help: consider passing by value instead: `Foo` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:110:21 + --> $DIR/trivially_copy_pass_by_ref.rs:111:21 | LL | fn foo_never(x: &i32) { | ^^^^ help: consider passing by value instead: `i32` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:115:15 + --> $DIR/trivially_copy_pass_by_ref.rs:116:15 | LL | fn foo(x: &i32) { | ^^^^ help: consider passing by value instead: `i32` error: this argument (N byte) is passed by reference, but would be more efficient if passed by value (limit: N byte) - --> $DIR/trivially_copy_pass_by_ref.rs:142:37 + --> $DIR/trivially_copy_pass_by_ref.rs:143:37 | LL | fn _unrelated_lifetimes<'a, 'b>(_x: &'a u32, y: &'b u32) -> &'b u32 { | ^^^^^^^ help: consider passing by value instead: `u32` diff --git a/src/tools/clippy/tests/ui/try_err.stderr b/src/tools/clippy/tests/ui/try_err.stderr index 4ad0e2e56a4..79f7b70224a 100644 --- a/src/tools/clippy/tests/ui/try_err.stderr +++ b/src/tools/clippy/tests/ui/try_err.stderr @@ -2,7 +2,7 @@ error: returning an `Err(_)` with the `?` operator --> $DIR/try_err.rs:19:9 | LL | Err(err)?; - | ^^^^^^^^^ help: try this: `return Err(err)` + | ^^^^^^^^^ help: try: `return Err(err)` | note: the lint level is defined here --> $DIR/try_err.rs:4:9 @@ -14,25 +14,25 @@ error: returning an `Err(_)` with the `?` operator --> $DIR/try_err.rs:29:9 | LL | Err(err)?; - | ^^^^^^^^^ help: try this: `return Err(err.into())` + | ^^^^^^^^^ help: try: `return Err(err.into())` error: returning an `Err(_)` with the `?` operator --> $DIR/try_err.rs:49:17 | LL | Err(err)?; - | ^^^^^^^^^ help: try this: `return Err(err)` + | ^^^^^^^^^ help: try: `return Err(err)` error: returning an `Err(_)` with the `?` operator --> $DIR/try_err.rs:68:17 | LL | Err(err)?; - | ^^^^^^^^^ help: try this: `return Err(err.into())` + | ^^^^^^^^^ help: try: `return Err(err.into())` error: returning an `Err(_)` with the `?` operator --> $DIR/try_err.rs:88:23 | LL | Err(_) => Err(1)?, - | ^^^^^^^ help: try this: `return Err(1)` + | ^^^^^^^ help: try: `return Err(1)` | = note: this error originates in the macro `__inline_mac_fn_calling_macro` (in Nightly builds, run with -Z macro-backtrace for more info) @@ -40,7 +40,7 @@ error: returning an `Err(_)` with the `?` operator --> $DIR/try_err.rs:95:23 | LL | Err(_) => Err(inline!(1))?, - | ^^^^^^^^^^^^^^^^ help: try this: `return Err(inline!(1))` + | ^^^^^^^^^^^^^^^^ help: try: `return Err(inline!(1))` | = note: this error originates in the macro `__inline_mac_fn_calling_macro` (in Nightly builds, run with -Z macro-backtrace for more info) @@ -48,31 +48,31 @@ error: returning an `Err(_)` with the `?` operator --> $DIR/try_err.rs:122:9 | LL | Err(inline!(inline!(String::from("aasdfasdfasdfa"))))?; - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Err(inline!(inline!(String::from("aasdfasdfasdfa"))))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `return Err(inline!(inline!(String::from("aasdfasdfasdfa"))))` error: returning an `Err(_)` with the `?` operator --> $DIR/try_err.rs:129:9 | LL | Err(io::ErrorKind::WriteZero)? - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Err(io::ErrorKind::WriteZero.into()))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `return Poll::Ready(Err(io::ErrorKind::WriteZero.into()))` error: returning an `Err(_)` with the `?` operator --> $DIR/try_err.rs:131:9 | LL | Err(io::Error::new(io::ErrorKind::InvalidInput, "error"))? - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Err(io::Error::new(io::ErrorKind::InvalidInput, "error")))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `return Poll::Ready(Err(io::Error::new(io::ErrorKind::InvalidInput, "error")))` error: returning an `Err(_)` with the `?` operator --> $DIR/try_err.rs:139:9 | LL | Err(io::ErrorKind::NotFound)? - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `return Poll::Ready(Some(Err(io::ErrorKind::NotFound.into())))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `return Poll::Ready(Some(Err(io::ErrorKind::NotFound.into())))` error: returning an `Err(_)` with the `?` operator --> $DIR/try_err.rs:148:16 | LL | return Err(42)?; - | ^^^^^^^^ help: try this: `Err(42)` + | ^^^^^^^^ help: try: `Err(42)` error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/type_id_on_box.fixed b/src/tools/clippy/tests/ui/type_id_on_box.fixed new file mode 100644 index 00000000000..615d809c897 --- /dev/null +++ b/src/tools/clippy/tests/ui/type_id_on_box.fixed @@ -0,0 +1,40 @@ +//@run-rustfix + +#![warn(clippy::type_id_on_box)] + +use std::any::{Any, TypeId}; +use std::ops::Deref; + +type SomeBox = Box<dyn Any>; + +struct BadBox(Box<dyn Any>); + +impl Deref for BadBox { + type Target = Box<dyn Any>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +fn existential() -> impl Any { + Box::new(1) as Box<dyn Any> +} + +fn main() { + let any_box: Box<dyn Any> = Box::new(0usize); + let _ = (*any_box).type_id(); + let _ = TypeId::of::<Box<dyn Any>>(); // Don't lint. We explicitly say "do this instead" if this is intentional + let _ = (*any_box).type_id(); + let any_box: &Box<dyn Any> = &(Box::new(0usize) as Box<dyn Any>); + let _ = (**any_box).type_id(); // 2 derefs are needed here to get to the `dyn Any` + + let b = existential(); + let _ = b.type_id(); // Don't lint. + + let b: SomeBox = Box::new(0usize); + let _ = (*b).type_id(); + + let b = BadBox(Box::new(0usize)); + let _ = b.type_id(); // Don't lint. This is a call to `<BadBox as Any>::type_id`. Not `std::boxed::Box`! +} diff --git a/src/tools/clippy/tests/ui/type_id_on_box.rs b/src/tools/clippy/tests/ui/type_id_on_box.rs new file mode 100644 index 00000000000..74b6c74ae5f --- /dev/null +++ b/src/tools/clippy/tests/ui/type_id_on_box.rs @@ -0,0 +1,40 @@ +//@run-rustfix + +#![warn(clippy::type_id_on_box)] + +use std::any::{Any, TypeId}; +use std::ops::Deref; + +type SomeBox = Box<dyn Any>; + +struct BadBox(Box<dyn Any>); + +impl Deref for BadBox { + type Target = Box<dyn Any>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +fn existential() -> impl Any { + Box::new(1) as Box<dyn Any> +} + +fn main() { + let any_box: Box<dyn Any> = Box::new(0usize); + let _ = any_box.type_id(); + let _ = TypeId::of::<Box<dyn Any>>(); // Don't lint. We explicitly say "do this instead" if this is intentional + let _ = (*any_box).type_id(); + let any_box: &Box<dyn Any> = &(Box::new(0usize) as Box<dyn Any>); + let _ = any_box.type_id(); // 2 derefs are needed here to get to the `dyn Any` + + let b = existential(); + let _ = b.type_id(); // Don't lint. + + let b: SomeBox = Box::new(0usize); + let _ = b.type_id(); + + let b = BadBox(Box::new(0usize)); + let _ = b.type_id(); // Don't lint. This is a call to `<BadBox as Any>::type_id`. Not `std::boxed::Box`! +} diff --git a/src/tools/clippy/tests/ui/type_id_on_box.stderr b/src/tools/clippy/tests/ui/type_id_on_box.stderr new file mode 100644 index 00000000000..1525328c0d0 --- /dev/null +++ b/src/tools/clippy/tests/ui/type_id_on_box.stderr @@ -0,0 +1,36 @@ +error: calling `.type_id()` on a `Box<dyn Any>` + --> $DIR/type_id_on_box.rs:26:13 + | +LL | let _ = any_box.type_id(); + | -------^^^^^^^^^^ + | | + | help: consider dereferencing first: `(*any_box)` + | + = note: this returns the type id of the literal type `Box<dyn Any>` instead of the type id of the boxed value, which is most likely not what you want + = note: if this is intentional, use `TypeId::of::<Box<dyn Any>>()` instead, which makes it more clear + = note: `-D clippy::type-id-on-box` implied by `-D warnings` + +error: calling `.type_id()` on a `Box<dyn Any>` + --> $DIR/type_id_on_box.rs:30:13 + | +LL | let _ = any_box.type_id(); // 2 derefs are needed here to get to the `dyn Any` + | -------^^^^^^^^^^ + | | + | help: consider dereferencing first: `(**any_box)` + | + = note: this returns the type id of the literal type `Box<dyn Any>` instead of the type id of the boxed value, which is most likely not what you want + = note: if this is intentional, use `TypeId::of::<Box<dyn Any>>()` instead, which makes it more clear + +error: calling `.type_id()` on a `Box<dyn Any>` + --> $DIR/type_id_on_box.rs:36:13 + | +LL | let _ = b.type_id(); + | -^^^^^^^^^^ + | | + | help: consider dereferencing first: `(*b)` + | + = note: this returns the type id of the literal type `Box<dyn Any>` instead of the type id of the boxed value, which is most likely not what you want + = note: if this is intentional, use `TypeId::of::<Box<dyn Any>>()` instead, which makes it more clear + +error: aborting due to 3 previous errors + diff --git a/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.rs b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.rs new file mode 100644 index 00000000000..0e027f6042e --- /dev/null +++ b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.rs @@ -0,0 +1,22 @@ +#![warn(clippy::unnecessary_cast)] + +fn main() { + let _ = std::ptr::null() as *const u8; +} + +mod issue11113 { + #[repr(C)] + struct Vtbl { + query: unsafe extern "system" fn(), + } + + struct TearOff { + object: *mut std::ffi::c_void, + } + + impl TearOff { + unsafe fn query(&self) { + ((*(*(self.object as *mut *mut _) as *mut Vtbl)).query)() + } + } +} diff --git a/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.stderr b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.stderr new file mode 100644 index 00000000000..eecf245686a --- /dev/null +++ b/src/tools/clippy/tests/ui/unnecessary_cast_unfixable.stderr @@ -0,0 +1,16 @@ +error: casting raw pointers to the same type and constness is unnecessary (`*const u8` -> `*const u8`) + --> $DIR/unnecessary_cast_unfixable.rs:4:13 + | +LL | let _ = std::ptr::null() as *const u8; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `std::ptr::null()` + | + = note: `-D clippy::unnecessary-cast` implied by `-D warnings` + +error: casting raw pointers to the same type and constness is unnecessary (`*mut issue11113::Vtbl` -> `*mut issue11113::Vtbl`) + --> $DIR/unnecessary_cast_unfixable.rs:19:16 + | +LL | ((*(*(self.object as *mut *mut _) as *mut Vtbl)).query)() + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `*(self.object as *mut *mut _)` + +error: aborting due to 2 previous errors + diff --git a/src/tools/clippy/tests/ui/unnecessary_clone.stderr b/src/tools/clippy/tests/ui/unnecessary_clone.stderr index 5686ab6b453..23639f6d41a 100644 --- a/src/tools/clippy/tests/ui/unnecessary_clone.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_clone.stderr @@ -2,7 +2,7 @@ error: using `.clone()` on a ref-counted pointer --> $DIR/unnecessary_clone.rs:23:5 | LL | rc.clone(); - | ^^^^^^^^^^ help: try this: `Rc::<bool>::clone(&rc)` + | ^^^^^^^^^^ help: try: `Rc::<bool>::clone(&rc)` | = note: `-D clippy::clone-on-ref-ptr` implied by `-D warnings` @@ -10,25 +10,25 @@ error: using `.clone()` on a ref-counted pointer --> $DIR/unnecessary_clone.rs:26:5 | LL | arc.clone(); - | ^^^^^^^^^^^ help: try this: `Arc::<bool>::clone(&arc)` + | ^^^^^^^^^^^ help: try: `Arc::<bool>::clone(&arc)` error: using `.clone()` on a ref-counted pointer --> $DIR/unnecessary_clone.rs:29:5 | LL | rcweak.clone(); - | ^^^^^^^^^^^^^^ help: try this: `Weak::<bool>::clone(&rcweak)` + | ^^^^^^^^^^^^^^ help: try: `Weak::<bool>::clone(&rcweak)` error: using `.clone()` on a ref-counted pointer --> $DIR/unnecessary_clone.rs:32:5 | LL | arc_weak.clone(); - | ^^^^^^^^^^^^^^^^ help: try this: `Weak::<bool>::clone(&arc_weak)` + | ^^^^^^^^^^^^^^^^ help: try: `Weak::<bool>::clone(&arc_weak)` error: using `.clone()` on a ref-counted pointer --> $DIR/unnecessary_clone.rs:36:33 | LL | let _: Arc<dyn SomeTrait> = x.clone(); - | ^^^^^^^^^ help: try this: `Arc::<SomeImpl>::clone(&x)` + | ^^^^^^^^^ help: try: `Arc::<SomeImpl>::clone(&x)` error: using `clone` on type `T` which implements the `Copy` trait --> $DIR/unnecessary_clone.rs:40:5 @@ -54,7 +54,7 @@ error: using `.clone()` on a ref-counted pointer --> $DIR/unnecessary_clone.rs:95:14 | LL | Some(try_opt!(Some(rc)).clone()) - | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `Rc::<u8>::clone(&try_opt!(Some(rc)))` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Rc::<u8>::clone(&try_opt!(Some(rc)))` error: aborting due to 9 previous errors diff --git a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.fixed b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.fixed index 630a1bea3c8..276cd800b89 100644 --- a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.fixed +++ b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.fixed @@ -68,6 +68,26 @@ fn unwrap_methods_result() { 1; } +fn unwrap_from_binding() { + macro_rules! from_macro { + () => { + Some("") + }; + } + let val = from_macro!(); + let _ = val.unwrap_or(""); +} + +fn unwrap_unchecked() { + let _ = 1; + let _ = unsafe { 1 + *(&1 as *const i32) }; // needs to keep the unsafe block + let _ = 1 + 1; + let _ = 1; + let _ = unsafe { 1 + *(&1 as *const i32) }; + let _ = 1 + 1; + let _ = 123; +} + fn main() { unwrap_option_some(); unwrap_option_none(); @@ -75,4 +95,5 @@ fn main() { unwrap_result_err(); unwrap_methods_option(); unwrap_methods_result(); + unwrap_unchecked(); } diff --git a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.rs b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.rs index 14f92cb370f..3065778d779 100644 --- a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.rs +++ b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.rs @@ -68,6 +68,26 @@ fn unwrap_methods_result() { Ok::<_, ()>(1).unwrap_or_else(|_| 2); } +fn unwrap_from_binding() { + macro_rules! from_macro { + () => { + Some("") + }; + } + let val = from_macro!(); + let _ = val.unwrap_or(""); +} + +fn unwrap_unchecked() { + let _ = unsafe { Some(1).unwrap_unchecked() }; + let _ = unsafe { Some(1).unwrap_unchecked() + *(&1 as *const i32) }; // needs to keep the unsafe block + let _ = unsafe { Some(1).unwrap_unchecked() } + 1; + let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() }; + let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() + *(&1 as *const i32) }; + let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() } + 1; + let _ = unsafe { Err::<(), i32>(123).unwrap_err_unchecked() }; +} + fn main() { unwrap_option_some(); unwrap_option_none(); @@ -75,4 +95,5 @@ fn main() { unwrap_result_err(); unwrap_methods_option(); unwrap_methods_result(); + unwrap_unchecked(); } diff --git a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.stderr b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.stderr index 0c71ee05323..5823313b736 100644 --- a/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.stderr +++ b/src/tools/clippy/tests/ui/unnecessary_literal_unwrap.stderr @@ -409,5 +409,89 @@ LL - Ok::<_, ()>(1).unwrap_or_else(|_| 2); LL + 1; | -error: aborting due to 36 previous errors +error: used `unwrap_unchecked()` on `Some` value + --> $DIR/unnecessary_literal_unwrap.rs:82:22 + | +LL | let _ = unsafe { Some(1).unwrap_unchecked() }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: remove the `Some` and `unwrap_unchecked()` + | +LL - let _ = unsafe { Some(1).unwrap_unchecked() }; +LL + let _ = 1; + | + +error: used `unwrap_unchecked()` on `Some` value + --> $DIR/unnecessary_literal_unwrap.rs:83:22 + | +LL | let _ = unsafe { Some(1).unwrap_unchecked() + *(&1 as *const i32) }; // needs to keep the unsafe block + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: remove the `Some` and `unwrap_unchecked()` + | +LL - let _ = unsafe { Some(1).unwrap_unchecked() + *(&1 as *const i32) }; // needs to keep the unsafe block +LL + let _ = unsafe { 1 + *(&1 as *const i32) }; // needs to keep the unsafe block + | + +error: used `unwrap_unchecked()` on `Some` value + --> $DIR/unnecessary_literal_unwrap.rs:84:22 + | +LL | let _ = unsafe { Some(1).unwrap_unchecked() } + 1; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: remove the `Some` and `unwrap_unchecked()` + | +LL - let _ = unsafe { Some(1).unwrap_unchecked() } + 1; +LL + let _ = 1 + 1; + | + +error: used `unwrap_unchecked()` on `Ok` value + --> $DIR/unnecessary_literal_unwrap.rs:85:22 + | +LL | let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: remove the `Ok` and `unwrap_unchecked()` + | +LL - let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() }; +LL + let _ = 1; + | + +error: used `unwrap_unchecked()` on `Ok` value + --> $DIR/unnecessary_literal_unwrap.rs:86:22 + | +LL | let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() + *(&1 as *const i32) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: remove the `Ok` and `unwrap_unchecked()` + | +LL - let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() + *(&1 as *const i32) }; +LL + let _ = unsafe { 1 + *(&1 as *const i32) }; + | + +error: used `unwrap_unchecked()` on `Ok` value + --> $DIR/unnecessary_literal_unwrap.rs:87:22 + | +LL | let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() } + 1; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: remove the `Ok` and `unwrap_unchecked()` + | +LL - let _ = unsafe { Ok::<_, ()>(1).unwrap_unchecked() } + 1; +LL + let _ = 1 + 1; + | + +error: used `unwrap_err_unchecked()` on `Err` value + --> $DIR/unnecessary_literal_unwrap.rs:88:22 + | +LL | let _ = unsafe { Err::<(), i32>(123).unwrap_err_unchecked() }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | +help: remove the `Err` and `unwrap_err_unchecked()` + | +LL - let _ = unsafe { Err::<(), i32>(123).unwrap_err_unchecked() }; +LL + let _ = 123; + | + +error: aborting due to 43 previous errors diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed b/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed index 592a53f3a81..cb7562351e8 100644 --- a/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed +++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.fixed @@ -477,7 +477,8 @@ mod issue_10021 { mod issue_10033 { #![allow(dead_code)] - use std::{fmt::Display, ops::Deref}; + use std::fmt::Display; + use std::ops::Deref; fn _main() { let f = Foo; diff --git a/src/tools/clippy/tests/ui/unnecessary_to_owned.rs b/src/tools/clippy/tests/ui/unnecessary_to_owned.rs index f2e48b1c4a6..f82ddb2d25d 100644 --- a/src/tools/clippy/tests/ui/unnecessary_to_owned.rs +++ b/src/tools/clippy/tests/ui/unnecessary_to_owned.rs @@ -477,7 +477,8 @@ mod issue_10021 { mod issue_10033 { #![allow(dead_code)] - use std::{fmt::Display, ops::Deref}; + use std::fmt::Display; + use std::ops::Deref; fn _main() { let f = Foo; diff --git a/src/tools/clippy/tests/ui/unsafe_removed_from_name.rs b/src/tools/clippy/tests/ui/unsafe_removed_from_name.rs index d29888ac62f..04f6ef29a9a 100644 --- a/src/tools/clippy/tests/ui/unsafe_removed_from_name.rs +++ b/src/tools/clippy/tests/ui/unsafe_removed_from_name.rs @@ -8,9 +8,13 @@ use std::cell::UnsafeCell as TotallySafeCellAgain; // Shouldn't error use std::cell::RefCell as ProbablyNotUnsafe; + use std::cell::RefCell as RefCellThatCantBeUnsafe; + use std::cell::UnsafeCell as SuperDangerousUnsafeCell; + use std::cell::UnsafeCell as Dangerunsafe; + use std::cell::UnsafeCell as Bombsawayunsafe; mod mod_with_some_unsafe_things { @@ -20,8 +24,12 @@ mod mod_with_some_unsafe_things { use mod_with_some_unsafe_things::Unsafe as LieAboutModSafety; +// merged imports +use mod_with_some_unsafe_things::{Unsafe as A, Unsafe as B}; + // Shouldn't error use mod_with_some_unsafe_things::Safe as IPromiseItsSafeThisTime; + use mod_with_some_unsafe_things::Unsafe as SuperUnsafeModThing; #[allow(clippy::unsafe_removed_from_name)] diff --git a/src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr b/src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr index 4f871cbe41b..090d917bd38 100644 --- a/src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr +++ b/src/tools/clippy/tests/ui/unsafe_removed_from_name.stderr @@ -13,10 +13,22 @@ LL | use std::cell::UnsafeCell as TotallySafeCellAgain; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: removed `unsafe` from the name of `Unsafe` in use as `LieAboutModSafety` - --> $DIR/unsafe_removed_from_name.rs:21:1 + --> $DIR/unsafe_removed_from_name.rs:25:1 | LL | use mod_with_some_unsafe_things::Unsafe as LieAboutModSafety; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 3 previous errors +error: removed `unsafe` from the name of `Unsafe` in use as `A` + --> $DIR/unsafe_removed_from_name.rs:28:1 + | +LL | use mod_with_some_unsafe_things::{Unsafe as A, Unsafe as B}; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: removed `unsafe` from the name of `Unsafe` in use as `B` + --> $DIR/unsafe_removed_from_name.rs:28:1 + | +LL | use mod_with_some_unsafe_things::{Unsafe as A, Unsafe as B}; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: aborting due to 5 previous errors diff --git a/src/tools/clippy/tests/ui/unused_io_amount.rs b/src/tools/clippy/tests/ui/unused_io_amount.rs index 8d3e094b759..e9d1eeb3161 100644 --- a/src/tools/clippy/tests/ui/unused_io_amount.rs +++ b/src/tools/clippy/tests/ui/unused_io_amount.rs @@ -1,4 +1,4 @@ -#![allow(dead_code)] +#![allow(dead_code, clippy::needless_pass_by_ref_mut)] #![warn(clippy::unused_io_amount)] extern crate futures; diff --git a/src/tools/clippy/tests/ui/unused_peekable.rs b/src/tools/clippy/tests/ui/unused_peekable.rs index 7374dfdf92e..b227f8660f5 100644 --- a/src/tools/clippy/tests/ui/unused_peekable.rs +++ b/src/tools/clippy/tests/ui/unused_peekable.rs @@ -1,8 +1,7 @@ #![warn(clippy::unused_peekable)] #![allow(clippy::no_effect)] -use std::iter::Empty; -use std::iter::Peekable; +use std::iter::{Empty, Peekable}; fn main() { invalid(); diff --git a/src/tools/clippy/tests/ui/unused_peekable.stderr b/src/tools/clippy/tests/ui/unused_peekable.stderr index 54788f2fa2f..d969232fdf3 100644 --- a/src/tools/clippy/tests/ui/unused_peekable.stderr +++ b/src/tools/clippy/tests/ui/unused_peekable.stderr @@ -1,5 +1,5 @@ error: `peek` never called on `Peekable` iterator - --> $DIR/unused_peekable.rs:14:9 + --> $DIR/unused_peekable.rs:13:9 | LL | let peekable = std::iter::empty::<u32>().peekable(); | ^^^^^^^^ @@ -8,7 +8,7 @@ LL | let peekable = std::iter::empty::<u32>().peekable(); = note: `-D clippy::unused-peekable` implied by `-D warnings` error: `peek` never called on `Peekable` iterator - --> $DIR/unused_peekable.rs:18:9 + --> $DIR/unused_peekable.rs:17:9 | LL | let new_local = old_local; | ^^^^^^^^^ @@ -16,7 +16,7 @@ LL | let new_local = old_local; = help: consider removing the call to `peekable` error: `peek` never called on `Peekable` iterator - --> $DIR/unused_peekable.rs:22:9 + --> $DIR/unused_peekable.rs:21:9 | LL | let by_mut_ref = &mut by_mut_ref_test; | ^^^^^^^^^^ @@ -24,7 +24,7 @@ LL | let by_mut_ref = &mut by_mut_ref_test; = help: consider removing the call to `peekable` error: `peek` never called on `Peekable` iterator - --> $DIR/unused_peekable.rs:29:9 + --> $DIR/unused_peekable.rs:28:9 | LL | let peekable_from_fn = returns_peekable(); | ^^^^^^^^^^^^^^^^ @@ -32,7 +32,7 @@ LL | let peekable_from_fn = returns_peekable(); = help: consider removing the call to `peekable` error: `peek` never called on `Peekable` iterator - --> $DIR/unused_peekable.rs:32:13 + --> $DIR/unused_peekable.rs:31:13 | LL | let mut peekable_using_iterator_method = std::iter::empty::<u32>().peekable(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -40,7 +40,7 @@ LL | let mut peekable_using_iterator_method = std::iter::empty::<u32>().peek = help: consider removing the call to `peekable` error: `peek` never called on `Peekable` iterator - --> $DIR/unused_peekable.rs:37:9 + --> $DIR/unused_peekable.rs:36:9 | LL | let passed_along_ref = std::iter::empty::<u32>().peekable(); | ^^^^^^^^^^^^^^^^ @@ -48,7 +48,7 @@ LL | let passed_along_ref = std::iter::empty::<u32>().peekable(); = help: consider removing the call to `peekable` error: `peek` never called on `Peekable` iterator - --> $DIR/unused_peekable.rs:42:9 + --> $DIR/unused_peekable.rs:41:9 | LL | let _by_ref = by_ref_test.by_ref(); | ^^^^^^^ @@ -56,7 +56,7 @@ LL | let _by_ref = by_ref_test.by_ref(); = help: consider removing the call to `peekable` error: `peek` never called on `Peekable` iterator - --> $DIR/unused_peekable.rs:44:13 + --> $DIR/unused_peekable.rs:43:13 | LL | let mut peekable_in_for_loop = std::iter::empty::<u32>().peekable(); | ^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/unwrap_or.stderr b/src/tools/clippy/tests/ui/unwrap_or.stderr index cf720eaaf05..e384bbbb015 100644 --- a/src/tools/clippy/tests/ui/unwrap_or.stderr +++ b/src/tools/clippy/tests/ui/unwrap_or.stderr @@ -2,7 +2,7 @@ error: use of `unwrap_or` followed by a function call --> $DIR/unwrap_or.rs:5:47 | LL | let s = Some(String::from("test string")).unwrap_or("Fail".to_string()).len(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| "Fail".to_string())` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| "Fail".to_string())` | = note: `-D clippy::or-fun-call` implied by `-D warnings` @@ -10,7 +10,7 @@ error: use of `unwrap_or` followed by a function call --> $DIR/unwrap_or.rs:9:47 | LL | let s = Some(String::from("test string")).unwrap_or("Fail".to_string()).len(); - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try this: `unwrap_or_else(|| "Fail".to_string())` + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `unwrap_or_else(|| "Fail".to_string())` error: aborting due to 2 previous errors diff --git a/src/tools/clippy/tests/ui/useless_asref.fixed b/src/tools/clippy/tests/ui/useless_asref.fixed index 490d36ae6d6..e42731f9bcf 100644 --- a/src/tools/clippy/tests/ui/useless_asref.fixed +++ b/src/tools/clippy/tests/ui/useless_asref.fixed @@ -1,6 +1,10 @@ //@run-rustfix #![deny(clippy::useless_asref)] -#![allow(clippy::explicit_auto_deref, clippy::uninlined_format_args)] +#![allow( + clippy::explicit_auto_deref, + clippy::uninlined_format_args, + clippy::needless_pass_by_ref_mut +)] use std::fmt::Debug; diff --git a/src/tools/clippy/tests/ui/useless_asref.rs b/src/tools/clippy/tests/ui/useless_asref.rs index f2681af924d..50c9990bb04 100644 --- a/src/tools/clippy/tests/ui/useless_asref.rs +++ b/src/tools/clippy/tests/ui/useless_asref.rs @@ -1,6 +1,10 @@ //@run-rustfix #![deny(clippy::useless_asref)] -#![allow(clippy::explicit_auto_deref, clippy::uninlined_format_args)] +#![allow( + clippy::explicit_auto_deref, + clippy::uninlined_format_args, + clippy::needless_pass_by_ref_mut +)] use std::fmt::Debug; diff --git a/src/tools/clippy/tests/ui/useless_asref.stderr b/src/tools/clippy/tests/ui/useless_asref.stderr index 67ce8b64e0e..c97851ac6ea 100644 --- a/src/tools/clippy/tests/ui/useless_asref.stderr +++ b/src/tools/clippy/tests/ui/useless_asref.stderr @@ -1,8 +1,8 @@ error: this call to `as_ref` does nothing - --> $DIR/useless_asref.rs:43:18 + --> $DIR/useless_asref.rs:47:18 | LL | foo_rstr(rstr.as_ref()); - | ^^^^^^^^^^^^^ help: try this: `rstr` + | ^^^^^^^^^^^^^ help: try: `rstr` | note: the lint level is defined here --> $DIR/useless_asref.rs:2:9 @@ -11,64 +11,64 @@ LL | #![deny(clippy::useless_asref)] | ^^^^^^^^^^^^^^^^^^^^^ error: this call to `as_ref` does nothing - --> $DIR/useless_asref.rs:45:20 + --> $DIR/useless_asref.rs:49:20 | LL | foo_rslice(rslice.as_ref()); - | ^^^^^^^^^^^^^^^ help: try this: `rslice` + | ^^^^^^^^^^^^^^^ help: try: `rslice` error: this call to `as_mut` does nothing - --> $DIR/useless_asref.rs:49:21 + --> $DIR/useless_asref.rs:53:21 | LL | foo_mrslice(mrslice.as_mut()); - | ^^^^^^^^^^^^^^^^ help: try this: `mrslice` + | ^^^^^^^^^^^^^^^^ help: try: `mrslice` error: this call to `as_ref` does nothing - --> $DIR/useless_asref.rs:51:20 + --> $DIR/useless_asref.rs:55:20 | LL | foo_rslice(mrslice.as_ref()); - | ^^^^^^^^^^^^^^^^ help: try this: `mrslice` + | ^^^^^^^^^^^^^^^^ help: try: `mrslice` error: this call to `as_ref` does nothing - --> $DIR/useless_asref.rs:58:20 + --> $DIR/useless_asref.rs:62:20 | LL | foo_rslice(rrrrrslice.as_ref()); - | ^^^^^^^^^^^^^^^^^^^ help: try this: `rrrrrslice` + | ^^^^^^^^^^^^^^^^^^^ help: try: `rrrrrslice` error: this call to `as_ref` does nothing - --> $DIR/useless_asref.rs:60:18 + --> $DIR/useless_asref.rs:64:18 | LL | foo_rstr(rrrrrstr.as_ref()); - | ^^^^^^^^^^^^^^^^^ help: try this: `rrrrrstr` + | ^^^^^^^^^^^^^^^^^ help: try: `rrrrrstr` error: this call to `as_mut` does nothing - --> $DIR/useless_asref.rs:65:21 + --> $DIR/useless_asref.rs:69:21 | LL | foo_mrslice(mrrrrrslice.as_mut()); - | ^^^^^^^^^^^^^^^^^^^^ help: try this: `mrrrrrslice` + | ^^^^^^^^^^^^^^^^^^^^ help: try: `mrrrrrslice` error: this call to `as_ref` does nothing - --> $DIR/useless_asref.rs:67:20 + --> $DIR/useless_asref.rs:71:20 | LL | foo_rslice(mrrrrrslice.as_ref()); - | ^^^^^^^^^^^^^^^^^^^^ help: try this: `mrrrrrslice` + | ^^^^^^^^^^^^^^^^^^^^ help: try: `mrrrrrslice` error: this call to `as_ref` does nothing - --> $DIR/useless_asref.rs:71:16 + --> $DIR/useless_asref.rs:75:16 | LL | foo_rrrrmr((&&&&MoreRef).as_ref()); - | ^^^^^^^^^^^^^^^^^^^^^^ help: try this: `(&&&&MoreRef)` + | ^^^^^^^^^^^^^^^^^^^^^^ help: try: `(&&&&MoreRef)` error: this call to `as_mut` does nothing - --> $DIR/useless_asref.rs:121:13 + --> $DIR/useless_asref.rs:125:13 | LL | foo_mrt(mrt.as_mut()); - | ^^^^^^^^^^^^ help: try this: `mrt` + | ^^^^^^^^^^^^ help: try: `mrt` error: this call to `as_ref` does nothing - --> $DIR/useless_asref.rs:123:12 + --> $DIR/useless_asref.rs:127:12 | LL | foo_rt(mrt.as_ref()); - | ^^^^^^^^^^^^ help: try this: `mrt` + | ^^^^^^^^^^^^ help: try: `mrt` error: aborting due to 11 previous errors diff --git a/src/tools/clippy/tests/ui/vec.fixed b/src/tools/clippy/tests/ui/vec.fixed index fcdc917c1b1..7a7d0026f79 100644 --- a/src/tools/clippy/tests/ui/vec.fixed +++ b/src/tools/clippy/tests/ui/vec.fixed @@ -115,6 +115,46 @@ fn main() { let _x = vec![1; 201]; } +fn issue11075() { + macro_rules! repro { + ($e:expr) => { + stringify!($e) + }; + } + for _string in [repro!(true), repro!(null)] { + unimplemented!(); + } + + macro_rules! in_macro { + ($e:expr, $vec:expr, $vec2:expr) => {{ + vec![1; 2].fill(3); + vec![1, 2].fill(3); + for _ in vec![1, 2] {} + for _ in vec![1; 2] {} + for _ in vec![$e, $e] {} + for _ in vec![$e; 2] {} + for _ in $vec {} + for _ in $vec2 {} + }}; + } + + in_macro!(1, [1, 2], [1; 2]); + + macro_rules! from_macro { + () => { + vec![1, 2, 3] + }; + } + macro_rules! from_macro_repeat { + () => { + vec![1; 3] + }; + } + + for _ in from_macro!() {} + for _ in from_macro_repeat!() {} +} + #[clippy::msrv = "1.53"] fn above() { for a in [1, 2, 3] { diff --git a/src/tools/clippy/tests/ui/vec.rs b/src/tools/clippy/tests/ui/vec.rs index 0404d8cdb84..cbe7685b453 100644 --- a/src/tools/clippy/tests/ui/vec.rs +++ b/src/tools/clippy/tests/ui/vec.rs @@ -115,6 +115,46 @@ fn main() { let _x = vec![1; 201]; } +fn issue11075() { + macro_rules! repro { + ($e:expr) => { + stringify!($e) + }; + } + for _string in vec![repro!(true), repro!(null)] { + unimplemented!(); + } + + macro_rules! in_macro { + ($e:expr, $vec:expr, $vec2:expr) => {{ + vec![1; 2].fill(3); + vec![1, 2].fill(3); + for _ in vec![1, 2] {} + for _ in vec![1; 2] {} + for _ in vec![$e, $e] {} + for _ in vec![$e; 2] {} + for _ in $vec {} + for _ in $vec2 {} + }}; + } + + in_macro!(1, vec![1, 2], vec![1; 2]); + + macro_rules! from_macro { + () => { + vec![1, 2, 3] + }; + } + macro_rules! from_macro_repeat { + () => { + vec![1; 3] + }; + } + + for _ in from_macro!() {} + for _ in from_macro_repeat!() {} +} + #[clippy::msrv = "1.53"] fn above() { for a in vec![1, 2, 3] { diff --git a/src/tools/clippy/tests/ui/vec.stderr b/src/tools/clippy/tests/ui/vec.stderr index 33d565b2d52..8f6d2a1df80 100644 --- a/src/tools/clippy/tests/ui/vec.stderr +++ b/src/tools/clippy/tests/ui/vec.stderr @@ -85,16 +85,34 @@ LL | for _ in vec![1, 2, 3] {} | ^^^^^^^^^^^^^ help: you can use an array directly: `[1, 2, 3]` error: useless use of `vec!` - --> $DIR/vec.rs:120:14 + --> $DIR/vec.rs:124:20 + | +LL | for _string in vec![repro!(true), repro!(null)] { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can use an array directly: `[repro!(true), repro!(null)]` + +error: useless use of `vec!` + --> $DIR/vec.rs:141:18 + | +LL | in_macro!(1, vec![1, 2], vec![1; 2]); + | ^^^^^^^^^^ help: you can use an array directly: `[1, 2]` + +error: useless use of `vec!` + --> $DIR/vec.rs:141:30 + | +LL | in_macro!(1, vec![1, 2], vec![1; 2]); + | ^^^^^^^^^^ help: you can use an array directly: `[1; 2]` + +error: useless use of `vec!` + --> $DIR/vec.rs:160:14 | LL | for a in vec![1, 2, 3] { | ^^^^^^^^^^^^^ help: you can use an array directly: `[1, 2, 3]` error: useless use of `vec!` - --> $DIR/vec.rs:124:14 + --> $DIR/vec.rs:164:14 | LL | for a in vec![String::new(), String::new()] { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: you can use an array directly: `[String::new(), String::new()]` -error: aborting due to 16 previous errors +error: aborting due to 19 previous errors diff --git a/src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr b/src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr index 30d29aa4e77..5b88ae4ab66 100644 --- a/src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr +++ b/src/tools/clippy/tests/ui/wildcard_enum_match_arm.stderr @@ -2,7 +2,7 @@ error: wildcard match will also match any future added variants --> $DIR/wildcard_enum_match_arm.rs:40:9 | LL | _ => eprintln!("Not red"), - | ^ help: try this: `Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan` + | ^ help: try: `Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan` | note: the lint level is defined here --> $DIR/wildcard_enum_match_arm.rs:3:9 @@ -14,31 +14,31 @@ error: wildcard match will also match any future added variants --> $DIR/wildcard_enum_match_arm.rs:44:9 | LL | _not_red => eprintln!("Not red"), - | ^^^^^^^^ help: try this: `_not_red @ Color::Green | _not_red @ Color::Blue | _not_red @ Color::Rgb(..) | _not_red @ Color::Cyan` + | ^^^^^^^^ help: try: `_not_red @ Color::Green | _not_red @ Color::Blue | _not_red @ Color::Rgb(..) | _not_red @ Color::Cyan` error: wildcard match will also match any future added variants --> $DIR/wildcard_enum_match_arm.rs:48:9 | LL | not_red => format!("{:?}", not_red), - | ^^^^^^^ help: try this: `not_red @ Color::Green | not_red @ Color::Blue | not_red @ Color::Rgb(..) | not_red @ Color::Cyan` + | ^^^^^^^ help: try: `not_red @ Color::Green | not_red @ Color::Blue | not_red @ Color::Rgb(..) | not_red @ Color::Cyan` error: wildcard match will also match any future added variants --> $DIR/wildcard_enum_match_arm.rs:64:9 | LL | _ => "No red", - | ^ help: try this: `Color::Red | Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan` + | ^ help: try: `Color::Red | Color::Green | Color::Blue | Color::Rgb(..) | Color::Cyan` error: wildcard matches known variants and will also match future added variants --> $DIR/wildcard_enum_match_arm.rs:81:9 | LL | _ => {}, - | ^ help: try this: `ErrorKind::PermissionDenied | _` + | ^ help: try: `ErrorKind::PermissionDenied | _` error: wildcard match will also match any future added variants --> $DIR/wildcard_enum_match_arm.rs:99:13 | LL | _ => (), - | ^ help: try this: `Enum::B | Enum::__Private` + | ^ help: try: `Enum::B | Enum::__Private` error: aborting due to 6 previous errors diff --git a/src/tools/clippy/tests/ui/wildcard_imports.fixed b/src/tools/clippy/tests/ui/wildcard_imports.fixed index 2961b062ec3..67173f40654 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports.fixed +++ b/src/tools/clippy/tests/ui/wildcard_imports.fixed @@ -112,6 +112,7 @@ mod in_fn_test { } fn test_inner_nested() { + #[rustfmt::skip] use self::{inner::inner_foo, inner2::inner_bar}; inner_foo(); diff --git a/src/tools/clippy/tests/ui/wildcard_imports.rs b/src/tools/clippy/tests/ui/wildcard_imports.rs index 28508a2538b..8223b693018 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports.rs +++ b/src/tools/clippy/tests/ui/wildcard_imports.rs @@ -112,6 +112,7 @@ mod in_fn_test { } fn test_inner_nested() { + #[rustfmt::skip] use self::{inner::*, inner2::*}; inner_foo(); diff --git a/src/tools/clippy/tests/ui/wildcard_imports.stderr b/src/tools/clippy/tests/ui/wildcard_imports.stderr index c96b3041a00..f7baf234c2f 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports.stderr +++ b/src/tools/clippy/tests/ui/wildcard_imports.stderr @@ -55,37 +55,37 @@ LL | use wildcard_imports_helper::*; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:115:20 + --> $DIR/wildcard_imports.rs:116:20 | LL | use self::{inner::*, inner2::*}; | ^^^^^^^^ help: try: `inner::inner_foo` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:115:30 + --> $DIR/wildcard_imports.rs:116:30 | LL | use self::{inner::*, inner2::*}; | ^^^^^^^^^ help: try: `inner2::inner_bar` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:122:13 + --> $DIR/wildcard_imports.rs:123:13 | LL | use wildcard_imports_helper::*; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:151:9 + --> $DIR/wildcard_imports.rs:152:9 | LL | use crate::in_fn_test::*; | ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:160:9 + --> $DIR/wildcard_imports.rs:161:9 | LL | use crate:: in_fn_test:: * ; | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate:: in_fn_test::exported` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:161:9 + --> $DIR/wildcard_imports.rs:162:9 | LL | use crate:: fn_mod:: | _________^ @@ -93,37 +93,37 @@ LL | | *; | |_________^ help: try: `crate:: fn_mod::foo` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:172:13 + --> $DIR/wildcard_imports.rs:173:13 | LL | use super::*; | ^^^^^^^^ help: try: `super::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:207:17 + --> $DIR/wildcard_imports.rs:208:17 | LL | use super::*; | ^^^^^^^^ help: try: `super::insidefoo` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:215:13 + --> $DIR/wildcard_imports.rs:216:13 | LL | use crate::super_imports::*; | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::super_imports::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:224:17 + --> $DIR/wildcard_imports.rs:225:17 | LL | use super::super::*; | ^^^^^^^^^^^^^^^ help: try: `super::super::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:233:13 + --> $DIR/wildcard_imports.rs:234:13 | LL | use super::super::super_imports::*; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `super::super::super_imports::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports.rs:241:13 + --> $DIR/wildcard_imports.rs:242:13 | LL | use super::*; | ^^^^^^^^ help: try: `super::foofoo` diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed index 3aea013fb3a..8a63375676e 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed +++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.fixed @@ -106,6 +106,7 @@ mod in_fn_test { } fn test_inner_nested() { + #[rustfmt::skip] use self::{inner::inner_foo, inner2::inner_bar}; inner_foo(); diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr index acca9f651b4..af9ae6e786c 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr +++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2018.stderr @@ -55,37 +55,37 @@ LL | use wildcard_imports_helper::*; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:109:20 + --> $DIR/wildcard_imports_2021.rs:110:20 | LL | use self::{inner::*, inner2::*}; | ^^^^^^^^ help: try: `inner::inner_foo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:109:30 + --> $DIR/wildcard_imports_2021.rs:110:30 | LL | use self::{inner::*, inner2::*}; | ^^^^^^^^^ help: try: `inner2::inner_bar` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:116:13 + --> $DIR/wildcard_imports_2021.rs:117:13 | LL | use wildcard_imports_helper::*; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:145:9 + --> $DIR/wildcard_imports_2021.rs:146:9 | LL | use crate::in_fn_test::*; | ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:154:9 + --> $DIR/wildcard_imports_2021.rs:155:9 | LL | use crate:: in_fn_test:: * ; | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate:: in_fn_test::exported` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:155:9 + --> $DIR/wildcard_imports_2021.rs:156:9 | LL | use crate:: fn_mod:: | _________^ @@ -93,37 +93,37 @@ LL | | *; | |_________^ help: try: `crate:: fn_mod::foo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:166:13 + --> $DIR/wildcard_imports_2021.rs:167:13 | LL | use super::*; | ^^^^^^^^ help: try: `super::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:201:17 + --> $DIR/wildcard_imports_2021.rs:202:17 | LL | use super::*; | ^^^^^^^^ help: try: `super::insidefoo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:209:13 + --> $DIR/wildcard_imports_2021.rs:210:13 | LL | use crate::super_imports::*; | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::super_imports::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:218:17 + --> $DIR/wildcard_imports_2021.rs:219:17 | LL | use super::super::*; | ^^^^^^^^^^^^^^^ help: try: `super::super::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:227:13 + --> $DIR/wildcard_imports_2021.rs:228:13 | LL | use super::super::super_imports::*; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `super::super::super_imports::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:235:13 + --> $DIR/wildcard_imports_2021.rs:236:13 | LL | use super::*; | ^^^^^^^^ help: try: `super::foofoo` diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed index 3aea013fb3a..8a63375676e 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed +++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.fixed @@ -106,6 +106,7 @@ mod in_fn_test { } fn test_inner_nested() { + #[rustfmt::skip] use self::{inner::inner_foo, inner2::inner_bar}; inner_foo(); diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr index acca9f651b4..af9ae6e786c 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr +++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.edition2021.stderr @@ -55,37 +55,37 @@ LL | use wildcard_imports_helper::*; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternA, extern_foo}` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:109:20 + --> $DIR/wildcard_imports_2021.rs:110:20 | LL | use self::{inner::*, inner2::*}; | ^^^^^^^^ help: try: `inner::inner_foo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:109:30 + --> $DIR/wildcard_imports_2021.rs:110:30 | LL | use self::{inner::*, inner2::*}; | ^^^^^^^^^ help: try: `inner2::inner_bar` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:116:13 + --> $DIR/wildcard_imports_2021.rs:117:13 | LL | use wildcard_imports_helper::*; | ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `wildcard_imports_helper::{ExternExportedEnum, ExternExportedStruct, extern_exported}` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:145:9 + --> $DIR/wildcard_imports_2021.rs:146:9 | LL | use crate::in_fn_test::*; | ^^^^^^^^^^^^^^^^^^^^ help: try: `crate::in_fn_test::{ExportedEnum, ExportedStruct, exported}` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:154:9 + --> $DIR/wildcard_imports_2021.rs:155:9 | LL | use crate:: in_fn_test:: * ; | ^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate:: in_fn_test::exported` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:155:9 + --> $DIR/wildcard_imports_2021.rs:156:9 | LL | use crate:: fn_mod:: | _________^ @@ -93,37 +93,37 @@ LL | | *; | |_________^ help: try: `crate:: fn_mod::foo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:166:13 + --> $DIR/wildcard_imports_2021.rs:167:13 | LL | use super::*; | ^^^^^^^^ help: try: `super::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:201:17 + --> $DIR/wildcard_imports_2021.rs:202:17 | LL | use super::*; | ^^^^^^^^ help: try: `super::insidefoo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:209:13 + --> $DIR/wildcard_imports_2021.rs:210:13 | LL | use crate::super_imports::*; | ^^^^^^^^^^^^^^^^^^^^^^^ help: try: `crate::super_imports::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:218:17 + --> $DIR/wildcard_imports_2021.rs:219:17 | LL | use super::super::*; | ^^^^^^^^^^^^^^^ help: try: `super::super::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:227:13 + --> $DIR/wildcard_imports_2021.rs:228:13 | LL | use super::super::super_imports::*; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `super::super::super_imports::foofoo` error: usage of wildcard import - --> $DIR/wildcard_imports_2021.rs:235:13 + --> $DIR/wildcard_imports_2021.rs:236:13 | LL | use super::*; | ^^^^^^^^ help: try: `super::foofoo` diff --git a/src/tools/clippy/tests/ui/wildcard_imports_2021.rs b/src/tools/clippy/tests/ui/wildcard_imports_2021.rs index 40c2d07527d..52cd2c82854 100644 --- a/src/tools/clippy/tests/ui/wildcard_imports_2021.rs +++ b/src/tools/clippy/tests/ui/wildcard_imports_2021.rs @@ -106,6 +106,7 @@ mod in_fn_test { } fn test_inner_nested() { + #[rustfmt::skip] use self::{inner::*, inner2::*}; inner_foo(); diff --git a/src/tools/clippy/tests/ui/write_literal.stderr b/src/tools/clippy/tests/ui/write_literal.stderr index 1e306ae28a2..8b72c8bd282 100644 --- a/src/tools/clippy/tests/ui/write_literal.stderr +++ b/src/tools/clippy/tests/ui/write_literal.stderr @@ -5,7 +5,7 @@ LL | write!(v, "Hello {}", "world"); | ^^^^^^^ | = note: `-D clippy::write-literal` implied by `-D warnings` -help: try this +help: try | LL - write!(v, "Hello {}", "world"); LL + write!(v, "Hello world"); @@ -17,7 +17,7 @@ error: literal with an empty format string LL | writeln!(v, "Hello {} {}", world, "world"); | ^^^^^^^ | -help: try this +help: try | LL - writeln!(v, "Hello {} {}", world, "world"); LL + writeln!(v, "Hello {} world", world); @@ -29,7 +29,7 @@ error: literal with an empty format string LL | writeln!(v, "Hello {}", "world"); | ^^^^^^^ | -help: try this +help: try | LL - writeln!(v, "Hello {}", "world"); LL + writeln!(v, "Hello world"); @@ -41,7 +41,7 @@ error: literal with an empty format string LL | writeln!(v, "{} {:.4}", "a literal", 5); | ^^^^^^^^^^^ | -help: try this +help: try | LL - writeln!(v, "{} {:.4}", "a literal", 5); LL + writeln!(v, "a literal {:.4}", 5); @@ -53,7 +53,7 @@ error: literal with an empty format string LL | writeln!(v, "{0} {1}", "hello", "world"); | ^^^^^^^ | -help: try this +help: try | LL - writeln!(v, "{0} {1}", "hello", "world"); LL + writeln!(v, "hello {1}", "world"); @@ -65,7 +65,7 @@ error: literal with an empty format string LL | writeln!(v, "{0} {1}", "hello", "world"); | ^^^^^^^ | -help: try this +help: try | LL - writeln!(v, "{0} {1}", "hello", "world"); LL + writeln!(v, "{0} world", "hello"); @@ -77,7 +77,7 @@ error: literal with an empty format string LL | writeln!(v, "{1} {0}", "hello", "world"); | ^^^^^^^ | -help: try this +help: try | LL - writeln!(v, "{1} {0}", "hello", "world"); LL + writeln!(v, "world {0}", "hello"); @@ -89,7 +89,7 @@ error: literal with an empty format string LL | writeln!(v, "{1} {0}", "hello", "world"); | ^^^^^^^ | -help: try this +help: try | LL - writeln!(v, "{1} {0}", "hello", "world"); LL + writeln!(v, "{1} hello", "world"); @@ -101,7 +101,7 @@ error: literal with an empty format string LL | writeln!(v, "{foo} {bar}", foo = "hello", bar = "world"); | ^^^^^^^ | -help: try this +help: try | LL - writeln!(v, "{foo} {bar}", foo = "hello", bar = "world"); LL + writeln!(v, "hello {bar}", bar = "world"); @@ -113,7 +113,7 @@ error: literal with an empty format string LL | writeln!(v, "{foo} {bar}", foo = "hello", bar = "world"); | ^^^^^^^ | -help: try this +help: try | LL - writeln!(v, "{foo} {bar}", foo = "hello", bar = "world"); LL + writeln!(v, "{foo} world", foo = "hello"); @@ -125,7 +125,7 @@ error: literal with an empty format string LL | writeln!(v, "{bar} {foo}", foo = "hello", bar = "world"); | ^^^^^^^ | -help: try this +help: try | LL - writeln!(v, "{bar} {foo}", foo = "hello", bar = "world"); LL + writeln!(v, "world {foo}", foo = "hello"); @@ -137,7 +137,7 @@ error: literal with an empty format string LL | writeln!(v, "{bar} {foo}", foo = "hello", bar = "world"); | ^^^^^^^ | -help: try this +help: try | LL - writeln!(v, "{bar} {foo}", foo = "hello", bar = "world"); LL + writeln!(v, "{bar} hello", bar = "world"); diff --git a/src/tools/clippy/tests/ui/write_literal_2.stderr b/src/tools/clippy/tests/ui/write_literal_2.stderr index 18591250aad..c30ec385b35 100644 --- a/src/tools/clippy/tests/ui/write_literal_2.stderr +++ b/src/tools/clippy/tests/ui/write_literal_2.stderr @@ -13,7 +13,7 @@ LL | writeln!(v, "{}", "{hello}"); | ^^^^^^^^^ | = note: `-D clippy::write-literal` implied by `-D warnings` -help: try this +help: try | LL - writeln!(v, "{}", "{hello}"); LL + writeln!(v, "{{hello}}"); @@ -25,7 +25,7 @@ error: literal with an empty format string LL | writeln!(v, r"{}", r"{hello}"); | ^^^^^^^^^^ | -help: try this +help: try | LL - writeln!(v, r"{}", r"{hello}"); LL + writeln!(v, r"{{hello}}"); @@ -37,7 +37,7 @@ error: literal with an empty format string LL | writeln!(v, "{}", '/''); | ^^^^ | -help: try this +help: try | LL - writeln!(v, "{}", '/''); LL + writeln!(v, "'"); @@ -49,7 +49,7 @@ error: literal with an empty format string LL | writeln!(v, "{}", '"'); | ^^^ | -help: try this +help: try | LL - writeln!(v, "{}", '"'); LL + writeln!(v, "/""); @@ -67,7 +67,7 @@ error: literal with an empty format string LL | writeln!(v, r"{}", '/''); | ^^^^ | -help: try this +help: try | LL - writeln!(v, r"{}", '/''); LL + writeln!(v, r"'"); @@ -80,7 +80,7 @@ LL | / "hello / LL | | world!" | |_______________^ | -help: try this +help: try | LL ~ "some hello / LL ~ world!" @@ -92,7 +92,7 @@ error: literal with an empty format string LL | "1", "2", "3", | ^^^ | -help: try this +help: try | LL ~ "some 1/ LL ~ {} // {}", "2", "3", @@ -104,7 +104,7 @@ error: literal with an empty format string LL | "1", "2", "3", | ^^^ | -help: try this +help: try | LL ~ 2 // {}", LL ~ "1", "3", @@ -116,7 +116,7 @@ error: literal with an empty format string LL | "1", "2", "3", | ^^^ | -help: try this +help: try | LL ~ {} // 3", LL ~ "1", "2", @@ -128,7 +128,7 @@ error: literal with an empty format string LL | writeln!(v, "{}", "//"); | ^^^^ | -help: try this +help: try | LL - writeln!(v, "{}", "//"); LL + writeln!(v, "//"); @@ -140,7 +140,7 @@ error: literal with an empty format string LL | writeln!(v, r"{}", "//"); | ^^^^ | -help: try this +help: try | LL - writeln!(v, r"{}", "//"); LL + writeln!(v, r"/"); @@ -152,7 +152,7 @@ error: literal with an empty format string LL | writeln!(v, r#"{}"#, "//"); | ^^^^ | -help: try this +help: try | LL - writeln!(v, r#"{}"#, "//"); LL + writeln!(v, r#"/"#); @@ -164,7 +164,7 @@ error: literal with an empty format string LL | writeln!(v, "{}", r"/"); | ^^^^ | -help: try this +help: try | LL - writeln!(v, "{}", r"/"); LL + writeln!(v, "//"); @@ -176,7 +176,7 @@ error: literal with an empty format string LL | writeln!(v, "{}", "/r"); | ^^^^ | -help: try this +help: try | LL - writeln!(v, "{}", "/r"); LL + writeln!(v, "/r"); diff --git a/src/tools/clippy/util/fetch_prs_between.sh b/src/tools/clippy/util/fetch_prs_between.sh index 6865abf971b..fa7560b6929 100755 --- a/src/tools/clippy/util/fetch_prs_between.sh +++ b/src/tools/clippy/util/fetch_prs_between.sh @@ -6,15 +6,20 @@ # If you want to use this to update the Clippy changelog, be sure to manually # exclude the non-user facing changes like 'rustup' PRs, typo fixes, etc. -first=$1 -last=$2 +set -e IFS=' ' -for pr in $(git log --oneline --grep "Merge #" --grep "Merge pull request" --grep "Auto merge of" --grep "Rollup merge of" "$first...$last" | sort -rn | uniq); do +for pr in $(git log --oneline --merges --first-parent "$1...$2"); do id=$(echo "$pr" | rg -o '#[0-9]{3,5}' | cut -c 2-) commit=$(echo "$pr" | cut -d' ' -f 1) message=$(git --no-pager show --pretty=medium "$commit") + + if [[ -z "$newest_pr" ]]; then + newest_pr="$id" + fi + oldest_pr="$id" + if [[ -n $(echo "$message" | rg "^[\s]{4}changelog: [nN]one\.*$") ]]; then continue fi @@ -25,3 +30,14 @@ for pr in $(git log --oneline --grep "Merge #" --grep "Merge pull request" --gre echo "---------------------------------------------------------" echo done + +newest_merged_at="$(gh pr view -R rust-lang/rust-clippy --json mergedAt $newest_pr -q .mergedAt)" +oldest_merged_at="$(gh pr view -R rust-lang/rust-clippy --json mergedAt $oldest_pr -q .mergedAt)" + +query="merged:$oldest_merged_at..$newest_merged_at base:master" +encoded_query="$(echo $query | sed 's/ /+/g; s/:/%3A/g')" + +pr_link="https://github.com/rust-lang/rust-clippy/pulls?q=$encoded_query" +count="$(gh api -X GET search/issues -f "q=$query repo:rust-lang/rust-clippy" -q .total_count)" + +echo "[View all $count merged pull requests]($pr_link)" diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index 17b1d2b1120..3bef3be2a53 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -743333f3dd90721461c09387ec73d09c080d5f5f +33a2c2487ac5d9927830ea4c1844335c6b9f77db diff --git a/src/tools/miri/src/borrow_tracker/mod.rs b/src/tools/miri/src/borrow_tracker/mod.rs index a2cf7c80950..fcfa8f64570 100644 --- a/src/tools/miri/src/borrow_tracker/mod.rs +++ b/src/tools/miri/src/borrow_tracker/mod.rs @@ -74,7 +74,7 @@ pub struct FrameState { impl VisitTags for FrameState { fn visit_tags(&self, _visit: &mut dyn FnMut(BorTag)) { - // `protected_tags` are fine to GC. + // `protected_tags` are already recorded by `GlobalStateInner`. } } @@ -108,9 +108,12 @@ pub struct GlobalStateInner { } impl VisitTags for GlobalStateInner { - fn visit_tags(&self, _visit: &mut dyn FnMut(BorTag)) { - // The only candidate is base_ptr_tags, and that does not need visiting since we don't ever - // GC the bottommost tag. + fn visit_tags(&self, visit: &mut dyn FnMut(BorTag)) { + for &tag in self.protected_tags.keys() { + visit(tag); + } + // The only other candidate is base_ptr_tags, and that does not need visiting since we don't ever + // GC the bottommost/root tag. } } diff --git a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs index e22b352e740..15a7d72edf1 100644 --- a/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs +++ b/src/tools/miri/src/borrow_tracker/stacked_borrows/mod.rs @@ -996,7 +996,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> { /// Protect a place so that it cannot be used any more for the duration of the current function /// call. - /// + /// /// This is used to ensure soundness of in-place function argument/return passing. fn sb_protect_place(&mut self, place: &MPlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> { let this = self.eval_context_mut(); diff --git a/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs b/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs index 274a4a0aaba..2afd45829bd 100644 --- a/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs +++ b/src/tools/miri/src/borrow_tracker/tree_borrows/mod.rs @@ -497,7 +497,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> { /// Protect a place so that it cannot be used any more for the duration of the current function /// call. - /// + /// /// This is used to ensure soundness of in-place function argument/return passing. fn tb_protect_place(&mut self, place: &MPlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> { let this = self.eval_context_mut(); diff --git a/src/tools/miri/src/lib.rs b/src/tools/miri/src/lib.rs index 4a093d7bcc6..5327c2f24e0 100644 --- a/src/tools/miri/src/lib.rs +++ b/src/tools/miri/src/lib.rs @@ -43,19 +43,20 @@ // Needed for rustdoc from bootstrap (with `-Znormalize-docs`). #![recursion_limit = "256"] +extern crate either; // the one from rustc + extern crate rustc_apfloat; extern crate rustc_ast; -extern crate rustc_errors; -#[macro_use] -extern crate rustc_middle; extern crate rustc_const_eval; extern crate rustc_data_structures; +extern crate rustc_errors; extern crate rustc_hir; extern crate rustc_index; +#[macro_use] +extern crate rustc_middle; extern crate rustc_session; extern crate rustc_span; extern crate rustc_target; -extern crate either; // the one from rustc // Necessary to pull in object code as the rest of the rustc crates are shipped only as rmeta // files. diff --git a/src/tools/miri/src/machine.rs b/src/tools/miri/src/machine.rs index 5ebd1fae492..a8f89d56f6d 100644 --- a/src/tools/miri/src/machine.rs +++ b/src/tools/miri/src/machine.rs @@ -1097,9 +1097,8 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for MiriMachine<'mir, 'tcx> { ptr: Pointer<Self::Provenance>, ) -> InterpResult<'tcx> { match ptr.provenance { - Provenance::Concrete { alloc_id, tag } => { - intptrcast::GlobalStateInner::expose_ptr(ecx, alloc_id, tag) - } + Provenance::Concrete { alloc_id, tag } => + intptrcast::GlobalStateInner::expose_ptr(ecx, alloc_id, tag), Provenance::Wildcard => { // No need to do anything for wildcard pointers as // their provenances have already been previously exposed. diff --git a/src/tools/miri/tests/fail/dangling_pointers/dangling_zst_deref.rs b/src/tools/miri/tests/fail/dangling_pointers/dangling_zst_deref.rs index 534d7d5f42f..e749eb896e2 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/dangling_zst_deref.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/dangling_zst_deref.rs @@ -1,6 +1,5 @@ // Make sure we find these even with many checks disabled. -// Some optimizations remove ZST accesses, thus masking this UB. -//@compile-flags: -Zmir-opt-level=0 -Zmiri-disable-alignment-check -Zmiri-disable-stacked-borrows -Zmiri-disable-validation +//@compile-flags: -Zmiri-disable-alignment-check -Zmiri-disable-stacked-borrows -Zmiri-disable-validation fn main() { let p = { diff --git a/src/tools/miri/tests/fail/dangling_pointers/dyn_size.rs b/src/tools/miri/tests/fail/dangling_pointers/dyn_size.rs index 54f353ebebe..87ca8a6077c 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/dyn_size.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/dyn_size.rs @@ -1,5 +1,5 @@ -// should find the bug even without these, but gets masked by optimizations -//@compile-flags: -Zmiri-disable-validation -Zmiri-disable-stacked-borrows -Zmir-opt-level=0 +// should find the bug even without these +//@compile-flags: -Zmiri-disable-validation -Zmiri-disable-stacked-borrows struct SliceWithHead(u8, [u8]); diff --git a/src/tools/miri/tests/fail/dangling_pointers/maybe_null_pointer_deref_zst.rs b/src/tools/miri/tests/fail/dangling_pointers/maybe_null_pointer_deref_zst.rs index a48a3189db2..73d0b120680 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/maybe_null_pointer_deref_zst.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/maybe_null_pointer_deref_zst.rs @@ -1,6 +1,3 @@ -// Some optimizations remove ZST accesses, thus masking this UB. -//@compile-flags: -Zmir-opt-level=0 - fn main() { // This pointer *could* be NULL so we cannot load from it, not even at ZST let ptr = (&0u8 as *const u8).wrapping_sub(0x800) as *const (); diff --git a/src/tools/miri/tests/fail/dangling_pointers/maybe_null_pointer_write_zst.rs b/src/tools/miri/tests/fail/dangling_pointers/maybe_null_pointer_write_zst.rs index 449c65d218a..5537207ae42 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/maybe_null_pointer_write_zst.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/maybe_null_pointer_write_zst.rs @@ -1,6 +1,3 @@ -// Some optimizations remove ZST accesses, thus masking this UB. -//@compile-flags: -Zmir-opt-level=0 - fn main() { // This pointer *could* be NULL so we cannot load from it, not even at ZST. // Not using the () type here, as writes of that type do not even have MIR generated. diff --git a/src/tools/miri/tests/fail/dangling_pointers/null_pointer_deref_zst.rs b/src/tools/miri/tests/fail/dangling_pointers/null_pointer_deref_zst.rs index d6a607c61cb..4cb805db095 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/null_pointer_deref_zst.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/null_pointer_deref_zst.rs @@ -1,6 +1,3 @@ -// Some optimizations remove ZST accesses, thus masking this UB. -//@compile-flags: -Zmir-opt-level=0 - #[allow(deref_nullptr)] fn main() { let x: () = unsafe { *std::ptr::null() }; //~ ERROR: dereferencing pointer failed: null pointer is a dangling pointer diff --git a/src/tools/miri/tests/fail/dangling_pointers/null_pointer_write_zst.rs b/src/tools/miri/tests/fail/dangling_pointers/null_pointer_write_zst.rs index 21344208130..ec34c631a46 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/null_pointer_write_zst.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/null_pointer_write_zst.rs @@ -1,6 +1,3 @@ -// Some optimizations remove ZST accesses, thus masking this UB. -//@compile-flags: -Zmir-opt-level=0 - #[allow(deref_nullptr)] fn main() { // Not using the () type here, as writes of that type do not even have MIR generated. diff --git a/src/tools/miri/tests/fail/dangling_pointers/stack_temporary.rs b/src/tools/miri/tests/fail/dangling_pointers/stack_temporary.rs index 1373773f68d..c193d5fe0b3 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/stack_temporary.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/stack_temporary.rs @@ -1,5 +1,5 @@ -// This should fail even without validation, but some MIR opts mask the error -//@compile-flags: -Zmiri-disable-validation -Zmir-opt-level=0 +// This should fail even without validation +//@compile-flags: -Zmiri-disable-validation unsafe fn make_ref<'a>(x: *mut i32) -> &'a mut i32 { &mut *x diff --git a/src/tools/miri/tests/fail/dangling_pointers/storage_dead_dangling.rs b/src/tools/miri/tests/fail/dangling_pointers/storage_dead_dangling.rs index 366930a831c..f9983f48c61 100644 --- a/src/tools/miri/tests/fail/dangling_pointers/storage_dead_dangling.rs +++ b/src/tools/miri/tests/fail/dangling_pointers/storage_dead_dangling.rs @@ -1,5 +1,5 @@ -// This should fail even without validation, but some MIR opts mask the error -//@compile-flags: -Zmiri-disable-validation -Zmir-opt-level=0 -Zmiri-permissive-provenance +// This should fail even without validation +//@compile-flags: -Zmiri-disable-validation -Zmiri-permissive-provenance static mut LEAK: usize = 0; diff --git a/src/tools/miri/tests/fail/data_race/read_write_race_stack.rs b/src/tools/miri/tests/fail/data_race/read_write_race_stack.rs index 40224ced12d..c3e5c401d87 100644 --- a/src/tools/miri/tests/fail/data_race/read_write_race_stack.rs +++ b/src/tools/miri/tests/fail/data_race/read_write_race_stack.rs @@ -1,7 +1,4 @@ -//@compile-flags: -Zmir-opt-level=0 -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows - -// Note: mir-opt-level set to 0 to prevent the read of stack_var in thread 1 -// from being optimized away and preventing the detection of the data-race. +//@compile-flags: -Zmiri-disable-weak-memory-emulation -Zmiri-preemption-rate=0 -Zmiri-disable-stacked-borrows use std::ptr::null_mut; use std::sync::atomic::{AtomicPtr, Ordering}; diff --git a/src/tools/miri/tests/fail/erroneous_const.rs b/src/tools/miri/tests/fail/erroneous_const.rs index d37837c7193..65f7aafc3cc 100644 --- a/src/tools/miri/tests/fail/erroneous_const.rs +++ b/src/tools/miri/tests/fail/erroneous_const.rs @@ -1,7 +1,5 @@ //! Make sure we detect erroneous constants post-monomorphization even when they are unused. //! (https://github.com/rust-lang/miri/issues/1382) -// Inlining changes the error location -//@compile-flags: -Zmir-opt-level=0 #![feature(never_type)] struct PrintName<T>(T); diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.rs b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.rs index 8eda913feb4..093b55759fd 100644 --- a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.rs +++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.rs @@ -5,9 +5,9 @@ pub struct S(i32); #[custom_mir(dialect = "runtime", phase = "optimized")] fn main() { - // FIXME: the span is not great (probably caused by custom MIR) - mir! { //~ERROR: uninitialized + mir! { let unit: (); + let _observe: i32; { let non_copy = S(42); // This could change `non_copy` in-place @@ -15,7 +15,7 @@ fn main() { } after_call = { // So now we must not be allowed to observe non-copy again. - let _observe = non_copy.0; + _observe = non_copy.0; //~ERROR: uninitialized Return() } diff --git a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.stderr b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.stderr index 3ff7976c70b..5d9a3af0c8a 100644 --- a/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.stderr +++ b/src/tools/miri/tests/fail/function_calls/arg_inplace_observe_after.stderr @@ -1,20 +1,13 @@ error: Undefined Behavior: using uninitialized data, but this operation requires initialized memory --> $DIR/arg_inplace_observe_after.rs:LL:CC | -LL | / mir! { -LL | | let unit: (); -LL | | { -LL | | let non_copy = S(42); -... | -LL | | -LL | | } - | |_____^ using uninitialized data, but this operation requires initialized memory +LL | _observe = non_copy.0; + | ^^^^^^^^^^^^^^^^^^^^^ using uninitialized data, but this operation requires initialized memory | = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information = note: BACKTRACE: - = note: inside `main` at RUSTLIB/core/src/intrinsics/mir.rs:LL:CC - = note: this error originates in the macro `::core::intrinsics::mir::__internal_remove_let` which comes from the expansion of the macro `mir` (in Nightly builds, run with -Z macro-backtrace for more info) + = note: inside `main` at $DIR/arg_inplace_observe_after.rs:LL:CC note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace diff --git a/src/tools/miri/tests/fail/function_calls/target_feature.rs b/src/tools/miri/tests/fail/function_calls/target_feature.rs new file mode 100644 index 00000000000..be95241ea67 --- /dev/null +++ b/src/tools/miri/tests/fail/function_calls/target_feature.rs @@ -0,0 +1,11 @@ +//@only-target-x86_64: uses x86 target features + +fn main() { + assert!(!is_x86_feature_detected!("ssse3")); + unsafe { + ssse3_fn(); //~ ERROR: calling a function that requires unavailable target features: ssse3 + } +} + +#[target_feature(enable = "ssse3")] +unsafe fn ssse3_fn() {} diff --git a/src/tools/miri/tests/fail/function_calls/target_feature.stderr b/src/tools/miri/tests/fail/function_calls/target_feature.stderr new file mode 100644 index 00000000000..bddc6e7e89b --- /dev/null +++ b/src/tools/miri/tests/fail/function_calls/target_feature.stderr @@ -0,0 +1,15 @@ +error: Undefined Behavior: calling a function that requires unavailable target features: ssse3 + --> $DIR/target_feature.rs:LL:CC + | +LL | ssse3_fn(); + | ^^^^^^^^^^ calling a function that requires unavailable target features: ssse3 + | + = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior + = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information + = note: BACKTRACE: + = note: inside `main` at $DIR/target_feature.rs:LL:CC + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to previous error + diff --git a/src/tools/miri/tests/fail/unaligned_pointers/dyn_alignment.rs b/src/tools/miri/tests/fail/unaligned_pointers/dyn_alignment.rs index 555aa57de30..b5a9b2bf18e 100644 --- a/src/tools/miri/tests/fail/unaligned_pointers/dyn_alignment.rs +++ b/src/tools/miri/tests/fail/unaligned_pointers/dyn_alignment.rs @@ -1,5 +1,5 @@ // should find the bug even without, but gets masked by optimizations -//@compile-flags: -Zmiri-disable-stacked-borrows -Zmir-opt-level=0 -Cdebug-assertions=no +//@compile-flags: -Zmiri-disable-stacked-borrows -Cdebug-assertions=no //@normalize-stderr-test: "but found [0-9]+" -> "but found $$ALIGN" #[repr(align(256))] diff --git a/src/tools/miri/tests/fail/unaligned_pointers/unaligned_ptr_zst.rs b/src/tools/miri/tests/fail/unaligned_pointers/unaligned_ptr_zst.rs index 04dbe3fd8d4..289536287a9 100644 --- a/src/tools/miri/tests/fail/unaligned_pointers/unaligned_ptr_zst.rs +++ b/src/tools/miri/tests/fail/unaligned_pointers/unaligned_ptr_zst.rs @@ -1,6 +1,5 @@ // This should fail even without validation -// Some optimizations remove ZST accesses, thus masking this UB. -//@compile-flags: -Zmir-opt-level=0 -Zmiri-disable-validation -Cdebug-assertions=no +//@compile-flags: -Zmiri-disable-validation -Cdebug-assertions=no fn main() { // Try many times as this might work by chance. diff --git a/src/tools/miri/tests/fail/validity/nonzero.rs b/src/tools/miri/tests/fail/validity/nonzero.rs index 384c94a5569..7cba90bc15d 100644 --- a/src/tools/miri/tests/fail/validity/nonzero.rs +++ b/src/tools/miri/tests/fail/validity/nonzero.rs @@ -1,5 +1,3 @@ -// gets masked by optimizations -//@compile-flags: -Zmir-opt-level=0 #![feature(rustc_attrs)] #![allow(unused_attributes)] diff --git a/src/tools/miri/tests/fail/zst2.rs b/src/tools/miri/tests/fail/zst2.rs index 82470866f17..1d3e8ea9d00 100644 --- a/src/tools/miri/tests/fail/zst2.rs +++ b/src/tools/miri/tests/fail/zst2.rs @@ -1,6 +1,3 @@ -// Some optimizations remove ZST accesses, thus masking this UB. -//@compile-flags: -Zmir-opt-level=0 - fn main() { // Not using the () type here, as writes of that type do not even have MIR generated. // Also not assigning directly as that's array initialization, not assignment. diff --git a/src/tools/miri/tests/fail/zst3.rs b/src/tools/miri/tests/fail/zst3.rs index a511f38998f..454bef25f22 100644 --- a/src/tools/miri/tests/fail/zst3.rs +++ b/src/tools/miri/tests/fail/zst3.rs @@ -1,6 +1,3 @@ -// Some optimizations remove ZST accesses, thus masking this UB. -//@compile-flags: -Zmir-opt-level=0 - fn main() { // Not using the () type here, as writes of that type do not even have MIR generated. // Also not assigning directly as that's array initialization, not assignment. diff --git a/src/tools/miri/tests/pass/function_calls/target_feature.rs b/src/tools/miri/tests/pass/function_calls/target_feature.rs new file mode 100644 index 00000000000..0be86ba3731 --- /dev/null +++ b/src/tools/miri/tests/pass/function_calls/target_feature.rs @@ -0,0 +1,12 @@ +//@only-target-x86_64: uses x86 target features +//@compile-flags: -C target-feature=+ssse3 + +fn main() { + assert!(is_x86_feature_detected!("ssse3")); + unsafe { + ssse3_fn(); + } +} + +#[target_feature(enable = "ssse3")] +unsafe fn ssse3_fn() {} diff --git a/src/tools/opt-dist/src/main.rs b/src/tools/opt-dist/src/main.rs index 39fa7e1fb2a..6fc3c3b64a8 100644 --- a/src/tools/opt-dist/src/main.rs +++ b/src/tools/opt-dist/src/main.rs @@ -7,7 +7,10 @@ use crate::tests::run_tests; use crate::timer::Timer; use crate::training::{gather_llvm_bolt_profiles, gather_llvm_profiles, gather_rustc_profiles}; use crate::utils::io::reset_directory; -use crate::utils::{clear_llvm_files, format_env_variables, print_free_disk_space}; +use crate::utils::{ + clear_llvm_files, format_env_variables, print_binary_sizes, print_free_disk_space, + with_log_group, +}; mod environment; mod exec; @@ -27,7 +30,8 @@ fn execute_pipeline( dist_args: Vec<String>, ) -> anyhow::Result<()> { reset_directory(&env.opt_artifacts())?; - env.prepare_rustc_perf()?; + + with_log_group("Building rustc-perf", || env.prepare_rustc_perf())?; // Stage 1: Build PGO instrumented rustc // We use a normal build of LLVM, because gathering PGO profiles for LLVM and `rustc` at the @@ -139,12 +143,17 @@ fn main() -> anyhow::Result<()> { .init(); let mut build_args: Vec<String> = std::env::args().skip(1).collect(); - log::info!("Running optimized build pipeline with args `{}`", build_args.join(" ")); - log::info!("Environment values\n{}", format_env_variables()); + println!("Running optimized build pipeline with args `{}`", build_args.join(" ")); - if let Ok(config) = std::fs::read_to_string("config.toml") { - log::info!("Contents of `config.toml`:\n{config}"); - } + with_log_group("Environment values", || { + println!("Environment values\n{}", format_env_variables()); + }); + + with_log_group("Printing config.toml", || { + if let Ok(config) = std::fs::read_to_string("config.toml") { + println!("Contents of `config.toml`:\n{config}"); + } + }); // Skip components that are not needed for try builds to speed them up if is_try_build() { @@ -170,6 +179,8 @@ fn main() -> anyhow::Result<()> { log::info!("Timer results\n{}", timer.format_stats()); print_free_disk_space()?; + result.context("Optimized build pipeline has failed")?; + print_binary_sizes(env.as_ref())?; - result.context("Optimized build pipeline has failed") + Ok(()) } diff --git a/src/tools/opt-dist/src/training.rs b/src/tools/opt-dist/src/training.rs index 10f4a603695..951bc6f9264 100644 --- a/src/tools/opt-dist/src/training.rs +++ b/src/tools/opt-dist/src/training.rs @@ -1,6 +1,7 @@ use crate::environment::Environment; use crate::exec::{cmd, CmdBuilder}; use crate::utils::io::{count_files, delete_directory}; +use crate::utils::with_log_group; use anyhow::Context; use camino::{Utf8Path, Utf8PathBuf}; use humansize::BINARY; @@ -108,9 +109,11 @@ pub fn gather_llvm_profiles( ) -> anyhow::Result<LlvmPGOProfile> { log::info!("Running benchmarks with PGO instrumented LLVM"); - init_compiler_benchmarks(env, &["Debug", "Opt"], &["Full"], LLVM_PGO_CRATES) - .run() - .context("Cannot gather LLVM PGO profiles")?; + with_log_group("Running benchmarks", || { + init_compiler_benchmarks(env, &["Debug", "Opt"], &["Full"], LLVM_PGO_CRATES) + .run() + .context("Cannot gather LLVM PGO profiles") + })?; let merged_profile = env.opt_artifacts().join("llvm-pgo.profdata"); log::info!("Merging LLVM PGO profiles to {merged_profile}"); @@ -141,10 +144,12 @@ pub fn gather_rustc_profiles( // Here we're profiling the `rustc` frontend, so we also include `Check`. // The benchmark set includes various stress tests that put the frontend under pressure. - init_compiler_benchmarks(env, &["Check", "Debug", "Opt"], &["All"], RUSTC_PGO_CRATES) - .env("LLVM_PROFILE_FILE", profile_template.as_str()) - .run() - .context("Cannot gather rustc PGO profiles")?; + with_log_group("Running benchmarks", || { + init_compiler_benchmarks(env, &["Check", "Debug", "Opt"], &["All"], RUSTC_PGO_CRATES) + .env("LLVM_PROFILE_FILE", profile_template.as_str()) + .run() + .context("Cannot gather rustc PGO profiles") + })?; let merged_profile = env.opt_artifacts().join("rustc-pgo.profdata"); log::info!("Merging Rustc PGO profiles to {merged_profile}"); @@ -164,9 +169,11 @@ pub struct LlvmBoltProfile(pub Utf8PathBuf); pub fn gather_llvm_bolt_profiles(env: &dyn Environment) -> anyhow::Result<LlvmBoltProfile> { log::info!("Running benchmarks with BOLT instrumented LLVM"); - init_compiler_benchmarks(env, &["Check", "Debug", "Opt"], &["Full"], LLVM_BOLT_CRATES) - .run() - .context("Cannot gather LLVM BOLT profiles")?; + with_log_group("Running benchmarks", || { + init_compiler_benchmarks(env, &["Check", "Debug", "Opt"], &["Full"], LLVM_BOLT_CRATES) + .run() + .context("Cannot gather LLVM BOLT profiles") + })?; let merged_profile = env.opt_artifacts().join("bolt.profdata"); let profile_root = Utf8PathBuf::from("/tmp/prof.fdata"); @@ -178,10 +185,12 @@ pub fn gather_llvm_bolt_profiles(env: &dyn Environment) -> anyhow::Result<LlvmBo let mut merge_args = vec!["merge-fdata"]; merge_args.extend(profiles.iter().map(|p| p.to_str().unwrap())); - cmd(&merge_args) - .redirect_output(merged_profile.clone()) - .run() - .context("Cannot merge BOLT profiles")?; + with_log_group("Merging BOLT profiles", || { + cmd(&merge_args) + .redirect_output(merged_profile.clone()) + .run() + .context("Cannot merge BOLT profiles") + })?; log::info!("LLVM BOLT statistics"); log::info!( diff --git a/src/tools/opt-dist/src/utils/io.rs b/src/tools/opt-dist/src/utils/io.rs index 43546e5fcfa..aab078067af 100644 --- a/src/tools/opt-dist/src/utils/io.rs +++ b/src/tools/opt-dist/src/utils/io.rs @@ -1,5 +1,5 @@ use anyhow::Context; -use camino::Utf8Path; +use camino::{Utf8Path, Utf8PathBuf}; use fs_extra::dir::CopyOptions; use std::fs::File; @@ -46,3 +46,17 @@ pub fn unpack_archive(path: &Utf8Path, dest_dir: &Utf8Path) -> anyhow::Result<() archive.unpack(dest_dir.as_std_path())?; Ok(()) } + +/// Returns paths in the given `dir` (non-recursively), optionally with the given `suffix`. +/// The `suffix` should contain the leading dot. +pub fn get_files_from_dir( + dir: &Utf8Path, + suffix: Option<&str>, +) -> anyhow::Result<Vec<Utf8PathBuf>> { + let path = format!("{dir}/*{}", suffix.unwrap_or("")); + + Ok(glob::glob(&path)? + .into_iter() + .map(|p| p.map(|p| Utf8PathBuf::from_path_buf(p).unwrap())) + .collect::<Result<Vec<_>, _>>()?) +} diff --git a/src/tools/opt-dist/src/utils/mod.rs b/src/tools/opt-dist/src/utils/mod.rs index 9305d4989aa..9a3df15e302 100644 --- a/src/tools/opt-dist/src/utils/mod.rs +++ b/src/tools/opt-dist/src/utils/mod.rs @@ -1,8 +1,8 @@ pub mod io; use crate::environment::Environment; -use crate::utils::io::delete_directory; -use humansize::BINARY; +use crate::utils::io::{delete_directory, get_files_from_dir}; +use humansize::{format_size, BINARY}; use sysinfo::{DiskExt, RefreshKind, System, SystemExt}; pub fn format_env_variables() -> String { @@ -25,6 +25,28 @@ pub fn print_free_disk_space() -> anyhow::Result<()> { Ok(()) } +pub fn print_binary_sizes(env: &dyn Environment) -> anyhow::Result<()> { + use std::fmt::Write; + + let root = env.build_artifacts().join("stage2"); + + let mut files = get_files_from_dir(&root.join("bin"), None)?; + files.extend(get_files_from_dir(&root.join("lib"), Some(".so"))?); + files.sort_unstable(); + + let mut output = String::new(); + for file in files { + let size = std::fs::metadata(file.as_std_path())?.len(); + let size_formatted = format_size(size, BINARY); + let name = format!("{}:", file.file_name().unwrap()); + writeln!(output, "{name:<50}{size_formatted:>10}")?; + } + + log::info!("Rustc artifact size\n{output}"); + + Ok(()) +} + pub fn clear_llvm_files(env: &dyn Environment) -> anyhow::Result<()> { // Bootstrap currently doesn't support rebuilding LLVM when PGO options // change (or any other llvm-related options); so just clear out the relevant @@ -34,3 +56,20 @@ pub fn clear_llvm_files(env: &dyn Environment) -> anyhow::Result<()> { delete_directory(&env.build_artifacts().join("lld"))?; Ok(()) } + +/// Wraps all output produced within the `func` closure in a CI output group, if we're running in +/// CI. +pub fn with_log_group<F: FnOnce() -> R, R>(group: &str, func: F) -> R { + if is_in_ci() { + println!("::group::{group}"); + let result = func(); + println!("::endgroup::"); + result + } else { + func() + } +} + +fn is_in_ci() -> bool { + std::env::var("GITHUB_ACTIONS").is_ok() +} diff --git a/src/tools/rust-analyzer/.editorconfig b/src/tools/rust-analyzer/.editorconfig index 314f79d3f90..f00ade5fd82 100644 --- a/src/tools/rust-analyzer/.editorconfig +++ b/src/tools/rust-analyzer/.editorconfig @@ -7,13 +7,10 @@ trim_trailing_whitespace = true end_of_line = lf insert_final_newline = true indent_style = space - -[*.{rs,toml}] indent_size = 4 -[*.ts] -indent_size = 4 -[*.js] -indent_size = 4 -[*.json] -indent_size = 4 +[*.md] +indent_size = 2 + +[*.{yml, yaml}] +indent_size = 2 diff --git a/src/tools/rust-analyzer/.github/workflows/autopublish.yaml b/src/tools/rust-analyzer/.github/workflows/autopublish.yaml index 15cedab1272..310a8a5be7a 100644 --- a/src/tools/rust-analyzer/.github/workflows/autopublish.yaml +++ b/src/tools/rust-analyzer/.github/workflows/autopublish.yaml @@ -49,8 +49,8 @@ jobs: cargo workspaces rename --from project-model project_model cargo workspaces rename --from test-utils test_utils cargo workspaces rename --from text-edit text_edit - cargo workspaces rename ra_ap_%n # Remove library crates from the workspaces so we don't auto-publish them as well sed -i 's/ "lib\/\*",//' ./Cargo.toml + cargo workspaces rename ra_ap_%n find crates/rust-analyzer -type f -name '*.rs' -exec sed -i 's/rust_analyzer/ra_ap_rust_analyzer/g' {} + cargo workspaces publish --yes --force '*' --exact --no-git-commit --allow-dirty --skip-published custom 0.0.$(($RUN_NUMBER + 133)) diff --git a/src/tools/rust-analyzer/.github/workflows/metrics.yaml b/src/tools/rust-analyzer/.github/workflows/metrics.yaml index 3fe2fc917a3..260e45ff517 100644 --- a/src/tools/rust-analyzer/.github/workflows/metrics.yaml +++ b/src/tools/rust-analyzer/.github/workflows/metrics.yaml @@ -1,8 +1,8 @@ name: metrics on: push: - branches: - - master + branches: + - master env: CARGO_INCREMENTAL: 0 @@ -11,20 +11,135 @@ env: RUSTUP_MAX_RETRIES: 10 jobs: - metrics: + setup_cargo: if: github.repository == 'rust-lang/rust-analyzer' runs-on: ubuntu-latest + steps: + - name: Install Rust toolchain + run: | + rustup update --no-self-update stable + rustup component add rustfmt rust-src + rustup default stable + - name: Cache cargo + uses: actions/cache@v3 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + key: ${{ runner.os }}-cargo-${{ github.sha }} + + build_metrics: + runs-on: ubuntu-latest + needs: setup_cargo + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Restore cargo cache + uses: actions/cache@v3 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + key: ${{ runner.os }}-cargo-${{ github.sha }} + + - name: Collect build metrics + run: cargo xtask metrics build + + - name: Cache target + uses: actions/cache@v3 + with: + path: target/ + key: ${{ runner.os }}-target-${{ github.sha }} + + - name: Upload build metrics + uses: actions/upload-artifact@v3 + with: + name: build-${{ github.sha }} + path: target/build.json + if-no-files-found: error + + other_metrics: + strategy: + matrix: + names: [self, ripgrep, webrender, diesel] + runs-on: ubuntu-latest + needs: [setup_cargo, build_metrics] + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Restore cargo cache + uses: actions/cache@v3 + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + key: ${{ runner.os }}-cargo-${{ github.sha }} + + - name: Restore target cache + uses: actions/cache@v3 + with: + path: target/ + key: ${{ runner.os }}-target-${{ github.sha }} + - name: Collect metrics + run: cargo xtask metrics ${{ matrix.names }} + + - name: Upload metrics + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.names }}-${{ github.sha }} + path: target/${{ matrix.names }}.json + if-no-files-found: error + + generate_final_metrics: + runs-on: ubuntu-latest + needs: [build_metrics, other_metrics] steps: - - name: Checkout repository - uses: actions/checkout@v3 - - - name: Install Rust toolchain - run: | - rustup update --no-self-update stable - rustup component add rustfmt rust-src - - - name: Collect metrics - run: cargo xtask metrics - env: - METRICS_TOKEN: ${{ secrets.METRICS_TOKEN }} + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Download build metrics + uses: actions/download-artifact@v3 + with: + name: build-${{ github.sha }} + + - name: Download self metrics + uses: actions/download-artifact@v3 + with: + name: self-${{ github.sha }} + + - name: Download ripgrep metrics + uses: actions/download-artifact@v3 + with: + name: ripgrep-${{ github.sha }} + + - name: Download webrender metrics + uses: actions/download-artifact@v3 + with: + name: webrender-${{ github.sha }} + + - name: Download diesel metrics + uses: actions/download-artifact@v3 + with: + name: diesel-${{ github.sha }} + + - name: Combine json + run: | + git clone --depth 1 https://$METRICS_TOKEN@github.com/rust-analyzer/metrics.git + jq -s ".[0] * .[1] * .[2] * .[3] * .[4]" build.json self.json ripgrep.json webrender.json diesel.json -c >> metrics/metrics.json + cd metrics + git add . + git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈 + git push origin master + env: + METRICS_TOKEN: ${{ secrets.METRICS_TOKEN }} diff --git a/src/tools/rust-analyzer/Cargo.lock b/src/tools/rust-analyzer/Cargo.lock index 13cb25f7b03..f8806794979 100644 --- a/src/tools/rust-analyzer/Cargo.lock +++ b/src/tools/rust-analyzer/Cargo.lock @@ -28,9 +28,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.70" +version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4" +checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" [[package]] name = "anymap" @@ -46,9 +46,9 @@ checksum = "e2d098ff73c1ca148721f37baad5ea6a465a13f9573aba8641fbbbae8164a54e" [[package]] name = "arrayvec" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" [[package]] name = "atty" @@ -77,8 +77,8 @@ dependencies = [ "cc", "cfg-if", "libc", - "miniz_oxide", - "object", + "miniz_oxide 0.6.2", + "object 0.30.4", "rustc-demangle", ] @@ -87,7 +87,7 @@ name = "base-db" version = "0.0.0" dependencies = [ "cfg", - "la-arena", + "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "profile", "rustc-hash", "salsa", @@ -107,9 +107,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.1.0" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c70beb79cbb5ce9c4f8e20849978f34225931f665bb49efa6982875a4d5facb3" +checksum = "6dbe3c979c178231552ecba20214a8272df4e09f232a87aef4320cf06539aded" [[package]] name = "byteorder" @@ -177,21 +177,21 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chalk-derive" -version = "0.91.0" +version = "0.92.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59178fded594fe78c47b841520e5a4399d00fe15fffee19b945958a878cd02d" +checksum = "ff5053a8a42dbff5279a82423946fc56dc1253b76cf211b2b3c14b3aad4e1281" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.18", "synstructure", ] [[package]] name = "chalk-ir" -version = "0.91.0" +version = "0.92.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8824be92876823b828d551bb792f79eb1f69c69d1948abf69fccbf84e448e57b" +checksum = "8a56de2146a8ed0fcd54f4bd50db852f1de4eac9e1efe568494f106c21b77d2a" dependencies = [ "bitflags 1.3.2", "chalk-derive", @@ -200,9 +200,9 @@ dependencies = [ [[package]] name = "chalk-recursive" -version = "0.91.0" +version = "0.92.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e110d1260809c238072d1c8ef84060e39983e8ea9d4c6f74b19b0ebbf8904dc" +checksum = "5cc09e6e9531f3544989ef89b189e80fbc7ad9e2f73f1c5e03ddc9ffb0527463" dependencies = [ "chalk-derive", "chalk-ir", @@ -213,14 +213,14 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.91.0" +version = "0.92.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12200b19abf4b0633095f7bd099f3ef609d314754b6adb358c68cc04d10589e5" +checksum = "b392e02b4c81ec76d3748da839fc70a5539b83d27c9030668463d34d5110b860" dependencies = [ "chalk-derive", "chalk-ir", "ena", - "indexmap", + "indexmap 1.9.3", "itertools", "petgraph", "rustc-hash", @@ -286,22 +286,22 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.14" +version = "0.9.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", - "memoffset", + "memoffset 0.9.0", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ "cfg-if", ] @@ -313,7 +313,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" dependencies = [ "cfg-if", - "hashbrown", + "hashbrown 0.12.3", "lock_api", "once_cell", "parking_lot_core 0.9.6", @@ -321,13 +321,13 @@ dependencies = [ [[package]] name = "derive_arbitrary" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cdeb9ec472d588e539a818b2dee436825730da08ad0017c4b1a17676bdc8b7" +checksum = "53e0efad4403bfc52dc201159c4b842a246a14b98c64b55dfd0f2d89729dfeb8" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.18", ] [[package]] @@ -364,6 +364,12 @@ dependencies = [ ] [[package]] +name = "equivalent" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88bffebc5d80432c9b140ee17875ff173a8ab62faad5b257da912bd2f6c1c0a1" + +[[package]] name = "expect-test" version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -382,7 +388,7 @@ dependencies = [ "cfg-if", "libc", "redox_syscall", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -393,12 +399,12 @@ checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d" [[package]] name = "flate2" -version = "1.0.25" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8a2db397cb1c8772f31494cb8917e48cd1e64f0fa7efac59fbd741a0a8ce841" +checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" dependencies = [ "crc32fast", - "miniz_oxide", + "miniz_oxide 0.7.1", ] [[package]] @@ -419,9 +425,9 @@ dependencies = [ [[package]] name = "form_urlencoded" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" dependencies = [ "percent-encoding", ] @@ -443,9 +449,9 @@ checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a" [[package]] name = "gimli" -version = "0.27.2" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" +checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" [[package]] name = "hashbrown" @@ -454,6 +460,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] +name = "hashbrown" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" + +[[package]] name = "heck" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -509,7 +521,7 @@ dependencies = [ "anymap", "arrayvec", "base-db", - "bitflags 2.1.0", + "bitflags 2.3.2", "cfg", "cov-mark", "dashmap", @@ -517,14 +529,14 @@ dependencies = [ "either", "expect-test", "fst", - "hashbrown", + "hashbrown 0.12.3", "hir-expand", "hkalbasi-rustc-ap-rustc_abi", "hkalbasi-rustc-ap-rustc_index", - "indexmap", + "indexmap 2.0.0", "intern", "itertools", - "la-arena", + "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "limit", "mbe", "once_cell", @@ -548,10 +560,10 @@ dependencies = [ "cov-mark", "either", "expect-test", - "hashbrown", + "hashbrown 0.12.3", "intern", "itertools", - "la-arena", + "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "limit", "mbe", "profile", @@ -570,7 +582,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", - "bitflags 2.1.0", + "bitflags 2.3.2", "chalk-derive", "chalk-ir", "chalk-recursive", @@ -584,10 +596,11 @@ dependencies = [ "hkalbasi-rustc-ap-rustc_index", "intern", "itertools", - "la-arena", + "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "limit", "nohash-hasher", "once_cell", + "oorandom", "profile", "project-model", "rustc-hash", @@ -626,11 +639,11 @@ dependencies = [ [[package]] name = "home" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408" +checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" dependencies = [ - "winapi", + "windows-sys 0.48.0", ] [[package]] @@ -715,10 +728,10 @@ dependencies = [ "expect-test", "fst", "hir", - "indexmap", + "indexmap 2.0.0", "itertools", "limit", - "line-index", + "line-index 0.1.0-pre.1", "memchr", "nohash-hasher", "once_cell", @@ -748,6 +761,7 @@ dependencies = [ "hir", "ide-db", "itertools", + "once_cell", "profile", "serde_json", "sourcegen", @@ -777,9 +791,9 @@ dependencies = [ [[package]] name = "idna" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -792,7 +806,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", - "hashbrown", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +dependencies = [ + "equivalent", + "hashbrown 0.14.0", ] [[package]] @@ -829,7 +853,7 @@ name = "intern" version = "0.0.0" dependencies = [ "dashmap", - "hashbrown", + "hashbrown 0.12.3", "once_cell", "rustc-hash", "triomphe", @@ -878,7 +902,13 @@ dependencies = [ [[package]] name = "la-arena" -version = "0.3.0" +version = "0.3.1" + +[[package]] +name = "la-arena" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3752f229dcc5a481d60f385fa479ff46818033d881d2d801aa27dffcfb5e8306" [[package]] name = "lazy_static" @@ -888,25 +918,25 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.141" +version = "0.2.146" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3304a64d199bb964be99741b7a14d26972741915b3649639149b2479bb46f4b5" +checksum = "f92be4933c13fd498862a9e02a3055f8a8d9c039ce33db97306fd5a6caa7f29b" [[package]] name = "libloading" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +checksum = "d580318f95776505201b28cf98eb1fa5e4be3b689633ba6a3e6cd880ff22d8cb" dependencies = [ "cfg-if", - "winapi", + "windows-sys 0.48.0", ] [[package]] name = "libmimalloc-sys" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a558e3d911bc3c7bfc8c78bc580b404d6e51c1cefbf656e176a94b49b0df40" +checksum = "f4ac0e912c8ef1b735e92369695618dc5b1819f5a7bf3f167301a3ba1cea515e" dependencies = [ "cc", "libc", @@ -919,16 +949,43 @@ version = "0.0.0" [[package]] name = "line-index" version = "0.1.0-pre.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cad96769710c1745e11d4f940a8ff36000ade4bbada4285b001cb8aa2f745ce" dependencies = [ "nohash-hasher", "text-size", ] [[package]] +name = "line-index" +version = "0.1.0" +dependencies = [ + "nohash-hasher", + "text-size", +] + +[[package]] +name = "load-cargo" +version = "0.0.0" +dependencies = [ + "anyhow", + "crossbeam-channel", + "ide", + "ide-db", + "itertools", + "proc-macro-api", + "project-model", + "tracing", + "tt", + "vfs", + "vfs-notify", +] + +[[package]] name = "lock_api" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" dependencies = [ "autocfg", "scopeguard", @@ -936,16 +993,25 @@ dependencies = [ [[package]] name = "log" -version = "0.4.17" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" + +[[package]] +name = "lsp-server" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" +checksum = "3711e4d6f491dc9edc0f1df80e204f38206775ac92c1241e89b79229a850bc00" dependencies = [ - "cfg-if", + "crossbeam-channel", + "log", + "serde", + "serde_json", ] [[package]] name = "lsp-server" -version = "0.7.0" +version = "0.7.2" dependencies = [ "crossbeam-channel", "log", @@ -968,15 +1034,6 @@ dependencies = [ ] [[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata", -] - -[[package]] name = "mbe" version = "0.0.0" dependencies = [ @@ -1016,10 +1073,19 @@ dependencies = [ ] [[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + +[[package]] name = "mimalloc" -version = "0.1.36" +version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d88dad3f985ec267a3fcb7a1726f5cb1a7e8cad8b646e70a84f967210df23da" +checksum = "4e2894987a3459f3ffb755608bd82188f8ed00d0ae077f1edea29c068d639d98" dependencies = [ "libmimalloc-sys", ] @@ -1034,6 +1100,15 @@ dependencies = [ ] [[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] name = "mio" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -1042,7 +1117,7 @@ dependencies = [ "libc", "log", "wasi", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -1051,7 +1126,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123" dependencies = [ - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -1087,7 +1162,7 @@ dependencies = [ "libc", "mio", "walkdir", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -1112,18 +1187,27 @@ dependencies = [ [[package]] name = "object" -version = "0.30.3" +version = "0.30.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea86265d3d3dcb6a27fc51bd29a4bf387fae9d2986b823079d4986af253eb439" +checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" +dependencies = [ + "memchr", +] + +[[package]] +name = "object" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.17.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "oorandom" @@ -1182,7 +1266,7 @@ dependencies = [ "libc", "redox_syscall", "smallvec", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -1209,9 +1293,9 @@ version = "0.0.0" [[package]] name = "percent-encoding" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "perf-event" @@ -1239,7 +1323,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7" dependencies = [ "fixedbitset", - "indexmap", + "indexmap 1.9.3", ] [[package]] @@ -1253,7 +1337,7 @@ name = "proc-macro-api" version = "0.0.0" dependencies = [ "memmap2", - "object", + "object 0.31.1", "paths", "profile", "serde", @@ -1273,7 +1357,7 @@ dependencies = [ "libloading", "mbe", "memmap2", - "object", + "object 0.31.1", "paths", "proc-macro-api", "proc-macro-test", @@ -1317,7 +1401,7 @@ version = "0.0.0" dependencies = [ "cfg-if", "countme", - "la-arena", + "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc", "once_cell", "perf-event", @@ -1335,7 +1419,7 @@ dependencies = [ "cfg", "expect-test", "itertools", - "la-arena", + "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "paths", "profile", "rustc-hash", @@ -1370,9 +1454,9 @@ dependencies = [ [[package]] name = "pulldown-cmark" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63" +checksum = "77a1a2f1f0a7ecff9c31abbe177637be0e97a0aef46cf8738ece09327985d998" dependencies = [ "bitflags 1.3.2", "memchr", @@ -1390,9 +1474,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.26" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" +checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" dependencies = [ "proc-macro2", ] @@ -1439,38 +1523,14 @@ dependencies = [ ] [[package]] -name = "regex" -version = "1.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" -dependencies = [ - "regex-syntax", -] - -[[package]] -name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - -[[package]] name = "rowan" version = "0.15.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf" dependencies = [ "countme", - "hashbrown", - "memoffset", + "hashbrown 0.12.3", + "memoffset 0.8.0", "rustc-hash", "text-size", ] @@ -1494,7 +1554,8 @@ dependencies = [ "ide-db", "ide-ssr", "itertools", - "lsp-server", + "load-cargo", + "lsp-server 0.7.1", "lsp-types", "mbe", "mimalloc", @@ -1512,12 +1573,10 @@ dependencies = [ "scip", "serde", "serde_json", - "serde_repr", "sourcegen", "stdx", "syntax", "test-utils", - "thiserror", "tikv-jemallocator", "toolchain", "tracing", @@ -1525,7 +1584,6 @@ dependencies = [ "tracing-subscriber", "tracing-tree", "triomphe", - "tt", "vfs", "vfs-notify", "winapi", @@ -1535,9 +1593,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4a36c42d1873f9a77c53bde094f9664d9891bc604a45b4798fd2c389ed12e5b" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" [[package]] name = "rustc-hash" @@ -1558,7 +1616,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b223dccb46c32753144d0b51290da7230bb4aedcd8379d6b4c9a474c18bf17a" dependencies = [ "crossbeam-utils", - "indexmap", + "indexmap 1.9.3", "lock_api", "log", "oorandom", @@ -1641,11 +1699,11 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.96" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +checksum = "bdf3bf93142acad5821c99197022e170842cdbc1c30482b98750c688c640842a" dependencies = [ - "indexmap", + "indexmap 1.9.3", "itoa", "ryu", "serde", @@ -1653,13 +1711,13 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "395627de918015623b32e7669714206363a7fc00382bf477e72c1f7533e8eafc" +checksum = "bcec881020c684085e55a25f7fd888954d56609ef363479dc5a1305eb0d40cab" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.18", ] [[package]] @@ -1731,9 +1789,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.15" +version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" +checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" dependencies = [ "proc-macro2", "quote", @@ -1748,7 +1806,7 @@ checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06" dependencies = [ "proc-macro2", "quote", - "syn 2.0.15", + "syn 2.0.18", "unicode-xid", ] @@ -1759,7 +1817,7 @@ dependencies = [ "cov-mark", "either", "expect-test", - "indexmap", + "indexmap 2.0.0", "itertools", "once_cell", "parser", @@ -1806,22 +1864,22 @@ checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a" [[package]] name = "thiserror" -version = "1.0.39" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5ab016db510546d856297882807df8da66a16fb8c4101cb8b30054b0d5b2d9c" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.39" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5420d42e90af0c38c3290abcca25b9b3bdf379fc9f55c528f53a269d9c9a267e" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.18", ] [[package]] @@ -1867,9 +1925,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.20" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890" +checksum = "ea9e1b3cf1243ae005d9e74085d4d542f3125458f3a81af210d901dcd7411efd" dependencies = [ "serde", "time-core", @@ -1877,9 +1935,9 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" +checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" [[package]] name = "tinyvec" @@ -1917,20 +1975,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.23" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.18", ] [[package]] name = "tracing-core" -version = "0.1.30" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" dependencies = [ "once_cell", "valuable", @@ -1949,25 +2007,21 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" dependencies = [ - "matchers", - "once_cell", - "regex", "sharded-slab", "thread_local", - "tracing", "tracing-core", "tracing-log", ] [[package]] name = "tracing-tree" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "758e983ab7c54fee18403994507e7f212b9005e957ce7984996fac8d11facedb" +checksum = "4f9742d8df709837409dbb22aa25dd7769c260406f20ff48a2320b80a4a6aed0" dependencies = [ "atty", "nu-ansi-term", @@ -2060,9 +2114,9 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" [[package]] name = "unicode-ident" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" +checksum = "b15811caf2415fb889178633e7724bad2509101cde276048e013b9def5e51fa0" [[package]] name = "unicode-normalization" @@ -2087,9 +2141,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "url" -version = "2.3.1" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" dependencies = [ "form_urlencoded", "idna", @@ -2114,7 +2168,7 @@ name = "vfs" version = "0.0.0" dependencies = [ "fst", - "indexmap", + "indexmap 2.0.0", "nohash-hasher", "paths", "rustc-hash", @@ -2187,13 +2241,37 @@ version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", ] [[package]] @@ -2203,42 +2281,84 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" [[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] name = "windows_aarch64_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] name = "windows_i686_gnu" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] name = "windows_i686_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] name = "windows_x86_64_gnu" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] name = "windows_x86_64_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" [[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" + +[[package]] name = "write-json" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" @@ -2289,9 +2409,9 @@ dependencies = [ [[package]] name = "zip" -version = "0.6.4" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0445d0fbc924bb93539b4316c11afb121ea39296f99a3c4c9edad09e3658cdef" +checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" dependencies = [ "byteorder", "crc32fast", diff --git a/src/tools/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/Cargo.toml index 3050cf764a4..f6a50bfa6b2 100644 --- a/src/tools/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/Cargo.toml @@ -4,7 +4,7 @@ exclude = ["crates/proc-macro-test/imp"] resolver = "2" [workspace.package] -rust-version = "1.66" +rust-version = "1.70" edition = "2021" license = "MIT OR Apache-2.0" authors = ["rust-analyzer team"] @@ -35,6 +35,10 @@ debug = 0 # chalk-ir = { path = "../chalk/chalk-ir" } # chalk-recursive = { path = "../chalk/chalk-recursive" } # chalk-derive = { path = "../chalk/chalk-derive" } +# line-index = { path = "lib/line-index" } +# la-arena = { path = "lib/la-arena" } +# lsp-server = { path = "lib/lsp-server" } + # ungrammar = { path = "../ungrammar" } @@ -57,13 +61,13 @@ ide-diagnostics = { path = "./crates/ide-diagnostics", version = "0.0.0" } ide-ssr = { path = "./crates/ide-ssr", version = "0.0.0" } intern = { path = "./crates/intern", version = "0.0.0" } limit = { path = "./crates/limit", version = "0.0.0" } +load-cargo = { path = "./crates/load-cargo", version = "0.0.0" } mbe = { path = "./crates/mbe", version = "0.0.0" } parser = { path = "./crates/parser", version = "0.0.0" } paths = { path = "./crates/paths", version = "0.0.0" } proc-macro-api = { path = "./crates/proc-macro-api", version = "0.0.0" } proc-macro-srv = { path = "./crates/proc-macro-srv", version = "0.0.0" } proc-macro-srv-cli = { path = "./crates/proc-macro-srv-cli", version = "0.0.0" } -proc-macro-test = { path = "./crates/proc-macro-test", version = "0.0.0" } profile = { path = "./crates/profile", version = "0.0.0" } project-model = { path = "./crates/project-model", version = "0.0.0" } sourcegen = { path = "./crates/sourcegen", version = "0.0.0" } @@ -75,7 +79,14 @@ toolchain = { path = "./crates/toolchain", version = "0.0.0" } tt = { path = "./crates/tt", version = "0.0.0" } vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" } vfs = { path = "./crates/vfs", version = "0.0.0" } -line-index = { version = "0.1.0-pre.1", path = "./lib/line-index" } + +# local crates that aren't published to crates.io. These should not have versions. +proc-macro-test = { path = "./crates/proc-macro-test" } + +# In-tree crates that are published separately and follow semver. See lib/README.md +line-index = { version = "0.1.0-pre.1" } +la-arena = { version = "0.3.1" } +lsp-server = { version = "0.7.1" } # non-local crates smallvec = { version = "1.10.0", features = [ @@ -86,9 +97,10 @@ smallvec = { version = "1.10.0", features = [ smol_str = "0.2.0" nohash-hasher = "0.2.0" text-size = "1.1.0" -# the following crates are pinned to prevent us from pulling in syn 2 until all our dependencies have moved -serde = { version = "=1.0.156", features = ["derive"] } -serde_json = "1.0.94" +serde = { version = "1.0.156", features = ["derive"] } +serde_json = "1.0.96" triomphe = { version = "0.1.8", default-features = false, features = ["std"] } +# can't upgrade due to dashmap depending on 0.12.3 currently +hashbrown = { version = "0.12.3", features = ["inline-more"], default-features = false } rustc_lexer = { version = "0.1.0", package = "ra-ap-rustc_lexer" } diff --git a/src/tools/rust-analyzer/crates/base-db/Cargo.toml b/src/tools/rust-analyzer/crates/base-db/Cargo.toml index 6001772c86e..171c113a950 100644 --- a/src/tools/rust-analyzer/crates/base-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/base-db/Cargo.toml @@ -17,7 +17,7 @@ rustc-hash = "1.1.0" triomphe.workspace = true -la-arena = { version = "0.3.0", path = "../../lib/la-arena" } +la-arena.workspace = true # local deps cfg.workspace = true diff --git a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs index d3abc3870b7..323ee4260e4 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/fixture.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/fixture.rs @@ -26,7 +26,7 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static { let fixture = ChangeFixture::parse(ra_fixture); let mut db = Self::default(); fixture.change.apply(&mut db); - assert_eq!(fixture.files.len(), 1); + assert_eq!(fixture.files.len(), 1, "Multiple file found in the fixture"); (db, fixture.files[0]) } @@ -102,6 +102,8 @@ pub struct ChangeFixture { pub change: Change, } +const SOURCE_ROOT_PREFIX: &str = "/"; + impl ChangeFixture { pub fn parse(ra_fixture: &str) -> ChangeFixture { Self::parse_with_proc_macros(ra_fixture, Vec::new()) @@ -131,7 +133,6 @@ impl ChangeFixture { let mut file_set = FileSet::default(); let mut current_source_root_kind = SourceRootKind::Local; - let source_root_prefix = "/".to_string(); let mut file_id = FileId(0); let mut roots = Vec::new(); @@ -151,19 +152,23 @@ impl ChangeFixture { entry.text.clone() }; - let meta = FileMeta::from(entry); - assert!(meta.path.starts_with(&source_root_prefix)); + let meta = FileMeta::from_fixture(entry, current_source_root_kind); + assert!(meta.path.starts_with(SOURCE_ROOT_PREFIX)); if !meta.deps.is_empty() { assert!(meta.krate.is_some(), "can't specify deps without naming the crate") } - if let Some(kind) = &meta.introduce_new_source_root { - let root = match current_source_root_kind { + if let Some(kind) = meta.introduce_new_source_root { + assert!( + meta.krate.is_some(), + "new_source_root meta doesn't make sense without crate meta" + ); + let prev_kind = mem::replace(&mut current_source_root_kind, kind); + let prev_root = match prev_kind { SourceRootKind::Local => SourceRoot::new_local(mem::take(&mut file_set)), SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)), }; - roots.push(root); - current_source_root_kind = *kind; + roots.push(prev_root); } if let Some((krate, origin, version)) = meta.krate { @@ -185,7 +190,7 @@ impl ChangeFixture { Some(toolchain), ); let prev = crates.insert(crate_name.clone(), crate_id); - assert!(prev.is_none()); + assert!(prev.is_none(), "multiple crates with same name: {}", crate_name); for dep in meta.deps { let prelude = meta.extern_prelude.contains(&dep); let dep = CrateName::normalize_dashes(&dep); @@ -219,7 +224,7 @@ impl ChangeFixture { false, CrateOrigin::Local { repo: None, name: None }, default_target_data_layout - .map(|x| x.into()) + .map(|it| it.into()) .ok_or_else(|| "target_data_layout unset".into()), Some(toolchain), ); @@ -442,51 +447,74 @@ struct FileMeta { target_data_layout: Option<String>, } -fn parse_crate(crate_str: String) -> (String, CrateOrigin, Option<String>) { - if let Some((a, b)) = crate_str.split_once('@') { - let (version, origin) = match b.split_once(':') { - Some(("CratesIo", data)) => match data.split_once(',') { - Some((version, url)) => { - (version, CrateOrigin::Local { repo: Some(url.to_owned()), name: None }) - } - _ => panic!("Bad crates.io parameter: {data}"), - }, - _ => panic!("Bad string for crate origin: {b}"), - }; - (a.to_owned(), origin, Some(version.to_string())) - } else { - let crate_origin = match LangCrateOrigin::from(&*crate_str) { - LangCrateOrigin::Other => CrateOrigin::Local { repo: None, name: None }, - origin => CrateOrigin::Lang(origin), - }; - (crate_str, crate_origin, None) - } -} - -impl From<Fixture> for FileMeta { - fn from(f: Fixture) -> FileMeta { +impl FileMeta { + fn from_fixture(f: Fixture, current_source_root_kind: SourceRootKind) -> Self { let mut cfg = CfgOptions::default(); - f.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into())); - f.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into())); + for (k, v) in f.cfgs { + if let Some(v) = v { + cfg.insert_key_value(k.into(), v.into()); + } else { + cfg.insert_atom(k.into()); + } + } + + let introduce_new_source_root = f.introduce_new_source_root.map(|kind| match &*kind { + "local" => SourceRootKind::Local, + "library" => SourceRootKind::Library, + invalid => panic!("invalid source root kind '{invalid}'"), + }); + let current_source_root_kind = + introduce_new_source_root.unwrap_or(current_source_root_kind); + let deps = f.deps; - FileMeta { + Self { path: f.path, - krate: f.krate.map(parse_crate), + krate: f.krate.map(|it| parse_crate(it, current_source_root_kind, f.library)), extern_prelude: f.extern_prelude.unwrap_or_else(|| deps.clone()), deps, cfg, - edition: f.edition.as_ref().map_or(Edition::CURRENT, |v| Edition::from_str(v).unwrap()), + edition: f.edition.map_or(Edition::CURRENT, |v| Edition::from_str(&v).unwrap()), env: f.env.into_iter().collect(), - introduce_new_source_root: f.introduce_new_source_root.map(|kind| match &*kind { - "local" => SourceRootKind::Local, - "library" => SourceRootKind::Library, - invalid => panic!("invalid source root kind '{invalid}'"), - }), + introduce_new_source_root, target_data_layout: f.target_data_layout, } } } +fn parse_crate( + crate_str: String, + current_source_root_kind: SourceRootKind, + explicit_non_workspace_member: bool, +) -> (String, CrateOrigin, Option<String>) { + // syntax: + // "my_awesome_crate" + // "my_awesome_crate@0.0.1,http://example.com" + let (name, repo, version) = if let Some((name, remain)) = crate_str.split_once('@') { + let (version, repo) = + remain.split_once(',').expect("crate meta: found '@' without version and url"); + (name.to_owned(), Some(repo.to_owned()), Some(version.to_owned())) + } else { + (crate_str, None, None) + }; + + let non_workspace_member = explicit_non_workspace_member + || matches!(current_source_root_kind, SourceRootKind::Library); + + let origin = match LangCrateOrigin::from(&*name) { + LangCrateOrigin::Other => { + let name = name.clone(); + if non_workspace_member { + CrateOrigin::Library { repo, name } + } else { + CrateOrigin::Local { repo, name: Some(name) } + } + } + origin => CrateOrigin::Lang(origin), + }; + + (name, origin, version) +} + // Identity mapping #[derive(Debug)] struct IdentityProcMacroExpander; diff --git a/src/tools/rust-analyzer/crates/base-db/src/input.rs b/src/tools/rust-analyzer/crates/base-db/src/input.rs index f2e523675bc..c47799f1320 100644 --- a/src/tools/rust-analyzer/crates/base-db/src/input.rs +++ b/src/tools/rust-analyzer/crates/base-db/src/input.rs @@ -138,12 +138,12 @@ impl ops::Deref for CrateName { } } -/// Origin of the crates. It is used in emitting monikers. +/// Origin of the crates. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum CrateOrigin { - /// Crates that are from the rustc workspace + /// Crates that are from the rustc workspace. Rustc { name: String }, - /// Crates that are workspace members, + /// Crates that are workspace members. Local { repo: Option<String>, name: Option<String> }, /// Crates that are non member libraries. Library { repo: Option<String>, name: String }, diff --git a/src/tools/rust-analyzer/crates/cfg/Cargo.toml b/src/tools/rust-analyzer/crates/cfg/Cargo.toml index 0880bc239d8..ed380897278 100644 --- a/src/tools/rust-analyzer/crates/cfg/Cargo.toml +++ b/src/tools/rust-analyzer/crates/cfg/Cargo.toml @@ -18,13 +18,13 @@ rustc-hash = "1.1.0" tt.workspace = true [dev-dependencies] -expect-test = "1.4.0" +expect-test = "1.4.1" oorandom = "11.1.3" # We depend on both individually instead of using `features = ["derive"]` to microoptimize the # build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr` # supports `arbitrary`. This way, we avoid feature unification. -arbitrary = "1.2.2" -derive_arbitrary = "1.2.2" +arbitrary = "1.3.0" +derive_arbitrary = "1.3.1" # local deps mbe.workspace = true diff --git a/src/tools/rust-analyzer/crates/cfg/src/lib.rs b/src/tools/rust-analyzer/crates/cfg/src/lib.rs index 495119d5519..183b9b7d278 100644 --- a/src/tools/rust-analyzer/crates/cfg/src/lib.rs +++ b/src/tools/rust-analyzer/crates/cfg/src/lib.rs @@ -69,7 +69,7 @@ impl CfgOptions { } pub fn get_cfg_keys(&self) -> impl Iterator<Item = &SmolStr> { - self.enabled.iter().map(|x| match x { + self.enabled.iter().map(|it| match it { CfgAtom::Flag(key) => key, CfgAtom::KeyValue { key, .. } => key, }) @@ -79,7 +79,7 @@ impl CfgOptions { &'a self, cfg_key: &'a str, ) -> impl Iterator<Item = &'a SmolStr> + 'a { - self.enabled.iter().filter_map(move |x| match x { + self.enabled.iter().filter_map(move |it| match it { CfgAtom::KeyValue { key, value } if cfg_key == key => Some(value), _ => None, }) diff --git a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml index 3f6671b1c43..e7f7adc7847 100644 --- a/src/tools/rust-analyzer/crates/flycheck/Cargo.toml +++ b/src/tools/rust-analyzer/crates/flycheck/Cargo.toml @@ -12,9 +12,9 @@ rust-version.workspace = true doctest = false [dependencies] -crossbeam-channel = "0.5.5" +crossbeam-channel = "0.5.8" tracing = "0.1.37" -cargo_metadata = "0.15.0" +cargo_metadata = "0.15.4" rustc-hash = "1.1.0" serde_json.workspace = true serde.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml index 83c7051646e..30307deb79b 100644 --- a/src/tools/rust-analyzer/crates/hir-def/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-def/Cargo.toml @@ -21,14 +21,14 @@ dashmap = { version = "=5.4.0", features = ["raw-api"] } drop_bomb = "0.1.5" either = "1.7.0" fst = { version = "0.4.7", default-features = false } -hashbrown = { version = "0.12.1", default-features = false } -indexmap = "1.9.1" +indexmap = "2.0.0" itertools = "0.10.5" -la-arena = { version = "0.3.0", path = "../../lib/la-arena" } +la-arena.workspace = true once_cell = "1.17.0" rustc-hash = "1.1.0" -smallvec.workspace = true tracing = "0.1.35" +smallvec.workspace = true +hashbrown.workspace = true triomphe.workspace = true rustc_abi = { version = "0.0.20221221", package = "hkalbasi-rustc-ap-rustc_abi", default-features = false } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs index bab3bbc2329..c29446d8235 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/attr.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/attr.rs @@ -137,13 +137,16 @@ impl Attrs { let cfg_options = &crate_graph[krate].cfg_options; - let Some(variant) = enum_.variants.clone().filter(|variant| { - let attrs = item_tree.attrs(db, krate, (*variant).into()); - attrs.is_cfg_enabled(cfg_options) - }) - .zip(0u32..) - .find(|(_variant, idx)| it.local_id == Idx::from_raw(RawIdx::from(*idx))) - .map(|(variant, _idx)| variant) + let Some(variant) = enum_ + .variants + .clone() + .filter(|variant| { + let attrs = item_tree.attrs(db, krate, (*variant).into()); + attrs.is_cfg_enabled(cfg_options) + }) + .zip(0u32..) + .find(|(_variant, idx)| it.local_id == Idx::from_raw(RawIdx::from(*idx))) + .map(|(variant, _idx)| variant) else { return Arc::new(res); }; @@ -272,6 +275,25 @@ impl Attrs { self.by_key("proc_macro_derive").exists() } + pub fn is_test(&self) -> bool { + self.iter().any(|it| { + it.path() + .segments() + .iter() + .rev() + .zip(["core", "prelude", "v1", "test"].iter().rev()) + .all(|it| it.0.as_str() == Some(it.1)) + }) + } + + pub fn is_ignore(&self) -> bool { + self.by_key("ignore").exists() + } + + pub fn is_bench(&self) -> bool { + self.by_key("bench").exists() + } + pub fn is_unstable(&self) -> bool { self.by_key("unstable").exists() } @@ -282,7 +304,7 @@ use std::slice::Iter as SliceIter; pub enum DocAtom { /// eg. `#[doc(hidden)]` Flag(SmolStr), - /// eg. `#[doc(alias = "x")]` + /// eg. `#[doc(alias = "it")]` /// /// Note that a key can have multiple values that are all considered "active" at the same time. /// For example, `#[doc(alias = "x")]` and `#[doc(alias = "y")]`. @@ -462,6 +484,7 @@ impl AttrsWithOwner { } }, AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), + AttrDefId::ExternCrateId(it) => attrs_from_item_tree_loc(db, it), }; let attrs = raw_attrs.filter(db.upcast(), def.krate(db)); @@ -546,6 +569,7 @@ impl AttrsWithOwner { .map(|source| ast::AnyHasAttrs::new(source[id.local_id].clone())), }, AttrDefId::ExternBlockId(id) => any_has_attrs(db, id), + AttrDefId::ExternCrateId(id) => any_has_attrs(db, id), }; AttrSourceMap::new(owner.as_ref().map(|node| node as &dyn HasAttrs)) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body.rs b/src/tools/rust-analyzer/crates/hir-def/src/body.rs index 94dc39b1175..f8d492d0e52 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body.rs @@ -273,10 +273,10 @@ impl Body { pub fn is_binding_upvar(&self, binding: BindingId, relative_to: ExprId) -> bool { match self.binding_owners.get(&binding) { - Some(x) => { + Some(it) => { // We assign expression ids in a way that outer closures will receive // a lower id - x.into_raw() < relative_to.into_raw() + it.into_raw() < relative_to.into_raw() } None => true, } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs index b375ec63a67..152c02743f7 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/lower.rs @@ -297,11 +297,11 @@ impl ExprCollector<'_> { let (result_expr_id, prev_binding_owner) = this.initialize_binding_owner(syntax_ptr); let inner_expr = this.collect_block(e); - let x = this.db.intern_anonymous_const(ConstBlockLoc { + let it = this.db.intern_anonymous_const(ConstBlockLoc { parent: this.owner, root: inner_expr, }); - this.body.exprs[result_expr_id] = Expr::Const(x); + this.body.exprs[result_expr_id] = Expr::Const(it); this.current_binding_owner = prev_binding_owner; result_expr_id }) @@ -324,10 +324,10 @@ impl ExprCollector<'_> { ast::Expr::CallExpr(e) => { let is_rustc_box = { let attrs = e.attrs(); - attrs.filter_map(|x| x.as_simple_atom()).any(|x| x == "rustc_box") + attrs.filter_map(|it| it.as_simple_atom()).any(|it| it == "rustc_box") }; if is_rustc_box { - let expr = self.collect_expr_opt(e.arg_list().and_then(|x| x.args().next())); + let expr = self.collect_expr_opt(e.arg_list().and_then(|it| it.args().next())); self.alloc_expr(Expr::Box { expr }, syntax_ptr) } else { let callee = self.collect_expr_opt(e.expr()); @@ -781,7 +781,7 @@ impl ExprCollector<'_> { pat: self.alloc_pat_desugared(some_pat), guard: None, expr: self.with_opt_labeled_rib(label, |this| { - this.collect_expr_opt(e.loop_body().map(|x| x.into())) + this.collect_expr_opt(e.loop_body().map(|it| it.into())) }), }; let iter_name = Name::generate_new_name(); @@ -874,10 +874,10 @@ impl ExprCollector<'_> { }), guard: None, expr: { - let x = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone()); + let it = self.alloc_expr(Expr::Path(Path::from(break_name)), syntax_ptr.clone()); let callee = self.alloc_expr(Expr::Path(try_from_residual), syntax_ptr.clone()); let result = self.alloc_expr( - Expr::Call { callee, args: Box::new([x]), is_assignee_expr: false }, + Expr::Call { callee, args: Box::new([it]), is_assignee_expr: false }, syntax_ptr.clone(), ); self.alloc_expr( @@ -1240,12 +1240,12 @@ impl ExprCollector<'_> { pats.push(self.collect_pat(first, binding_list)); binding_list.reject_new = true; for rest in it { - for (_, x) in binding_list.is_used.iter_mut() { - *x = false; + for (_, it) in binding_list.is_used.iter_mut() { + *it = false; } pats.push(self.collect_pat(rest, binding_list)); - for (&id, &x) in binding_list.is_used.iter() { - if !x { + for (&id, &is_used) in binding_list.is_used.iter() { + if !is_used { self.body.bindings[id].problems = Some(BindingProblems::NotBoundAcrossAll); } @@ -1352,9 +1352,9 @@ impl ExprCollector<'_> { // FIXME: implement in a way that also builds source map and calculates assoc resolutions in type inference. ast::Pat::RangePat(p) => { let mut range_part_lower = |p: Option<ast::Pat>| { - p.and_then(|x| match &x { - ast::Pat::LiteralPat(x) => { - Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(x)?.0))) + p.and_then(|it| match &it { + ast::Pat::LiteralPat(it) => { + Some(Box::new(LiteralOrConst::Literal(pat_literal_to_hir(it)?.0))) } ast::Pat::IdentPat(p) => { let name = @@ -1451,9 +1451,7 @@ impl ExprCollector<'_> { &self, lifetime: Option<ast::Lifetime>, ) -> Result<Option<LabelId>, BodyDiagnostic> { - let Some(lifetime) = lifetime else { - return Ok(None) - }; + let Some(lifetime) = lifetime else { return Ok(None) }; let name = Name::new_lifetime(&lifetime); for (rib_idx, rib) in self.label_ribs.iter().enumerate().rev() { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs index cd6df0e6325..0c6cf0b49a2 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/pretty.rs @@ -105,7 +105,7 @@ struct Printer<'a> { needs_indent: bool, } -impl<'a> Write for Printer<'a> { +impl Write for Printer<'_> { fn write_str(&mut self, s: &str) -> fmt::Result { for line in s.split_inclusive('\n') { if self.needs_indent { @@ -125,7 +125,7 @@ impl<'a> Write for Printer<'a> { } } -impl<'a> Printer<'a> { +impl Printer<'_> { fn indented(&mut self, f: impl FnOnce(&mut Self)) { self.indent_level += 1; wln!(self); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs index edee2c7ff96..d5582011645 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests.rs @@ -3,12 +3,12 @@ mod block; use base_db::{fixture::WithFixture, SourceDatabase}; use expect_test::Expect; -use crate::ModuleDefId; +use crate::{test_db::TestDB, ModuleDefId}; use super::*; fn lower(ra_fixture: &str) -> Arc<Body> { - let db = crate::test_db::TestDB::with_files(ra_fixture); + let db = TestDB::with_files(ra_fixture); let krate = db.crate_graph().iter().next().unwrap(); let def_map = db.crate_def_map(krate); @@ -25,15 +25,15 @@ fn lower(ra_fixture: &str) -> Arc<Body> { db.body(fn_def.unwrap().into()) } -fn block_def_map_at(ra_fixture: &str) -> String { - let (db, position) = crate::test_db::TestDB::with_position(ra_fixture); +fn def_map_at(ra_fixture: &str) -> String { + let (db, position) = TestDB::with_position(ra_fixture); let module = db.module_at_position(position); module.def_map(&db).dump(&db) } fn check_block_scopes_at(ra_fixture: &str, expect: Expect) { - let (db, position) = crate::test_db::TestDB::with_position(ra_fixture); + let (db, position) = TestDB::with_position(ra_fixture); let module = db.module_at_position(position); let actual = module.def_map(&db).dump_block_scopes(&db); @@ -41,7 +41,7 @@ fn check_block_scopes_at(ra_fixture: &str, expect: Expect) { } fn check_at(ra_fixture: &str, expect: Expect) { - let actual = block_def_map_at(ra_fixture); + let actual = def_map_at(ra_fixture); expect.assert_eq(&actual); } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs index 6e77744f215..4e015a7fbbb 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/body/tests/block.rs @@ -134,6 +134,47 @@ struct Struct {} } #[test] +fn super_imports_2() { + check_at( + r#" +fn outer() { + mod m { + struct ResolveMe {} + fn middle() { + mod m2 { + fn inner() { + use super::ResolveMe; + $0 + } + } + } + } +} +"#, + expect![[r#" + block scope + ResolveMe: t + + block scope + m2: t + + block scope::m2 + inner: v + + block scope + m: t + + block scope::m + ResolveMe: t + middle: v + + crate + outer: v + "#]], + ); +} + +#[test] fn nested_module_scoping() { check_block_scopes_at( r#" @@ -156,6 +197,42 @@ fn f() { } #[test] +fn self_imports() { + check_at( + r#" +fn f() { + mod m { + struct ResolveMe {} + fn g() { + fn h() { + use self::ResolveMe; + $0 + } + } + } +} +"#, + expect![[r#" + block scope + ResolveMe: t + + block scope + h: v + + block scope + m: t + + block scope::m + ResolveMe: t + g: v + + crate + f: v + "#]], + ); +} + +#[test] fn legacy_macro_items() { // Checks that legacy-scoped `macro_rules!` from parent namespaces are resolved and expanded // correctly. diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data.rs b/src/tools/rust-analyzer/crates/hir-def/src/data.rs index 40e6a430878..54fe9a2e844 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data.rs @@ -24,11 +24,12 @@ use crate::{ proc_macro::{parse_macro_name_and_helper_attrs, ProcMacroKind}, DefMap, MacroSubNs, }, + path::ImportAlias, type_ref::{TraitRef, TypeBound, TypeRef}, visibility::RawVisibility, - AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId, - Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, ProcMacroId, - StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc, + AssocItemId, AstIdWithPath, ConstId, ConstLoc, ExternCrateId, FunctionId, FunctionLoc, + HasModule, ImplId, Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, + ProcMacroId, StaticId, TraitAliasId, TraitId, TypeAliasId, TypeAliasLoc, }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -424,6 +425,7 @@ impl MacroRulesData { Arc::new(MacroRulesData { name: makro.name.clone(), macro_export }) } } + #[derive(Debug, Clone, PartialEq, Eq)] pub struct ProcMacroData { pub name: Name, @@ -461,6 +463,30 @@ impl ProcMacroData { } #[derive(Debug, Clone, PartialEq, Eq)] +pub struct ExternCrateDeclData { + pub name: Name, + pub alias: Option<ImportAlias>, + pub visibility: RawVisibility, +} + +impl ExternCrateDeclData { + pub(crate) fn extern_crate_decl_data_query( + db: &dyn DefDatabase, + extern_crate: ExternCrateId, + ) -> Arc<ExternCrateDeclData> { + let loc = extern_crate.lookup(db); + let item_tree = loc.id.item_tree(db); + let extern_crate = &item_tree[loc.id.value]; + + Arc::new(Self { + name: extern_crate.name.clone(), + visibility: item_tree[extern_crate.visibility].clone(), + alias: extern_crate.alias.clone(), + }) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] pub struct ConstData { /// `None` for `const _: () = ();` pub name: Option<Name>, @@ -573,7 +599,7 @@ impl<'a> AssocItemCollector<'a> { if !attrs.is_cfg_enabled(self.expander.cfg_options()) { self.diagnostics.push(DefDiagnostic::unconfigured_code( self.module_id.local_id, - InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).upcast()), + InFile::new(self.expander.current_file_id(), item.ast_id(item_tree).erase()), attrs.cfg().unwrap(), self.expander.cfg_options().clone(), )); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs index 6db5abccc92..c8df3f3f96a 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/data/adt.rs @@ -18,7 +18,6 @@ use triomphe::Arc; use crate::{ builtin_type::{BuiltinInt, BuiltinUint}, db::DefDatabase, - expander::CfgExpander, item_tree::{AttrOwner, Field, FieldAstId, Fields, ItemTree, ModItem, RawVisibilityId}, lang_item::LangItem, lower::LowerCtx, @@ -29,8 +28,8 @@ use crate::{ tt::{Delimiter, DelimiterKind, Leaf, Subtree, TokenTree}, type_ref::TypeRef, visibility::RawVisibility, - EnumId, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId, UnionId, - VariantId, + EnumId, EnumLoc, LocalEnumVariantId, LocalFieldId, LocalModuleId, Lookup, ModuleId, StructId, + UnionId, VariantId, }; /// Note that we use `StructData` for unions as well! @@ -76,6 +75,7 @@ pub struct EnumData { pub struct EnumVariantData { pub name: Name, pub variant_data: Arc<VariantData>, + pub tree_id: la_arena::Idx<crate::item_tree::Variant>, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -147,6 +147,7 @@ fn parse_repr_tt(tt: &Subtree) -> Option<ReprOptions> { } "C" => ReprFlags::IS_C, "transparent" => ReprFlags::IS_TRANSPARENT, + "simd" => ReprFlags::IS_SIMD, repr => { if let Some(builtin) = BuiltinInt::from_suffix(repr) .map(Either::Left) @@ -325,11 +326,12 @@ impl EnumData { variants.alloc(EnumVariantData { name: var.name.clone(), variant_data: Arc::new(var_data), + tree_id, }); } else { diagnostics.push(DefDiagnostic::unconfigured_code( loc.container.local_id, - InFile::new(loc.id.file_id(), var.ast_id.upcast()), + InFile::new(loc.id.file_id(), var.ast_id.erase()), attrs.cfg().unwrap(), cfg_options.clone(), )) @@ -367,9 +369,10 @@ impl HasChildSource<LocalEnumVariantId> for EnumId { &self, db: &dyn DefDatabase, ) -> InFile<ArenaMap<LocalEnumVariantId, Self::Value>> { - let src = self.lookup(db).source(db); + let loc = &self.lookup(db); + let src = loc.source(db); let mut trace = Trace::new_for_map(); - lower_enum(db, &mut trace, &src, self.lookup(db).container); + lower_enum(db, &mut trace, &src, loc); src.with_value(trace.into_map()) } } @@ -378,31 +381,58 @@ fn lower_enum( db: &dyn DefDatabase, trace: &mut Trace<EnumVariantData, ast::Variant>, ast: &InFile<ast::Enum>, - module_id: ModuleId, + loc: &EnumLoc, ) { - let expander = CfgExpander::new(db, ast.file_id, module_id.krate); + let item_tree = loc.id.item_tree(db); + let krate = loc.container.krate; + + let item_tree_variants = item_tree[loc.id.value].variants.clone(); + + let cfg_options = &db.crate_graph()[krate].cfg_options; let variants = ast .value .variant_list() .into_iter() .flat_map(|it| it.variants()) - .filter(|var| expander.is_cfg_enabled(db, var)); - for var in variants { + .zip(item_tree_variants) + .filter(|&(_, item_tree_id)| { + item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) + }); + for (var, item_tree_id) in variants { trace.alloc( || var.clone(), || EnumVariantData { name: var.name().map_or_else(Name::missing, |it| it.as_name()), - variant_data: Arc::new(VariantData::new(db, ast.with_value(var.kind()), module_id)), + variant_data: Arc::new(VariantData::new( + db, + ast.with_value(var.kind()), + loc.container, + &item_tree, + item_tree_id, + )), + tree_id: item_tree_id, }, ); } } impl VariantData { - fn new(db: &dyn DefDatabase, flavor: InFile<ast::StructKind>, module_id: ModuleId) -> Self { - let mut expander = CfgExpander::new(db, flavor.file_id, module_id.krate); + fn new( + db: &dyn DefDatabase, + flavor: InFile<ast::StructKind>, + module_id: ModuleId, + item_tree: &ItemTree, + variant: la_arena::Idx<crate::item_tree::Variant>, + ) -> Self { let mut trace = Trace::new_for_arena(); - match lower_struct(db, &mut expander, &mut trace, &flavor) { + match lower_struct( + db, + &mut trace, + &flavor, + module_id.krate, + item_tree, + &item_tree[variant].fields, + ) { StructKind::Tuple => VariantData::Tuple(trace.into_arena()), StructKind::Record => VariantData::Record(trace.into_arena()), StructKind::Unit => VariantData::Unit, @@ -434,28 +464,43 @@ impl HasChildSource<LocalFieldId> for VariantId { type Value = Either<ast::TupleField, ast::RecordField>; fn child_source(&self, db: &dyn DefDatabase) -> InFile<ArenaMap<LocalFieldId, Self::Value>> { - let (src, module_id) = match self { + let item_tree; + let (src, fields, container) = match *self { VariantId::EnumVariantId(it) => { // I don't really like the fact that we call into parent source // here, this might add to more queries then necessary. + let lookup = it.parent.lookup(db); + item_tree = lookup.id.item_tree(db); let src = it.parent.child_source(db); - (src.map(|map| map[it.local_id].kind()), it.parent.lookup(db).container) + let tree_id = db.enum_data(it.parent).variants[it.local_id].tree_id; + let fields = &item_tree[tree_id].fields; + (src.map(|map| map[it.local_id].kind()), fields, lookup.container) } VariantId::StructId(it) => { - (it.lookup(db).source(db).map(|it| it.kind()), it.lookup(db).container) + let lookup = it.lookup(db); + item_tree = lookup.id.item_tree(db); + ( + lookup.source(db).map(|it| it.kind()), + &item_tree[lookup.id.value].fields, + lookup.container, + ) + } + VariantId::UnionId(it) => { + let lookup = it.lookup(db); + item_tree = lookup.id.item_tree(db); + ( + lookup.source(db).map(|it| { + it.record_field_list() + .map(ast::StructKind::Record) + .unwrap_or(ast::StructKind::Unit) + }), + &item_tree[lookup.id.value].fields, + lookup.container, + ) } - VariantId::UnionId(it) => ( - it.lookup(db).source(db).map(|it| { - it.record_field_list() - .map(ast::StructKind::Record) - .unwrap_or(ast::StructKind::Unit) - }), - it.lookup(db).container, - ), }; - let mut expander = CfgExpander::new(db, src.file_id, module_id.krate); let mut trace = Trace::new_for_map(); - lower_struct(db, &mut expander, &mut trace, &src); + lower_struct(db, &mut trace, &src, container.krate, &item_tree, fields); src.with_value(trace.into_map()) } } @@ -469,16 +514,19 @@ pub enum StructKind { fn lower_struct( db: &dyn DefDatabase, - expander: &mut CfgExpander, trace: &mut Trace<FieldData, Either<ast::TupleField, ast::RecordField>>, ast: &InFile<ast::StructKind>, + krate: CrateId, + item_tree: &ItemTree, + fields: &Fields, ) -> StructKind { - let ctx = LowerCtx::new(db, &expander.hygiene(), ast.file_id); + let ctx = LowerCtx::with_file_id(db, ast.file_id); - match &ast.value { - ast::StructKind::Tuple(fl) => { - for (i, fd) in fl.fields().enumerate() { - if !expander.is_cfg_enabled(db, &fd) { + match (&ast.value, fields) { + (ast::StructKind::Tuple(fl), Fields::Tuple(fields)) => { + let cfg_options = &db.crate_graph()[krate].cfg_options; + for ((i, fd), item_tree_id) in fl.fields().enumerate().zip(fields.clone()) { + if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) { continue; } @@ -493,9 +541,10 @@ fn lower_struct( } StructKind::Tuple } - ast::StructKind::Record(fl) => { - for fd in fl.fields() { - if !expander.is_cfg_enabled(db, &fd) { + (ast::StructKind::Record(fl), Fields::Record(fields)) => { + let cfg_options = &db.crate_graph()[krate].cfg_options; + for (fd, item_tree_id) in fl.fields().zip(fields.clone()) { + if !item_tree.attrs(db, krate, item_tree_id.into()).is_cfg_enabled(cfg_options) { continue; } @@ -510,7 +559,7 @@ fn lower_struct( } StructKind::Record } - ast::StructKind::Unit => StructKind::Unit, + _ => StructKind::Unit, } } @@ -539,8 +588,8 @@ fn lower_fields( InFile::new( current_file_id, match field.ast_id { - FieldAstId::Record(it) => it.upcast(), - FieldAstId::Tuple(it) => it.upcast(), + FieldAstId::Record(it) => it.erase(), + FieldAstId::Tuple(it) => it.erase(), }, ), attrs.cfg().unwrap(), @@ -563,8 +612,8 @@ fn lower_fields( InFile::new( current_file_id, match field.ast_id { - FieldAstId::Record(it) => it.upcast(), - FieldAstId::Tuple(it) => it.upcast(), + FieldAstId::Record(it) => it.erase(), + FieldAstId::Tuple(it) => it.erase(), }, ), attrs.cfg().unwrap(), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/db.rs b/src/tools/rust-analyzer/crates/hir-def/src/db.rs index 04ec47f84ca..82e6dfb30c8 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/db.rs @@ -12,27 +12,31 @@ use crate::{ body::{scope::ExprScopes, Body, BodySourceMap}, data::{ adt::{EnumData, StructData}, - ConstData, FunctionData, ImplData, Macro2Data, MacroRulesData, ProcMacroData, StaticData, - TraitAliasData, TraitData, TypeAliasData, + ConstData, ExternCrateDeclData, FunctionData, ImplData, Macro2Data, MacroRulesData, + ProcMacroData, StaticData, TraitAliasData, TraitData, TypeAliasData, }, generics::GenericParams, import_map::ImportMap, item_tree::{AttrOwner, ItemTree}, - lang_item::{LangItem, LangItemTarget, LangItems}, + lang_item::{self, LangItem, LangItemTarget, LangItems}, nameres::{diagnostics::DefDiagnostic, DefMap}, visibility::{self, Visibility}, AttrDefId, BlockId, BlockLoc, ConstBlockId, ConstBlockLoc, ConstId, ConstLoc, DefWithBodyId, - EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, - ImplLoc, InTypeConstId, InTypeConstLoc, LocalEnumVariantId, LocalFieldId, Macro2Id, Macro2Loc, - MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, - StructLoc, TraitAliasId, TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, - UnionLoc, VariantId, + EnumId, EnumLoc, ExternBlockId, ExternBlockLoc, ExternCrateId, ExternCrateLoc, FunctionId, + FunctionLoc, GenericDefId, ImplId, ImplLoc, ImportId, ImportLoc, InTypeConstId, InTypeConstLoc, + LocalEnumVariantId, LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, + ProcMacroId, ProcMacroLoc, StaticId, StaticLoc, StructId, StructLoc, TraitAliasId, + TraitAliasLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc, VariantId, }; #[salsa::query_group(InternDatabaseStorage)] pub trait InternDatabase: SourceDatabase { // region: items #[salsa::interned] + fn intern_import(&self, loc: ImportLoc) -> ImportId; + #[salsa::interned] + fn intern_extern_crate(&self, loc: ExternCrateLoc) -> ExternCrateId; + #[salsa::interned] fn intern_function(&self, loc: FunctionLoc) -> FunctionId; #[salsa::interned] fn intern_struct(&self, loc: StructLoc) -> StructId; @@ -160,6 +164,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba #[salsa::invoke(ProcMacroData::proc_macro_data_query)] fn proc_macro_data(&self, makro: ProcMacroId) -> Arc<ProcMacroData>; + #[salsa::invoke(ExternCrateDeclData::extern_crate_decl_data_query)] + fn extern_crate_decl_data(&self, extern_crate: ExternCrateId) -> Arc<ExternCrateDeclData>; + // endregion:data #[salsa::invoke(Body::body_with_source_map_query)] @@ -197,6 +204,9 @@ pub trait DefDatabase: InternDatabase + ExpandDatabase + Upcast<dyn ExpandDataba #[salsa::invoke(AttrsWithOwner::attrs_query)] fn attrs(&self, def: AttrDefId) -> Attrs; + #[salsa::invoke(lang_item::lang_attr_query)] + fn lang_attr(&self, def: AttrDefId) -> Option<LangItem>; + #[salsa::transparent] #[salsa::invoke(AttrsWithOwner::attrs_with_owner)] fn attrs_with_owner(&self, def: AttrDefId) -> AttrsWithOwner; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs index f30be6b64e3..4197d010608 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/dyn_map/keys.rs @@ -8,8 +8,8 @@ use syntax::{ast, AstNode, AstPtr}; use crate::{ dyn_map::{DynMap, Policy}, - ConstId, EnumId, EnumVariantId, FieldId, FunctionId, ImplId, LifetimeParamId, Macro2Id, - MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, + ConstId, EnumId, EnumVariantId, ExternCrateId, FieldId, FunctionId, ImplId, LifetimeParamId, + Macro2Id, MacroRulesId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeOrConstParamId, UnionId, }; @@ -25,6 +25,7 @@ pub const TRAIT_ALIAS: Key<ast::TraitAlias, TraitAliasId> = Key::new(); pub const STRUCT: Key<ast::Struct, StructId> = Key::new(); pub const UNION: Key<ast::Union, UnionId> = Key::new(); pub const ENUM: Key<ast::Enum, EnumId> = Key::new(); +pub const EXTERN_CRATE: Key<ast::ExternCrate, ExternCrateId> = Key::new(); pub const VARIANT: Key<ast::Variant, EnumVariantId> = Key::new(); pub const TUPLE_FIELD: Key<ast::TupleField, FieldId> = Key::new(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs index a588827c8d3..cc85bd893ac 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/expander.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/expander.rs @@ -15,18 +15,11 @@ use crate::{ MacroId, ModuleId, }; -/// A subset of Expander that only deals with cfg attributes. We only need it to -/// avoid cyclic queries in crate def map during enum processing. #[derive(Debug)] -pub(crate) struct CfgExpander { +pub struct Expander { cfg_options: CfgOptions, hygiene: Hygiene, krate: CrateId, -} - -#[derive(Debug)] -pub struct Expander { - cfg_expander: CfgExpander, pub(crate) current_file_id: HirFileId, pub(crate) module: ModuleId, /// `recursion_depth == usize::MAX` indicates that the recursion limit has been reached. @@ -34,41 +27,23 @@ pub struct Expander { recursion_limit: Limit, } -impl CfgExpander { - pub(crate) fn new( - db: &dyn DefDatabase, - current_file_id: HirFileId, - krate: CrateId, - ) -> CfgExpander { - let hygiene = Hygiene::new(db.upcast(), current_file_id); - let cfg_options = db.crate_graph()[krate].cfg_options.clone(); - CfgExpander { cfg_options, hygiene, krate } - } - - pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs { - Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene)) - } - - pub(crate) fn is_cfg_enabled(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> bool { - let attrs = self.parse_attrs(db, owner); - attrs.is_cfg_enabled(&self.cfg_options) - } - - pub(crate) fn hygiene(&self) -> &Hygiene { - &self.hygiene - } -} - impl Expander { pub fn new(db: &dyn DefDatabase, current_file_id: HirFileId, module: ModuleId) -> Expander { - let cfg_expander = CfgExpander::new(db, current_file_id, module.krate); let recursion_limit = db.recursion_limit(module.krate); #[cfg(not(test))] let recursion_limit = Limit::new(recursion_limit as usize); // Without this, `body::tests::your_stack_belongs_to_me` stack-overflows in debug #[cfg(test)] let recursion_limit = Limit::new(std::cmp::min(32, recursion_limit as usize)); - Expander { cfg_expander, current_file_id, module, recursion_depth: 0, recursion_limit } + Expander { + current_file_id, + module, + recursion_depth: 0, + recursion_limit, + cfg_options: db.crate_graph()[module.krate].cfg_options.clone(), + hygiene: Hygiene::new(db.upcast(), current_file_id), + krate: module.krate, + } } pub fn enter_expand<T: ast::AstNode>( @@ -120,7 +95,7 @@ impl Expander { } pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) { - self.cfg_expander.hygiene = Hygiene::new(db.upcast(), mark.file_id); + self.hygiene = Hygiene::new(db.upcast(), mark.file_id); self.current_file_id = mark.file_id; if self.recursion_depth == u32::MAX { // Recursion limit has been reached somewhere in the macro expansion tree. Reset the @@ -135,7 +110,7 @@ impl Expander { } pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> { - LowerCtx::new(db, &self.cfg_expander.hygiene, self.current_file_id) + LowerCtx::new(db, &self.hygiene, self.current_file_id) } pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> { @@ -143,11 +118,11 @@ impl Expander { } pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs { - self.cfg_expander.parse_attrs(db, owner) + Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene)) } pub(crate) fn cfg_options(&self) -> &CfgOptions { - &self.cfg_expander.cfg_options + &self.cfg_options } pub fn current_file_id(&self) -> HirFileId { @@ -155,7 +130,7 @@ impl Expander { } pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> { - let ctx = LowerCtx::new(db, &self.cfg_expander.hygiene, self.current_file_id); + let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id); Path::from_src(path, &ctx) } @@ -194,7 +169,7 @@ impl Expander { let parse = value.cast::<T>()?; self.recursion_depth += 1; - self.cfg_expander.hygiene = Hygiene::new(db.upcast(), file_id); + self.hygiene = Hygiene::new(db.upcast(), file_id); let old_file_id = std::mem::replace(&mut self.current_file_id, file_id); let mark = Mark { file_id: old_file_id, bomb: DropBomb::new("expansion mark dropped") }; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs index 8c49ae1c4af..df2af4c89b0 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/find_path.rs @@ -360,7 +360,7 @@ fn calculate_best_path( prefer_no_std, )?; cov_mark::hit!(partially_imported); - path.push_segment(info.path.segments.last()?.clone()); + path.push_segment(info.name.clone()); Some(path) }) }); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs index f19c3f028f4..d7d44e41388 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/generics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/generics.rs @@ -67,21 +67,21 @@ pub enum TypeOrConstParamData { impl TypeOrConstParamData { pub fn name(&self) -> Option<&Name> { match self { - TypeOrConstParamData::TypeParamData(x) => x.name.as_ref(), - TypeOrConstParamData::ConstParamData(x) => Some(&x.name), + TypeOrConstParamData::TypeParamData(it) => it.name.as_ref(), + TypeOrConstParamData::ConstParamData(it) => Some(&it.name), } } pub fn has_default(&self) -> bool { match self { - TypeOrConstParamData::TypeParamData(x) => x.default.is_some(), - TypeOrConstParamData::ConstParamData(x) => x.has_default, + TypeOrConstParamData::TypeParamData(it) => it.default.is_some(), + TypeOrConstParamData::ConstParamData(it) => it.has_default, } } pub fn type_param(&self) -> Option<&TypeParamData> { match self { - TypeOrConstParamData::TypeParamData(x) => Some(x), + TypeOrConstParamData::TypeParamData(it) => Some(it), TypeOrConstParamData::ConstParamData(_) => None, } } @@ -89,14 +89,14 @@ impl TypeOrConstParamData { pub fn const_param(&self) -> Option<&ConstParamData> { match self { TypeOrConstParamData::TypeParamData(_) => None, - TypeOrConstParamData::ConstParamData(x) => Some(x), + TypeOrConstParamData::ConstParamData(it) => Some(it), } } pub fn is_trait_self(&self) -> bool { match self { - TypeOrConstParamData::TypeParamData(x) => { - x.provenance == TypeParamProvenance::TraitSelf + TypeOrConstParamData::TypeParamData(it) => { + it.provenance == TypeParamProvenance::TraitSelf } TypeOrConstParamData::ConstParamData(_) => false, } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs index fa1f4933a26..57f023ef35d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/hir/type_ref.rs @@ -425,8 +425,8 @@ impl ConstRef { } match expr { ast::Expr::PathExpr(p) if is_path_ident(&p) => { - match p.path().and_then(|x| x.segment()).and_then(|x| x.name_ref()) { - Some(x) => Self::Path(x.as_name()), + match p.path().and_then(|it| it.segment()).and_then(|it| it.name_ref()) { + Some(it) => Self::Path(it.as_name()), None => Self::Scalar(LiteralConstRef::Unknown), } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs index 48532655e04..4b2e5041a12 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/import_map.rs @@ -1,13 +1,14 @@ //! A map of all publicly exported items in a crate. +use std::collections::hash_map::Entry; use std::{fmt, hash::BuildHasherDefault}; use base_db::CrateId; use fst::{self, Streamer}; use hir_expand::name::Name; -use indexmap::{map::Entry, IndexMap}; +use indexmap::IndexMap; use itertools::Itertools; -use rustc_hash::{FxHashSet, FxHasher}; +use rustc_hash::{FxHashMap, FxHashSet, FxHasher}; use triomphe::Arc; use crate::{ @@ -17,52 +18,23 @@ use crate::{ type FxIndexMap<K, V> = IndexMap<K, V, BuildHasherDefault<FxHasher>>; +// FIXME: Support aliases: an item may be exported under multiple names, so `ImportInfo` should +// have `Vec<(Name, ModuleId)>` instead of `(Name, ModuleId)`. /// Item import details stored in the `ImportMap`. #[derive(Debug, Clone, Eq, PartialEq)] pub struct ImportInfo { - /// A path that can be used to import the item, relative to the crate's root. - pub path: ImportPath, + /// A name that can be used to import the item, relative to the crate's root. + pub name: Name, /// The module containing this item. pub container: ModuleId, /// Whether the import is a trait associated item or not. pub is_trait_assoc_item: bool, } -#[derive(Debug, Clone, Eq, PartialEq)] -pub struct ImportPath { - pub segments: Vec<Name>, -} - -impl ImportPath { - pub fn display<'a>(&'a self, db: &'a dyn DefDatabase) -> impl fmt::Display + 'a { - struct Display<'a> { - db: &'a dyn DefDatabase, - path: &'a ImportPath, - } - impl fmt::Display for Display<'_> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Display::fmt( - &self.path.segments.iter().map(|it| it.display(self.db.upcast())).format("::"), - f, - ) - } - } - Display { db, path: self } - } - - fn len(&self) -> usize { - self.segments.len() - } -} - -/// A map from publicly exported items to the path needed to import/name them from a downstream -/// crate. +/// A map from publicly exported items to its name. /// /// Reexports of items are taken into account, ie. if something is exported under multiple /// names, the one with the shortest import path will be used. -/// -/// Note that all paths are relative to the containing crate's root, so the crate name still needs -/// to be prepended to the `ModPath` before the path is valid. #[derive(Default)] pub struct ImportMap { map: FxIndexMap<ItemInNs, ImportInfo>, @@ -70,122 +42,58 @@ pub struct ImportMap { /// List of keys stored in `map`, sorted lexicographically by their `ModPath`. Indexed by the /// values returned by running `fst`. /// - /// Since a path can refer to multiple items due to namespacing, we store all items with the - /// same path right after each other. This allows us to find all items after the FST gives us + /// Since a name can refer to multiple items due to namespacing, we store all items with the + /// same name right after each other. This allows us to find all items after the FST gives us /// the index of the first one. importables: Vec<ItemInNs>, fst: fst::Map<Vec<u8>>, } impl ImportMap { - pub fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> { + pub(crate) fn import_map_query(db: &dyn DefDatabase, krate: CrateId) -> Arc<Self> { let _p = profile::span("import_map_query"); - let mut import_map = collect_import_map(db, krate); + let map = collect_import_map(db, krate); - let mut importables = import_map - .map + let mut importables: Vec<_> = map .iter() - .map(|(item, info)| (item, fst_path(db, &info.path))) - .collect::<Vec<_>>(); - importables.sort_by(|(_, fst_path), (_, fst_path2)| fst_path.cmp(fst_path2)); + // We've only collected items, whose name cannot be tuple field. + .map(|(&item, info)| (item, info.name.as_str().unwrap().to_ascii_lowercase())) + .collect(); + importables.sort_by(|(_, lhs_name), (_, rhs_name)| lhs_name.cmp(rhs_name)); // Build the FST, taking care not to insert duplicate values. - let mut builder = fst::MapBuilder::memory(); - let mut last_batch_start = 0; - - for idx in 0..importables.len() { - let key = &importables[last_batch_start].1; - if let Some((_, fst_path)) = importables.get(idx + 1) { - if key == fst_path { - continue; - } - } - - let _ = builder.insert(key, last_batch_start as u64); - - last_batch_start = idx + 1; + let iter = importables.iter().enumerate().dedup_by(|lhs, rhs| lhs.1 .1 == rhs.1 .1); + for (start_idx, (_, name)) in iter { + let _ = builder.insert(name, start_idx as u64); } - import_map.fst = builder.into_map(); - import_map.importables = importables.iter().map(|&(&item, _)| item).collect(); - - Arc::new(import_map) - } - - /// Returns the `ModPath` needed to import/mention `item`, relative to this crate's root. - pub fn path_of(&self, item: ItemInNs) -> Option<&ImportPath> { - self.import_info_for(item).map(|it| &it.path) + Arc::new(ImportMap { + map, + fst: builder.into_map(), + importables: importables.into_iter().map(|(item, _)| item).collect(), + }) } pub fn import_info_for(&self, item: ItemInNs) -> Option<&ImportInfo> { self.map.get(&item) } - - #[cfg(test)] - fn fmt_for_test(&self, db: &dyn DefDatabase) -> String { - let mut importable_paths: Vec<_> = self - .map - .iter() - .map(|(item, info)| { - let ns = match item { - ItemInNs::Types(_) => "t", - ItemInNs::Values(_) => "v", - ItemInNs::Macros(_) => "m", - }; - format!("- {} ({ns})", info.path.display(db)) - }) - .collect(); - - importable_paths.sort(); - importable_paths.join("\n") - } - - fn collect_trait_assoc_items( - &mut self, - db: &dyn DefDatabase, - tr: TraitId, - is_type_in_ns: bool, - original_import_info: &ImportInfo, - ) { - let _p = profile::span("collect_trait_assoc_items"); - for (assoc_item_name, item) in &db.trait_data(tr).items { - let module_def_id = match item { - AssocItemId::FunctionId(f) => ModuleDefId::from(*f), - AssocItemId::ConstId(c) => ModuleDefId::from(*c), - // cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias` - // qualifier, ergo no need to store it for imports in import_map - AssocItemId::TypeAliasId(_) => { - cov_mark::hit!(type_aliases_ignored); - continue; - } - }; - let assoc_item = if is_type_in_ns { - ItemInNs::Types(module_def_id) - } else { - ItemInNs::Values(module_def_id) - }; - - let mut assoc_item_info = original_import_info.clone(); - assoc_item_info.path.segments.push(assoc_item_name.to_owned()); - assoc_item_info.is_trait_assoc_item = true; - self.map.insert(assoc_item, assoc_item_info); - } - } } -fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMap { +fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> FxIndexMap<ItemInNs, ImportInfo> { let _p = profile::span("collect_import_map"); let def_map = db.crate_def_map(krate); - let mut import_map = ImportMap::default(); + let mut map = FxIndexMap::default(); // We look only into modules that are public(ly reexported), starting with the crate root. - let empty = ImportPath { segments: vec![] }; let root = def_map.module_id(DefMap::ROOT); - let mut worklist = vec![(root, empty)]; - while let Some((module, mod_path)) = worklist.pop() { + let mut worklist = vec![(root, 0)]; + // Records items' minimum module depth. + let mut depth_map = FxHashMap::default(); + + while let Some((module, depth)) = worklist.pop() { let ext_def_map; let mod_data = if module.krate == krate { &def_map[module.local_id] @@ -201,52 +109,83 @@ fn collect_import_map(db: &dyn DefDatabase, krate: CrateId) -> ImportMap { }); for (name, per_ns) in visible_items { - let mk_path = || { - let mut path = mod_path.clone(); - path.segments.push(name.clone()); - path - }; - for item in per_ns.iter_items() { - let path = mk_path(); - let path_len = path.len(); - let import_info = - ImportInfo { path, container: module, is_trait_assoc_item: false }; - - if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() { - import_map.collect_trait_assoc_items( - db, - tr, - matches!(item, ItemInNs::Types(_)), - &import_info, - ); - } + let import_info = ImportInfo { + name: name.clone(), + container: module, + is_trait_assoc_item: false, + }; - match import_map.map.entry(item) { + match depth_map.entry(item) { Entry::Vacant(entry) => { - entry.insert(import_info); + entry.insert(depth); } Entry::Occupied(mut entry) => { - // If the new path is shorter, prefer that one. - if path_len < entry.get().path.len() { - *entry.get_mut() = import_info; + if depth < *entry.get() { + entry.insert(depth); } else { continue; } } } - // If we've just added a path to a module, descend into it. We might traverse - // modules multiple times, but only if the new path to it is shorter than the - // first (else we `continue` above). + if let Some(ModuleDefId::TraitId(tr)) = item.as_module_def_id() { + collect_trait_assoc_items( + db, + &mut map, + tr, + matches!(item, ItemInNs::Types(_)), + &import_info, + ); + } + + map.insert(item, import_info); + + // If we've just added a module, descend into it. We might traverse modules + // multiple times, but only if the module depth is smaller (else we `continue` + // above). if let Some(ModuleDefId::ModuleId(mod_id)) = item.as_module_def_id() { - worklist.push((mod_id, mk_path())); + worklist.push((mod_id, depth + 1)); } } } } - import_map + map +} + +fn collect_trait_assoc_items( + db: &dyn DefDatabase, + map: &mut FxIndexMap<ItemInNs, ImportInfo>, + tr: TraitId, + is_type_in_ns: bool, + trait_import_info: &ImportInfo, +) { + let _p = profile::span("collect_trait_assoc_items"); + for (assoc_item_name, item) in &db.trait_data(tr).items { + let module_def_id = match item { + AssocItemId::FunctionId(f) => ModuleDefId::from(*f), + AssocItemId::ConstId(c) => ModuleDefId::from(*c), + // cannot use associated type aliases directly: need a `<Struct as Trait>::TypeAlias` + // qualifier, ergo no need to store it for imports in import_map + AssocItemId::TypeAliasId(_) => { + cov_mark::hit!(type_aliases_ignored); + continue; + } + }; + let assoc_item = if is_type_in_ns { + ItemInNs::Types(module_def_id) + } else { + ItemInNs::Values(module_def_id) + }; + + let assoc_item_info = ImportInfo { + container: trait_import_info.container, + name: assoc_item_name.clone(), + is_trait_assoc_item: true, + }; + map.insert(assoc_item, assoc_item_info); + } } impl PartialEq for ImportMap { @@ -260,7 +199,7 @@ impl Eq for ImportMap {} impl fmt::Debug for ImportMap { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let mut importable_paths: Vec<_> = self + let mut importable_names: Vec<_> = self .map .iter() .map(|(item, _)| match item { @@ -270,56 +209,40 @@ impl fmt::Debug for ImportMap { }) .collect(); - importable_paths.sort(); - f.write_str(&importable_paths.join("\n")) + importable_names.sort(); + f.write_str(&importable_names.join("\n")) } } -fn fst_path(db: &dyn DefDatabase, path: &ImportPath) -> String { - let _p = profile::span("fst_path"); - let mut s = path.display(db).to_string(); - s.make_ascii_lowercase(); - s -} - -#[derive(Debug, Eq, PartialEq, Hash)] -pub enum ImportKind { - Module, - Function, - Adt, - EnumVariant, - Const, - Static, - Trait, - TraitAlias, - TypeAlias, - BuiltinType, - AssociatedItem, - Macro, -} - /// A way to match import map contents against the search query. #[derive(Debug)] -pub enum SearchMode { +enum SearchMode { /// Import map entry should strictly match the query string. - Equals, - /// Import map entry should contain the query string. - Contains, + Exact, /// Import map entry should contain all letters from the query string, /// in the same order, but not necessary adjacent. Fuzzy, } +/// Three possible ways to search for the name in associated and/or other items. +#[derive(Debug, Clone, Copy)] +pub enum AssocSearchMode { + /// Search for the name in both associated and other items. + Include, + /// Search for the name in other items only. + Exclude, + /// Search for the name in the associated items only. + AssocItemsOnly, +} + #[derive(Debug)] pub struct Query { query: String, lowercased: String, - name_only: bool, - assoc_items_only: bool, search_mode: SearchMode, + assoc_mode: AssocSearchMode, case_sensitive: bool, limit: usize, - exclude_import_kinds: FxHashSet<ImportKind>, } impl Query { @@ -328,30 +251,21 @@ impl Query { Self { query, lowercased, - name_only: false, - assoc_items_only: false, - search_mode: SearchMode::Contains, + search_mode: SearchMode::Exact, + assoc_mode: AssocSearchMode::Include, case_sensitive: false, - limit: usize::max_value(), - exclude_import_kinds: FxHashSet::default(), + limit: usize::MAX, } } - /// Matches entries' names only, ignoring the rest of - /// the qualifier. - /// Example: for `std::marker::PhantomData`, the name is `PhantomData`. - pub fn name_only(self) -> Self { - Self { name_only: true, ..self } - } - - /// Matches only the entries that are associated items, ignoring the rest. - pub fn assoc_items_only(self) -> Self { - Self { assoc_items_only: true, ..self } + /// Fuzzy finds items instead of exact matching. + pub fn fuzzy(self) -> Self { + Self { search_mode: SearchMode::Fuzzy, ..self } } - /// Specifies the way to search for the entries using the query. - pub fn search_mode(self, search_mode: SearchMode) -> Self { - Self { search_mode, ..self } + /// Specifies whether we want to include associated items in the result. + pub fn assoc_search_mode(self, assoc_mode: AssocSearchMode) -> Self { + Self { assoc_mode, ..self } } /// Limits the returned number of items to `limit`. @@ -364,12 +278,6 @@ impl Query { Self { case_sensitive: true, ..self } } - /// Do not include imports of the specified kind in the search results. - pub fn exclude_import_kind(mut self, import_kind: ImportKind) -> Self { - self.exclude_import_kinds.insert(import_kind); - self - } - fn import_matches( &self, db: &dyn DefDatabase, @@ -377,49 +285,36 @@ impl Query { enforce_lowercase: bool, ) -> bool { let _p = profile::span("import_map::Query::import_matches"); - if import.is_trait_assoc_item { - if self.exclude_import_kinds.contains(&ImportKind::AssociatedItem) { - return false; - } - } else if self.assoc_items_only { - return false; + match (import.is_trait_assoc_item, self.assoc_mode) { + (true, AssocSearchMode::Exclude) => return false, + (false, AssocSearchMode::AssocItemsOnly) => return false, + _ => {} } - let mut input = if import.is_trait_assoc_item || self.name_only { - import.path.segments.last().unwrap().display(db.upcast()).to_string() - } else { - import.path.display(db).to_string() - }; - if enforce_lowercase || !self.case_sensitive { + let mut input = import.name.display(db.upcast()).to_string(); + let case_insensitive = enforce_lowercase || !self.case_sensitive; + if case_insensitive { input.make_ascii_lowercase(); } - let query_string = - if !enforce_lowercase && self.case_sensitive { &self.query } else { &self.lowercased }; + let query_string = if case_insensitive { &self.lowercased } else { &self.query }; match self.search_mode { - SearchMode::Equals => &input == query_string, - SearchMode::Contains => input.contains(query_string), + SearchMode::Exact => &input == query_string, SearchMode::Fuzzy => { - let mut unchecked_query_chars = query_string.chars(); - let mut mismatching_query_char = unchecked_query_chars.next(); - - for input_char in input.chars() { - match mismatching_query_char { - None => return true, - Some(matching_query_char) if matching_query_char == input_char => { - mismatching_query_char = unchecked_query_chars.next(); - } - _ => (), + let mut input_chars = input.chars(); + for query_char in query_string.chars() { + if input_chars.find(|&it| it == query_char).is_none() { + return false; } } - mismatching_query_char.is_none() + true } } } } -/// Searches dependencies of `krate` for an importable path matching `query`. +/// Searches dependencies of `krate` for an importable name matching `query`. /// /// This returns a list of items that could be imported from dependencies of `krate`. pub fn search_dependencies( @@ -442,65 +337,44 @@ pub fn search_dependencies( let mut stream = op.union(); - let mut all_indexed_values = FxHashSet::default(); - while let Some((_, indexed_values)) = stream.next() { - all_indexed_values.extend(indexed_values.iter().copied()); - } - let mut res = FxHashSet::default(); - for indexed_value in all_indexed_values { - let import_map = &import_maps[indexed_value.index]; - let importables = &import_map.importables[indexed_value.value as usize..]; + while let Some((_, indexed_values)) = stream.next() { + for indexed_value in indexed_values { + let import_map = &import_maps[indexed_value.index]; + let importables = &import_map.importables[indexed_value.value as usize..]; - let common_importable_data = &import_map.map[&importables[0]]; - if !query.import_matches(db, common_importable_data, true) { - continue; - } + let common_importable_data = &import_map.map[&importables[0]]; + if !query.import_matches(db, common_importable_data, true) { + continue; + } - // Path shared by the importable items in this group. - let common_importables_path_fst = fst_path(db, &common_importable_data.path); - // Add the items from this `ModPath` group. Those are all subsequent items in - // `importables` whose paths match `path`. - let iter = importables - .iter() - .copied() - .take_while(|item| { - common_importables_path_fst == fst_path(db, &import_map.map[item].path) - }) - .filter(|&item| match item_import_kind(item) { - Some(import_kind) => !query.exclude_import_kinds.contains(&import_kind), - None => true, - }) - .filter(|item| { - !query.case_sensitive // we've already checked the common importables path case-insensitively + // Name shared by the importable items in this group. + let common_importable_name = + common_importable_data.name.to_smol_str().to_ascii_lowercase(); + // Add the items from this name group. Those are all subsequent items in + // `importables` whose name match `common_importable_name`. + let iter = importables + .iter() + .copied() + .take_while(|item| { + common_importable_name + == import_map.map[item].name.to_smol_str().to_ascii_lowercase() + }) + .filter(|item| { + !query.case_sensitive // we've already checked the common importables name case-insensitively || query.import_matches(db, &import_map.map[item], false) - }); - res.extend(iter); + }); + res.extend(iter); - if res.len() >= query.limit { - return res; + if res.len() >= query.limit { + return res; + } } } res } -fn item_import_kind(item: ItemInNs) -> Option<ImportKind> { - Some(match item.as_module_def_id()? { - ModuleDefId::ModuleId(_) => ImportKind::Module, - ModuleDefId::FunctionId(_) => ImportKind::Function, - ModuleDefId::AdtId(_) => ImportKind::Adt, - ModuleDefId::EnumVariantId(_) => ImportKind::EnumVariant, - ModuleDefId::ConstId(_) => ImportKind::Const, - ModuleDefId::StaticId(_) => ImportKind::Static, - ModuleDefId::TraitId(_) => ImportKind::Trait, - ModuleDefId::TraitAliasId(_) => ImportKind::TraitAlias, - ModuleDefId::TypeAliasId(_) => ImportKind::TypeAlias, - ModuleDefId::BuiltinType(_) => ImportKind::BuiltinType, - ModuleDefId::MacroId(_) => ImportKind::Macro, - }) -} - #[cfg(test)] mod tests { use base_db::{fixture::WithFixture, SourceDatabase, Upcast}; @@ -510,16 +384,39 @@ mod tests { use super::*; + impl ImportMap { + fn fmt_for_test(&self, db: &dyn DefDatabase) -> String { + let mut importable_paths: Vec<_> = self + .map + .iter() + .map(|(item, info)| { + let path = render_path(db, info); + let ns = match item { + ItemInNs::Types(_) => "t", + ItemInNs::Values(_) => "v", + ItemInNs::Macros(_) => "m", + }; + format!("- {path} ({ns})") + }) + .collect(); + + importable_paths.sort(); + importable_paths.join("\n") + } + } + fn check_search(ra_fixture: &str, crate_name: &str, query: Query, expect: Expect) { let db = TestDB::with_files(ra_fixture); let crate_graph = db.crate_graph(); let krate = crate_graph .iter() - .find(|krate| { - crate_graph[*krate].display_name.as_ref().map(|n| n.to_string()) - == Some(crate_name.to_string()) + .find(|&krate| { + crate_graph[krate] + .display_name + .as_ref() + .is_some_and(|it| &**it.crate_name() == crate_name) }) - .unwrap(); + .expect("could not find crate"); let actual = search_dependencies(db.upcast(), krate, query) .into_iter() @@ -530,7 +427,7 @@ mod tests { let (path, mark) = match assoc_item_path(&db, &dependency_imports, dependency) { Some(assoc_item_path) => (assoc_item_path, "a"), None => ( - dependency_imports.path_of(dependency)?.display(&db).to_string(), + render_path(&db, dependency_imports.import_info_for(dependency)?), match dependency { ItemInNs::Types(ModuleDefId::FunctionId(_)) | ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f", @@ -560,57 +457,25 @@ mod tests { dependency_imports: &ImportMap, dependency: ItemInNs, ) -> Option<String> { - let dependency_assoc_item_id = match dependency { - ItemInNs::Types(ModuleDefId::FunctionId(id)) - | ItemInNs::Values(ModuleDefId::FunctionId(id)) => AssocItemId::from(id), - ItemInNs::Types(ModuleDefId::ConstId(id)) - | ItemInNs::Values(ModuleDefId::ConstId(id)) => AssocItemId::from(id), - ItemInNs::Types(ModuleDefId::TypeAliasId(id)) - | ItemInNs::Values(ModuleDefId::TypeAliasId(id)) => AssocItemId::from(id), + let (dependency_assoc_item_id, container) = match dependency.as_module_def_id()? { + ModuleDefId::FunctionId(id) => (AssocItemId::from(id), id.lookup(db).container), + ModuleDefId::ConstId(id) => (AssocItemId::from(id), id.lookup(db).container), + ModuleDefId::TypeAliasId(id) => (AssocItemId::from(id), id.lookup(db).container), _ => return None, }; - let trait_ = assoc_to_trait(db, dependency)?; - if let ModuleDefId::TraitId(tr) = trait_.as_module_def_id()? { - let trait_data = db.trait_data(tr); - let assoc_item_name = - trait_data.items.iter().find_map(|(assoc_item_name, assoc_item_id)| { - if &dependency_assoc_item_id == assoc_item_id { - Some(assoc_item_name) - } else { - None - } - })?; - return Some(format!( - "{}::{}", - dependency_imports.path_of(trait_)?.display(db), - assoc_item_name.display(db.upcast()) - )); - } - None - } - - fn assoc_to_trait(db: &dyn DefDatabase, item: ItemInNs) -> Option<ItemInNs> { - let assoc: AssocItemId = match item { - ItemInNs::Types(it) | ItemInNs::Values(it) => match it { - ModuleDefId::TypeAliasId(it) => it.into(), - ModuleDefId::FunctionId(it) => it.into(), - ModuleDefId::ConstId(it) => it.into(), - _ => return None, - }, - _ => return None, + let ItemContainerId::TraitId(trait_id) = container else { + return None; }; - let container = match assoc { - AssocItemId::FunctionId(it) => it.lookup(db).container, - AssocItemId::ConstId(it) => it.lookup(db).container, - AssocItemId::TypeAliasId(it) => it.lookup(db).container, - }; + let trait_info = dependency_imports.import_info_for(ItemInNs::Types(trait_id.into()))?; - match container { - ItemContainerId::TraitId(it) => Some(ItemInNs::Types(it.into())), - _ => None, - } + let trait_data = db.trait_data(trait_id); + let (assoc_item_name, _) = trait_data + .items + .iter() + .find(|(_, assoc_item_id)| &dependency_assoc_item_id == assoc_item_id)?; + Some(format!("{}::{}", render_path(db, trait_info), assoc_item_name.display(db.upcast()))) } fn check(ra_fixture: &str, expect: Expect) { @@ -633,6 +498,24 @@ mod tests { expect.assert_eq(&actual) } + fn render_path(db: &dyn DefDatabase, info: &ImportInfo) -> String { + let mut module = info.container; + let mut segments = vec![&info.name]; + + let def_map = module.def_map(db); + assert!(def_map.block_id().is_none(), "block local items should not be in `ImportMap`"); + + while let Some(parent) = module.containing_module(db) { + let parent_data = &def_map[parent.local_id]; + let (name, _) = + parent_data.children.iter().find(|(_, id)| **id == module.local_id).unwrap(); + segments.push(name); + module = parent; + } + + segments.iter().rev().map(|it| it.display(db.upcast())).join("::") + } + #[test] fn smoke() { check( @@ -749,6 +632,7 @@ mod tests { #[test] fn module_reexport() { // Reexporting modules from a dependency adds all contents to the import map. + // XXX: The rendered paths are relative to the defining crate. check( r" //- /main.rs crate:main deps:lib @@ -764,9 +648,9 @@ mod tests { - module::S (t) - module::S (v) main: + - module::S (t) + - module::S (v) - reexported_module (t) - - reexported_module::S (t) - - reexported_module::S (v) "#]], ); } @@ -868,10 +752,9 @@ mod tests { check_search( ra_fixture, "main", - Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy), + Query::new("fmt".to_string()).fuzzy(), expect![[r#" dep::fmt (t) - dep::fmt::Display (t) dep::fmt::Display::FMT_CONST (a) dep::fmt::Display::format_function (a) dep::fmt::Display::format_method (a) @@ -898,7 +781,9 @@ mod tests { check_search( ra_fixture, "main", - Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy).assoc_items_only(), + Query::new("fmt".to_string()) + .fuzzy() + .assoc_search_mode(AssocSearchMode::AssocItemsOnly), expect![[r#" dep::fmt::Display::FMT_CONST (a) dep::fmt::Display::format_function (a) @@ -909,23 +794,10 @@ mod tests { check_search( ra_fixture, "main", - Query::new("fmt".to_string()) - .search_mode(SearchMode::Fuzzy) - .exclude_import_kind(ImportKind::AssociatedItem), + Query::new("fmt".to_string()).fuzzy().assoc_search_mode(AssocSearchMode::Exclude), expect![[r#" - dep::fmt (t) - dep::fmt::Display (t) - "#]], - ); - - check_search( - ra_fixture, - "main", - Query::new("fmt".to_string()) - .search_mode(SearchMode::Fuzzy) - .assoc_items_only() - .exclude_import_kind(ImportKind::AssociatedItem), - expect![[r#""#]], + dep::fmt (t) + "#]], ); } @@ -958,13 +830,12 @@ mod tests { check_search( ra_fixture, "main", - Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy), + Query::new("fmt".to_string()).fuzzy(), expect![[r#" dep::Fmt (m) dep::Fmt (t) dep::Fmt (v) dep::fmt (t) - dep::fmt::Display (t) dep::fmt::Display::fmt (a) dep::format (f) "#]], @@ -973,26 +844,12 @@ mod tests { check_search( ra_fixture, "main", - Query::new("fmt".to_string()).search_mode(SearchMode::Equals), - expect![[r#" - dep::Fmt (m) - dep::Fmt (t) - dep::Fmt (v) - dep::fmt (t) - dep::fmt::Display::fmt (a) - "#]], - ); - - check_search( - ra_fixture, - "main", - Query::new("fmt".to_string()).search_mode(SearchMode::Contains), + Query::new("fmt".to_string()), expect![[r#" dep::Fmt (m) dep::Fmt (t) dep::Fmt (v) dep::fmt (t) - dep::fmt::Display (t) dep::fmt::Display::fmt (a) "#]], ); @@ -1033,7 +890,6 @@ mod tests { dep::Fmt (t) dep::Fmt (v) dep::fmt (t) - dep::fmt::Display (t) dep::fmt::Display::fmt (a) "#]], ); @@ -1041,7 +897,7 @@ mod tests { check_search( ra_fixture, "main", - Query::new("fmt".to_string()).name_only(), + Query::new("fmt".to_string()), expect![[r#" dep::Fmt (m) dep::Fmt (t) @@ -1106,43 +962,10 @@ mod tests { pub fn no() {} "#, "main", - Query::new("".to_string()).limit(2), - expect![[r#" - dep::Fmt (m) - dep::Fmt (t) - dep::Fmt (v) - dep::fmt (t) - "#]], - ); - } - - #[test] - fn search_exclusions() { - let ra_fixture = r#" - //- /main.rs crate:main deps:dep - //- /dep.rs crate:dep - - pub struct fmt; - pub struct FMT; - "#; - - check_search( - ra_fixture, - "main", - Query::new("FMT".to_string()), + Query::new("".to_string()).fuzzy().limit(1), expect![[r#" - dep::FMT (t) - dep::FMT (v) - dep::fmt (t) - dep::fmt (v) + dep::fmt::Display (t) "#]], ); - - check_search( - ra_fixture, - "main", - Query::new("FMT".to_string()).exclude_import_kind(ImportKind::Adt), - expect![[r#""#]], - ); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs index 2001fb29a9e..2ac1bcdc079 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_scope.rs @@ -14,8 +14,8 @@ use stdx::format_to; use syntax::ast; use crate::{ - db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId, HasModule, - ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId, + db::DefDatabase, per_ns::PerNs, visibility::Visibility, AdtId, BuiltinType, ConstId, + ExternCrateId, HasModule, ImplId, LocalModuleId, MacroId, ModuleDefId, ModuleId, TraitId, }; #[derive(Copy, Clone, Debug)] @@ -50,6 +50,7 @@ pub struct ItemScope { unnamed_consts: Vec<ConstId>, /// Traits imported via `use Trait as _;`. unnamed_trait_imports: FxHashMap<TraitId, Visibility>, + extern_crate_decls: Vec<ExternCrateId>, /// Macros visible in current module in legacy textual scope /// /// For macros invoked by an unqualified identifier like `bar!()`, `legacy_macros` will be searched in first. @@ -188,7 +189,11 @@ impl ItemScope { } pub(crate) fn define_impl(&mut self, imp: ImplId) { - self.impls.push(imp) + self.impls.push(imp); + } + + pub(crate) fn define_extern_crate_decl(&mut self, extern_crate: ExternCrateId) { + self.extern_crate_decls.push(extern_crate); } pub(crate) fn define_unnamed_const(&mut self, konst: ConstId) { @@ -397,6 +402,7 @@ impl ItemScope { legacy_macros, attr_macros, derive_macros, + extern_crate_decls, } = self; types.shrink_to_fit(); values.shrink_to_fit(); @@ -409,6 +415,7 @@ impl ItemScope { legacy_macros.shrink_to_fit(); attr_macros.shrink_to_fit(); derive_macros.shrink_to_fit(); + extern_crate_decls.shrink_to_fit(); } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs index e74b71888c2..6f80bb6e07c 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree.rs @@ -46,7 +46,7 @@ use ast::{AstNode, HasName, StructKind}; use base_db::CrateId; use either::Either; use hir_expand::{ - ast_id_map::FileAstId, + ast_id_map::{AstIdNode, FileAstId}, attrs::RawAttrs, hygiene::Hygiene, name::{name, AsName, Name}, @@ -314,7 +314,7 @@ from_attrs!(ModItem(ModItem), Variant(Idx<Variant>), Field(Idx<Field>), Param(Id /// Trait implemented by all item nodes in the item tree. pub trait ItemTreeNode: Clone { - type Source: AstNode + Into<ast::Item>; + type Source: AstIdNode + Into<ast::Item>; fn ast_id(&self) -> FileAstId<Self::Source>; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs index e873316a578..ddf668d20b0 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/item_tree/pretty.rs @@ -52,7 +52,7 @@ struct Printer<'a> { needs_indent: bool, } -impl<'a> Printer<'a> { +impl Printer<'_> { fn indented(&mut self, f: impl FnOnce(&mut Self)) { self.indent_level += 1; wln!(self); @@ -572,7 +572,7 @@ impl<'a> Printer<'a> { } } -impl<'a> Write for Printer<'a> { +impl Write for Printer<'_> { fn write_str(&mut self, s: &str) -> fmt::Result { for line in s.split_inclusive('\n') { if self.needs_indent { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs index 0e9ac58fbaa..627479bb7c1 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lang_item.rs @@ -180,15 +180,15 @@ impl LangItems { T: Into<AttrDefId> + Copy, { let _p = profile::span("collect_lang_item"); - if let Some(lang_item) = lang_attr(db, item) { + if let Some(lang_item) = db.lang_attr(item.into()) { self.items.entry(lang_item).or_insert_with(|| constructor(item)); } } } -pub fn lang_attr(db: &dyn DefDatabase, item: impl Into<AttrDefId> + Copy) -> Option<LangItem> { - let attrs = db.attrs(item.into()); - attrs.by_key("lang").string_value().cloned().and_then(|it| LangItem::from_str(&it)) +pub(crate) fn lang_attr_query(db: &dyn DefDatabase, item: AttrDefId) -> Option<LangItem> { + let attrs = db.attrs(item); + attrs.by_key("lang").string_value().and_then(|it| LangItem::from_str(&it)) } pub enum GenericRequirement { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs index 9d8b57a0da9..1e74e2dfcb4 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lib.rs @@ -64,7 +64,7 @@ use std::{ use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind}; use hir_expand::{ - ast_id_map::FileAstId, + ast_id_map::{AstIdNode, FileAstId}, attrs::{Attr, AttrId, AttrInput}, builtin_attr_macro::BuiltinAttrExpander, builtin_derive_macro::BuiltinDeriveExpander, @@ -88,8 +88,8 @@ use crate::{ builtin_type::BuiltinType, data::adt::VariantData, item_tree::{ - Const, Enum, Function, Impl, ItemTreeId, ItemTreeNode, MacroDef, MacroRules, Static, - Struct, Trait, TraitAlias, TypeAlias, Union, + Const, Enum, ExternCrate, Function, Impl, Import, ItemTreeId, ItemTreeNode, MacroDef, + MacroRules, Static, Struct, Trait, TraitAlias, TypeAlias, Union, }, }; @@ -145,24 +145,28 @@ pub struct ModuleId { } impl ModuleId { - pub fn def_map(&self, db: &dyn db::DefDatabase) -> Arc<DefMap> { + pub fn def_map(self, db: &dyn db::DefDatabase) -> Arc<DefMap> { match self.block { Some(block) => db.block_def_map(block), None => db.crate_def_map(self.krate), } } - pub fn krate(&self) -> CrateId { + pub fn krate(self) -> CrateId { self.krate } - pub fn containing_module(&self, db: &dyn db::DefDatabase) -> Option<ModuleId> { + pub fn containing_module(self, db: &dyn db::DefDatabase) -> Option<ModuleId> { self.def_map(db).containing_module(self.local_id) } - pub fn containing_block(&self) -> Option<BlockId> { + pub fn containing_block(self) -> Option<BlockId> { self.block } + + pub fn is_block_module(self) -> bool { + self.block.is_some() && self.local_id == DefMap::ROOT + } } /// An ID of a module, **local** to a `DefMap`. @@ -314,6 +318,16 @@ type ImplLoc = ItemLoc<Impl>; impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl); #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct ImportId(salsa::InternId); +type ImportLoc = ItemLoc<Import>; +impl_intern!(ImportId, ImportLoc, intern_import, lookup_intern_import); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct ExternCrateId(salsa::InternId); +type ExternCrateLoc = ItemLoc<ExternCrate>; +impl_intern!(ExternCrateId, ExternCrateLoc, intern_extern_crate, lookup_intern_extern_crate); + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] pub struct ExternBlockId(salsa::InternId); type ExternBlockLoc = ItemLoc<ExternBlock>; impl_intern!(ExternBlockId, ExternBlockLoc, intern_extern_block, lookup_intern_extern_block); @@ -392,14 +406,14 @@ impl TypeParamId { impl TypeParamId { /// Caller should check if this toc id really belongs to a type - pub fn from_unchecked(x: TypeOrConstParamId) -> Self { - Self(x) + pub fn from_unchecked(it: TypeOrConstParamId) -> Self { + Self(it) } } impl From<TypeParamId> for TypeOrConstParamId { - fn from(x: TypeParamId) -> Self { - x.0 + fn from(it: TypeParamId) -> Self { + it.0 } } @@ -418,14 +432,14 @@ impl ConstParamId { impl ConstParamId { /// Caller should check if this toc id really belongs to a const - pub fn from_unchecked(x: TypeOrConstParamId) -> Self { - Self(x) + pub fn from_unchecked(it: TypeOrConstParamId) -> Self { + Self(it) } } impl From<ConstParamId> for TypeOrConstParamId { - fn from(x: ConstParamId) -> Self { - x.0 + fn from(it: ConstParamId) -> Self { + it.0 } } @@ -548,14 +562,14 @@ pub enum TypeOwnerId { impl TypeOwnerId { fn as_generic_def_id(self) -> Option<GenericDefId> { Some(match self { - TypeOwnerId::FunctionId(x) => GenericDefId::FunctionId(x), - TypeOwnerId::ConstId(x) => GenericDefId::ConstId(x), - TypeOwnerId::AdtId(x) => GenericDefId::AdtId(x), - TypeOwnerId::TraitId(x) => GenericDefId::TraitId(x), - TypeOwnerId::TraitAliasId(x) => GenericDefId::TraitAliasId(x), - TypeOwnerId::TypeAliasId(x) => GenericDefId::TypeAliasId(x), - TypeOwnerId::ImplId(x) => GenericDefId::ImplId(x), - TypeOwnerId::EnumVariantId(x) => GenericDefId::EnumVariantId(x), + TypeOwnerId::FunctionId(it) => GenericDefId::FunctionId(it), + TypeOwnerId::ConstId(it) => GenericDefId::ConstId(it), + TypeOwnerId::AdtId(it) => GenericDefId::AdtId(it), + TypeOwnerId::TraitId(it) => GenericDefId::TraitId(it), + TypeOwnerId::TraitAliasId(it) => GenericDefId::TraitAliasId(it), + TypeOwnerId::TypeAliasId(it) => GenericDefId::TypeAliasId(it), + TypeOwnerId::ImplId(it) => GenericDefId::ImplId(it), + TypeOwnerId::EnumVariantId(it) => GenericDefId::EnumVariantId(it), TypeOwnerId::InTypeConstId(_) | TypeOwnerId::ModuleId(_) | TypeOwnerId::StaticId(_) => { return None } @@ -578,15 +592,15 @@ impl_from!( for TypeOwnerId ); -// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let x: Type = _; }`) +// Every `DefWithBodyId` is a type owner, since bodies can contain type (e.g. `{ let it: Type = _; }`) impl From<DefWithBodyId> for TypeOwnerId { fn from(value: DefWithBodyId) -> Self { match value { - DefWithBodyId::FunctionId(x) => x.into(), - DefWithBodyId::StaticId(x) => x.into(), - DefWithBodyId::ConstId(x) => x.into(), - DefWithBodyId::InTypeConstId(x) => x.into(), - DefWithBodyId::VariantId(x) => x.into(), + DefWithBodyId::FunctionId(it) => it.into(), + DefWithBodyId::StaticId(it) => it.into(), + DefWithBodyId::ConstId(it) => it.into(), + DefWithBodyId::InTypeConstId(it) => it.into(), + DefWithBodyId::VariantId(it) => it.into(), } } } @@ -594,14 +608,14 @@ impl From<DefWithBodyId> for TypeOwnerId { impl From<GenericDefId> for TypeOwnerId { fn from(value: GenericDefId) -> Self { match value { - GenericDefId::FunctionId(x) => x.into(), - GenericDefId::AdtId(x) => x.into(), - GenericDefId::TraitId(x) => x.into(), - GenericDefId::TraitAliasId(x) => x.into(), - GenericDefId::TypeAliasId(x) => x.into(), - GenericDefId::ImplId(x) => x.into(), - GenericDefId::EnumVariantId(x) => x.into(), - GenericDefId::ConstId(x) => x.into(), + GenericDefId::FunctionId(it) => it.into(), + GenericDefId::AdtId(it) => it.into(), + GenericDefId::TraitId(it) => it.into(), + GenericDefId::TraitAliasId(it) => it.into(), + GenericDefId::TypeAliasId(it) => it.into(), + GenericDefId::ImplId(it) => it.into(), + GenericDefId::EnumVariantId(it) => it.into(), + GenericDefId::ConstId(it) => it.into(), } } } @@ -716,7 +730,7 @@ impl GeneralConstId { .const_data(const_id) .name .as_ref() - .and_then(|x| x.as_str()) + .and_then(|it| it.as_str()) .unwrap_or("_") .to_owned(), GeneralConstId::ConstBlockId(id) => format!("{{anonymous const {id:?}}}"), @@ -821,6 +835,7 @@ pub enum AttrDefId { ImplId(ImplId), GenericParamId(GenericParamId), ExternBlockId(ExternBlockId), + ExternCrateId(ExternCrateId), } impl_from!( @@ -835,7 +850,8 @@ impl_from!( TypeAliasId, MacroId(Macro2Id, MacroRulesId, ProcMacroId), ImplId, - GenericParamId + GenericParamId, + ExternCrateId for AttrDefId ); @@ -927,6 +943,12 @@ impl HasModule for AdtId { } } +impl HasModule for ExternCrateId { + fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { + self.lookup(db).container + } +} + impl HasModule for VariantId { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { match self { @@ -950,17 +972,17 @@ impl HasModule for MacroId { impl HasModule for TypeOwnerId { fn module(&self, db: &dyn db::DefDatabase) -> ModuleId { match self { - TypeOwnerId::FunctionId(x) => x.lookup(db).module(db), - TypeOwnerId::StaticId(x) => x.lookup(db).module(db), - TypeOwnerId::ConstId(x) => x.lookup(db).module(db), - TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.module(db), - TypeOwnerId::AdtId(x) => x.module(db), - TypeOwnerId::TraitId(x) => x.lookup(db).container, - TypeOwnerId::TraitAliasId(x) => x.lookup(db).container, - TypeOwnerId::TypeAliasId(x) => x.lookup(db).module(db), - TypeOwnerId::ImplId(x) => x.lookup(db).container, - TypeOwnerId::EnumVariantId(x) => x.parent.lookup(db).container, - TypeOwnerId::ModuleId(x) => *x, + TypeOwnerId::FunctionId(it) => it.lookup(db).module(db), + TypeOwnerId::StaticId(it) => it.lookup(db).module(db), + TypeOwnerId::ConstId(it) => it.lookup(db).module(db), + TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.module(db), + TypeOwnerId::AdtId(it) => it.module(db), + TypeOwnerId::TraitId(it) => it.lookup(db).container, + TypeOwnerId::TraitAliasId(it) => it.lookup(db).container, + TypeOwnerId::TypeAliasId(it) => it.lookup(db).module(db), + TypeOwnerId::ImplId(it) => it.lookup(db).container, + TypeOwnerId::EnumVariantId(it) => it.parent.lookup(db).container, + TypeOwnerId::ModuleId(it) => *it, } } } @@ -1050,6 +1072,7 @@ impl AttrDefId { .krate } AttrDefId::MacroId(it) => it.module(db).krate, + AttrDefId::ExternCrateId(it) => it.lookup(db).container.krate, } } } @@ -1101,12 +1124,12 @@ impl AsMacroCall for InFile<&ast::MacroCall> { /// Helper wrapper for `AstId` with `ModPath` #[derive(Clone, Debug, Eq, PartialEq)] -struct AstIdWithPath<T: ast::AstNode> { +struct AstIdWithPath<T: AstIdNode> { ast_id: AstId<T>, path: path::ModPath, } -impl<T: ast::AstNode> AstIdWithPath<T> { +impl<T: AstIdNode> AstIdWithPath<T> { fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> { AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path } } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs index af623fd0e5d..e523c229179 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/lower.rs @@ -1,5 +1,9 @@ //! Context for lowering paths. -use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile}; +use hir_expand::{ + ast_id_map::{AstIdMap, AstIdNode}, + hygiene::Hygiene, + AstId, HirFileId, InFile, +}; use once_cell::unsync::OnceCell; use syntax::ast; use triomphe::Arc; @@ -37,7 +41,7 @@ impl<'a> LowerCtx<'a> { Path::from_src(ast, self) } - pub(crate) fn ast_id<N: syntax::AstNode>(&self, item: &N) -> Option<AstId<N>> { + pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> { let &(file_id, ref ast_id_map) = self.ast_id_map.as_ref()?; let ast_id_map = ast_id_map.get_or_init(|| self.db.ast_id_map(file_id)); Some(InFile::new(file_id, ast_id_map.ast_id(item))) diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs index f41f9719043..abd84c6a46d 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_derive_macro.rs @@ -279,6 +279,44 @@ impl < > core::cmp::Eq for Command< > where {}"#]], } #[test] +fn test_partial_eq_expand_with_derive_const() { + // FIXME: actually expand with const + check( + r#" +//- minicore: derive, eq +#[derive_const(PartialEq, Eq)] +enum Command { + Move { x: i32, y: i32 }, + Do(&'static str), + Jump, +} +"#, + expect![[r#" +#[derive_const(PartialEq, Eq)] +enum Command { + Move { x: i32, y: i32 }, + Do(&'static str), + Jump, +} + +impl < > core::cmp::PartialEq for Command< > where { + fn eq(&self , other: &Self ) -> bool { + match (self , other) { + (Command::Move { + x: x_self, y: y_self, + } + , Command::Move { + x: x_other, y: y_other, + } + )=>x_self.eq(x_other) && y_self.eq(y_other), (Command::Do(f0_self, ), Command::Do(f0_other, ))=>f0_self.eq(f0_other), (Command::Jump, Command::Jump)=>true , _unused=>false + } + } +} +impl < > core::cmp::Eq for Command< > where {}"#]], + ); +} + +#[test] fn test_partial_ord_expand() { check( r#" @@ -379,6 +417,44 @@ fn test_hash_expand() { use core::hash::Hash; #[derive(Hash)] +struct Foo { + x: i32, + y: u64, + z: (i32, u64), +} +"#, + expect![[r#" +use core::hash::Hash; + +#[derive(Hash)] +struct Foo { + x: i32, + y: u64, + z: (i32, u64), +} + +impl < > core::hash::Hash for Foo< > where { + fn hash<H: core::hash::Hasher>(&self , ra_expand_state: &mut H) { + match self { + Foo { + x: x, y: y, z: z, + } + => { + x.hash(ra_expand_state); + y.hash(ra_expand_state); + z.hash(ra_expand_state); + } + , + } + } +}"#]], + ); + check( + r#" +//- minicore: derive, hash +use core::hash::Hash; + +#[derive(Hash)] enum Command { Move { x: i32, y: i32 }, Do(&'static str), diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 07d9baa5897..b232651db96 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -201,7 +201,7 @@ macro_rules! format_args { } fn main() { - ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(arg1(a, b, c)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(arg2), ::core::fmt::Debug::fmt), ]); + ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(arg1(a, b, c)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(arg2), ::core::fmt::Debug::fmt), ]); } "##]], ); @@ -235,11 +235,11 @@ macro_rules! format_args { fn main() { /* error: no rule matches input tokens */; - /* error: no rule matches input tokens */; - /* error: no rule matches input tokens */; - /* error: no rule matches input tokens */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::Argument::new(&(), ::core::fmt::Display::fmt), ]); - /* error: no rule matches input tokens */; - ::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::Argument::new(&(5), ::core::fmt::Display::fmt), ]); + /* error: expected expression */; + /* error: expected expression, expected COMMA */; + /* error: expected expression */::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(), ::core::fmt::Display::fmt), ]); + /* error: expected expression, expected R_PAREN */; + ::core::fmt::Arguments::new_v1(&["", ], &[::core::fmt::ArgumentV1::new(&(5), ::core::fmt::Display::fmt), ]); } "##]], ); @@ -267,7 +267,7 @@ macro_rules! format_args { } fn main() { - ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(a::<A, B>()), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(b), ::core::fmt::Debug::fmt), ]); + ::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a::<A, B>()), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(b), ::core::fmt::Debug::fmt), ]); } "##]], ); @@ -300,7 +300,7 @@ macro_rules! format_args { } fn main() { - ::core::fmt::Arguments::new_v1(&[r#""#, r#",mismatch,""#, r#"",""#, r#"""#, ], &[::core::fmt::Argument::new(&(location_csv_pat(db, &analysis, vfs, &sm, pat_id)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(mismatch.expected.display(db)), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(mismatch.actual.display(db)), ::core::fmt::Display::fmt), ]); + ::core::fmt::Arguments::new_v1(&[r#""#, r#",mismatch,""#, r#"",""#, r#"""#, ], &[::core::fmt::ArgumentV1::new(&(location_csv_pat(db, &analysis, vfs, &sm, pat_id)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(mismatch.expected.display(db)), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(mismatch.actual.display(db)), ::core::fmt::Display::fmt), ]); } "##]], ); @@ -334,7 +334,7 @@ macro_rules! format_args { } fn main() { - ::core::fmt::Arguments::new_v1(&["xxx", "y", "zzz", ], &[::core::fmt::Argument::new(&(2), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(b), ::core::fmt::Debug::fmt), ]); + ::core::fmt::Arguments::new_v1(&["xxx", "y", "zzz", ], &[::core::fmt::ArgumentV1::new(&(2), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(b), ::core::fmt::Debug::fmt), ]); } "##]], ); @@ -364,8 +364,8 @@ macro_rules! format_args { fn main() { let _ = - /* error: no rule matches input tokens *//* parse error: expected field name or number */ -::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::Argument::new(&(a.), ::core::fmt::Display::fmt), ::core::fmt::Argument::new(&(), ::core::fmt::Debug::fmt), ]); + /* error: expected field name or number *//* parse error: expected field name or number */ +::core::fmt::Arguments::new_v1(&["", " ", ], &[::core::fmt::ArgumentV1::new(&(a.), ::core::fmt::Display::fmt), ::core::fmt::ArgumentV1::new(&(), ::core::fmt::Debug::fmt), ]); } "##]], ); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs index 553ffe3d0b8..b26f9867580 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mbe.rs @@ -98,6 +98,42 @@ fn#19 main#20(#21)#21 {#22 "##]], ); } + +#[test] +fn token_mapping_eager() { + check( + r#" +#[rustc_builtin_macro] +#[macro_export] +macro_rules! format_args {} + +macro_rules! identity { + ($expr:expr) => { $expr }; +} + +fn main(foo: ()) { + format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") +} + +"#, + expect![[r##" +#[rustc_builtin_macro] +#[macro_export] +macro_rules! format_args {} + +macro_rules! identity { + ($expr:expr) => { $expr }; +} + +fn main(foo: ()) { + // format_args/*+tokenids*/!("{} {} {}"#1,#3 format_args!("{}", 0#10),#12 foo#13,#14 identity!(10#18),#21 "bar"#22) +::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(�[#4294967295""#4294967295,#4294967295 " "#4294967295,#4294967295 " "#4294967295,#4294967295 ]#4294967295,#4294967295 �[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(::core#4294967295::fmt#4294967295::Arguments#4294967295::new_v1#4294967295(�[#4294967295""#4294967295,#4294967295 ]#4294967295,#4294967295 �[::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(#42949672950#10)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(#4294967295foo#13)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::ArgumentV1#4294967295::new#4294967295(�(#429496729510#18)#4294967295,#4294967295 ::core#4294967295::fmt#4294967295::Display#4294967295::fmt#4294967295)#4294967295,#4294967295 ]#4294967295)#4294967295 +} + +"##]], + ); +} + #[test] fn float_field_access_macro_input() { check( diff --git a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs index 4a62696df08..7a87e61c693 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -20,8 +20,8 @@ use ::mbe::TokenMap; use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase}; use expect_test::Expect; use hir_expand::{ - db::{ExpandDatabase, TokenExpander}, - AstId, InFile, MacroDefId, MacroDefKind, MacroFile, + db::{DeclarativeMacroExpander, ExpandDatabase}, + AstId, InFile, MacroFile, }; use stdx::format_to; use syntax::{ @@ -100,32 +100,29 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let call_offset = macro_.syntax().text_range().start().into(); let file_ast_id = db.ast_id_map(source.file_id).ast_id(¯o_); let ast_id = AstId::new(source.file_id, file_ast_id.upcast()); - let kind = MacroDefKind::Declarative(ast_id); - let macro_def = db - .macro_def(MacroDefId { krate, kind, local_inner: false, allow_internal_unsafe: false }) - .unwrap(); - if let TokenExpander::DeclarativeMacro { mac, def_site_token_map } = &*macro_def { - let tt = match ¯o_ { - ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(), - ast::Macro::MacroDef(_) => unimplemented!(""), - }; + let DeclarativeMacroExpander { mac, def_site_token_map } = + &*db.decl_macro_expander(krate, ast_id); + assert_eq!(mac.err(), None); + let tt = match ¯o_ { + ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(), + ast::Macro::MacroDef(_) => unimplemented!(""), + }; - let tt_start = tt.syntax().text_range().start(); - tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each( - |token| { - let range = token.text_range().checked_sub(tt_start).unwrap(); - if let Some(id) = def_site_token_map.token_by_range(range) { - let offset = (range.end() + tt_start).into(); - text_edits.push((offset..offset, format!("#{}", id.0))); - } - }, - ); - text_edits.push(( - call_offset..call_offset, - format!("// call ids will be shifted by {:?}\n", mac.shift()), - )); - } + let tt_start = tt.syntax().text_range().start(); + tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each( + |token| { + let range = token.text_range().checked_sub(tt_start).unwrap(); + if let Some(id) = def_site_token_map.token_by_range(range) { + let offset = (range.end() + tt_start).into(); + text_edits.push((offset..offset, format!("#{}", id.0))); + } + }, + ); + text_edits.push(( + call_offset..call_offset, + format!("// call ids will be shifted by {:?}\n", mac.shift()), + )); } for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { @@ -190,7 +187,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream let range: Range<usize> = range.into(); if show_token_ids { - if let Some((tree, map, _)) = arg.as_deref() { + if let Some((tree, map, _)) = arg.value.as_deref() { let tt_range = call.token_tree().unwrap().syntax().text_range(); let mut ranges = Vec::new(); extract_id_ranges(&mut ranges, map, tree); @@ -239,7 +236,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream for impl_id in def_map[local_id].scope.impls() { let src = impl_id.lookup(&db).source(&db); - if src.file_id.is_builtin_derive(&db).is_some() { + if src.file_id.is_builtin_derive(&db) { let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None); format_to!(expanded_text, "\n{}", pp) } diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs index 0ab1bd8490c..86818ce26dd 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres.rs @@ -60,7 +60,7 @@ mod tests; use std::{cmp::Ord, ops::Deref}; use base_db::{CrateId, Edition, FileId, ProcMacroKind}; -use hir_expand::{name::Name, InFile, MacroCallId, MacroDefId}; +use hir_expand::{name::Name, HirFileId, InFile, MacroCallId, MacroDefId}; use itertools::Itertools; use la_arena::Arena; use profile::Count; @@ -196,6 +196,10 @@ impl BlockRelativeModuleId { fn into_module(self, krate: CrateId) -> ModuleId { ModuleId { krate, block: self.block, local_id: self.local_id } } + + fn is_block_module(self) -> bool { + self.block.is_some() && self.local_id == DefMap::ROOT + } } impl std::ops::Index<LocalModuleId> for DefMap { @@ -278,7 +282,9 @@ pub struct ModuleData { pub origin: ModuleOrigin, /// Declared visibility of this module. pub visibility: Visibility, - /// Always [`None`] for block modules + /// Parent module in the same `DefMap`. + /// + /// [`None`] for block modules because they are always its `DefMap`'s root. pub parent: Option<LocalModuleId>, pub children: FxHashMap<Name, LocalModuleId>, pub scope: ItemScope, @@ -626,6 +632,17 @@ impl ModuleData { self.origin.definition_source(db) } + /// Same as [`definition_source`] but only returns the file id to prevent parsing the ASt. + pub fn definition_source_file_id(&self) -> HirFileId { + match self.origin { + ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => { + definition.into() + } + ModuleOrigin::Inline { definition, .. } => definition.file_id, + ModuleOrigin::BlockExpr { block } => block.file_id, + } + } + /// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`. /// `None` for the crate root or block. pub fn declaration_source(&self, db: &dyn DefDatabase) -> Option<InFile<ast::Module>> { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs index 62fb3c7882c..c048716d740 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/collector.rs @@ -52,10 +52,10 @@ use crate::{ tt, visibility::{RawVisibility, Visibility}, AdtId, AstId, AstIdWithPath, ConstLoc, CrateRootModuleId, EnumLoc, EnumVariantId, - ExternBlockLoc, FunctionId, FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, - Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId, - ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, TraitAliasLoc, TraitLoc, - TypeAliasLoc, UnionLoc, UnresolvedMacro, + ExternBlockLoc, ExternCrateLoc, FunctionId, FunctionLoc, ImplLoc, ImportLoc, Intern, + ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc, MacroExpander, MacroId, MacroRulesId, + MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId, ProcMacroLoc, StaticLoc, StructLoc, + TraitAliasLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro, }; static GLOB_RECURSION_LIMIT: Limit = Limit::new(100); @@ -156,10 +156,9 @@ struct Import { alias: Option<ImportAlias>, visibility: RawVisibility, kind: ImportKind, + source: ImportSource, is_prelude: bool, - is_extern_crate: bool, is_macro_use: bool, - source: ImportSource, } impl Import { @@ -168,26 +167,23 @@ impl Import { krate: CrateId, tree: &ItemTree, id: ItemTreeId<item_tree::Import>, - ) -> Vec<Self> { + mut cb: impl FnMut(Self), + ) { let it = &tree[id.value]; let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into()); let visibility = &tree[it.visibility]; let is_prelude = attrs.by_key("prelude_import").exists(); - - let mut res = Vec::new(); it.use_tree.expand(|idx, path, kind, alias| { - res.push(Self { + cb(Self { path, alias, visibility: visibility.clone(), kind, is_prelude, - is_extern_crate: false, is_macro_use: false, source: ImportSource::Import { id, use_tree: idx }, }); }); - res } fn from_extern_crate( @@ -205,7 +201,6 @@ impl Import { visibility: visibility.clone(), kind: ImportKind::Plain, is_prelude: false, - is_extern_crate: true, is_macro_use: attrs.by_key("macro_use").exists(), source: ImportSource::ExternCrate(id), } @@ -776,7 +771,7 @@ impl DefCollector<'_> { let _p = profile::span("resolve_import") .detail(|| format!("{}", import.path.display(self.db.upcast()))); tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition); - if import.is_extern_crate { + if matches!(import.source, ImportSource::ExternCrate { .. }) { let name = import .path .as_ident() @@ -813,11 +808,8 @@ impl DefCollector<'_> { } } - // Check whether all namespace is resolved - if def.take_types().is_some() - && def.take_values().is_some() - && def.take_macros().is_some() - { + // Check whether all namespaces are resolved. + if def.is_full() { PartialResolvedImport::Resolved(def) } else { PartialResolvedImport::Indeterminate(def) @@ -826,7 +818,7 @@ impl DefCollector<'_> { } fn resolve_extern_crate(&self, name: &Name) -> Option<CrateRootModuleId> { - if *name == name!(self) { + if *name == name![self] { cov_mark::hit!(extern_crate_self_as); Some(self.def_map.crate_root()) } else { @@ -867,7 +859,7 @@ impl DefCollector<'_> { tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 - if import.is_extern_crate + if matches!(import.source, ImportSource::ExternCrate { .. }) && self.def_map.block.is_none() && module_id == DefMap::ROOT { @@ -1585,21 +1577,34 @@ impl ModCollector<'_, '_> { match item { ModItem::Mod(m) => self.collect_module(m, &attrs), ModItem::Import(import_id) => { - let imports = Import::from_use( + let _import_id = ImportLoc { + container: module, + id: ItemTreeId::new(self.tree_id, import_id), + } + .intern(db); + Import::from_use( db, krate, self.item_tree, ItemTreeId::new(self.tree_id, import_id), - ); - self.def_collector.unresolved_imports.extend(imports.into_iter().map( - |import| ImportDirective { - module_id: self.module_id, - import, - status: PartialResolvedImport::Unresolved, + |import| { + self.def_collector.unresolved_imports.push(ImportDirective { + module_id: self.module_id, + import, + status: PartialResolvedImport::Unresolved, + }); }, - )); + ) } ModItem::ExternCrate(import_id) => { + let extern_crate_id = ExternCrateLoc { + container: module, + id: ItemTreeId::new(self.tree_id, import_id), + } + .intern(db); + self.def_collector.def_map.modules[self.module_id] + .scope + .define_extern_crate_decl(extern_crate_id); self.def_collector.unresolved_imports.push(ImportDirective { module_id: self.module_id, import: Import::from_extern_crate( @@ -2230,8 +2235,12 @@ impl ModCollector<'_, '_> { } fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) { - let Some((source, target)) = Self::borrow_modules(self.def_collector.def_map.modules.as_mut(), module_id, self.module_id) else { - return + let Some((source, target)) = Self::borrow_modules( + self.def_collector.def_map.modules.as_mut(), + module_id, + self.module_id, + ) else { + return; }; for (name, macs) in source.scope.legacy_macros() { @@ -2271,7 +2280,7 @@ impl ModCollector<'_, '_> { fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) { let ast_id = item.ast_id(self.item_tree); - let ast_id = InFile::new(self.file_id(), ast_id.upcast()); + let ast_id = InFile::new(self.file_id(), ast_id.erase()); self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code( self.module_id, ast_id, diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs index 18b424255cd..e82e97b628e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs @@ -2,12 +2,9 @@ use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; -use hir_expand::{attrs::AttrId, MacroCallKind}; +use hir_expand::{attrs::AttrId, ErasedAstId, MacroCallKind}; use la_arena::Idx; -use syntax::{ - ast::{self, AnyHasAttrs}, - SyntaxError, -}; +use syntax::{ast, SyntaxError}; use crate::{ item_tree::{self, ItemTreeId}, @@ -24,7 +21,7 @@ pub enum DefDiagnosticKind { UnresolvedImport { id: ItemTreeId<item_tree::Import>, index: Idx<ast::UseTree> }, - UnconfiguredCode { ast: AstId<AnyHasAttrs>, cfg: CfgExpr, opts: CfgOptions }, + UnconfiguredCode { ast: ErasedAstId, cfg: CfgExpr, opts: CfgOptions }, UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId }, @@ -81,7 +78,7 @@ impl DefDiagnostic { pub fn unconfigured_code( container: LocalModuleId, - ast: AstId<ast::AnyHasAttrs>, + ast: ErasedAstId, cfg: CfgExpr, opts: CfgOptions, ) -> Self { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs index 5f6163175a7..de22ea10146 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/nameres/path_resolution.rs @@ -12,11 +12,12 @@ use base_db::Edition; use hir_expand::name::Name; +use triomphe::Arc; use crate::{ db::DefDatabase, item_scope::BUILTIN_SCOPE, - nameres::{sub_namespace_match, BuiltinShadowMode, DefMap, MacroSubNs}, + nameres::{sub_namespace_match, BlockInfo, BuiltinShadowMode, DefMap, MacroSubNs}, path::{ModPath, PathKind}, per_ns::PerNs, visibility::{RawVisibility, Visibility}, @@ -159,13 +160,15 @@ impl DefMap { (None, new) => new, }; - match ¤t_map.block { - Some(block) => { + match current_map.block { + Some(block) if original_module == Self::ROOT => { + // Block modules "inherit" names from its parent module. original_module = block.parent.local_id; arc = block.parent.def_map(db, current_map.krate); - current_map = &*arc; + current_map = &arc; } - None => return result, + // Proper (non-block) modules, including those in block `DefMap`s, don't. + _ => return result, } } } @@ -189,7 +192,7 @@ impl DefMap { )); let mut segments = path.segments().iter().enumerate(); - let mut curr_per_ns: PerNs = match path.kind { + let mut curr_per_ns = match path.kind { PathKind::DollarCrate(krate) => { if krate == self.krate { cov_mark::hit!(macro_dollar_crate_self); @@ -241,51 +244,54 @@ impl DefMap { ) } PathKind::Super(lvl) => { - let mut module = original_module; - for i in 0..lvl { - match self.modules[module].parent { - Some(it) => module = it, - None => match &self.block { - Some(block) => { - // Look up remaining path in parent `DefMap` - let new_path = ModPath::from_segments( - PathKind::Super(lvl - i), - path.segments().to_vec(), - ); - tracing::debug!( - "`super` path: {} -> {} in parent map", - path.display(db.upcast()), - new_path.display(db.upcast()) - ); - return block - .parent - .def_map(db, self.krate) - .resolve_path_fp_with_macro( - db, - mode, - block.parent.local_id, - &new_path, - shadow, - expected_macro_subns, - ); - } - None => { - tracing::debug!("super path in root module"); - return ResolvePathResult::empty(ReachedFixedPoint::Yes); - } - }, - } + let mut local_id = original_module; + let mut ext; + let mut def_map = self; + + // Adjust `local_id` to `self`, i.e. the nearest non-block module. + if def_map.module_id(local_id).is_block_module() { + (ext, local_id) = adjust_to_nearest_non_block_module(db, def_map, local_id); + def_map = &ext; } - // Resolve `self` to the containing crate-rooted module if we're a block - self.with_ancestor_maps(db, module, &mut |def_map, module| { - if def_map.block.is_some() { - None // keep ascending + // Go up the module tree but skip block modules as `super` always refers to the + // nearest non-block module. + for _ in 0..lvl { + // Loop invariant: at the beginning of each loop, `local_id` must refer to a + // non-block module. + if let Some(parent) = def_map.modules[local_id].parent { + local_id = parent; + if def_map.module_id(local_id).is_block_module() { + (ext, local_id) = + adjust_to_nearest_non_block_module(db, def_map, local_id); + def_map = &ext; + } } else { - Some(PerNs::types(def_map.module_id(module).into(), Visibility::Public)) + stdx::always!(def_map.block.is_none()); + tracing::debug!("super path in root module"); + return ResolvePathResult::empty(ReachedFixedPoint::Yes); } - }) - .expect("block DefMap not rooted in crate DefMap") + } + + let module = def_map.module_id(local_id); + stdx::never!(module.is_block_module()); + + if self.block != def_map.block { + // If we have a different `DefMap` from `self` (the orignal `DefMap` we started + // with), resolve the remaining path segments in that `DefMap`. + let path = + ModPath::from_segments(PathKind::Super(0), path.segments().iter().cloned()); + return def_map.resolve_path_fp_with_macro( + db, + mode, + local_id, + &path, + shadow, + expected_macro_subns, + ); + } + + PerNs::types(module.into(), Visibility::Public) } PathKind::Abs => { // 2018-style absolute path -- only extern prelude @@ -508,3 +514,27 @@ impl DefMap { } } } + +/// Given a block module, returns its nearest non-block module and the `DefMap` it blongs to. +fn adjust_to_nearest_non_block_module( + db: &dyn DefDatabase, + def_map: &DefMap, + mut local_id: LocalModuleId, +) -> (Arc<DefMap>, LocalModuleId) { + // INVARIANT: `local_id` in `def_map` must be a block module. + stdx::always!(def_map.module_id(local_id).is_block_module()); + + let mut ext; + // This needs to be a local variable due to our mighty lifetime. + let mut def_map = def_map; + loop { + let BlockInfo { parent, .. } = def_map.block.expect("block module without parent module"); + + ext = parent.def_map(db, def_map.krate); + def_map = &ext; + local_id = parent.local_id; + if !parent.is_block_module() { + return (ext, local_id); + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path.rs b/src/tools/rust-analyzer/crates/hir-def/src/path.rs index ff4ae69546c..06530cc7ebd 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/path.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/path.rs @@ -45,7 +45,7 @@ pub enum Path { /// Invariant: the same len as `self.mod_path.segments` or `None` if all segments are `None`. generic_args: Option<Box<[Option<Interned<GenericArgs>>]>>, }, - /// A link to a lang item. It is used in desugaring of things like `x?`. We can show these + /// A link to a lang item. It is used in desugaring of things like `it?`. We can show these /// links via a normal path since they might be private and not accessible in the usage place. LangItem(LangItemTarget), } @@ -135,10 +135,7 @@ impl Path { pub fn segments(&self) -> PathSegments<'_> { let Path::Normal { mod_path, generic_args, .. } = self else { - return PathSegments { - segments: &[], - generic_args: None, - }; + return PathSegments { segments: &[], generic_args: None }; }; let s = PathSegments { segments: mod_path.segments(), generic_args: generic_args.as_deref() }; diff --git a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs index 1cb17ff0d26..abd817893cc 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/path/lower.rs @@ -74,8 +74,8 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path // <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo Some(trait_ref) => { let Path::Normal { mod_path, generic_args: path_generic_args, .. } = - Path::from_src(trait_ref.path()?, ctx)? else - { + Path::from_src(trait_ref.path()?, ctx)? + else { return None; }; let num_segments = mod_path.segments().len(); diff --git a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs index 0aead6f37f7..11d58a6ba09 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/pretty.rs @@ -12,8 +12,8 @@ use crate::{ }; pub(crate) fn print_path(db: &dyn ExpandDatabase, path: &Path, buf: &mut dyn Write) -> fmt::Result { - if let Path::LangItem(x) = path { - return write!(buf, "$lang_item::{x:?}"); + if let Path::LangItem(it) = path { + return write!(buf, "$lang_item::{it:?}"); } match path.type_anchor() { Some(anchor) => { diff --git a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs index 0d6f55411c1..10f5702845e 100644 --- a/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs +++ b/src/tools/rust-analyzer/crates/hir-def/src/resolver.rs @@ -22,10 +22,10 @@ use crate::{ per_ns::PerNs, visibility::{RawVisibility, Visibility}, AdtId, AssocItemId, ConstId, ConstParamId, CrateRootModuleId, DefWithBodyId, EnumId, - EnumVariantId, ExternBlockId, FunctionId, GenericDefId, GenericParamId, HasModule, ImplId, - ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, MacroRulesId, - ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, TypeAliasId, - TypeOrConstParamId, TypeOwnerId, TypeParamId, VariantId, + EnumVariantId, ExternBlockId, ExternCrateId, FunctionId, GenericDefId, GenericParamId, + HasModule, ImplId, ItemContainerId, LifetimeParamId, LocalModuleId, Lookup, Macro2Id, MacroId, + MacroRulesId, ModuleDefId, ModuleId, ProcMacroId, StaticId, StructId, TraitAliasId, TraitId, + TypeAliasId, TypeOrConstParamId, TypeOwnerId, TypeParamId, VariantId, }; #[derive(Debug, Clone)] @@ -186,12 +186,12 @@ impl Resolver { Path::LangItem(l) => { return Some(( match *l { - LangItemTarget::Union(x) => TypeNs::AdtId(x.into()), - LangItemTarget::TypeAlias(x) => TypeNs::TypeAliasId(x), - LangItemTarget::Struct(x) => TypeNs::AdtId(x.into()), - LangItemTarget::EnumVariant(x) => TypeNs::EnumVariantId(x), - LangItemTarget::EnumId(x) => TypeNs::AdtId(x.into()), - LangItemTarget::Trait(x) => TypeNs::TraitId(x), + LangItemTarget::Union(it) => TypeNs::AdtId(it.into()), + LangItemTarget::TypeAlias(it) => TypeNs::TypeAliasId(it), + LangItemTarget::Struct(it) => TypeNs::AdtId(it.into()), + LangItemTarget::EnumVariant(it) => TypeNs::EnumVariantId(it), + LangItemTarget::EnumId(it) => TypeNs::AdtId(it.into()), + LangItemTarget::Trait(it) => TypeNs::TraitId(it), LangItemTarget::Function(_) | LangItemTarget::ImplDef(_) | LangItemTarget::Static(_) => return None, @@ -273,10 +273,10 @@ impl Resolver { Path::Normal { mod_path, .. } => mod_path, Path::LangItem(l) => { return Some(ResolveValueResult::ValueNs(match *l { - LangItemTarget::Function(x) => ValueNs::FunctionId(x), - LangItemTarget::Static(x) => ValueNs::StaticId(x), - LangItemTarget::Struct(x) => ValueNs::StructId(x), - LangItemTarget::EnumVariant(x) => ValueNs::EnumVariantId(x), + LangItemTarget::Function(it) => ValueNs::FunctionId(it), + LangItemTarget::Static(it) => ValueNs::StaticId(it), + LangItemTarget::Struct(it) => ValueNs::StructId(it), + LangItemTarget::EnumVariant(it) => ValueNs::EnumVariantId(it), LangItemTarget::Union(_) | LangItemTarget::ImplDef(_) | LangItemTarget::TypeAlias(_) @@ -425,14 +425,14 @@ impl Resolver { /// The shadowing is accounted for: in /// /// ``` - /// let x = 92; + /// let it = 92; /// { - /// let x = 92; + /// let it = 92; /// $0 /// } /// ``` /// - /// there will be only one entry for `x` in the result. + /// there will be only one entry for `it` in the result. /// /// The result is ordered *roughly* from the innermost scope to the /// outermost: when the name is introduced in two namespaces in two scopes, @@ -1018,20 +1018,26 @@ impl HasResolver for ExternBlockId { } } +impl HasResolver for ExternCrateId { + fn resolver(self, db: &dyn DefDatabase) -> Resolver { + self.lookup(db).container.resolver(db) + } +} + impl HasResolver for TypeOwnerId { fn resolver(self, db: &dyn DefDatabase) -> Resolver { match self { - TypeOwnerId::FunctionId(x) => x.resolver(db), - TypeOwnerId::StaticId(x) => x.resolver(db), - TypeOwnerId::ConstId(x) => x.resolver(db), - TypeOwnerId::InTypeConstId(x) => x.lookup(db).owner.resolver(db), - TypeOwnerId::AdtId(x) => x.resolver(db), - TypeOwnerId::TraitId(x) => x.resolver(db), - TypeOwnerId::TraitAliasId(x) => x.resolver(db), - TypeOwnerId::TypeAliasId(x) => x.resolver(db), - TypeOwnerId::ImplId(x) => x.resolver(db), - TypeOwnerId::EnumVariantId(x) => x.resolver(db), - TypeOwnerId::ModuleId(x) => x.resolver(db), + TypeOwnerId::FunctionId(it) => it.resolver(db), + TypeOwnerId::StaticId(it) => it.resolver(db), + TypeOwnerId::ConstId(it) => it.resolver(db), + TypeOwnerId::InTypeConstId(it) => it.lookup(db).owner.resolver(db), + TypeOwnerId::AdtId(it) => it.resolver(db), + TypeOwnerId::TraitId(it) => it.resolver(db), + TypeOwnerId::TraitAliasId(it) => it.resolver(db), + TypeOwnerId::TypeAliasId(it) => it.resolver(db), + TypeOwnerId::ImplId(it) => it.resolver(db), + TypeOwnerId::EnumVariantId(it) => it.resolver(db), + TypeOwnerId::ModuleId(it) => it.resolver(db), } } } diff --git a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml index 40d8659f25b..1f27204c191 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-expand/Cargo.toml @@ -16,11 +16,9 @@ cov-mark = "2.0.0-pre.1" tracing = "0.1.35" either = "1.7.0" rustc-hash = "1.1.0" -la-arena = { version = "0.3.0", path = "../../lib/la-arena" } +la-arena.workspace = true itertools = "0.10.5" -hashbrown = { version = "0.12.1", features = [ - "inline-more", -], default-features = false } +hashbrown.workspace = true smallvec.workspace = true triomphe.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs index c2b0d5985e3..1906ed15bae 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/ast_id_map.rs @@ -18,47 +18,89 @@ use rustc_hash::FxHasher; use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; /// `AstId` points to an AST node in a specific file. -pub struct FileAstId<N: AstNode> { +pub struct FileAstId<N: AstIdNode> { raw: ErasedFileAstId, covariant: PhantomData<fn() -> N>, } -impl<N: AstNode> Clone for FileAstId<N> { +impl<N: AstIdNode> Clone for FileAstId<N> { fn clone(&self) -> FileAstId<N> { *self } } -impl<N: AstNode> Copy for FileAstId<N> {} +impl<N: AstIdNode> Copy for FileAstId<N> {} -impl<N: AstNode> PartialEq for FileAstId<N> { +impl<N: AstIdNode> PartialEq for FileAstId<N> { fn eq(&self, other: &Self) -> bool { self.raw == other.raw } } -impl<N: AstNode> Eq for FileAstId<N> {} -impl<N: AstNode> Hash for FileAstId<N> { +impl<N: AstIdNode> Eq for FileAstId<N> {} +impl<N: AstIdNode> Hash for FileAstId<N> { fn hash<H: Hasher>(&self, hasher: &mut H) { self.raw.hash(hasher); } } -impl<N: AstNode> fmt::Debug for FileAstId<N> { +impl<N: AstIdNode> fmt::Debug for FileAstId<N> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "FileAstId::<{}>({})", type_name::<N>(), self.raw.into_raw()) } } -impl<N: AstNode> FileAstId<N> { +impl<N: AstIdNode> FileAstId<N> { // Can't make this a From implementation because of coherence - pub fn upcast<M: AstNode>(self) -> FileAstId<M> + pub fn upcast<M: AstIdNode>(self) -> FileAstId<M> where N: Into<M>, { FileAstId { raw: self.raw, covariant: PhantomData } } + + pub fn erase(self) -> ErasedFileAstId { + self.raw + } } -type ErasedFileAstId = Idx<SyntaxNodePtr>; +pub type ErasedFileAstId = Idx<SyntaxNodePtr>; + +pub trait AstIdNode: AstNode {} +macro_rules! register_ast_id_node { + (impl AstIdNode for $($ident:ident),+ ) => { + $( + impl AstIdNode for ast::$ident {} + )+ + fn should_alloc_id(kind: syntax::SyntaxKind) -> bool { + $( + ast::$ident::can_cast(kind) + )||+ + } + }; +} +register_ast_id_node! { + impl AstIdNode for + Item, + Adt, + Enum, + Struct, + Union, + Const, + ExternBlock, + ExternCrate, + Fn, + Impl, + Macro, + MacroDef, + MacroRules, + MacroCall, + Module, + Static, + Trait, + TraitAlias, + TypeAlias, + Use, + AssocItem, BlockExpr, Variant, RecordField, TupleField, ConstArg +} /// Maps items' `SyntaxNode`s to `ErasedFileAstId`s and back. #[derive(Default)] @@ -92,14 +134,7 @@ impl AstIdMap { // change parent's id. This means that, say, adding a new function to a // trait does not change ids of top-level items, which helps caching. bdfs(node, |it| { - let kind = it.kind(); - if ast::Item::can_cast(kind) - || ast::BlockExpr::can_cast(kind) - || ast::Variant::can_cast(kind) - || ast::RecordField::can_cast(kind) - || ast::TupleField::can_cast(kind) - || ast::ConstArg::can_cast(kind) - { + if should_alloc_id(it.kind()) { res.alloc(&it); true } else { @@ -120,15 +155,19 @@ impl AstIdMap { res } - pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> { + pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> { let raw = self.erased_ast_id(item.syntax()); FileAstId { raw, covariant: PhantomData } } - pub fn get<N: AstNode>(&self, id: FileAstId<N>) -> AstPtr<N> { + pub fn get<N: AstIdNode>(&self, id: FileAstId<N>) -> AstPtr<N> { AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap() } + pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr { + self.arena[id].clone() + } + fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId { let ptr = SyntaxNodePtr::new(item); let hash = hash_ptr(&ptr); diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs index 80695bc0656..4ee12e2f212 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_attr_macro.rs @@ -35,7 +35,7 @@ macro_rules! register_builtin { impl BuiltinAttrExpander { pub fn is_derive(self) -> bool { - matches!(self, BuiltinAttrExpander::Derive) + matches!(self, BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst) } pub fn is_test(self) -> bool { matches!(self, BuiltinAttrExpander::Test) @@ -50,6 +50,8 @@ register_builtin! { (cfg_accessible, CfgAccessible) => dummy_attr_expand, (cfg_eval, CfgEval) => dummy_attr_expand, (derive, Derive) => derive_attr_expand, + // derive const is equivalent to derive for our proposes. + (derive_const, DeriveConst) => derive_attr_expand, (global_allocator, GlobalAllocator) => dummy_attr_expand, (test, Test) => dummy_attr_expand, (test_case, TestCase) => dummy_attr_expand diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs index 3d1e272b900..ecc8b407a9c 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_derive_macro.rs @@ -12,9 +12,7 @@ use crate::{ name::{AsName, Name}, tt::{self, TokenId}, }; -use syntax::ast::{ - self, AstNode, FieldList, HasAttrs, HasGenericParams, HasModuleItem, HasName, HasTypeBounds, -}; +use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}; use crate::{db::ExpandDatabase, name, quote, ExpandError, ExpandResult, MacroCallId}; @@ -30,12 +28,13 @@ macro_rules! register_builtin { &self, db: &dyn ExpandDatabase, id: MacroCallId, - tt: &tt::Subtree, + tt: &ast::Adt, + token_map: &TokenMap, ) -> ExpandResult<tt::Subtree> { let expander = match *self { $( BuiltinDeriveExpander::$trait => $expand, )* }; - expander(db, id, tt) + expander(db, id, tt, token_map) } fn find_by_name(name: &name::Name) -> Option<Self> { @@ -72,12 +71,12 @@ enum VariantShape { } fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> { - (0..n).map(|x| Ident::new(format!("f{x}"), tt::TokenId::unspecified())) + (0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified())) } impl VariantShape { fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree { - self.as_pattern_map(path, |x| quote!(#x)) + self.as_pattern_map(path, |it| quote!(#it)) } fn field_names(&self) -> Vec<tt::Ident> { @@ -95,17 +94,17 @@ impl VariantShape { ) -> tt::Subtree { match self { VariantShape::Struct(fields) => { - let fields = fields.iter().map(|x| { - let mapped = field_map(x); - quote! { #x : #mapped , } + let fields = fields.iter().map(|it| { + let mapped = field_map(it); + quote! { #it : #mapped , } }); quote! { #path { ##fields } } } &VariantShape::Tuple(n) => { - let fields = tuple_field_iterator(n).map(|x| { - let mapped = field_map(&x); + let fields = tuple_field_iterator(n).map(|it| { + let mapped = field_map(&it); quote! { #mapped , } @@ -118,16 +117,16 @@ impl VariantShape { } } - fn from(value: Option<FieldList>, token_map: &TokenMap) -> Result<Self, ExpandError> { + fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> { let r = match value { None => VariantShape::Unit, - Some(FieldList::RecordFieldList(x)) => VariantShape::Struct( - x.fields() - .map(|x| x.name()) - .map(|x| name_to_token(token_map, x)) + Some(FieldList::RecordFieldList(it)) => VariantShape::Struct( + it.fields() + .map(|it| it.name()) + .map(|it| name_to_token(tm, it)) .collect::<Result<_, _>>()?, ), - Some(FieldList::TupleFieldList(x)) => VariantShape::Tuple(x.fields().count()), + Some(FieldList::TupleFieldList(it)) => VariantShape::Tuple(it.fields().count()), }; Ok(r) } @@ -141,7 +140,7 @@ enum AdtShape { impl AdtShape { fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> { - self.as_pattern_map(name, |x| quote!(#x)) + self.as_pattern_map(name, |it| quote!(#it)) } fn field_names(&self) -> Vec<Vec<tt::Ident>> { @@ -190,32 +189,19 @@ struct BasicAdtInfo { associated_types: Vec<tt::Subtree>, } -fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> { - let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems); - let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| { - debug!("derive node didn't parse"); - ExpandError::other("invalid item definition") - })?; - let item = macro_items.items().next().ok_or_else(|| { - debug!("no module item parsed"); - ExpandError::other("no item found") - })?; - let adt = ast::Adt::cast(item.syntax().clone()).ok_or_else(|| { - debug!("expected adt, found: {:?}", item); - ExpandError::other("expected struct, enum or union") - })?; +fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> { let (name, generic_param_list, shape) = match &adt { ast::Adt::Struct(it) => ( it.name(), it.generic_param_list(), - AdtShape::Struct(VariantShape::from(it.field_list(), &token_map)?), + AdtShape::Struct(VariantShape::from(tm, it.field_list())?), ), ast::Adt::Enum(it) => { let default_variant = it .variant_list() .into_iter() - .flat_map(|x| x.variants()) - .position(|x| x.attrs().any(|x| x.simple_name() == Some("default".into()))); + .flat_map(|it| it.variants()) + .position(|it| it.attrs().any(|it| it.simple_name() == Some("default".into()))); ( it.name(), it.generic_param_list(), @@ -224,11 +210,11 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> { variants: it .variant_list() .into_iter() - .flat_map(|x| x.variants()) - .map(|x| { + .flat_map(|it| it.variants()) + .map(|it| { Ok(( - name_to_token(&token_map, x.name())?, - VariantShape::from(x.field_list(), &token_map)?, + name_to_token(tm, it.name())?, + VariantShape::from(tm, it.field_list())?, )) }) .collect::<Result<_, ExpandError>>()?, @@ -246,16 +232,16 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> { let name = { let this = param.name(); match this { - Some(x) => { - param_type_set.insert(x.as_name()); - mbe::syntax_node_to_token_tree(x.syntax()).0 + Some(it) => { + param_type_set.insert(it.as_name()); + mbe::syntax_node_to_token_tree(it.syntax()).0 } None => tt::Subtree::empty(), } }; let bounds = match ¶m { - ast::TypeOrConstParam::Type(x) => { - x.type_bound_list().map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0) + ast::TypeOrConstParam::Type(it) => { + it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0) } ast::TypeOrConstParam::Const(_) => None, }; @@ -296,9 +282,9 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, ExpandError> { let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name(); param_type_set.contains(&name).then_some(p) }) - .map(|x| mbe::syntax_node_to_token_tree(x.syntax()).0) + .map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0) .collect(); - let name_token = name_to_token(&token_map, name)?; + let name_token = name_to_token(&tm, name)?; Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types }) } @@ -345,11 +331,12 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id /// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and /// therefore does not get bound by the derived trait. fn expand_simple_derive( - tt: &tt::Subtree, + tt: &ast::Adt, + tm: &TokenMap, trait_path: tt::Subtree, make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree, ) -> ExpandResult<tt::Subtree> { - let info = match parse_adt(tt) { + let info = match parse_adt(tm, tt) { Ok(info) => info, Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), }; @@ -373,10 +360,10 @@ fn expand_simple_derive( }) .unzip(); - where_block.extend(info.associated_types.iter().map(|x| { - let x = x.clone(); + where_block.extend(info.associated_types.iter().map(|it| { + let it = it.clone(); let bound = trait_path.clone(); - quote! { #x : #bound , } + quote! { #it : #bound , } })); let name = info.name; @@ -405,19 +392,21 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree fn copy_expand( db: &dyn ExpandDatabase, id: MacroCallId, - tt: &tt::Subtree, + tt: &ast::Adt, + tm: &TokenMap, ) -> ExpandResult<tt::Subtree> { let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::marker::Copy }, |_| quote! {}) + expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {}) } fn clone_expand( db: &dyn ExpandDatabase, id: MacroCallId, - tt: &tt::Subtree, + tt: &ast::Adt, + tm: &TokenMap, ) -> ExpandResult<tt::Subtree> { let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::clone::Clone }, |adt| { + expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| { if matches!(adt.shape, AdtShape::Union) { let star = tt::Punct { char: '*', @@ -444,7 +433,7 @@ fn clone_expand( } let name = &adt.name; let patterns = adt.shape.as_pattern(name); - let exprs = adt.shape.as_pattern_map(name, |x| quote! { #x .clone() }); + let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() }); let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| { let fat_arrow = fat_arrow(); quote! { @@ -479,10 +468,11 @@ fn and_and() -> ::tt::Subtree<TokenId> { fn default_expand( db: &dyn ExpandDatabase, id: MacroCallId, - tt: &tt::Subtree, + tt: &ast::Adt, + tm: &TokenMap, ) -> ExpandResult<tt::Subtree> { let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::default::Default }, |adt| { + expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| { let body = match &adt.shape { AdtShape::Struct(fields) => { let name = &adt.name; @@ -518,16 +508,17 @@ fn default_expand( fn debug_expand( db: &dyn ExpandDatabase, id: MacroCallId, - tt: &tt::Subtree, + tt: &ast::Adt, + tm: &TokenMap, ) -> ExpandResult<tt::Subtree> { let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::fmt::Debug }, |adt| { + expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| { let for_variant = |name: String, v: &VariantShape| match v { VariantShape::Struct(fields) => { - let for_fields = fields.iter().map(|x| { - let x_string = x.to_string(); + let for_fields = fields.iter().map(|it| { + let x_string = it.to_string(); quote! { - .field(#x_string, & #x) + .field(#x_string, & #it) } }); quote! { @@ -535,9 +526,9 @@ fn debug_expand( } } VariantShape::Tuple(n) => { - let for_fields = tuple_field_iterator(*n).map(|x| { + let for_fields = tuple_field_iterator(*n).map(|it| { quote! { - .field( & #x) + .field( & #it) } }); quote! { @@ -598,10 +589,11 @@ fn debug_expand( fn hash_expand( db: &dyn ExpandDatabase, id: MacroCallId, - tt: &tt::Subtree, + tt: &ast::Adt, + tm: &TokenMap, ) -> ExpandResult<tt::Subtree> { let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::hash::Hash }, |adt| { + expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| { if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here return quote! {}; @@ -621,7 +613,7 @@ fn hash_expand( let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map( |(pat, names)| { let expr = { - let it = names.iter().map(|x| quote! { #x . hash(ra_expand_state); }); + let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); }); quote! { { ##it } } @@ -632,9 +624,14 @@ fn hash_expand( } }, ); + let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) { + quote! { #krate::mem::discriminant(self).hash(ra_expand_state); } + } else { + quote! {} + }; quote! { fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) { - #krate::mem::discriminant(self).hash(ra_expand_state); + #check_discriminant match self { ##arms } @@ -646,19 +643,21 @@ fn hash_expand( fn eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, - tt: &tt::Subtree, + tt: &ast::Adt, + tm: &TokenMap, ) -> ExpandResult<tt::Subtree> { let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::cmp::Eq }, |_| quote! {}) + expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {}) } fn partial_eq_expand( db: &dyn ExpandDatabase, id: MacroCallId, - tt: &tt::Subtree, + tt: &ast::Adt, + tm: &TokenMap, ) -> ExpandResult<tt::Subtree> { let krate = find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::cmp::PartialEq }, |adt| { + expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| { if matches!(adt.shape, AdtShape::Union) { // FIXME: Return expand error here return quote! {}; @@ -674,9 +673,9 @@ fn partial_eq_expand( quote!(true) } [first, rest @ ..] => { - let rest = rest.iter().map(|x| { - let t1 = Ident::new(format!("{}_self", x.text), x.span); - let t2 = Ident::new(format!("{}_other", x.text), x.span); + let rest = rest.iter().map(|it| { + let t1 = Ident::new(format!("{}_self", it.text), it.span); + let t2 = Ident::new(format!("{}_other", it.text), it.span); let and_and = and_and(); quote!(#and_and #t1 .eq( #t2 )) }); @@ -708,12 +707,12 @@ fn self_and_other_patterns( adt: &BasicAdtInfo, name: &tt::Ident, ) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) { - let self_patterns = adt.shape.as_pattern_map(name, |x| { - let t = Ident::new(format!("{}_self", x.text), x.span); + let self_patterns = adt.shape.as_pattern_map(name, |it| { + let t = Ident::new(format!("{}_self", it.text), it.span); quote!(#t) }); - let other_patterns = adt.shape.as_pattern_map(name, |x| { - let t = Ident::new(format!("{}_other", x.text), x.span); + let other_patterns = adt.shape.as_pattern_map(name, |it| { + let t = Ident::new(format!("{}_other", it.text), it.span); quote!(#t) }); (self_patterns, other_patterns) @@ -722,10 +721,11 @@ fn self_and_other_patterns( fn ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, - tt: &tt::Subtree, + tt: &ast::Adt, + tm: &TokenMap, ) -> ExpandResult<tt::Subtree> { let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::cmp::Ord }, |adt| { + expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| { fn compare( krate: &tt::TokenTree, left: tt::Subtree, @@ -747,9 +747,6 @@ fn ord_expand( // FIXME: Return expand error here return quote!(); } - let left = quote!(#krate::intrinsics::discriminant_value(self)); - let right = quote!(#krate::intrinsics::discriminant_value(other)); - let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name); let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map( |(pat1, pat2, fields)| { @@ -764,17 +761,17 @@ fn ord_expand( }, ); let fat_arrow = fat_arrow(); - let body = compare( - krate, - left, - right, - quote! { - match (self, other) { - ##arms - _unused #fat_arrow #krate::cmp::Ordering::Equal - } - }, - ); + let mut body = quote! { + match (self, other) { + ##arms + _unused #fat_arrow #krate::cmp::Ordering::Equal + } + }; + if matches!(&adt.shape, AdtShape::Enum { .. }) { + let left = quote!(#krate::intrinsics::discriminant_value(self)); + let right = quote!(#krate::intrinsics::discriminant_value(other)); + body = compare(krate, left, right, body); + } quote! { fn cmp(&self, other: &Self) -> #krate::cmp::Ordering { #body @@ -786,10 +783,11 @@ fn ord_expand( fn partial_ord_expand( db: &dyn ExpandDatabase, id: MacroCallId, - tt: &tt::Subtree, + tt: &ast::Adt, + tm: &TokenMap, ) -> ExpandResult<tt::Subtree> { let krate = &find_builtin_crate(db, id); - expand_simple_derive(tt, quote! { #krate::cmp::PartialOrd }, |adt| { + expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| { fn compare( krate: &tt::TokenTree, left: tt::Subtree, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs index a9f0c154b02..95c6baf42da 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/builtin_fn_macro.rs @@ -339,7 +339,7 @@ fn format_args_expand_general( parts.push(mem::take(&mut last_part)); let arg_tree = if argument.is_empty() { match args.next() { - Some(x) => x, + Some(it) => it, None => { err = Some(mbe::ExpandError::NoMatchingRule.into()); tt::Subtree::empty() @@ -361,7 +361,7 @@ fn format_args_expand_general( quote!(::core::fmt::Display::fmt) } }; - arg_tts.push(quote! { ::core::fmt::Argument::new(&(#arg_tree), #formatter), }); + arg_tts.push(quote! { ::core::fmt::ArgumentV1::new(&(#arg_tree), #formatter), }); } '}' => { if format_iter.peek() == Some(&'}') { @@ -378,11 +378,11 @@ fn format_args_expand_general( if !last_part.is_empty() { parts.push(last_part); } - let part_tts = parts.into_iter().map(|x| { + let part_tts = parts.into_iter().map(|it| { let text = if let Some(raw) = &raw_sharps { - format!("r{raw}\"{}\"{raw}", x).into() + format!("r{raw}\"{}\"{raw}", it).into() } else { - format!("\"{}\"", x).into() + format!("\"{}\"", it).into() }; let l = tt::Literal { span: tt::TokenId::unspecified(), text }; quote!(#l ,) @@ -574,7 +574,7 @@ fn concat_bytes_expand( syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()), syntax::SyntaxKind::BYTE_STRING => { let components = unquote_byte_string(lit).unwrap_or_default(); - components.into_iter().for_each(|x| bytes.push(x.to_string())); + components.into_iter().for_each(|it| bytes.push(it.to_string())); } _ => { err.get_or_insert(mbe::ExpandError::UnexpectedToken.into()); @@ -692,7 +692,7 @@ pub(crate) fn include_arg_to_tt( arg_id: MacroCallId, ) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> { let loc = db.lookup_intern_macro_call(arg_id); - let Some(EagerCallInfo {arg, arg_id: Some(arg_id), .. }) = loc.eager.as_deref() else { + let Some(EagerCallInfo { arg,arg_id, .. }) = loc.eager.as_deref() else { panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager); }; let path = parse_string(&arg.0)?; diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs index 78b2db7306b..309c0930d1a 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/db.rs @@ -1,9 +1,9 @@ //! Defines database & queries for macro expansion. -use base_db::{salsa, Edition, SourceDatabase}; +use base_db::{salsa, CrateId, Edition, SourceDatabase}; use either::Either; use limit::Limit; -use mbe::syntax_node_to_token_tree; +use mbe::{syntax_node_to_token_tree, ValueResult}; use rustc_hash::FxHashSet; use syntax::{ ast::{self, HasAttrs, HasDocComments}, @@ -13,7 +13,7 @@ use triomphe::Arc; use crate::{ ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, - builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, BuiltinAttrExpander, + builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander, @@ -28,61 +28,67 @@ use crate::{ static TOKEN_LIMIT: Limit = Limit::new(1_048_576); #[derive(Debug, Clone, Eq, PartialEq)] +/// Old-style `macro_rules` or the new macros 2.0 +pub struct DeclarativeMacroExpander { + pub mac: mbe::DeclarativeMacro, + pub def_site_token_map: mbe::TokenMap, +} + +impl DeclarativeMacroExpander { + pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> { + match self.mac.err() { + Some(e) => ExpandResult::new( + tt::Subtree::empty(), + ExpandError::other(format!("invalid macro definition: {e}")), + ), + None => self.mac.expand(tt).map_err(Into::into), + } + } + + pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId { + self.mac.map_id_down(token_id) + } + + pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { + self.mac.map_id_up(token_id) + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] pub enum TokenExpander { - /// Old-style `macro_rules` or the new macros 2.0 - DeclarativeMacro { mac: mbe::DeclarativeMacro, def_site_token_map: mbe::TokenMap }, + DeclarativeMacro(Arc<DeclarativeMacroExpander>), /// Stuff like `line!` and `file!`. - Builtin(BuiltinFnLikeExpander), + BuiltIn(BuiltinFnLikeExpander), /// Built-in eagerly expanded fn-like macros (`include!`, `concat!`, etc.) - BuiltinEager(EagerExpander), + BuiltInEager(EagerExpander), /// `global_allocator` and such. - BuiltinAttr(BuiltinAttrExpander), + BuiltInAttr(BuiltinAttrExpander), /// `derive(Copy)` and such. - BuiltinDerive(BuiltinDeriveExpander), + BuiltInDerive(BuiltinDeriveExpander), /// The thing we love the most here in rust-analyzer -- procedural macros. ProcMacro(ProcMacroExpander), } +// FIXME: Get rid of these methods impl TokenExpander { - fn expand( - &self, - db: &dyn ExpandDatabase, - id: MacroCallId, - tt: &tt::Subtree, - ) -> ExpandResult<tt::Subtree> { - match self { - TokenExpander::DeclarativeMacro { mac, .. } => mac.expand(tt).map_err(Into::into), - TokenExpander::Builtin(it) => it.expand(db, id, tt).map_err(Into::into), - TokenExpander::BuiltinEager(it) => it.expand(db, id, tt).map_err(Into::into), - TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt), - TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt), - TokenExpander::ProcMacro(_) => { - // We store the result in salsa db to prevent non-deterministic behavior in - // some proc-macro implementation - // See #4315 for details - db.expand_proc_macro(id) - } - } - } - pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { match self { - TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_down(id), - TokenExpander::Builtin(..) - | TokenExpander::BuiltinEager(..) - | TokenExpander::BuiltinAttr(..) - | TokenExpander::BuiltinDerive(..) + TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id), + TokenExpander::BuiltIn(..) + | TokenExpander::BuiltInEager(..) + | TokenExpander::BuiltInAttr(..) + | TokenExpander::BuiltInDerive(..) | TokenExpander::ProcMacro(..) => id, } } pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) { match self { - TokenExpander::DeclarativeMacro { mac, .. } => mac.map_id_up(id), - TokenExpander::Builtin(..) - | TokenExpander::BuiltinEager(..) - | TokenExpander::BuiltinAttr(..) - | TokenExpander::BuiltinDerive(..) + TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id), + TokenExpander::BuiltIn(..) + | TokenExpander::BuiltInEager(..) + | TokenExpander::BuiltInAttr(..) + | TokenExpander::BuiltInDerive(..) | TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call), } } @@ -118,14 +124,26 @@ pub trait ExpandDatabase: SourceDatabase { fn macro_arg( &self, id: MacroCallId, - ) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>; + ) -> ValueResult< + Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>, + Arc<Box<[SyntaxError]>>, + >; /// Extracts syntax node, corresponding to a macro call. That's a firewall /// query, only typing in the macro call itself changes the returned /// subtree. - fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>; - /// Gets the expander for this macro. This compiles declarative macros, and - /// just fetches procedural ones. - fn macro_def(&self, id: MacroDefId) -> Result<Arc<TokenExpander>, mbe::ParseError>; + fn macro_arg_node( + &self, + id: MacroCallId, + ) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>; + /// Fetches the expander for this macro. + #[salsa::transparent] + fn macro_expander(&self, id: MacroDefId) -> TokenExpander; + /// Fetches (and compiles) the expander of this decl macro. + fn decl_macro_expander( + &self, + def_crate: CrateId, + id: AstId<ast::Macro>, + ) -> Arc<DeclarativeMacroExpander>; /// Expand macro call to a token tree. // This query is LRU cached @@ -141,8 +159,8 @@ pub trait ExpandDatabase: SourceDatabase { /// Special case of the previous query for procedural macros. We can't LRU /// proc macros, since they are not deterministic in general, and /// non-determinism breaks salsa in a very, very, very bad way. - /// @edwin0cheng heroically debugged this once! - fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>; + /// @edwin0cheng heroically debugged this once! See #4315 for details + fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>; /// Firewall query that returns the errors from the `parse_macro_expansion` query. fn parse_macro_expansion_error( &self, @@ -163,7 +181,6 @@ pub fn expand_speculative( token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { let loc = db.lookup_intern_macro_call(actual_macro_call); - let macro_def = db.macro_def(loc.def).ok()?; let token_range = token_to_map.text_range(); // Build the subtree and token mapping for the speculative args @@ -221,7 +238,12 @@ pub fn expand_speculative( None => { let range = token_range.checked_sub(speculative_args.text_range().start())?; let token_id = spec_args_tmap.token_by_range(range)?; - macro_def.map_id_down(token_id) + match loc.def.kind { + MacroDefKind::Declarative(it) => { + db.decl_macro_expander(loc.krate, it).map_id_down(token_id) + } + _ => token_id, + } } }; @@ -235,7 +257,17 @@ pub fn expand_speculative( MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?) } - _ => macro_def.expand(db, actual_macro_call, &tt), + MacroDefKind::BuiltInDerive(expander, ..) => { + // this cast is a bit sus, can we avoid losing the typedness here? + let adt = ast::Adt::cast(speculative_args.clone()).unwrap(); + expander.expand(db, actual_macro_call, &adt, &spec_args_tmap) + } + MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt), + MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into), + MacroDefKind::BuiltInEager(it, _) => { + it.expand(db, actual_macro_call, &tt).map_err(Into::into) + } + MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt), }; let expand_to = macro_expand_to(db, actual_macro_call); @@ -297,17 +329,31 @@ fn parse_macro_expansion( ExpandResult { value: (parse, Arc::new(rev_token_map)), err } } +fn parse_macro_expansion_error( + db: &dyn ExpandDatabase, + macro_call_id: MacroCallId, +) -> ExpandResult<Box<[SyntaxError]>> { + db.parse_macro_expansion(MacroFile { macro_call_id }) + .map(|it| it.0.errors().to_vec().into_boxed_slice()) +} + fn macro_arg( db: &dyn ExpandDatabase, id: MacroCallId, -) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> { +) -> ValueResult< + Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>, + Arc<Box<[SyntaxError]>>, +> { let loc = db.lookup_intern_macro_call(id); - if let Some(EagerCallInfo { arg, arg_id: Some(_), error: _ }) = loc.eager.as_deref() { - return Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))); + if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() { + return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())))); } - let arg = db.macro_arg_text(id)?; + let ValueResult { value, err } = db.macro_arg_node(id); + let Some(arg) = value else { + return ValueResult { value: None, err }; + }; let node = SyntaxNode::new_root(arg); let censor = censor_for_macro_input(&loc, &node); @@ -325,9 +371,16 @@ fn macro_arg( // proc macros expect their inputs without parentheses, MBEs expect it with them included tt.delimiter = tt::Delimiter::unspecified(); } - Some(Arc::new((tt, tmap, fixups.undo_info))) + let val = Some(Arc::new((tt, tmap, fixups.undo_info))); + match err { + Some(err) => ValueResult::new(val, err), + None => ValueResult::ok(val), + } } +/// Certain macro calls expect some nodes in the input to be preprocessed away, namely: +/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped +/// - attributes expect the invoking attribute to be stripped fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> { // FIXME: handle `cfg_attr` (|| { @@ -364,9 +417,44 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy .unwrap_or_default() } -fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode> { +fn macro_arg_node( + db: &dyn ExpandDatabase, + id: MacroCallId, +) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> { + let err = || -> Arc<Box<[_]>> { + Arc::new(Box::new([SyntaxError::new_at_offset( + "invalid macro call".to_owned(), + syntax::TextSize::from(0), + )])) + }; let loc = db.lookup_intern_macro_call(id); - let arg = loc.kind.arg(db)?; + let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind { + let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() { + Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr).0) + } else { + loc.kind + .arg(db) + .and_then(|arg| ast::TokenTree::cast(arg.value)) + .map(|tt| tt.reparse_as_expr().to_syntax()) + }; + + match res { + Some(res) if res.errors().is_empty() => res.syntax_node(), + Some(res) => { + return ValueResult::new( + Some(res.syntax_node().green().into()), + // Box::<[_]>::from(res.errors()), not stable yet + Arc::new(res.errors().to_vec().into_boxed_slice()), + ); + } + None => return ValueResult::only_err(err()), + } + } else { + match loc.kind.arg(db) { + Some(res) => res.value, + None => return ValueResult::only_err(err()), + } + }; if matches!(loc.kind, MacroCallKind::FnLike { .. }) { let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); @@ -381,101 +469,146 @@ fn macro_arg_text(db: &dyn ExpandDatabase, id: MacroCallId) -> Option<GreenNode> // Some day, we'll have explicit recursion counters for all // recursive things, at which point this code might be removed. cov_mark::hit!(issue9358_bad_macro_stack_overflow); - return None; + return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new( + "unbalanced token tree".to_owned(), + arg.text_range(), + )]))); } } - if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() { - Some( - mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::Expr) - .0 - .syntax_node() - .green() - .into(), - ) - } else { - Some(arg.green().into()) - } + ValueResult::ok(Some(arg.green().into())) } -fn macro_def( +fn decl_macro_expander( db: &dyn ExpandDatabase, - id: MacroDefId, -) -> Result<Arc<TokenExpander>, mbe::ParseError> { + def_crate: CrateId, + id: AstId<ast::Macro>, +) -> Arc<DeclarativeMacroExpander> { + let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021; + let (mac, def_site_token_map) = match id.to_node(db) { + ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() { + Some(arg) => { + let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); + let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); + (mac, def_site_token_map) + } + None => ( + mbe::DeclarativeMacro::from_err( + mbe::ParseError::Expected("expected a token tree".into()), + is_2021, + ), + Default::default(), + ), + }, + ast::Macro::MacroDef(macro_def) => match macro_def.body() { + Some(arg) => { + let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); + let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021); + (mac, def_site_token_map) + } + None => ( + mbe::DeclarativeMacro::from_err( + mbe::ParseError::Expected("expected a token tree".into()), + is_2021, + ), + Default::default(), + ), + }, + }; + Arc::new(DeclarativeMacroExpander { mac, def_site_token_map }) +} + +fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { match id.kind { MacroDefKind::Declarative(ast_id) => { - let is_2021 = db.crate_graph()[id.krate].edition >= Edition::Edition2021; - let (mac, def_site_token_map) = match ast_id.to_node(db) { - ast::Macro::MacroRules(macro_rules) => { - let arg = macro_rules - .token_tree() - .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?; - let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); - let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021)?; - (mac, def_site_token_map) - } - ast::Macro::MacroDef(macro_def) => { - let arg = macro_def - .body() - .ok_or_else(|| mbe::ParseError::Expected("expected a token tree".into()))?; - let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); - let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021)?; - (mac, def_site_token_map) - } - }; - Ok(Arc::new(TokenExpander::DeclarativeMacro { mac, def_site_token_map })) - } - MacroDefKind::BuiltIn(expander, _) => Ok(Arc::new(TokenExpander::Builtin(expander))), - MacroDefKind::BuiltInAttr(expander, _) => { - Ok(Arc::new(TokenExpander::BuiltinAttr(expander))) + TokenExpander::DeclarativeMacro(db.decl_macro_expander(id.krate, ast_id)) } - MacroDefKind::BuiltInDerive(expander, _) => { - Ok(Arc::new(TokenExpander::BuiltinDerive(expander))) - } - MacroDefKind::BuiltInEager(expander, ..) => { - Ok(Arc::new(TokenExpander::BuiltinEager(expander))) - } - MacroDefKind::ProcMacro(expander, ..) => Ok(Arc::new(TokenExpander::ProcMacro(expander))), + MacroDefKind::BuiltIn(expander, _) => TokenExpander::BuiltIn(expander), + MacroDefKind::BuiltInAttr(expander, _) => TokenExpander::BuiltInAttr(expander), + MacroDefKind::BuiltInDerive(expander, _) => TokenExpander::BuiltInDerive(expander), + MacroDefKind::BuiltInEager(expander, ..) => TokenExpander::BuiltInEager(expander), + MacroDefKind::ProcMacro(expander, ..) => TokenExpander::ProcMacro(expander), } } fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> { let _p = profile::span("macro_expand"); let loc = db.lookup_intern_macro_call(id); - if let Some(EagerCallInfo { arg, arg_id: None, error }) = loc.eager.as_deref() { - // This is an input expansion for an eager macro. These are already pre-expanded - return ExpandResult { value: Arc::new(arg.0.clone()), err: error.clone() }; - } - let expander = match db.macro_def(loc.def) { - Ok(it) => it, - // FIXME: We should make sure to enforce a variant that invalid macro - // definitions do not get expanders that could reach this call path! - Err(err) => { - return ExpandResult { - value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, - token_trees: vec![], - }), - err: Some(ExpandError::other(format!("invalid macro definition: {err}"))), - } + + let ExpandResult { value: tt, mut err } = match loc.def.kind { + MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id), + MacroDefKind::BuiltInDerive(expander, ..) => { + let arg = db.macro_arg_node(id).value.unwrap(); + + let node = SyntaxNode::new_root(arg); + let censor = censor_for_macro_input(&loc, &node); + let mut fixups = fixup::fixup_syntax(&node); + fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); + let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications( + &node, + fixups.token_map, + fixups.next_id, + fixups.replace, + fixups.append, + ); + + // this cast is a bit sus, can we avoid losing the typedness here? + let adt = ast::Adt::cast(node).unwrap(); + let mut res = expander.expand(db, id, &adt, &tmap); + fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info); + res + } + _ => { + let ValueResult { value, err } = db.macro_arg(id); + let Some(macro_arg) = value else { + return ExpandResult { + value: Arc::new(tt::Subtree { + delimiter: tt::Delimiter::UNSPECIFIED, + token_trees: Vec::new(), + }), + // FIXME: We should make sure to enforce an invariant that invalid macro + // calls do not reach this call path! + err: Some(ExpandError::other("invalid token tree")), + }; + }; + + let (arg, arg_tm, undo_info) = &*macro_arg; + let mut res = match loc.def.kind { + MacroDefKind::Declarative(id) => { + db.decl_macro_expander(loc.def.krate, id).expand(arg.clone()) + } + MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into), + // This might look a bit odd, but we do not expand the inputs to eager macros here. + // Eager macros inputs are expanded, well, eagerly when we collect the macro calls. + // That kind of expansion uses the ast id map of an eager macros input though which goes through + // the HirFileId machinery. As eager macro inputs are assigned a macro file id that query + // will end up going through here again, whereas we want to just want to inspect the raw input. + // As such we just return the input subtree here. + MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { + let mut arg = arg.clone(); + fixup::reverse_fixups(&mut arg, arg_tm, undo_info); + + return ExpandResult { + value: Arc::new(arg), + err: err.map(|err| { + let mut buf = String::new(); + for err in &**err { + use std::fmt::Write; + _ = write!(buf, "{}, ", err); + } + buf.pop(); + buf.pop(); + ExpandError::other(buf) + }), + }; + } + MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into), + MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg), + _ => unreachable!(), + }; + fixup::reverse_fixups(&mut res.value, arg_tm, undo_info); + res } }; - let Some(macro_arg) = db.macro_arg(id) else { - return ExpandResult { - value: Arc::new( - tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, - token_trees: Vec::new(), - }, - ), - // FIXME: We should make sure to enforce a variant that invalid macro - // calls do not reach this call path! - err: Some(ExpandError::other( - "invalid token tree" - )), - }; - }; - let (arg_tt, arg_tm, undo_info) = &*macro_arg; - let ExpandResult { value: mut tt, mut err } = expander.expand(db, id, arg_tt); if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() { // FIXME: We should report both errors! @@ -483,48 +616,29 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt } // Set a hard limit for the expanded tt - let count = tt.count(); - if TOKEN_LIMIT.check(count).is_err() { - return ExpandResult { - value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::UNSPECIFIED, - token_trees: vec![], - }), - err: Some(ExpandError::other(format!( - "macro invocation exceeds token limit: produced {} tokens, limit is {}", - count, - TOKEN_LIMIT.inner(), - ))), - }; + if let Err(value) = check_tt_count(&tt) { + return value; } - fixup::reverse_fixups(&mut tt, arg_tm, undo_info); - ExpandResult { value: Arc::new(tt), err } } -fn parse_macro_expansion_error( - db: &dyn ExpandDatabase, - macro_call_id: MacroCallId, -) -> ExpandResult<Box<[SyntaxError]>> { - db.parse_macro_expansion(MacroFile { macro_call_id }) - .map(|it| it.0.errors().to_vec().into_boxed_slice()) -} - -fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> { +fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> { let loc = db.lookup_intern_macro_call(id); - let Some(macro_arg) = db.macro_arg(id) else { + let Some(macro_arg) = db.macro_arg(id).value else { return ExpandResult { - value: tt::Subtree { + value: Arc::new(tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new(), - }, - err: Some(ExpandError::other( - "invalid token tree" - )), + }), + // FIXME: We should make sure to enforce an invariant that invalid macro + // calls do not reach this call path! + err: Some(ExpandError::other("invalid token tree")), }; }; + let (arg_tt, arg_tm, undo_info) = &*macro_arg; + let expander = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => expander, _ => unreachable!(), @@ -533,13 +647,23 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<t let attr_arg = match &loc.kind { MacroCallKind::Attr { attr_args, .. } => { let mut attr_args = attr_args.0.clone(); - mbe::Shift::new(¯o_arg.0).shift_all(&mut attr_args); + mbe::Shift::new(arg_tt).shift_all(&mut attr_args); Some(attr_args) } _ => None, }; - expander.expand(db, loc.def.krate, loc.krate, ¯o_arg.0, attr_arg.as_ref()) + let ExpandResult { value: mut tt, err } = + expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref()); + + // Set a hard limit for the expanded tt + if let Err(value) = check_tt_count(&tt) { + return value; + } + + fixup::reverse_fixups(&mut tt, arg_tm, undo_info); + + ExpandResult { value: Arc::new(tt), err } } fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> { @@ -563,3 +687,22 @@ fn token_tree_to_syntax_node( }; mbe::token_tree_to_syntax_node(tt, entry_point) } + +fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> { + let count = tt.count(); + if TOKEN_LIMIT.check(count).is_err() { + Err(ExpandResult { + value: Arc::new(tt::Subtree { + delimiter: tt::Delimiter::UNSPECIFIED, + token_trees: vec![], + }), + err: Some(ExpandError::other(format!( + "macro invocation exceeds token limit: produced {} tokens, limit is {}", + count, + TOKEN_LIMIT.inner(), + ))), + }) + } else { + Ok(()) + } +} diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs index 7ee3fd375f6..876813eab5d 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/eager.rs @@ -19,7 +19,8 @@ //! //! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros> use base_db::CrateId; -use syntax::{ted, Parse, SyntaxNode}; +use rustc_hash::FxHashMap; +use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent}; use triomphe::Arc; use crate::{ @@ -38,19 +39,8 @@ pub fn expand_eager_macro_input( def: MacroDefId, resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> { - assert!(matches!(def.kind, MacroDefKind::BuiltInEager(..))); - let token_tree = macro_call.value.token_tree(); - - let Some(token_tree) = token_tree else { - return Ok(ExpandResult { value: None, err: - Some(ExpandError::other( - "invalid token tree" - )), - }); - }; - let (parsed_args, arg_token_map) = mbe::syntax_node_to_token_tree(token_tree.syntax()); - let ast_map = db.ast_id_map(macro_call.file_id); + // the expansion which the ast id map is built upon has no whitespace, so the offsets are wrong as macro_call is from the token tree that has whitespace! let call_id = InFile::new(macro_call.file_id, ast_map.ast_id(¯o_call.value)); let expand_to = ExpandTo::from_call_site(¯o_call.value); @@ -61,41 +51,69 @@ pub fn expand_eager_macro_input( let arg_id = db.intern_macro_call(MacroCallLoc { def, krate, - eager: Some(Box::new(EagerCallInfo { - arg: Arc::new((parsed_args, arg_token_map)), - arg_id: None, - error: None, - })), + eager: None, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, }); - let arg_as_expr = match db.macro_arg_text(arg_id) { - Some(it) => it, - None => { - return Ok(ExpandResult { - value: None, - err: Some(ExpandError::other("invalid token tree")), - }) - } + let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = + db.parse_macro_expansion(arg_id.as_macro_file()); + // we need this map here as the expansion of the eager input fake file loses whitespace ... + let mut ws_mapping = FxHashMap::default(); + if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() { + ws_mapping.extend(tm.entries().filter_map(|(id, range)| { + Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range)) + })); + } + + let ExpandResult { value: expanded_eager_input, err } = { + eager_macro_recur( + db, + &Hygiene::new(db, macro_call.file_id), + InFile::new(arg_id.as_file(), arg_exp.syntax_node()), + krate, + resolver, + )? }; - let ExpandResult { value: expanded_eager_input, err } = eager_macro_recur( - db, - &Hygiene::new(db, macro_call.file_id), - InFile::new(arg_id.as_file(), SyntaxNode::new_root(arg_as_expr)), - krate, - resolver, - )?; - let Some(expanded_eager_input) = expanded_eager_input else { - return Ok(ExpandResult { value: None, err }) + let err = parse_err.or(err); + + let Some((expanded_eager_input, mapping)) = expanded_eager_input else { + return Ok(ExpandResult { value: None, err }); }; - let (mut subtree, token_map) = mbe::syntax_node_to_token_tree(&expanded_eager_input); + + let og_tmap = mbe::syntax_node_to_token_map( + macro_call.value.token_tree().expect("macro_arg_text succeeded").syntax(), + ); + + let (mut subtree, expanded_eager_input_token_map) = + mbe::syntax_node_to_token_tree(&expanded_eager_input); + + // The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside + // so we need to remap them to the original input of the eager macro. + subtree.visit_ids(&|id| { + // Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix + + if let Some(range) = + expanded_eager_input_token_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE) + { + // remap from expanded eager input to eager input expansion + if let Some(og_range) = mapping.get(&range) { + // remap from eager input expansion to original eager input + if let Some(&og_range) = ws_mapping.get(og_range) { + if let Some(og_token) = og_tmap.token_by_range(og_range) { + return og_token; + } + } + } + } + tt::TokenId::UNSPECIFIED + }); subtree.delimiter = crate::tt::Delimiter::unspecified(); let loc = MacroCallLoc { def, krate, eager: Some(Box::new(EagerCallInfo { - arg: Arc::new((subtree, token_map)), - arg_id: Some(arg_id), + arg: Arc::new((subtree, og_tmap)), + arg_id, error: err.clone(), })), kind: MacroCallKind::FnLike { ast_id: call_id, expand_to }, @@ -109,19 +127,16 @@ fn lazy_expand( def: &MacroDefId, macro_call: InFile<ast::MacroCall>, krate: CrateId, -) -> ExpandResult<InFile<Parse<SyntaxNode>>> { +) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> { let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value); let expand_to = ExpandTo::from_call_site(¯o_call.value); - let id = def.as_lazy_macro( - db, - krate, - MacroCallKind::FnLike { ast_id: macro_call.with_value(ast_id), expand_to }, - ); - + let ast_id = macro_call.with_value(ast_id); + let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }); let macro_file = id.as_macro_file(); - db.parse_macro_expansion(macro_file).map(|parse| InFile::new(macro_file.into(), parse.0)) + db.parse_macro_expansion(macro_file) + .map(|parse| (InFile::new(macro_file.into(), parse.0), parse.1)) } fn eager_macro_recur( @@ -130,18 +145,43 @@ fn eager_macro_recur( curr: InFile<SyntaxNode>, krate: CrateId, macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, -) -> Result<ExpandResult<Option<SyntaxNode>>, UnresolvedMacro> { +) -> Result<ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>>, UnresolvedMacro> { let original = curr.value.clone_for_update(); + let mut mapping = FxHashMap::default(); - let children = original.descendants().filter_map(ast::MacroCall::cast); let mut replacements = Vec::new(); // Note: We only report a single error inside of eager expansions let mut error = None; + let mut offset = 0i32; + let apply_offset = |it: TextSize, offset: i32| { + TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default()) + }; + let mut children = original.preorder_with_tokens(); // Collect replacement - for child in children { - let def = match child.path().and_then(|path| ModPath::from_src(db, path, hygiene)) { + while let Some(child) = children.next() { + let WalkEvent::Enter(child) = child else { continue }; + let call = match child { + syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) { + Some(it) => { + children.skip_subtree(); + it + } + None => continue, + }, + syntax::NodeOrToken::Token(t) => { + mapping.insert( + TextRange::new( + apply_offset(t.text_range().start(), offset), + apply_offset(t.text_range().end(), offset), + ), + t.text_range(), + ); + continue; + } + }; + let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) { Some(path) => macro_resolver(path.clone()).ok_or(UnresolvedMacro { path })?, None => { error = Some(ExpandError::other("malformed macro invocation")); @@ -153,7 +193,7 @@ fn eager_macro_recur( let ExpandResult { value, err } = match expand_eager_macro_input( db, krate, - curr.with_value(child.clone()), + curr.with_value(call.clone()), def, macro_resolver, ) { @@ -161,9 +201,22 @@ fn eager_macro_recur( Err(err) => return Err(err), }; match value { - Some(call) => { + Some(call_id) => { let ExpandResult { value, err: err2 } = - db.parse_macro_expansion(call.as_macro_file()); + db.parse_macro_expansion(call_id.as_macro_file()); + + let call_tt_start = + call.token_tree().unwrap().syntax().text_range().start(); + let call_start = apply_offset(call.syntax().text_range().start(), offset); + if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() { + mapping.extend(arg_map.entries().filter_map(|(tid, range)| { + value + .1 + .first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE) + .map(|r| (r + call_start, range + call_tt_start)) + })); + }; + ExpandResult { value: Some(value.0.syntax_node().clone_for_update()), err: err.or(err2), @@ -177,36 +230,61 @@ fn eager_macro_recur( | MacroDefKind::BuiltInAttr(..) | MacroDefKind::BuiltInDerive(..) | MacroDefKind::ProcMacro(..) => { - let ExpandResult { value, err } = - lazy_expand(db, &def, curr.with_value(child.clone()), krate); + let ExpandResult { value: (parse, tm), err } = + lazy_expand(db, &def, curr.with_value(call.clone()), krate); + let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind { + Some(db.decl_macro_expander(def.krate, ast_id)) + } else { + None + }; // replace macro inside - let hygiene = Hygiene::new(db, value.file_id); + let hygiene = Hygiene::new(db, parse.file_id); let ExpandResult { value, err: error } = eager_macro_recur( db, &hygiene, // FIXME: We discard parse errors here - value.map(|it| it.syntax_node()), + parse.as_ref().map(|it| it.syntax_node()), krate, macro_resolver, )?; let err = err.or(error); - ExpandResult { value, err } + + let call_tt_start = call.token_tree().unwrap().syntax().text_range().start(); + let call_start = apply_offset(call.syntax().text_range().start(), offset); + if let Some((_tt, arg_map, _)) = parse + .file_id + .macro_file() + .and_then(|id| db.macro_arg(id.macro_call_id).value) + .as_deref() + { + mapping.extend(arg_map.entries().filter_map(|(tid, range)| { + tm.first_range_by_token( + decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid), + syntax::SyntaxKind::TOMBSTONE, + ) + .map(|r| (r + call_start, range + call_tt_start)) + })); + }; + // FIXME: Do we need to re-use _m here? + ExpandResult { value: value.map(|(n, _m)| n), err } } }; if err.is_some() { error = err; } // check if the whole original syntax is replaced - if child.syntax() == &original { - return Ok(ExpandResult { value, err: error }); + if call.syntax() == &original { + return Ok(ExpandResult { value: value.zip(Some(mapping)), err: error }); } if let Some(insert) = value { - replacements.push((child, insert)); + offset += u32::from(insert.text_range().len()) as i32 + - u32::from(call.syntax().text_range().len()) as i32; + replacements.push((call, insert)); } } replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new)); - Ok(ExpandResult { value: Some(original), err: error }) + Ok(ExpandResult { value: Some((original, mapping)), err: error }) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs index 00796e7c0db..e6e8d8c0299 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/fixup.rs @@ -26,7 +26,7 @@ pub(crate) struct SyntaxFixups { /// This is the information needed to reverse the fixups. #[derive(Debug, Default, PartialEq, Eq)] pub struct SyntaxFixupUndoInfo { - original: Vec<Subtree>, + original: Box<[Subtree]>, } const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0); @@ -272,7 +272,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { replace, token_map, next_id, - undo_info: SyntaxFixupUndoInfo { original }, + undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() }, } } @@ -472,13 +472,13 @@ fn foo () {match __ra_fixup {}} check( r#" fn foo() { - match x { + match it { } } "#, expect![[r#" -fn foo () {match x {}} +fn foo () {match it {}} "#]], ) } @@ -547,11 +547,11 @@ fn foo () {a . __ra_fixup ; bar () ;} check( r#" fn foo() { - let x = a + let it = a } "#, expect![[r#" -fn foo () {let x = a ;} +fn foo () {let it = a ;} "#]], ) } @@ -561,11 +561,11 @@ fn foo () {let x = a ;} check( r#" fn foo() { - let x = a. + let it = a. } "#, expect![[r#" -fn foo () {let x = a . __ra_fixup ;} +fn foo () {let it = a . __ra_fixup ;} "#]], ) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs index 10f8fe9cec4..54e74d50c87 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/hygiene.rs @@ -126,7 +126,7 @@ struct HygieneInfo { /// The start offset of the `macro_rules!` arguments or attribute input. attr_input_or_mac_def_start: Option<InFile<TextSize>>, - macro_def: Arc<TokenExpander>, + macro_def: TokenExpander, macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, macro_arg_shift: mbe::Shift, exp_map: Arc<mbe::TokenMap>, @@ -149,19 +149,15 @@ impl HygieneInfo { token_id = unshifted; (&attr_args.1, self.attr_input_or_mac_def_start?) } - None => ( - &self.macro_arg.1, - InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()), - ), + None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())), }, _ => match origin { - mbe::Origin::Call => ( - &self.macro_arg.1, - InFile::new(loc.kind.file_id(), loc.kind.arg(db)?.text_range().start()), - ), - mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def_start) { - (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => { - (def_site_token_map, *tt) + mbe::Origin::Call => { + (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())) + } + mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) { + (TokenExpander::DeclarativeMacro(expander), Some(tt)) => { + (&expander.def_site_token_map, *tt) } _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), }, @@ -198,9 +194,9 @@ fn make_hygiene_info( _ => None, }); - let macro_def = db.macro_def(loc.def).ok()?; + let macro_def = db.macro_expander(loc.def); let (_, exp_map) = db.parse_macro_expansion(macro_file).value; - let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| { + let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { Arc::new(( tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, Default::default(), diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs index e0c199328ef..9ed6c31ddde 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/lib.rs @@ -37,11 +37,11 @@ use either::Either; use syntax::{ algo::{self, skip_trivia_token}, ast::{self, AstNode, HasDocComments}, - Direction, SyntaxNode, SyntaxToken, + AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, }; use crate::{ - ast_id_map::FileAstId, + ast_id_map::{AstIdNode, ErasedFileAstId, FileAstId}, attrs::AttrId, builtin_attr_macro::BuiltinAttrExpander, builtin_derive_macro::BuiltinDeriveExpander, @@ -127,7 +127,8 @@ impl_intern_key!(MacroCallId); pub struct MacroCallLoc { pub def: MacroDefId, pub(crate) krate: CrateId, - /// Some if `def` is a builtin eager macro. + /// Some if this is a macro call for an eager macro. Note that this is `None` + /// for the eager input macro file. eager: Option<Box<EagerCallInfo>>, pub kind: MacroCallKind, } @@ -152,11 +153,10 @@ pub enum MacroDefKind { #[derive(Debug, Clone, PartialEq, Eq, Hash)] struct EagerCallInfo { - /// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro! + /// The expanded argument of the eager macro. arg: Arc<(tt::Subtree, TokenMap)>, - /// call id of the eager macro's input file. If this is none, macro call containing this call info - /// is an eager macro's input, otherwise it is its output. - arg_id: Option<MacroCallId>, + /// Call id of the eager macro's input file (this is the macro file for its fully expanded input). + arg_id: MacroCallId, error: Option<ExpandError>, } @@ -221,11 +221,7 @@ impl HirFileId { HirFileIdRepr::FileId(id) => break id, HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id); - let is_include_expansion = loc.def.is_include() - && matches!( - loc.eager.as_deref(), - Some(EagerCallInfo { arg_id: Some(_), .. }) - ); + let is_include_expansion = loc.def.is_include() && loc.eager.is_some(); file_id = match is_include_expansion.then(|| db.include_expand(macro_call_id)) { Some(Ok((_, file))) => file.into(), _ => loc.kind.file_id(), @@ -270,57 +266,13 @@ impl HirFileId { /// Return expansion information if it is a macro-expansion file pub fn expansion_info(self, db: &dyn db::ExpandDatabase) -> Option<ExpansionInfo> { let macro_file = self.macro_file()?; - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - - let arg_tt = loc.kind.arg(db)?; - - let macro_def = db.macro_def(loc.def).ok()?; - let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; - let macro_arg = db.macro_arg(macro_file.macro_call_id).unwrap_or_else(|| { - Arc::new(( - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, - Default::default(), - Default::default(), - )) - }); - - let def = loc.def.ast_id().left().and_then(|id| { - let def_tt = match id.to_node(db) { - ast::Macro::MacroRules(mac) => mac.token_tree()?, - ast::Macro::MacroDef(_) if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) => { - return None - } - ast::Macro::MacroDef(mac) => mac.body()?, - }; - Some(InFile::new(id.file_id, def_tt)) - }); - let attr_input_or_mac_def = def.or_else(|| match loc.kind { - MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { - // FIXME: handle `cfg_attr` - let tt = ast_id - .to_node(db) - .doc_comments_and_attrs() - .nth(invoc_attr_index.ast_index()) - .and_then(Either::left)? - .token_tree()?; - Some(InFile::new(ast_id.file_id, tt)) - } - _ => None, - }); - - Some(ExpansionInfo { - expanded: InFile::new(self, parse.syntax_node()), - arg: InFile::new(loc.kind.file_id(), arg_tt), - attr_input_or_mac_def, - macro_arg_shift: mbe::Shift::new(¯o_arg.0), - macro_arg, - macro_def, - exp_map, - }) + ExpansionInfo::new(db, macro_file) } - /// Indicate it is macro file generated for builtin derive - pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<ast::Attr>> { + pub fn as_builtin_derive_attr_node( + &self, + db: &dyn db::ExpandDatabase, + ) -> Option<InFile<ast::Attr>> { let macro_file = self.macro_file()?; let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); let attr = match loc.def.kind { @@ -333,8 +285,22 @@ impl HirFileId { pub fn is_custom_derive(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _)) + matches!( + db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind, + MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _) + ) + } + None => false, + } + } + + pub fn is_builtin_derive(&self, db: &dyn db::ExpandDatabase) -> bool { + match self.macro_file() { + Some(macro_file) => { + matches!( + db.lookup_intern_macro_call(macro_file.macro_call_id).def.kind, + MacroDefKind::BuiltInDerive(..) + ) } None => false, } @@ -344,8 +310,7 @@ impl HirFileId { pub fn is_include_macro(&self, db: &dyn db::ExpandDatabase) -> bool { match self.macro_file() { Some(macro_file) => { - let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - loc.def.is_include() + db.lookup_intern_macro_call(macro_file.macro_call_id).def.is_include() } _ => false, } @@ -355,7 +320,7 @@ impl HirFileId { match self.macro_file() { Some(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); - matches!(loc.eager.as_deref(), Some(EagerCallInfo { .. })) + matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) } _ => false, } @@ -536,9 +501,9 @@ impl MacroCallKind { }; let range = match kind { - MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(), - MacroCallKind::Derive { ast_id, .. } => ast_id.to_node(db).syntax().text_range(), - MacroCallKind::Attr { ast_id, .. } => ast_id.to_node(db).syntax().text_range(), + MacroCallKind::FnLike { ast_id, .. } => ast_id.to_ptr(db).text_range(), + MacroCallKind::Derive { ast_id, .. } => ast_id.to_ptr(db).text_range(), + MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).text_range(), }; FileRange { range, file_id } @@ -588,13 +553,18 @@ impl MacroCallKind { FileRange { range, file_id } } - fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<SyntaxNode> { + fn arg(&self, db: &dyn db::ExpandDatabase) -> Option<InFile<SyntaxNode>> { match self { - MacroCallKind::FnLike { ast_id, .. } => { - Some(ast_id.to_node(db).token_tree()?.syntax().clone()) + MacroCallKind::FnLike { ast_id, .. } => ast_id + .to_in_file_node(db) + .map(|it| Some(it.token_tree()?.syntax().clone())) + .transpose(), + MacroCallKind::Derive { ast_id, .. } => { + Some(ast_id.to_in_file_node(db).syntax().cloned()) + } + MacroCallKind::Attr { ast_id, .. } => { + Some(ast_id.to_in_file_node(db).syntax().cloned()) } - MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()), - MacroCallKind::Attr { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()), } } } @@ -612,13 +582,13 @@ impl MacroCallId { /// ExpansionInfo mainly describes how to map text range between src and expanded macro #[derive(Debug, Clone, PartialEq, Eq)] pub struct ExpansionInfo { - expanded: InFile<SyntaxNode>, + expanded: InMacroFile<SyntaxNode>, /// The argument TokenTree or item for attributes arg: InFile<SyntaxNode>, /// The `macro_rules!` or attribute input. attr_input_or_mac_def: Option<InFile<ast::TokenTree>>, - macro_def: Arc<TokenExpander>, + macro_def: TokenExpander, macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>, /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg /// and as such we need to shift tokens if they are part of an attributes input instead of their item. @@ -628,7 +598,7 @@ pub struct ExpansionInfo { impl ExpansionInfo { pub fn expanded(&self) -> InFile<SyntaxNode> { - self.expanded.clone() + self.expanded.clone().into() } pub fn call_node(&self) -> Option<InFile<SyntaxNode>> { @@ -659,7 +629,7 @@ impl ExpansionInfo { let token_id_in_attr_input = if let Some(item) = item { // check if we are mapping down in an attribute input // this is a special case as attributes can have two inputs - let call_id = self.expanded.file_id.macro_file()?.macro_call_id; + let call_id = self.expanded.file_id.macro_call_id; let loc = db.lookup_intern_macro_call(call_id); let token_range = token.value.text_range(); @@ -705,7 +675,7 @@ impl ExpansionInfo { let relative_range = token.value.text_range().checked_sub(self.arg.value.text_range().start())?; let token_id = self.macro_arg.1.token_by_range(relative_range)?; - // conditionally shift the id by a declaratives macro definition + // conditionally shift the id by a declarative macro definition self.macro_def.map_id_down(token_id) } }; @@ -715,7 +685,7 @@ impl ExpansionInfo { .ranges_by_token(token_id, token.value.kind()) .flat_map(move |range| self.expanded.value.covering_element(range).into_token()); - Some(tokens.map(move |token| self.expanded.with_value(token))) + Some(tokens.map(move |token| InFile::new(self.expanded.file_id.into(), token))) } /// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion. @@ -724,18 +694,17 @@ impl ExpansionInfo { db: &dyn db::ExpandDatabase, token: InFile<&SyntaxToken>, ) -> Option<(InFile<SyntaxToken>, Origin)> { + assert_eq!(token.file_id, self.expanded.file_id.into()); // Fetch the id through its text range, let token_id = self.exp_map.token_by_range(token.value.text_range())?; // conditionally unshifting the id to accommodate for macro-rules def site let (mut token_id, origin) = self.macro_def.map_id_up(token_id); - let call_id = self.expanded.file_id.macro_file()?.macro_call_id; + let call_id = self.expanded.file_id.macro_call_id; let loc = db.lookup_intern_macro_call(call_id); // Special case: map tokens from `include!` expansions to the included file - if loc.def.is_include() - && matches!(loc.eager.as_deref(), Some(EagerCallInfo { arg_id: Some(_), .. })) - { + if loc.def.is_include() { if let Ok((tt_and_map, file_id)) = db.include_expand(call_id) { let range = tt_and_map.1.first_range_by_token(token_id, token.value.kind())?; let source = db.parse(file_id); @@ -765,9 +734,9 @@ impl ExpansionInfo { } _ => match origin { mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()), - mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def) { - (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => { - (def_site_token_map, tt.syntax().cloned()) + mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def) { + (TokenExpander::DeclarativeMacro(expander), Some(tt)) => { + (&expander.def_site_token_map, tt.syntax().cloned()) } _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"), }, @@ -779,6 +748,58 @@ impl ExpansionInfo { tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; Some((tt.with_value(token), origin)) } + + fn new(db: &dyn db::ExpandDatabase, macro_file: MacroFile) -> Option<ExpansionInfo> { + let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id); + + let arg_tt = loc.kind.arg(db)?; + + let macro_def = db.macro_expander(loc.def); + let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; + let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; + + let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { + Arc::new(( + tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() }, + Default::default(), + Default::default(), + )) + }); + + let def = loc.def.ast_id().left().and_then(|id| { + let def_tt = match id.to_node(db) { + ast::Macro::MacroRules(mac) => mac.token_tree()?, + ast::Macro::MacroDef(_) if matches!(macro_def, TokenExpander::BuiltInAttr(_)) => { + return None + } + ast::Macro::MacroDef(mac) => mac.body()?, + }; + Some(InFile::new(id.file_id, def_tt)) + }); + let attr_input_or_mac_def = def.or_else(|| match loc.kind { + MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => { + // FIXME: handle `cfg_attr` + let tt = ast_id + .to_node(db) + .doc_comments_and_attrs() + .nth(invoc_attr_index.ast_index()) + .and_then(Either::left)? + .token_tree()?; + Some(InFile::new(ast_id.file_id, tt)) + } + _ => None, + }); + + Some(ExpansionInfo { + expanded, + arg: arg_tt, + attr_input_or_mac_def, + macro_arg_shift: mbe::Shift::new(¯o_arg.0), + macro_arg, + macro_def, + exp_map, + }) + } } /// `AstId` points to an AST node in any file. @@ -786,10 +807,26 @@ impl ExpansionInfo { /// It is stable across reparses, and can be used as salsa key/value. pub type AstId<N> = InFile<FileAstId<N>>; -impl<N: AstNode> AstId<N> { +impl<N: AstIdNode> AstId<N> { pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N { - let root = db.parse_or_expand(self.file_id); - db.ast_id_map(self.file_id).get(self.value).to_node(&root) + self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) + } + pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> InFile<N> { + InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))) + } + pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> { + db.ast_id_map(self.file_id).get(self.value) + } +} + +pub type ErasedAstId = InFile<ErasedFileAstId>; + +impl ErasedAstId { + pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode { + self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)) + } + pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr { + db.ast_id_map(self.file_id).get_raw(self.value) } } @@ -850,7 +887,7 @@ impl<L, R> InFile<Either<L, R>> { } } -impl<'a> InFile<&'a SyntaxNode> { +impl InFile<&SyntaxNode> { pub fn ancestors_with_macros( self, db: &dyn db::ExpandDatabase, @@ -1011,6 +1048,18 @@ impl InFile<SyntaxToken> { } } +#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] +pub struct InMacroFile<T> { + pub file_id: MacroFile, + pub value: T, +} + +impl<T> From<InMacroFile<T>> for InFile<T> { + fn from(macro_file: InMacroFile<T>) -> Self { + InFile { file_id: macro_file.file_id.into(), value: macro_file.value } + } +} + fn ascend_node_border_tokens( db: &dyn db::ExpandDatabase, InFile { file_id, value: node }: InFile<&SyntaxNode>, diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs index 47a8ab7de77..69aa09c4a52 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/mod_path.rs @@ -126,7 +126,7 @@ struct Display<'a> { path: &'a ModPath, } -impl<'a> fmt::Display for Display<'a> { +impl fmt::Display for Display<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { display_fmt_path(self.db, self.path, f, true) } @@ -137,7 +137,7 @@ struct UnescapedDisplay<'a> { path: &'a UnescapedModPath<'a>, } -impl<'a> fmt::Display for UnescapedDisplay<'a> { +impl fmt::Display for UnescapedDisplay<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { display_fmt_path(self.db, self.path.0, f, false) } diff --git a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs index f8dbb842775..7c179c0cf95 100644 --- a/src/tools/rust-analyzer/crates/hir-expand/src/name.rs +++ b/src/tools/rust-analyzer/crates/hir-expand/src/name.rs @@ -24,7 +24,7 @@ enum Repr { TupleField(usize), } -impl<'a> UnescapedName<'a> { +impl UnescapedName<'_> { /// Returns the textual representation of this name as a [`SmolStr`]. Prefer using this over /// [`ToString::to_string`] if possible as this conversion is cheaper in the general case. pub fn to_smol_str(&self) -> SmolStr { @@ -40,7 +40,7 @@ impl<'a> UnescapedName<'a> { } } - pub fn display(&'a self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a { + pub fn display(&self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + '_ { _ = db; UnescapedDisplay { name: self } } @@ -96,6 +96,15 @@ impl Name { Name::new_inline("[missing name]") } + /// Returns true if this is a fake name for things missing in the source code. See + /// [`missing()`][Self::missing] for details. + /// + /// Use this method instead of comparing with `Self::missing()` as missing names + /// (ideally should) have a `gensym` semantics. + pub fn is_missing(&self) -> bool { + self == &Name::missing() + } + /// Generates a new name which is only equal to itself, by incrementing a counter. Due /// its implementation, it should not be used in things that salsa considers, like /// type names or field names, and it should be only used in names of local variables @@ -162,7 +171,7 @@ struct Display<'a> { name: &'a Name, } -impl<'a> fmt::Display for Display<'a> { +impl fmt::Display for Display<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.name.0 { Repr::Text(text) => fmt::Display::fmt(&text, f), @@ -175,7 +184,7 @@ struct UnescapedDisplay<'a> { name: &'a UnescapedName<'a>, } -impl<'a> fmt::Display for UnescapedDisplay<'a> { +impl fmt::Display for UnescapedDisplay<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.name.0 .0 { Repr::Text(text) => { @@ -282,8 +291,10 @@ pub mod known { alloc, iter, ops, + fmt, future, result, + string, boxed, option, prelude, @@ -311,6 +322,7 @@ pub mod known { RangeToInclusive, RangeTo, Range, + String, Neg, Not, None, @@ -321,6 +333,7 @@ pub mod known { iter_mut, len, is_empty, + as_str, new, // Builtin macros asm, @@ -334,6 +347,7 @@ pub mod known { core_panic, env, file, + format, format_args_nl, format_args, global_asm, @@ -365,6 +379,7 @@ pub mod known { cfg_eval, crate_type, derive, + derive_const, global_allocator, no_core, no_std, diff --git a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml index c8bea34507c..abc19d63abf 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir-ty/Cargo.toml @@ -19,14 +19,15 @@ bitflags = "2.1.0" smallvec.workspace = true ena = "0.14.0" either = "1.7.0" +oorandom = "11.1.3" tracing = "0.1.35" rustc-hash = "1.1.0" scoped-tls = "1.0.0" -chalk-solve = { version = "0.91.0", default-features = false } -chalk-ir = "0.91.0" -chalk-recursive = { version = "0.91.0", default-features = false } -chalk-derive = "0.91.0" -la-arena = { version = "0.3.0", path = "../../lib/la-arena" } +chalk-solve = { version = "0.92.0", default-features = false } +chalk-ir = "0.92.0" +chalk-recursive = { version = "0.92.0", default-features = false } +chalk-derive = "0.92.0" +la-arena.workspace = true once_cell = "1.17.0" triomphe.workspace = true nohash-hasher.workspace = true @@ -47,7 +48,6 @@ limit.workspace = true expect-test = "1.4.0" tracing = "0.1.35" tracing-subscriber = { version = "0.3.16", default-features = false, features = [ - "env-filter", "registry", ] } tracing-tree = "0.2.1" diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs index 3860bccec8b..4625a3b01a3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/autoderef.rs @@ -36,7 +36,7 @@ pub fn autoderef( ) -> impl Iterator<Item = Ty> { let mut table = InferenceTable::new(db, env); let ty = table.instantiate_canonical(ty); - let mut autoderef = Autoderef::new(&mut table, ty); + let mut autoderef = Autoderef::new(&mut table, ty, false); let mut v = Vec::new(); while let Some((ty, _steps)) = autoderef.next() { // `ty` may contain unresolved inference variables. Since there's no chance they would be @@ -63,12 +63,13 @@ pub(crate) struct Autoderef<'a, 'db> { ty: Ty, at_start: bool, steps: Vec<(AutoderefKind, Ty)>, + explicit: bool, } impl<'a, 'db> Autoderef<'a, 'db> { - pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty) -> Self { + pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty, explicit: bool) -> Self { let ty = table.resolve_ty_shallow(&ty); - Autoderef { table, ty, at_start: true, steps: Vec::new() } + Autoderef { table, ty, at_start: true, steps: Vec::new(), explicit } } pub(crate) fn step_count(&self) -> usize { @@ -97,7 +98,7 @@ impl Iterator for Autoderef<'_, '_> { return None; } - let (kind, new_ty) = autoderef_step(self.table, self.ty.clone())?; + let (kind, new_ty) = autoderef_step(self.table, self.ty.clone(), self.explicit)?; self.steps.push((kind, self.ty.clone())); self.ty = new_ty; @@ -109,8 +110,9 @@ impl Iterator for Autoderef<'_, '_> { pub(crate) fn autoderef_step( table: &mut InferenceTable<'_>, ty: Ty, + explicit: bool, ) -> Option<(AutoderefKind, Ty)> { - if let Some(derefed) = builtin_deref(table, &ty, false) { + if let Some(derefed) = builtin_deref(table, &ty, explicit) { Some((AutoderefKind::Builtin, table.resolve_ty_shallow(derefed))) } else { Some((AutoderefKind::Overloaded, deref_by_trait(table, ty)?)) @@ -124,7 +126,6 @@ pub(crate) fn builtin_deref<'ty>( ) -> Option<&'ty Ty> { match ty.kind(Interner) { TyKind::Ref(.., ty) => Some(ty), - // FIXME: Maybe accept this but diagnose if its not explicit? TyKind::Raw(.., ty) if explicit => Some(ty), &TyKind::Adt(chalk_ir::AdtId(adt), ref substs) => { if crate::lang_items::is_box(table.db, adt) { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index 5dd8e2719a2..f4fbace19e3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -5,13 +5,13 @@ use std::{iter, sync::Arc}; use tracing::debug; -use chalk_ir::{cast::Cast, fold::shift::Shift, CanonicalVarKinds}; +use chalk_ir::{cast::Caster, fold::shift::Shift, CanonicalVarKinds}; use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait}; use base_db::CrateId; use hir_def::{ hir::Movability, - lang_item::{lang_attr, LangItem, LangItemTarget}, + lang_item::{LangItem, LangItemTarget}, AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, }; use hir_expand::name::name; @@ -46,7 +46,7 @@ pub(crate) type AssociatedTyValue = chalk_solve::rust_ir::AssociatedTyValue<Inte pub(crate) type FnDefDatum = chalk_solve::rust_ir::FnDefDatum<Interner>; pub(crate) type Variances = chalk_ir::Variances<Interner>; -impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { +impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> { fn associated_ty_data(&self, id: AssocTypeId) -> Arc<AssociatedTyDatum> { self.db.associated_ty_data(id) } @@ -60,9 +60,37 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { // FIXME: keep track of these Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None }) } - fn discriminant_type(&self, _ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> { - // FIXME: keep track of this - chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(Interner) + fn discriminant_type(&self, ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> { + if let chalk_ir::TyKind::Adt(id, _) = ty.kind(Interner) { + if let hir_def::AdtId::EnumId(e) = id.0 { + let enum_data = self.db.enum_data(e); + let ty = enum_data.repr.unwrap_or_default().discr_type(); + return chalk_ir::TyKind::Scalar(match ty { + hir_def::layout::IntegerType::Pointer(is_signed) => match is_signed { + true => chalk_ir::Scalar::Int(chalk_ir::IntTy::Isize), + false => chalk_ir::Scalar::Uint(chalk_ir::UintTy::Usize), + }, + hir_def::layout::IntegerType::Fixed(size, is_signed) => match is_signed { + true => chalk_ir::Scalar::Int(match size { + hir_def::layout::Integer::I8 => chalk_ir::IntTy::I8, + hir_def::layout::Integer::I16 => chalk_ir::IntTy::I16, + hir_def::layout::Integer::I32 => chalk_ir::IntTy::I32, + hir_def::layout::Integer::I64 => chalk_ir::IntTy::I64, + hir_def::layout::Integer::I128 => chalk_ir::IntTy::I128, + }), + false => chalk_ir::Scalar::Uint(match size { + hir_def::layout::Integer::I8 => chalk_ir::UintTy::U8, + hir_def::layout::Integer::I16 => chalk_ir::UintTy::U16, + hir_def::layout::Integer::I32 => chalk_ir::UintTy::U32, + hir_def::layout::Integer::I64 => chalk_ir::UintTy::U64, + hir_def::layout::Integer::I128 => chalk_ir::UintTy::U128, + }), + }, + }) + .intern(Interner); + } + } + chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U8)).intern(Interner) } fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> { self.db.impl_datum(self.krate, impl_id) @@ -565,7 +593,7 @@ pub(crate) fn trait_datum_query( let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars); let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect(); let trait_datum_bound = rust_ir::TraitDatumBound { where_clauses }; - let well_known = lang_attr(db.upcast(), trait_).and_then(well_known_trait_from_lang_item); + let well_known = db.lang_attr(trait_.into()).and_then(well_known_trait_from_lang_item); let trait_datum = TraitDatum { id: trait_id, binders: make_binders(db, &generic_params, trait_datum_bound), @@ -593,6 +621,7 @@ fn well_known_trait_from_lang_item(item: LangItem) -> Option<WellKnownTrait> { LangItem::Unsize => WellKnownTrait::Unsize, LangItem::Tuple => WellKnownTrait::Tuple, LangItem::PointeeTrait => WellKnownTrait::Pointee, + LangItem::FnPtrTrait => WellKnownTrait::FnPtr, _ => return None, }) } @@ -614,6 +643,7 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem { WellKnownTrait::Unpin => LangItem::Unpin, WellKnownTrait::Unsize => LangItem::Unsize, WellKnownTrait::Pointee => LangItem::PointeeTrait, + WellKnownTrait::FnPtr => LangItem::FnPtrTrait, } } @@ -844,28 +874,34 @@ pub(super) fn generic_predicate_to_inline_bound( } let args_no_self = trait_ref.substitution.as_slice(Interner)[1..] .iter() - .map(|ty| ty.clone().cast(Interner)) + .cloned() + .casted(Interner) .collect(); let trait_bound = rust_ir::TraitBound { trait_id: trait_ref.trait_id, args_no_self }; Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound))) } WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => { - let trait_ = projection_ty.trait_(db); - if projection_ty.self_type_parameter(db) != self_ty_shifted_in { + let generics = + generics(db.upcast(), from_assoc_type_id(projection_ty.associated_ty_id).into()); + let (assoc_args, trait_args) = + projection_ty.substitution.as_slice(Interner).split_at(generics.len_self()); + let (self_ty, args_no_self) = + trait_args.split_first().expect("projection without trait self type"); + if self_ty.assert_ty_ref(Interner) != &self_ty_shifted_in { return None; } - let args_no_self = projection_ty.substitution.as_slice(Interner)[1..] - .iter() - .map(|ty| ty.clone().cast(Interner)) - .collect(); + + let args_no_self = args_no_self.iter().cloned().casted(Interner).collect(); + let parameters = assoc_args.to_vec(); + let alias_eq_bound = rust_ir::AliasEqBound { value: ty.clone(), trait_bound: rust_ir::TraitBound { - trait_id: to_chalk_trait_id(trait_), + trait_id: to_chalk_trait_id(projection_ty.trait_(db)), args_no_self, }, associated_ty_id: projection_ty.associated_ty_id, - parameters: Vec::new(), // FIXME we don't support generic associated types yet + parameters, }; Some(chalk_ir::Binders::new( binders, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs index a8071591ada..c0b243ea248 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_ext.rs @@ -343,7 +343,8 @@ impl TyExt for Ty { fn is_copy(self, db: &dyn HirDatabase, owner: DefWithBodyId) -> bool { let crate_id = owner.module(db.upcast()).krate(); - let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|x| x.as_trait()) else { + let Some(copy_trait) = db.lang_item(crate_id, LangItem::Copy).and_then(|it| it.as_trait()) + else { return false; }; let trait_ref = TyBuilder::trait_ref(db, copy_trait).push(self).build(); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs index 262341c6e9e..4de90d40a7c 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval.rs @@ -88,7 +88,7 @@ pub(crate) fn path_to_const( ConstValue::Placeholder(to_placeholder_idx(db, p.into())) } ParamLoweringMode::Variable => match args.param_idx(p.into()) { - Some(x) => ConstValue::BoundVar(BoundVar::new(debruijn, x)), + Some(it) => ConstValue::BoundVar(BoundVar::new(debruijn, it)), None => { never!( "Generic list doesn't contain this param: {:?}, {:?}, {:?}", @@ -139,11 +139,11 @@ pub fn intern_const_ref( let bytes = match value { LiteralConstRef::Int(i) => { // FIXME: We should handle failure of layout better. - let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16); + let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16); ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default()) } LiteralConstRef::UInt(i) => { - let size = layout.map(|x| x.size.bytes_usize()).unwrap_or(16); + let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16); ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default()) } LiteralConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()), @@ -171,7 +171,7 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> { chalk_ir::ConstValue::InferenceVar(_) => None, chalk_ir::ConstValue::Placeholder(_) => None, chalk_ir::ConstValue::Concrete(c) => match &c.interned { - ConstScalar::Bytes(x, _) => Some(u128::from_le_bytes(pad16(&x, false))), + ConstScalar::Bytes(it, _) => Some(u128::from_le_bytes(pad16(&it, false))), ConstScalar::UnevaluatedConst(c, subst) => { let ec = db.const_eval(*c, subst.clone()).ok()?; try_const_usize(db, &ec) @@ -228,7 +228,7 @@ pub(crate) fn const_eval_query( } GeneralConstId::InTypeConstId(c) => db.mir_body(c.into())?, }; - let c = interpret_mir(db, &body, false).0?; + let c = interpret_mir(db, body, false).0?; Ok(c) } @@ -241,7 +241,7 @@ pub(crate) fn const_eval_static_query( Substitution::empty(Interner), db.trait_environment_for_body(def.into()), )?; - let c = interpret_mir(db, &body, false).0?; + let c = interpret_mir(db, body, false).0?; Ok(c) } @@ -268,7 +268,7 @@ pub(crate) fn const_eval_discriminant_variant( Substitution::empty(Interner), db.trait_environment_for_body(def), )?; - let c = interpret_mir(db, &mir_body, false).0?; + let c = interpret_mir(db, mir_body, false).0?; let c = try_const_usize(db, &c).unwrap() as i128; Ok(c) } @@ -293,7 +293,7 @@ pub(crate) fn eval_to_const( } let infer = ctx.clone().resolve_all(); if let Ok(mir_body) = lower_to_mir(ctx.db, ctx.owner, &ctx.body, &infer, expr) { - if let Ok(result) = interpret_mir(db, &mir_body, true).0 { + if let Ok(result) = interpret_mir(db, Arc::new(mir_body), true).0 { return result; } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs index 0db1fefbfef..5bb327606d3 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests.rs @@ -1,10 +1,11 @@ use base_db::{fixture::WithFixture, FileId}; use chalk_ir::Substitution; use hir_def::db::DefDatabase; +use test_utils::skip_slow_tests; use crate::{ consteval::try_const_usize, db::HirDatabase, mir::pad16, test_db::TestDB, Const, ConstScalar, - Interner, + Interner, MemoryMap, }; use super::{ @@ -16,7 +17,7 @@ mod intrinsics; fn simplify(e: ConstEvalError) -> ConstEvalError { match e { - ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e, _, _)) => { + ConstEvalError::MirEvalError(MirEvalError::InFunction(e, _)) => { simplify(ConstEvalError::MirEvalError(*e)) } _ => e, @@ -36,7 +37,37 @@ fn check_fail(ra_fixture: &str, error: impl FnOnce(ConstEvalError) -> bool) { #[track_caller] fn check_number(ra_fixture: &str, answer: i128) { - let (db, file_id) = TestDB::with_single_file(ra_fixture); + check_answer(ra_fixture, |b, _| { + assert_eq!( + b, + &answer.to_le_bytes()[0..b.len()], + "Bytes differ. In decimal form: actual = {}, expected = {answer}", + i128::from_le_bytes(pad16(b, true)) + ); + }); +} + +#[track_caller] +fn check_str(ra_fixture: &str, answer: &str) { + check_answer(ra_fixture, |b, mm| { + let addr = usize::from_le_bytes(b[0..b.len() / 2].try_into().unwrap()); + let size = usize::from_le_bytes(b[b.len() / 2..].try_into().unwrap()); + let Some(bytes) = mm.get(addr, size) else { + panic!("string data missed in the memory map"); + }; + assert_eq!( + bytes, + answer.as_bytes(), + "Bytes differ. In string form: actual = {}, expected = {answer}", + String::from_utf8_lossy(bytes) + ); + }); +} + +#[track_caller] +fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) { + let (db, file_ids) = TestDB::with_many_files(ra_fixture); + let file_id = *file_ids.last().unwrap(); let r = match eval_goal(&db, file_id) { Ok(t) => t, Err(e) => { @@ -46,13 +77,8 @@ fn check_number(ra_fixture: &str, answer: i128) { }; match &r.data(Interner).value { chalk_ir::ConstValue::Concrete(c) => match &c.interned { - ConstScalar::Bytes(b, _) => { - assert_eq!( - b, - &answer.to_le_bytes()[0..b.len()], - "Bytes differ. In decimal form: actual = {}, expected = {answer}", - i128::from_le_bytes(pad16(b, true)) - ); + ConstScalar::Bytes(b, mm) => { + check(b, mm); } x => panic!("Expected number but found {:?}", x), }, @@ -87,7 +113,7 @@ fn eval_goal(db: &TestDB, file_id: FileId) -> Result<Const, ConstEvalError> { } _ => None, }) - .unwrap(); + .expect("No const named GOAL found in the test"); db.const_eval(const_id.into(), Substitution::empty(Interner)) } @@ -108,6 +134,7 @@ fn bit_op() { check_fail(r#"const GOAL: i8 = 1 << 8"#, |e| { e == ConstEvalError::MirEvalError(MirEvalError::Panic("Overflow in Shl".to_string())) }); + check_number(r#"const GOAL: i32 = 100000000i32 << 11"#, (100000000i32 << 11) as i128); } #[test] @@ -166,14 +193,21 @@ fn casts() { check_number( r#" //- minicore: coerce_unsized, index, slice + struct X { + unsize_field: [u8], + } + const GOAL: usize = { let a = [10, 20, 3, 15]; let x: &[i32] = &a; - let y: *const [i32] = x; - let z = y as *const [u8]; // slice fat pointer cast don't touch metadata - let q = z as *const str; - let p = q as *const [u8]; - let w = unsafe { &*z }; + let x: *const [i32] = x; + let x = x as *const [u8]; // slice fat pointer cast don't touch metadata + let x = x as *const str; + let x = x as *const X; + let x = x as *const [i16]; + let x = x as *const X; + let x = x as *const [u8]; + let w = unsafe { &*x }; w.len() }; "#, @@ -199,6 +233,30 @@ fn raw_pointer_equality() { } #[test] +fn alignment() { + check_answer( + r#" +//- minicore: transmute +use core::mem::transmute; +const GOAL: usize = { + let x: i64 = 2; + transmute(&x) +} + "#, + |b, _| assert_eq!(b[0] % 8, 0), + ); + check_answer( + r#" +//- minicore: transmute +use core::mem::transmute; +static X: i64 = 12; +const GOAL: usize = transmute(&X); + "#, + |b, _| assert_eq!(b[0] % 8, 0), + ); +} + +#[test] fn locals() { check_number( r#" @@ -1551,6 +1609,30 @@ fn closures() { } #[test] +fn manual_fn_trait_impl() { + check_number( + r#" +//- minicore: fn, copy +struct S(i32); + +impl FnOnce<(i32, i32)> for S { + type Output = i32; + + extern "rust-call" fn call_once(self, arg: (i32, i32)) -> i32 { + arg.0 + arg.1 + self.0 + } +} + +const GOAL: i32 = { + let s = S(1); + s(2, 3) +}; +"#, + 6, + ); +} + +#[test] fn closure_and_impl_fn() { check_number( r#" @@ -1663,6 +1745,18 @@ fn function_pointer() { ); check_number( r#" + fn add2(x: u8) -> u8 { + x + 2 + } + const GOAL: u8 = { + let plus2 = add2 as fn(u8) -> u8; + plus2(3) + }; + "#, + 5, + ); + check_number( + r#" //- minicore: coerce_unsized, index, slice fn add2(x: u8) -> u8 { x + 2 @@ -1850,6 +1944,38 @@ fn dyn_trait() { } #[test] +fn coerce_unsized() { + check_number( + r#" +//- minicore: coerce_unsized, deref_mut, slice, index, transmute, non_null +use core::ops::{Deref, DerefMut, CoerceUnsized}; +use core::{marker::Unsize, mem::transmute, ptr::NonNull}; + +struct ArcInner<T: ?Sized> { + strong: usize, + weak: usize, + data: T, +} + +pub struct Arc<T: ?Sized> { + inner: NonNull<ArcInner<T>>, +} + +impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {} + +const GOAL: usize = { + let x = transmute::<usize, Arc<[i32; 3]>>(12); + let y: Arc<[i32]> = x; + let z = transmute::<Arc<[i32]>, (usize, usize)>(y); + z.1 +}; + + "#, + 3, + ); +} + +#[test] fn boxes() { check_number( r#" @@ -1961,6 +2087,17 @@ fn array_and_index() { } #[test] +fn string() { + check_str( + r#" + //- minicore: coerce_unsized, index, slice + const GOAL: &str = "hello"; + "#, + "hello", + ); +} + +#[test] fn byte_string() { check_number( r#" @@ -2018,6 +2155,57 @@ fn consts() { "#, 6, ); + + check_number( + r#" + const F1: i32 = 2147483647; + const F2: i32 = F1 - 25; + const GOAL: i32 = F2; + "#, + 2147483622, + ); + + check_number( + r#" + const F1: i32 = -2147483648; + const F2: i32 = F1 + 18; + const GOAL: i32 = F2; + "#, + -2147483630, + ); + + check_number( + r#" + const F1: i32 = 10; + const F2: i32 = F1 - 20; + const GOAL: i32 = F2; + "#, + -10, + ); + + check_number( + r#" + const F1: i32 = 25; + const F2: i32 = F1 - 25; + const GOAL: i32 = F2; + "#, + 0, + ); + + check_number( + r#" + const A: i32 = -2147483648; + const GOAL: bool = A > 0; + "#, + 0, + ); + + check_number( + r#" + const GOAL: i64 = (-2147483648_i32) as i64; + "#, + -2147483648, + ); } #[test] @@ -2287,6 +2475,25 @@ fn const_trait_assoc() { ); check_number( r#" + //- /a/lib.rs crate:a + pub trait ToConst { + const VAL: usize; + } + pub const fn to_const<T: ToConst>() -> usize { + T::VAL + } + //- /main.rs crate:main deps:a + use a::{ToConst, to_const}; + struct U0; + impl ToConst for U0 { + const VAL: usize = 5; + } + const GOAL: usize = to_const::<U0>(); + "#, + 5, + ); + check_number( + r#" struct S<T>(*mut T); trait MySized: Sized { @@ -2311,21 +2518,11 @@ fn const_trait_assoc() { } #[test] -fn panic_messages() { - check_fail( - r#" - //- minicore: panic - const GOAL: u8 = { - let x: u16 = 2; - panic!("hello"); - }; - "#, - |e| e == ConstEvalError::MirEvalError(MirEvalError::Panic("hello".to_string())), - ); -} - -#[test] fn exec_limits() { + if skip_slow_tests() { + return; + } + check_fail( r#" const GOAL: usize = loop {}; @@ -2339,7 +2536,7 @@ fn exec_limits() { } const GOAL: i32 = f(0); "#, - |e| e == ConstEvalError::MirEvalError(MirEvalError::StackOverflow), + |e| e == ConstEvalError::MirEvalError(MirEvalError::ExecutionLimitExceeded), ); // Reasonable code should still work check_number( @@ -2363,6 +2560,28 @@ fn exec_limits() { } #[test] +fn memory_limit() { + check_fail( + r#" + extern "Rust" { + #[rustc_allocator] + fn __rust_alloc(size: usize, align: usize) -> *mut u8; + } + + const GOAL: u8 = unsafe { + __rust_alloc(30_000_000_000, 1); // 30GB + 2 + }; + "#, + |e| { + e == ConstEvalError::MirEvalError(MirEvalError::Panic( + "Memory allocation of 30000000000 bytes failed".to_string(), + )) + }, + ); +} + +#[test] fn type_error() { check_fail( r#" @@ -2377,6 +2596,37 @@ fn type_error() { } #[test] +fn unsized_field() { + check_number( + r#" + //- minicore: coerce_unsized, index, slice, transmute + use core::mem::transmute; + + struct Slice([u8]); + struct Slice2(Slice); + + impl Slice2 { + fn as_inner(&self) -> &Slice { + &self.0 + } + + fn as_bytes(&self) -> &[u8] { + &self.as_inner().0 + } + } + + const GOAL: u8 = unsafe { + let x: &[u8] = &[1, 2, 3]; + let x: &Slice2 = transmute(x); + let x = x.as_bytes(); + x[0] + x[1] + x[2] + }; + "#, + 6, + ); +} + +#[test] fn unsized_local() { check_fail( r#" diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs index e05d824dbac..9253e31d77b 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/consteval/tests/intrinsics.rs @@ -15,6 +15,171 @@ fn size_of() { } #[test] +fn size_of_val() { + check_number( + r#" + //- minicore: coerce_unsized + extern "rust-intrinsic" { + pub fn size_of_val<T: ?Sized>(_: *const T) -> usize; + } + + struct X(i32, u8); + + const GOAL: usize = size_of_val(&X(1, 2)); + "#, + 8, + ); + check_number( + r#" + //- minicore: coerce_unsized + extern "rust-intrinsic" { + pub fn size_of_val<T: ?Sized>(_: *const T) -> usize; + } + + const GOAL: usize = { + let it: &[i32] = &[1, 2, 3]; + size_of_val(it) + }; + "#, + 12, + ); + check_number( + r#" + //- minicore: coerce_unsized, transmute + use core::mem::transmute; + + extern "rust-intrinsic" { + pub fn size_of_val<T: ?Sized>(_: *const T) -> usize; + } + + struct X { + x: i64, + y: u8, + t: [i32], + } + + const GOAL: usize = unsafe { + let y: &X = transmute([0usize, 3]); + size_of_val(y) + }; + "#, + 24, + ); + check_number( + r#" + //- minicore: coerce_unsized, transmute + use core::mem::transmute; + + extern "rust-intrinsic" { + pub fn size_of_val<T: ?Sized>(_: *const T) -> usize; + } + + struct X { + x: i32, + y: i64, + t: [u8], + } + + const GOAL: usize = unsafe { + let y: &X = transmute([0usize, 15]); + size_of_val(y) + }; + "#, + 32, + ); + check_number( + r#" + //- minicore: coerce_unsized, fmt, builtin_impls + extern "rust-intrinsic" { + pub fn size_of_val<T: ?Sized>(_: *const T) -> usize; + } + + const GOAL: usize = { + let x: &i16 = &5; + let y: &dyn core::fmt::Debug = x; + let z: &dyn core::fmt::Debug = &y; + size_of_val(x) + size_of_val(y) * 10 + size_of_val(z) * 100 + }; + "#, + 1622, + ); + check_number( + r#" + //- minicore: coerce_unsized + extern "rust-intrinsic" { + pub fn size_of_val<T: ?Sized>(_: *const T) -> usize; + } + + const GOAL: usize = { + size_of_val("salam") + }; + "#, + 5, + ); +} + +#[test] +fn min_align_of_val() { + check_number( + r#" + //- minicore: coerce_unsized + extern "rust-intrinsic" { + pub fn min_align_of_val<T: ?Sized>(_: *const T) -> usize; + } + + struct X(i32, u8); + + const GOAL: usize = min_align_of_val(&X(1, 2)); + "#, + 4, + ); + check_number( + r#" + //- minicore: coerce_unsized + extern "rust-intrinsic" { + pub fn min_align_of_val<T: ?Sized>(_: *const T) -> usize; + } + + const GOAL: usize = { + let x: &[i32] = &[1, 2, 3]; + min_align_of_val(x) + }; + "#, + 4, + ); +} + +#[test] +fn type_name() { + check_str( + r#" + extern "rust-intrinsic" { + pub fn type_name<T: ?Sized>() -> &'static str; + } + + const GOAL: &str = type_name::<i32>(); + "#, + "i32", + ); + check_str( + r#" + extern "rust-intrinsic" { + pub fn type_name<T: ?Sized>() -> &'static str; + } + + mod mod1 { + pub mod mod2 { + pub struct Ty; + } + } + + const GOAL: &str = type_name::<mod1::mod2::Ty>(); + "#, + "mod1::mod2::Ty", + ); +} + +#[test] fn transmute() { check_number( r#" @@ -29,9 +194,28 @@ fn transmute() { } #[test] +fn read_via_copy() { + check_number( + r#" + extern "rust-intrinsic" { + pub fn read_via_copy<T>(e: *const T) -> T; + pub fn volatile_load<T>(e: *const T) -> T; + } + + const GOAL: i32 = { + let x = 2; + read_via_copy(&x) + volatile_load(&x) + }; + "#, + 4, + ); +} + +#[test] fn const_eval_select() { check_number( r#" + //- minicore: fn extern "rust-intrinsic" { pub fn const_eval_select<ARG, F, G, RET>(arg: ARG, called_in_const: F, called_at_rt: G) -> RET where @@ -68,7 +252,7 @@ fn wrapping_add() { } #[test] -fn saturating_add() { +fn saturating() { check_number( r#" extern "rust-intrinsic" { @@ -82,6 +266,16 @@ fn saturating_add() { check_number( r#" extern "rust-intrinsic" { + pub fn saturating_sub<T>(a: T, b: T) -> T; + } + + const GOAL: bool = saturating_sub(5u8, 7) == 0 && saturating_sub(8u8, 4) == 4; + "#, + 1, + ); + check_number( + r#" + extern "rust-intrinsic" { pub fn saturating_add<T>(a: T, b: T) -> T; } @@ -112,6 +306,7 @@ fn allocator() { *ptr = 23; *ptr2 = 32; let ptr = __rust_realloc(ptr, 4, 1, 8); + let ptr = __rust_realloc(ptr, 8, 1, 3); let ptr2 = ((ptr as usize) + 1) as *mut u8; *ptr + *ptr2 }; @@ -160,6 +355,24 @@ fn needs_drop() { } #[test] +fn discriminant_value() { + check_number( + r#" + //- minicore: discriminant, option + use core::marker::DiscriminantKind; + extern "rust-intrinsic" { + pub fn discriminant_value<T>(v: &T) -> <T as DiscriminantKind>::Discriminant; + } + const GOAL: bool = { + discriminant_value(&Some(2i32)) == discriminant_value(&Some(5i32)) + && discriminant_value(&Some(2i32)) != discriminant_value(&None::<i32>) + }; + "#, + 1, + ); +} + +#[test] fn likely() { check_number( r#" @@ -328,6 +541,24 @@ fn copy_nonoverlapping() { } #[test] +fn write_bytes() { + check_number( + r#" + extern "rust-intrinsic" { + fn write_bytes<T>(dst: *mut T, val: u8, count: usize); + } + + const GOAL: i32 = unsafe { + let mut x = 2; + write_bytes(&mut x, 5, 1); + x + }; + "#, + 0x05050505, + ); +} + +#[test] fn copy() { check_number( r#" @@ -363,6 +594,20 @@ fn ctpop() { } #[test] +fn ctlz() { + check_number( + r#" + extern "rust-intrinsic" { + pub fn ctlz<T: Copy>(x: T) -> T; + } + + const GOAL: u8 = ctlz(0b0001_1100_u8); + "#, + 3, + ); +} + +#[test] fn cttz() { check_number( r#" @@ -375,3 +620,85 @@ fn cttz() { 3, ); } + +#[test] +fn rotate() { + check_number( + r#" + extern "rust-intrinsic" { + pub fn rotate_left<T: Copy>(x: T, y: T) -> T; + } + + const GOAL: i64 = rotate_left(0xaa00000000006e1i64, 12); + "#, + 0x6e10aa, + ); + check_number( + r#" + extern "rust-intrinsic" { + pub fn rotate_right<T: Copy>(x: T, y: T) -> T; + } + + const GOAL: i64 = rotate_right(0x6e10aa, 12); + "#, + 0xaa00000000006e1, + ); + check_number( + r#" + extern "rust-intrinsic" { + pub fn rotate_left<T: Copy>(x: T, y: T) -> T; + } + + const GOAL: i8 = rotate_left(129, 2); + "#, + 6, + ); + check_number( + r#" + extern "rust-intrinsic" { + pub fn rotate_right<T: Copy>(x: T, y: T) -> T; + } + + const GOAL: i32 = rotate_right(10006016, 1020315); + "#, + 320192512, + ); +} + +#[test] +fn simd() { + check_number( + r#" + pub struct i8x16( + i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8, + ); + extern "platform-intrinsic" { + pub fn simd_bitmask<T, U>(x: T) -> U; + } + const GOAL: u16 = simd_bitmask(i8x16( + 0, 1, 0, 0, 2, 255, 100, 0, 50, 0, 1, 1, 0, 0, 0, 0 + )); + "#, + 0b0000110101110010, + ); + check_number( + r#" + pub struct i8x16( + i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8,i8, + ); + extern "platform-intrinsic" { + pub fn simd_lt<T, U>(x: T, y: T) -> U; + pub fn simd_bitmask<T, U>(x: T) -> U; + } + const GOAL: u16 = simd_bitmask(simd_lt::<i8x16, i8x16>( + i8x16( + -105, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 + ), + i8x16( + -4, -3, -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11 + ), + )); + "#, + 0xFFFF, + ); +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs index 9dd810f844d..14b719ea412 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/db.rs @@ -110,6 +110,14 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { #[salsa::invoke(crate::layout::target_data_layout_query)] fn target_data_layout(&self, krate: CrateId) -> Option<Arc<TargetDataLayout>>; + #[salsa::invoke(crate::method_resolution::lookup_impl_method_query)] + fn lookup_impl_method( + &self, + env: Arc<crate::TraitEnvironment>, + func: FunctionId, + fn_subst: Substitution, + ) -> (FunctionId, Substitution); + #[salsa::invoke(crate::lower::callable_item_sig)] fn callable_item_signature(&self, def: CallableDefId) -> PolyFnSig; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs index 4b147b99707..ef43ed5c463 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics.rs @@ -5,7 +5,7 @@ mod unsafe_check; mod decl_check; pub use crate::diagnostics::{ - decl_check::{incorrect_case, IncorrectCase}, + decl_check::{incorrect_case, CaseType, IncorrectCase}, expr::{ record_literal_missing_fields, record_pattern_missing_fields, BodyValidationDiagnostic, }, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs index 1233469b947..73c8ad3dd5a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/diagnostics/decl_check.rs @@ -57,11 +57,11 @@ pub fn incorrect_case( #[derive(Debug)] pub enum CaseType { - // `some_var` + /// `some_var` LowerSnakeCase, - // `SOME_CONST` + /// `SOME_CONST` UpperSnakeCase, - // `SomeStruct` + /// `SomeStruct` UpperCamelCase, } @@ -181,6 +181,7 @@ impl<'a> DeclValidator<'a> { AttrDefId::TraitAliasId(taid) => Some(taid.lookup(self.db.upcast()).container.into()), AttrDefId::ImplId(iid) => Some(iid.lookup(self.db.upcast()).container.into()), AttrDefId::ExternBlockId(id) => Some(id.lookup(self.db.upcast()).container.into()), + AttrDefId::ExternCrateId(id) => Some(id.lookup(self.db.upcast()).container.into()), // These warnings should not explore macro definitions at all AttrDefId::MacroId(_) => None, AttrDefId::AdtId(aid) => match aid { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs index c1df24d1729..8cffdef289e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/display.rs @@ -192,7 +192,7 @@ pub trait HirDisplay { } } -impl<'a> HirFormatter<'a> { +impl HirFormatter<'_> { pub fn write_joined<T: HirDisplay>( &mut self, iter: impl IntoIterator<Item = T>, @@ -342,7 +342,7 @@ impl<T: HirDisplay> HirDisplayWrapper<'_, T> { } } -impl<'a, T> fmt::Display for HirDisplayWrapper<'a, T> +impl<T> fmt::Display for HirDisplayWrapper<'_, T> where T: HirDisplay, { @@ -360,7 +360,7 @@ where const TYPE_HINT_TRUNCATION: &str = "…"; -impl<T: HirDisplay> HirDisplay for &'_ T { +impl<T: HirDisplay> HirDisplay for &T { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { HirDisplay::hir_fmt(*self, f) } @@ -446,28 +446,6 @@ impl HirDisplay for Const { } } -pub struct HexifiedConst(pub Const); - -impl HirDisplay for HexifiedConst { - fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { - let data = &self.0.data(Interner); - if let TyKind::Scalar(s) = data.ty.kind(Interner) { - if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) { - if let ConstValue::Concrete(c) = &data.value { - if let ConstScalar::Bytes(b, m) = &c.interned { - let value = u128::from_le_bytes(pad16(b, false)); - if value >= 10 { - render_const_scalar(f, &b, m, &data.ty)?; - return write!(f, " ({:#X})", value); - } - } - } - } - } - self.0.hir_fmt(f) - } -} - fn render_const_scalar( f: &mut HirFormatter<'_>, b: &[u8], @@ -481,28 +459,28 @@ fn render_const_scalar( TyKind::Scalar(s) => match s { Scalar::Bool => write!(f, "{}", if b[0] == 0 { false } else { true }), Scalar::Char => { - let x = u128::from_le_bytes(pad16(b, false)) as u32; - let Ok(c) = char::try_from(x) else { + let it = u128::from_le_bytes(pad16(b, false)) as u32; + let Ok(c) = char::try_from(it) else { return f.write_str("<unicode-error>"); }; write!(f, "{c:?}") } Scalar::Int(_) => { - let x = i128::from_le_bytes(pad16(b, true)); - write!(f, "{x}") + let it = i128::from_le_bytes(pad16(b, true)); + write!(f, "{it}") } Scalar::Uint(_) => { - let x = u128::from_le_bytes(pad16(b, false)); - write!(f, "{x}") + let it = u128::from_le_bytes(pad16(b, false)); + write!(f, "{it}") } Scalar::Float(fl) => match fl { chalk_ir::FloatTy::F32 => { - let x = f32::from_le_bytes(b.try_into().unwrap()); - write!(f, "{x:?}") + let it = f32::from_le_bytes(b.try_into().unwrap()); + write!(f, "{it:?}") } chalk_ir::FloatTy::F64 => { - let x = f64::from_le_bytes(b.try_into().unwrap()); - write!(f, "{x:?}") + let it = f64::from_le_bytes(b.try_into().unwrap()); + write!(f, "{it:?}") } }, }, @@ -636,7 +614,8 @@ fn render_const_scalar( } hir_def::AdtId::EnumId(e) => { let Some((var_id, var_layout)) = - detect_variant_from_bytes(&layout, f.db, krate, b, e) else { + detect_variant_from_bytes(&layout, f.db, krate, b, e) + else { return f.write_str("<failed-to-detect-variant>"); }; let data = &f.db.enum_data(e).variants[var_id]; @@ -658,8 +637,8 @@ fn render_const_scalar( } TyKind::FnDef(..) => ty.hir_fmt(f), TyKind::Function(_) | TyKind::Raw(_, _) => { - let x = u128::from_le_bytes(pad16(b, false)); - write!(f, "{:#X} as ", x)?; + let it = u128::from_le_bytes(pad16(b, false)); + write!(f, "{:#X} as ", it)?; ty.hir_fmt(f) } TyKind::Array(ty, len) => { @@ -735,7 +714,7 @@ fn render_variant_after_name( } write!(f, " }}")?; } else { - let mut it = it.map(|x| x.0); + let mut it = it.map(|it| it.0); write!(f, "(")?; if let Some(id) = it.next() { render_field(f, id)?; @@ -1277,19 +1256,20 @@ fn hir_fmt_generics( i: usize, parameters: &Substitution, ) -> bool { - if parameter.ty(Interner).map(|x| x.kind(Interner)) == Some(&TyKind::Error) + if parameter.ty(Interner).map(|it| it.kind(Interner)) + == Some(&TyKind::Error) { return true; } if let Some(ConstValue::Concrete(c)) = - parameter.constant(Interner).map(|x| &x.data(Interner).value) + parameter.constant(Interner).map(|it| &it.data(Interner).value) { if c.interned == ConstScalar::Unknown { return true; } } let default_parameter = match default_parameters.get(i) { - Some(x) => x, + Some(it) => it, None => return true, }; let actual_default = diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs index 1ac0837b5b2..0a617dae7d4 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer.rs @@ -290,7 +290,7 @@ impl Default for InternedStandardTypes { /// ``` /// /// Note that for a struct, the 'deep' unsizing of the struct is not recorded. -/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]> +/// E.g., `struct Foo<T> { it: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]> /// The autoderef and -ref are the same as in the above example, but the type /// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about /// the underlying conversions from `[i32; 4]` to `[i32]`. @@ -1172,7 +1172,7 @@ impl<'a> InferenceContext<'a> { unresolved: Option<usize>, path: &ModPath, ) -> (Ty, Option<VariantId>) { - let remaining = unresolved.map(|x| path.segments()[x..].len()).filter(|x| x > &0); + let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0); match remaining { None => { let variant = ty.as_adt().and_then(|(adt_id, _)| match adt_id { @@ -1232,7 +1232,9 @@ impl<'a> InferenceContext<'a> { .as_function()? .lookup(self.db.upcast()) .container - else { return None }; + else { + return None; + }; self.resolve_output_on(trait_) } @@ -1322,7 +1324,7 @@ impl Expectation { /// The primary use case is where the expected type is a fat pointer, /// like `&[isize]`. For example, consider the following statement: /// - /// let x: &[isize] = &[1, 2, 3]; + /// let it: &[isize] = &[1, 2, 3]; /// /// In this case, the expected type for the `&[1, 2, 3]` expression is /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs index ff64ae252bc..972e5321a47 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/closure.rs @@ -139,7 +139,7 @@ impl HirPlace { ) -> CaptureKind { match current_capture { CaptureKind::ByRef(BorrowKind::Mut { .. }) => { - if self.projections[len..].iter().any(|x| *x == ProjectionElem::Deref) { + if self.projections[len..].iter().any(|it| *it == ProjectionElem::Deref) { current_capture = CaptureKind::ByRef(BorrowKind::Unique); } } @@ -199,7 +199,7 @@ impl CapturedItem { .to_string(), VariantData::Tuple(fields) => fields .iter() - .position(|x| x.0 == f.local_id) + .position(|it| it.0 == f.local_id) .unwrap_or_default() .to_string(), VariantData::Unit => "[missing field]".to_string(), @@ -439,10 +439,10 @@ impl InferenceContext<'_> { } fn walk_expr(&mut self, tgt_expr: ExprId) { - if let Some(x) = self.result.expr_adjustments.get_mut(&tgt_expr) { + if let Some(it) = self.result.expr_adjustments.get_mut(&tgt_expr) { // FIXME: this take is completely unneeded, and just is here to make borrow checker // happy. Remove it if you can. - let x_taken = mem::take(x); + let x_taken = mem::take(it); self.walk_expr_with_adjust(tgt_expr, &x_taken); *self.result.expr_adjustments.get_mut(&tgt_expr).unwrap() = x_taken; } else { @@ -536,7 +536,7 @@ impl InferenceContext<'_> { if let &Some(expr) = spread { self.consume_expr(expr); } - self.consume_exprs(fields.iter().map(|x| x.expr)); + self.consume_exprs(fields.iter().map(|it| it.expr)); } Expr::Field { expr, name: _ } => self.select_from_expr(*expr), Expr::UnaryOp { expr, op: UnaryOp::Deref } => { @@ -548,7 +548,7 @@ impl InferenceContext<'_> { } else if let Some((f, _)) = self.result.method_resolution(tgt_expr) { let mutability = 'b: { if let Some(deref_trait) = - self.resolve_lang_item(LangItem::DerefMut).and_then(|x| x.as_trait()) + self.resolve_lang_item(LangItem::DerefMut).and_then(|it| it.as_trait()) { if let Some(deref_fn) = self.db.trait_data(deref_trait).method_by_name(&name![deref_mut]) @@ -615,8 +615,8 @@ impl InferenceContext<'_> { "We sort closures, so we should always have data for inner closures", ); let mut cc = mem::take(&mut self.current_captures); - cc.extend(captures.iter().filter(|x| self.is_upvar(&x.place)).map(|x| { - CapturedItemWithoutTy { place: x.place.clone(), kind: x.kind, span: x.span } + cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| { + CapturedItemWithoutTy { place: it.place.clone(), kind: it.kind, span: it.span } })); self.current_captures = cc; } @@ -694,7 +694,7 @@ impl InferenceContext<'_> { }, }, } - if self.result.pat_adjustments.get(&p).map_or(false, |x| !x.is_empty()) { + if self.result.pat_adjustments.get(&p).map_or(false, |it| !it.is_empty()) { for_mut = BorrowKind::Unique; } self.body.walk_pats_shallow(p, |p| self.walk_pat_inner(p, update_result, for_mut)); @@ -706,9 +706,9 @@ impl InferenceContext<'_> { fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty { let mut ty = None; - if let Some(x) = self.result.expr_adjustments.get(&e) { - if let Some(x) = x.last() { - ty = Some(x.target.clone()); + if let Some(it) = self.result.expr_adjustments.get(&e) { + if let Some(it) = it.last() { + ty = Some(it.target.clone()); } } ty.unwrap_or_else(|| self.expr_ty(e)) @@ -727,7 +727,7 @@ impl InferenceContext<'_> { // FIXME: We handle closure as a special case, since chalk consider every closure as copy. We // should probably let chalk know which closures are copy, but I don't know how doing it // without creating query cycles. - return self.result.closure_info.get(id).map(|x| x.1 == FnTrait::Fn).unwrap_or(true); + return self.result.closure_info.get(id).map(|it| it.1 == FnTrait::Fn).unwrap_or(true); } self.table.resolve_completely(ty).is_copy(self.db, self.owner) } @@ -748,7 +748,7 @@ impl InferenceContext<'_> { } fn minimize_captures(&mut self) { - self.current_captures.sort_by_key(|x| x.place.projections.len()); + self.current_captures.sort_by_key(|it| it.place.projections.len()); let mut hash_map = HashMap::<HirPlace, usize>::new(); let result = mem::take(&mut self.current_captures); for item in result { @@ -759,7 +759,7 @@ impl InferenceContext<'_> { break Some(*k); } match it.next() { - Some(x) => lookup_place.projections.push(x.clone()), + Some(it) => lookup_place.projections.push(it.clone()), None => break None, } }; @@ -780,7 +780,7 @@ impl InferenceContext<'_> { } fn consume_with_pat(&mut self, mut place: HirPlace, pat: PatId) { - let cnt = self.result.pat_adjustments.get(&pat).map(|x| x.len()).unwrap_or_default(); + let cnt = self.result.pat_adjustments.get(&pat).map(|it| it.len()).unwrap_or_default(); place.projections = place .projections .iter() @@ -894,10 +894,10 @@ impl InferenceContext<'_> { fn closure_kind(&self) -> FnTrait { let mut r = FnTrait::Fn; - for x in &self.current_captures { + for it in &self.current_captures { r = cmp::min( r, - match &x.kind { + match &it.kind { CaptureKind::ByRef(BorrowKind::Unique | BorrowKind::Mut { .. }) => { FnTrait::FnMut } @@ -933,7 +933,7 @@ impl InferenceContext<'_> { } self.minimize_captures(); let result = mem::take(&mut self.current_captures); - let captures = result.into_iter().map(|x| x.with_ty(self)).collect::<Vec<_>>(); + let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>(); self.result.closure_info.insert(closure, (captures, closure_kind)); closure_kind } @@ -973,20 +973,20 @@ impl InferenceContext<'_> { fn sort_closures(&mut self) -> Vec<(ClosureId, Vec<(Ty, Ty, Vec<Ty>, ExprId)>)> { let mut deferred_closures = mem::take(&mut self.deferred_closures); let mut dependents_count: FxHashMap<ClosureId, usize> = - deferred_closures.keys().map(|x| (*x, 0)).collect(); + deferred_closures.keys().map(|it| (*it, 0)).collect(); for (_, deps) in &self.closure_dependencies { for dep in deps { *dependents_count.entry(*dep).or_default() += 1; } } let mut queue: Vec<_> = - deferred_closures.keys().copied().filter(|x| dependents_count[x] == 0).collect(); + deferred_closures.keys().copied().filter(|it| dependents_count[it] == 0).collect(); let mut result = vec![]; - while let Some(x) = queue.pop() { - if let Some(d) = deferred_closures.remove(&x) { - result.push((x, d)); + while let Some(it) = queue.pop() { + if let Some(d) = deferred_closures.remove(&it) { + result.push((it, d)); } - for dep in self.closure_dependencies.get(&x).into_iter().flat_map(|x| x.iter()) { + for dep in self.closure_dependencies.get(&it).into_iter().flat_map(|it| it.iter()) { let cnt = dependents_count.get_mut(dep).unwrap(); *cnt -= 1; if *cnt == 0 { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs index 05a476f632d..8e7e62c4961 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/coerce.rs @@ -220,7 +220,7 @@ pub(crate) fn coerce( Ok((adjustments, table.resolve_with_fallback(ty, &fallback))) } -impl<'a> InferenceContext<'a> { +impl InferenceContext<'_> { /// Unify two types, but may coerce the first one to the second one /// using "implicit coercion rules" if needed. pub(super) fn coerce( @@ -239,7 +239,7 @@ impl<'a> InferenceContext<'a> { } } -impl<'a> InferenceTable<'a> { +impl InferenceTable<'_> { /// Unify two types, but may coerce the first one to the second one /// using "implicit coercion rules" if needed. pub(crate) fn coerce( @@ -377,7 +377,7 @@ impl<'a> InferenceTable<'a> { let snapshot = self.snapshot(); - let mut autoderef = Autoderef::new(self, from_ty.clone()); + let mut autoderef = Autoderef::new(self, from_ty.clone(), false); let mut first_error = None; let mut found = None; diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs index 194471f0048..4b14345aa39 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/expr.rs @@ -50,7 +50,7 @@ use super::{ InferenceContext, InferenceDiagnostic, TypeMismatch, }; -impl<'a> InferenceContext<'a> { +impl InferenceContext<'_> { pub(crate) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { let ty = self.infer_expr_inner(tgt_expr, expected); if let Some(expected_ty) = expected.only_has_type(&mut self.table) { @@ -316,7 +316,7 @@ impl<'a> InferenceContext<'a> { } Expr::Call { callee, args, .. } => { let callee_ty = self.infer_expr(*callee, &Expectation::none()); - let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone()); + let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone(), false); let (res, derefed_callee) = 'b: { // manual loop to be able to access `derefs.table` while let Some((callee_deref_ty, _)) = derefs.next() { @@ -928,7 +928,7 @@ impl<'a> InferenceContext<'a> { if let TyKind::Ref(Mutability::Mut, _, inner) = derefed_callee.kind(Interner) { if adjustments .last() - .map(|x| matches!(x.kind, Adjust::Borrow(_))) + .map(|it| matches!(it.kind, Adjust::Borrow(_))) .unwrap_or(true) { // prefer reborrow to move @@ -1385,7 +1385,7 @@ impl<'a> InferenceContext<'a> { receiver_ty: &Ty, name: &Name, ) -> Option<(Ty, Option<FieldId>, Vec<Adjustment>, bool)> { - let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone()); + let mut autoderef = Autoderef::new(&mut self.table, receiver_ty.clone(), false); let mut private_field = None; let res = autoderef.by_ref().find_map(|(derefed_ty, _)| { let (field_id, parameters) = match derefed_ty.kind(Interner) { @@ -1449,6 +1449,13 @@ impl<'a> InferenceContext<'a> { fn infer_field_access(&mut self, tgt_expr: ExprId, receiver: ExprId, name: &Name) -> Ty { let receiver_ty = self.infer_expr_inner(receiver, &Expectation::none()); + + if name.is_missing() { + // Bail out early, don't even try to look up field. Also, we don't issue an unresolved + // field diagnostic because this is a syntax error rather than a semantic error. + return self.err_ty(); + } + match self.lookup_field(&receiver_ty, name) { Some((ty, field_id, adjustments, is_public)) => { self.write_expr_adj(receiver, adjustments); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs index 46f2e1d7d12..f517bc2c09f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/mutability.rs @@ -12,7 +12,7 @@ use crate::{lower::lower_to_chalk_mutability, Adjust, Adjustment, AutoBorrow, Ov use super::InferenceContext; -impl<'a> InferenceContext<'a> { +impl InferenceContext<'_> { pub(crate) fn infer_mut_body(&mut self) { self.infer_mut_expr(self.body.body_expr, Mutability::Not); } @@ -73,12 +73,12 @@ impl<'a> InferenceContext<'a> { self.infer_mut_expr(c, Mutability::Not); self.infer_mut_expr(body, Mutability::Not); } - Expr::MethodCall { receiver: x, method_name: _, args, generic_args: _ } - | Expr::Call { callee: x, args, is_assignee_expr: _ } => { - self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*x))); + Expr::MethodCall { receiver: it, method_name: _, args, generic_args: _ } + | Expr::Call { callee: it, args, is_assignee_expr: _ } => { + self.infer_mut_not_expr_iter(args.iter().copied().chain(Some(*it))); } Expr::Match { expr, arms } => { - let m = self.pat_iter_bound_mutability(arms.iter().map(|x| x.pat)); + let m = self.pat_iter_bound_mutability(arms.iter().map(|it| it.pat)); self.infer_mut_expr(*expr, m); for arm in arms.iter() { self.infer_mut_expr(arm.expr, Mutability::Not); @@ -96,7 +96,7 @@ impl<'a> InferenceContext<'a> { } } Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => { - self.infer_mut_not_expr_iter(fields.iter().map(|x| x.expr).chain(*spread)) + self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) } &Expr::Index { base, index } => { if mutability == Mutability::Mut { @@ -204,8 +204,8 @@ impl<'a> InferenceContext<'a> { } /// Checks if the pat contains a `ref mut` binding. Such paths makes the context of bounded expressions - /// mutable. For example in `let (ref mut x0, ref x1) = *x;` we need to use `DerefMut` for `*x` but in - /// `let (ref x0, ref x1) = *x;` we should use `Deref`. + /// mutable. For example in `let (ref mut x0, ref x1) = *it;` we need to use `DerefMut` for `*it` but in + /// `let (ref x0, ref x1) = *it;` we should use `Deref`. fn pat_bound_mutability(&self, pat: PatId) -> Mutability { let mut r = Mutability::Not; self.body.walk_bindings_in_pat(pat, |b| { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs index 2480f8babac..5da0ab76b88 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/pat.rs @@ -56,7 +56,7 @@ impl PatLike for PatId { } } -impl<'a> InferenceContext<'a> { +impl InferenceContext<'_> { /// Infers type for tuple struct pattern or its corresponding assignee expression. /// /// Ellipses found in the original pattern or expression must be filtered out. @@ -306,7 +306,7 @@ impl<'a> InferenceContext<'a> { self.result .pat_adjustments .get(&pat) - .and_then(|x| x.first()) + .and_then(|it| it.first()) .unwrap_or(&self.result.type_of_pat[pat]) .clone() } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs index e33d8f1795e..385f39f5374 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/infer/unify.rs @@ -22,7 +22,7 @@ use crate::{ TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind, }; -impl<'a> InferenceContext<'a> { +impl InferenceContext<'_> { pub(super) fn canonicalize<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>( &mut self, t: T, @@ -91,7 +91,7 @@ pub(crate) fn unify( let mut table = InferenceTable::new(db, env); let vars = Substitution::from_iter( Interner, - tys.binders.iter(Interner).map(|x| match &x.kind { + tys.binders.iter(Interner).map(|it| match &it.kind { chalk_ir::VariableKind::Ty(_) => { GenericArgData::Ty(table.new_type_var()).intern(Interner) } @@ -547,7 +547,7 @@ impl<'a> InferenceTable<'a> { table: &'a mut InferenceTable<'b>, highest_known_var: InferenceVar, } - impl<'a, 'b> TypeFolder<Interner> for VarFudger<'a, 'b> { + impl TypeFolder<Interner> for VarFudger<'_, '_> { fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> { self } @@ -686,8 +686,8 @@ impl<'a> InferenceTable<'a> { let mut arg_tys = vec![]; let arg_ty = TyBuilder::tuple(num_args) - .fill(|x| { - let arg = match x { + .fill(|it| { + let arg = match it { ParamKind::Type => self.new_type_var(), ParamKind::Const(ty) => { never!("Tuple with const parameter"); @@ -753,7 +753,7 @@ impl<'a> InferenceTable<'a> { { fold_tys_and_consts( ty, - |x, _| match x { + |it, _| match it { Either::Left(ty) => Either::Left(self.insert_type_vars_shallow(ty)), Either::Right(c) => Either::Right(self.insert_const_vars_shallow(c)), }, @@ -798,7 +798,7 @@ impl<'a> InferenceTable<'a> { } } -impl<'a> fmt::Debug for InferenceTable<'a> { +impl fmt::Debug for InferenceTable<'_> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("InferenceTable").field("num_vars", &self.type_variable_table.len()).finish() } @@ -826,7 +826,7 @@ mod resolve { pub(super) var_stack: &'a mut Vec<InferenceVar>, pub(super) fallback: F, } - impl<'a, 'b, F> TypeFolder<Interner> for Resolver<'a, 'b, F> + impl<F> TypeFolder<Interner> for Resolver<'_, '_, F> where F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg, { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index 35d3407c16c..72e2bcc5559 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -7,7 +7,7 @@ use hir_def::{ Abi, FieldsShape, Integer, LayoutCalculator, LayoutS, Primitive, ReprOptions, Scalar, Size, StructKind, TargetDataLayout, WrappingRange, }, - LocalEnumVariantId, LocalFieldId, + LocalEnumVariantId, LocalFieldId, StructId, }; use la_arena::{Idx, RawIdx}; use stdx::never; @@ -24,8 +24,8 @@ pub use self::{ }; macro_rules! user_error { - ($x: expr) => { - return Err(LayoutError::UserError(format!($x))) + ($it: expr) => { + return Err(LayoutError::UserError(format!($it))) }; } @@ -77,18 +77,101 @@ impl<'a> LayoutCalculator for LayoutCx<'a> { } } +// FIXME: move this to the `rustc_abi`. +fn layout_of_simd_ty( + db: &dyn HirDatabase, + id: StructId, + subst: &Substitution, + krate: CrateId, + dl: &TargetDataLayout, +) -> Result<Arc<Layout>, LayoutError> { + let fields = db.field_types(id.into()); + + // Supported SIMD vectors are homogeneous ADTs with at least one field: + // + // * #[repr(simd)] struct S(T, T, T, T); + // * #[repr(simd)] struct S { it: T, y: T, z: T, w: T } + // * #[repr(simd)] struct S([T; 4]) + // + // where T is a primitive scalar (integer/float/pointer). + + let f0_ty = match fields.iter().next() { + Some(it) => it.1.clone().substitute(Interner, subst), + None => { + user_error!("simd type with zero fields"); + } + }; + + // The element type and number of elements of the SIMD vector + // are obtained from: + // + // * the element type and length of the single array field, if + // the first field is of array type, or + // + // * the homogeneous field type and the number of fields. + let (e_ty, e_len, is_array) = if let TyKind::Array(e_ty, _) = f0_ty.kind(Interner) { + // Extract the number of elements from the layout of the array field: + let FieldsShape::Array { count, .. } = db.layout_of_ty(f0_ty.clone(), krate)?.fields else { + user_error!("Array with non array layout"); + }; + + (e_ty.clone(), count, true) + } else { + // First ADT field is not an array: + (f0_ty, fields.iter().count() as u64, false) + }; + + // Compute the ABI of the element type: + let e_ly = db.layout_of_ty(e_ty, krate)?; + let Abi::Scalar(e_abi) = e_ly.abi else { + user_error!("simd type with inner non scalar type"); + }; + + // Compute the size and alignment of the vector: + let size = e_ly.size.checked_mul(e_len, dl).ok_or(LayoutError::SizeOverflow)?; + let align = dl.vector_align(size); + let size = size.align_to(align.abi); + + // Compute the placement of the vector fields: + let fields = if is_array { + FieldsShape::Arbitrary { offsets: [Size::ZERO].into(), memory_index: [0].into() } + } else { + FieldsShape::Array { stride: e_ly.size, count: e_len } + }; + + Ok(Arc::new(Layout { + variants: Variants::Single { index: struct_variant_idx() }, + fields, + abi: Abi::Vector { element: e_abi, count: e_len }, + largest_niche: e_ly.largest_niche, + size, + align, + })) +} + pub fn layout_of_ty_query( db: &dyn HirDatabase, ty: Ty, krate: CrateId, ) -> Result<Arc<Layout>, LayoutError> { - let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) }; + let Some(target) = db.target_data_layout(krate) else { + return Err(LayoutError::TargetLayoutNotAvailable); + }; let cx = LayoutCx { krate, target: &target }; let dl = &*cx.current_data_layout(); let trait_env = Arc::new(TraitEnvironment::empty(krate)); let ty = normalize(db, trait_env, ty.clone()); let result = match ty.kind(Interner) { - TyKind::Adt(AdtId(def), subst) => return db.layout_of_adt(*def, subst.clone(), krate), + TyKind::Adt(AdtId(def), subst) => { + if let hir_def::AdtId::StructId(s) = def { + let data = db.struct_data(*s); + let repr = data.repr.unwrap_or_default(); + if repr.simd() { + return layout_of_simd_ty(db, *s, subst, krate, &target); + } + }; + return db.layout_of_adt(*def, subst.clone(), krate); + } TyKind::Scalar(s) => match s { chalk_ir::Scalar::Bool => Layout::scalar( dl, @@ -147,7 +230,7 @@ pub fn layout_of_ty_query( .iter(Interner) .map(|k| db.layout_of_ty(k.assert_ty_ref(Interner).clone(), krate)) .collect::<Result<Vec<_>, _>>()?; - let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>(); + let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>(); let fields = fields.iter().collect::<Vec<_>>(); cx.univariant(dl, &fields, &ReprOptions::default(), kind).ok_or(LayoutError::Unknown)? } @@ -265,14 +348,14 @@ pub fn layout_of_ty_query( let (captures, _) = infer.closure_info(c); let fields = captures .iter() - .map(|x| { + .map(|it| { db.layout_of_ty( - x.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()), + it.ty.clone().substitute(Interner, ClosureSubst(subst).parent_subst()), krate, ) }) .collect::<Result<Vec<_>, _>>()?; - let fields = fields.iter().map(|x| &**x).collect::<Vec<_>>(); + let fields = fields.iter().map(|it| &**it).collect::<Vec<_>>(); let fields = fields.iter().collect::<Vec<_>>(); cx.univariant(dl, &fields, &ReprOptions::default(), StructKind::AlwaysSized) .ok_or(LayoutError::Unknown)? @@ -315,7 +398,10 @@ fn struct_tail_erasing_lifetimes(db: &dyn HirDatabase, pointee: Ty) -> Ty { let data = db.struct_data(*i); let mut it = data.variant_data.fields().iter().rev(); match it.next() { - Some((f, _)) => field_ty(db, (*i).into(), f, subst), + Some((f, _)) => { + let last_field_ty = field_ty(db, (*i).into(), f, subst); + struct_tail_erasing_lifetimes(db, last_field_ty) + } None => pointee, } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs index bd2752a7119..19d5e98e738 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/adt.rs @@ -31,7 +31,9 @@ pub fn layout_of_adt_query( subst: Substitution, krate: CrateId, ) -> Result<Arc<Layout>, LayoutError> { - let Some(target) = db.target_data_layout(krate) else { return Err(LayoutError::TargetLayoutNotAvailable) }; + let Some(target) = db.target_data_layout(krate) else { + return Err(LayoutError::TargetLayoutNotAvailable); + }; let cx = LayoutCx { krate, target: &target }; let dl = cx.current_data_layout(); let handle_variant = |def: VariantId, var: &VariantData| { @@ -70,9 +72,9 @@ pub fn layout_of_adt_query( }; let variants = variants .iter() - .map(|x| x.iter().map(|x| &**x).collect::<Vec<_>>()) + .map(|it| it.iter().map(|it| &**it).collect::<Vec<_>>()) .collect::<SmallVec<[_; 1]>>(); - let variants = variants.iter().map(|x| x.iter().collect()).collect(); + let variants = variants.iter().map(|it| it.iter().collect()).collect(); let result = if matches!(def, AdtId::UnionId(..)) { cx.layout_of_union(&repr, &variants).ok_or(LayoutError::Unknown)? } else { @@ -103,7 +105,7 @@ pub fn layout_of_adt_query( && variants .iter() .next() - .and_then(|x| x.last().map(|x| x.is_unsized())) + .and_then(|it| it.last().map(|it| !it.is_unsized())) .unwrap_or(true), ) .ok_or(LayoutError::SizeOverflow)? @@ -116,9 +118,9 @@ fn layout_scalar_valid_range(db: &dyn HirDatabase, def: AdtId) -> (Bound<u128>, let get = |name| { let attr = attrs.by_key(name).tt_values(); for tree in attr { - if let Some(x) = tree.token_trees.first() { - if let Ok(x) = x.to_string().parse() { - return Bound::Included(x); + if let Some(it) = tree.token_trees.first() { + if let Ok(it) = it.to_string().parse() { + return Bound::Included(it); } } } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs index 0ff8c532d47..a3ced2bac0a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout/tests.rs @@ -271,6 +271,20 @@ struct Goal(Foo<S>); } #[test] +fn simd_types() { + check_size_and_align( + r#" + #[repr(simd)] + struct SimdType(i64, i64); + struct Goal(SimdType); + "#, + "", + 16, + 16, + ); +} + +#[test] fn return_position_impl_trait() { size_and_align_expr! { trait T {} @@ -344,6 +358,24 @@ fn return_position_impl_trait() { } #[test] +fn unsized_ref() { + size_and_align! { + struct S1([u8]); + struct S2(S1); + struct S3(i32, str); + struct S4(u64, S3); + #[allow(dead_code)] + struct S5 { + field1: u8, + field2: i16, + field_last: S4, + } + + struct Goal(&'static S1, &'static S2, &'static S3, &'static S4, &'static S5); + } +} + +#[test] fn enums() { size_and_align! { enum Goal { @@ -369,11 +401,11 @@ fn tuple() { } #[test] -fn non_zero() { +fn non_zero_and_non_null() { size_and_align! { - minicore: non_zero, option; - use core::num::NonZeroU8; - struct Goal(Option<NonZeroU8>); + minicore: non_zero, non_null, option; + use core::{num::NonZeroU8, ptr::NonNull}; + struct Goal(Option<NonZeroU8>, Option<NonNull<i32>>); } } @@ -432,3 +464,41 @@ fn enums_with_discriminants() { } } } + +#[test] +fn core_mem_discriminant() { + size_and_align! { + minicore: discriminant; + struct S(i32, u64); + struct Goal(core::mem::Discriminant<S>); + } + size_and_align! { + minicore: discriminant; + #[repr(u32)] + enum S { + A, + B, + C, + } + struct Goal(core::mem::Discriminant<S>); + } + size_and_align! { + minicore: discriminant; + enum S { + A(i32), + B(i64), + C(u8), + } + struct Goal(core::mem::Discriminant<S>); + } + size_and_align! { + minicore: discriminant; + #[repr(C, u16)] + enum S { + A(i32), + B(i64) = 200, + C = 1000, + } + struct Goal(core::mem::Discriminant<S>); + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs index 1a4d003bf5e..b3ca2a22258 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lib.rs @@ -180,9 +180,16 @@ impl MemoryMap { /// allocator function as `f` and it will return a mapping of old addresses to new addresses. fn transform_addresses( &self, - mut f: impl FnMut(&[u8]) -> Result<usize, MirEvalError>, + mut f: impl FnMut(&[u8], usize) -> Result<usize, MirEvalError>, ) -> Result<HashMap<usize, usize>, MirEvalError> { - self.memory.iter().map(|x| Ok((*x.0, f(x.1)?))).collect() + self.memory + .iter() + .map(|x| { + let addr = *x.0; + let align = if addr == 0 { 64 } else { (addr - (addr & (addr - 1))).min(64) }; + Ok((addr, f(x.1, align)?)) + }) + .collect() } fn get<'a>(&'a self, addr: usize, size: usize) -> Option<&'a [u8]> { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs index 9951a1c750b..2837f400bce 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/lower.rs @@ -23,7 +23,7 @@ use hir_def::{ generics::{ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget, }, - lang_item::{lang_attr, LangItem}, + lang_item::LangItem, nameres::MacroSubNs, path::{GenericArg, GenericArgs, ModPath, Path, PathKind, PathSegment, PathSegments}, resolver::{HasResolver, Resolver, TypeNs}, @@ -959,10 +959,10 @@ impl<'a> TyLoweringContext<'a> { } pub(crate) fn lower_where_predicate( - &'a self, - where_predicate: &'a WherePredicate, + &self, + where_predicate: &WherePredicate, ignore_bindings: bool, - ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a { + ) -> impl Iterator<Item = QuantifiedWhereClause> { match where_predicate { WherePredicate::ForLifetime { target, bound, .. } | WherePredicate::TypeBound { target, bound } => { @@ -1012,7 +1012,7 @@ impl<'a> TyLoweringContext<'a> { // (So ideally, we'd only ignore `~const Drop` here) // - `Destruct` impls are built-in in 1.62 (current nightly as of 08-04-2022), so until // the builtin impls are supported by Chalk, we ignore them here. - if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) { + if let Some(lang) = self.db.lang_attr(tr.hir_trait_id().into()) { if matches!(lang, LangItem::Drop | LangItem::Destruct) { return false; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs index ab6430e8f19..5e1040bc6aa 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/method_resolution.rs @@ -534,7 +534,7 @@ impl ReceiverAdjustments { let mut ty = table.resolve_ty_shallow(&ty); let mut adjust = Vec::new(); for _ in 0..self.autoderefs { - match autoderef::autoderef_step(table, ty.clone()) { + match autoderef::autoderef_step(table, ty.clone(), true) { None => { never!("autoderef not possible for {:?}", ty); ty = TyKind::Error.intern(Interner); @@ -559,10 +559,10 @@ impl ReceiverAdjustments { adjust.push(a); } if self.unsize_array { - ty = 'x: { + ty = 'it: { if let TyKind::Ref(m, l, inner) = ty.kind(Interner) { if let TyKind::Array(inner, _) = inner.kind(Interner) { - break 'x TyKind::Ref( + break 'it TyKind::Ref( m.clone(), l.clone(), TyKind::Slice(inner.clone()).intern(Interner), @@ -666,7 +666,7 @@ pub fn is_dyn_method( let self_ty = trait_ref.self_type_parameter(Interner); if let TyKind::Dyn(d) = self_ty.kind(Interner) { let is_my_trait_in_bounds = - d.bounds.skip_binders().as_slice(Interner).iter().any(|x| match x.skip_binders() { + d.bounds.skip_binders().as_slice(Interner).iter().any(|it| match it.skip_binders() { // rustc doesn't accept `impl Foo<2> for dyn Foo<5>`, so if the trait id is equal, no matter // what the generics are, we are sure that the method is come from the vtable. WhereClause::Implemented(tr) => tr.trait_id == trait_ref.trait_id, @@ -682,14 +682,14 @@ pub fn is_dyn_method( /// Looks up the impl method that actually runs for the trait method `func`. /// /// Returns `func` if it's not a method defined in a trait or the lookup failed. -pub fn lookup_impl_method( +pub(crate) fn lookup_impl_method_query( db: &dyn HirDatabase, env: Arc<TraitEnvironment>, func: FunctionId, fn_subst: Substitution, ) -> (FunctionId, Substitution) { let ItemContainerId::TraitId(trait_id) = func.lookup(db.upcast()).container else { - return (func, fn_subst) + return (func, fn_subst); }; let trait_params = db.generic_params(trait_id.into()).type_or_consts.len(); let fn_params = fn_subst.len(Interner) - trait_params; @@ -699,8 +699,8 @@ pub fn lookup_impl_method( }; let name = &db.function_data(func).name; - let Some((impl_fn, impl_subst)) = lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name) - .and_then(|assoc| { + let Some((impl_fn, impl_subst)) = + lookup_impl_assoc_item_for_trait_ref(trait_ref, db, env, name).and_then(|assoc| { if let (AssocItemId::FunctionId(id), subst) = assoc { Some((id, subst)) } else { @@ -731,7 +731,7 @@ fn lookup_impl_assoc_item_for_trait_ref( let impls = db.trait_impls_in_deps(env.krate); let self_impls = match self_ty.kind(Interner) { TyKind::Adt(id, _) => { - id.0.module(db.upcast()).containing_block().map(|x| db.trait_impls_in_block(x)) + id.0.module(db.upcast()).containing_block().map(|it| db.trait_impls_in_block(it)) } _ => None, }; @@ -895,8 +895,8 @@ pub fn iterate_method_candidates_dyn( // (just as rustc does an autoderef and then autoref again). // We have to be careful about the order we're looking at candidates - // in here. Consider the case where we're resolving `x.clone()` - // where `x: &Vec<_>`. This resolves to the clone method with self + // in here. Consider the case where we're resolving `it.clone()` + // where `it: &Vec<_>`. This resolves to the clone method with self // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where // the receiver type exactly matches before cases where we have to // do autoref. But in the autoderef steps, the `&_` self type comes @@ -1012,8 +1012,8 @@ fn iterate_method_candidates_by_receiver( let snapshot = table.snapshot(); // We're looking for methods with *receiver* type receiver_ty. These could // be found in any of the derefs of receiver_ty, so we have to go through - // that. - let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone()); + // that, including raw derefs. + let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone(), true); while let Some((self_ty, _)) = autoderef.next() { iterate_inherent_methods( &self_ty, @@ -1028,7 +1028,7 @@ fn iterate_method_candidates_by_receiver( table.rollback_to(snapshot); - let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone()); + let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone(), true); while let Some((self_ty, _)) = autoderef.next() { iterate_trait_method_candidates( &self_ty, @@ -1480,8 +1480,8 @@ fn generic_implements_goal( .push(self_ty.value.clone()) .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len()) .build(); - kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|x| { - let vk = match x.data(Interner) { + kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|it| { + let vk = match it.data(Interner) { chalk_ir::GenericArgData::Ty(_) => { chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General) } @@ -1504,7 +1504,7 @@ fn autoderef_method_receiver( ty: Ty, ) -> Vec<(Canonical<Ty>, ReceiverAdjustments)> { let mut deref_chain: Vec<_> = Vec::new(); - let mut autoderef = autoderef::Autoderef::new(table, ty); + let mut autoderef = autoderef::Autoderef::new(table, ty, true); while let Some((ty, derefs)) = autoderef.next() { deref_chain.push(( autoderef.table.canonicalize(ty).value, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs index 2345bab0bb4..da5b496e141 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir.rs @@ -3,9 +3,14 @@ use std::{fmt::Display, iter}; use crate::{ - consteval::usize_const, db::HirDatabase, display::HirDisplay, infer::PointerCast, - lang_items::is_box, mapping::ToChalk, CallableDefId, ClosureId, Const, ConstScalar, - InferenceResult, Interner, MemoryMap, Substitution, Ty, TyKind, + consteval::usize_const, + db::HirDatabase, + display::HirDisplay, + infer::{normalize, PointerCast}, + lang_items::is_box, + mapping::ToChalk, + CallableDefId, ClosureId, Const, ConstScalar, InferenceResult, Interner, MemoryMap, + Substitution, TraitEnvironment, Ty, TyKind, }; use base_db::CrateId; use chalk_ir::Mutability; @@ -22,7 +27,9 @@ mod pretty; mod monomorphization; pub use borrowck::{borrowck_query, BorrowckResult, MutabilityReason}; -pub use eval::{interpret_mir, pad16, Evaluator, MirEvalError, VTableMap}; +pub use eval::{ + interpret_mir, pad16, render_const_using_debug_impl, Evaluator, MirEvalError, VTableMap, +}; pub use lower::{ lower_to_mir, mir_body_for_closure_query, mir_body_query, mir_body_recover, MirLowerError, }; @@ -32,6 +39,7 @@ pub use monomorphization::{ }; use smallvec::{smallvec, SmallVec}; use stdx::{impl_from, never}; +use triomphe::Arc; use super::consteval::{intern_const_scalar, try_const_usize}; @@ -129,11 +137,19 @@ pub enum ProjectionElem<V, T> { impl<V, T> ProjectionElem<V, T> { pub fn projected_ty( &self, - base: Ty, + mut base: Ty, db: &dyn HirDatabase, closure_field: impl FnOnce(ClosureId, &Substitution, usize) -> Ty, krate: CrateId, ) -> Ty { + if matches!(base.data(Interner).kind, TyKind::Alias(_) | TyKind::AssociatedType(..)) { + base = normalize( + db, + // FIXME: we should get this from caller + Arc::new(TraitEnvironment::empty(krate)), + base, + ); + } match self { ProjectionElem::Deref => match &base.data(Interner).kind { TyKind::Raw(_, inner) | TyKind::Ref(_, _, inner) => inner.clone(), @@ -321,8 +337,8 @@ impl SwitchTargets { #[derive(Debug, PartialEq, Eq, Clone)] pub struct Terminator { - span: MirSpan, - kind: TerminatorKind, + pub span: MirSpan, + pub kind: TerminatorKind, } #[derive(Debug, PartialEq, Eq, Clone)] diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs index a5dd0182eb6..ad98e8fa181 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/borrowck.rs @@ -52,7 +52,7 @@ fn all_mir_bodies( let closures = body.closures.clone(); Box::new( iter::once(Ok(body)) - .chain(closures.into_iter().flat_map(|x| for_closure(db, x))), + .chain(closures.into_iter().flat_map(|it| for_closure(db, it))), ) } Err(e) => Box::new(iter::once(Err(e))), @@ -62,7 +62,7 @@ fn all_mir_bodies( Ok(body) => { let closures = body.closures.clone(); Box::new( - iter::once(Ok(body)).chain(closures.into_iter().flat_map(|x| for_closure(db, x))), + iter::once(Ok(body)).chain(closures.into_iter().flat_map(|it| for_closure(db, it))), ) } Err(e) => Box::new(iter::once(Err(e))), @@ -171,7 +171,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec<MovedOutOfRef> } TerminatorKind::Call { func, args, .. } => { for_operand(func, terminator.span); - args.iter().for_each(|x| for_operand(x, terminator.span)); + args.iter().for_each(|it| for_operand(it, terminator.span)); } TerminatorKind::Assert { cond, .. } => { for_operand(cond, terminator.span); @@ -245,7 +245,7 @@ fn ever_initialized_map( body: &MirBody, ) -> ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> { let mut result: ArenaMap<BasicBlockId, ArenaMap<LocalId, bool>> = - body.basic_blocks.iter().map(|x| (x.0, ArenaMap::default())).collect(); + body.basic_blocks.iter().map(|it| (it.0, ArenaMap::default())).collect(); fn dfs( db: &dyn HirDatabase, body: &MirBody, @@ -271,7 +271,10 @@ fn ever_initialized_map( } } let Some(terminator) = &block.terminator else { - never!("Terminator should be none only in construction.\nThe body:\n{}", body.pretty_print(db)); + never!( + "Terminator should be none only in construction.\nThe body:\n{}", + body.pretty_print(db) + ); return; }; let targets = match &terminator.kind { @@ -311,7 +314,7 @@ fn ever_initialized_map( result[body.start_block].insert(l, true); dfs(db, body, body.start_block, l, &mut result); } - for l in body.locals.iter().map(|x| x.0) { + for l in body.locals.iter().map(|it| it.0) { if !result[body.start_block].contains_idx(l) { result[body.start_block].insert(l, false); dfs(db, body, body.start_block, l, &mut result); @@ -325,10 +328,10 @@ fn mutability_of_locals( body: &MirBody, ) -> ArenaMap<LocalId, MutabilityReason> { let mut result: ArenaMap<LocalId, MutabilityReason> = - body.locals.iter().map(|x| (x.0, MutabilityReason::Not)).collect(); + body.locals.iter().map(|it| (it.0, MutabilityReason::Not)).collect(); let mut push_mut_span = |local, span| match &mut result[local] { MutabilityReason::Mut { spans } => spans.push(span), - x @ MutabilityReason::Not => *x = MutabilityReason::Mut { spans: vec![span] }, + it @ MutabilityReason::Not => *it = MutabilityReason::Mut { spans: vec![span] }, }; let ever_init_maps = ever_initialized_map(db, body); for (block_id, mut ever_init_map) in ever_init_maps.into_iter() { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs index 9acf9d39e56..d7820de629a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval.rs @@ -1,6 +1,6 @@ //! This module provides a MIR interpreter, which is used in const eval. -use std::{borrow::Cow, collections::HashMap, fmt::Write, iter, ops::Range}; +use std::{borrow::Cow, cell::RefCell, collections::HashMap, fmt::Write, iter, mem, ops::Range}; use base_db::{CrateId, FileId}; use chalk_ir::Mutability; @@ -8,12 +8,13 @@ use either::Either; use hir_def::{ builtin_type::BuiltinType, data::adt::{StructFlags, VariantData}, - lang_item::{lang_attr, LangItem}, + lang_item::LangItem, layout::{TagEncoding, Variants}, - AdtId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, StaticId, - VariantId, + resolver::{HasResolver, TypeNs, ValueNs}, + AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, + StaticId, VariantId, }; -use hir_expand::InFile; +use hir_expand::{mod_path::ModPath, InFile}; use intern::Interned; use la_arena::ArenaMap; use rustc_hash::{FxHashMap, FxHashSet}; @@ -28,7 +29,7 @@ use crate::{ infer::PointerCast, layout::{Layout, LayoutError, RustcEnumVariantIdx}, mapping::from_chalk, - method_resolution::{is_dyn_method, lookup_impl_method}, + method_resolution::{is_dyn_method, lookup_impl_const}, name, static_lifetime, traits::FnTrait, utils::{detect_variant_from_bytes, ClosureSubst}, @@ -37,8 +38,8 @@ use crate::{ }; use super::{ - return_slot, AggregateKind, BinOp, CastKind, LocalId, MirBody, MirLowerError, MirSpan, Operand, - Place, ProjectionElem, Rvalue, StatementKind, TerminatorKind, UnOp, + return_slot, AggregateKind, BasicBlockId, BinOp, CastKind, LocalId, MirBody, MirLowerError, + MirSpan, Operand, Place, ProjectionElem, Rvalue, StatementKind, TerminatorKind, UnOp, }; mod shim; @@ -48,15 +49,15 @@ mod tests; macro_rules! from_bytes { ($ty:tt, $value:expr) => { ($ty::from_le_bytes(match ($value).try_into() { - Ok(x) => x, + Ok(it) => it, Err(_) => return Err(MirEvalError::TypeError(stringify!(mismatched size in constructing $ty))), })) }; } macro_rules! not_supported { - ($x: expr) => { - return Err(MirEvalError::NotSupported(format!($x))) + ($it: expr) => { + return Err(MirEvalError::NotSupported(format!($it))) }; } @@ -68,8 +69,8 @@ pub struct VTableMap { impl VTableMap { fn id(&mut self, ty: Ty) -> usize { - if let Some(x) = self.ty_to_id.get(&ty) { - return *x; + if let Some(it) = self.ty_to_id.get(&ty) { + return *it; } let id = self.id_to_ty.len(); self.id_to_ty.push(ty.clone()); @@ -114,11 +115,20 @@ impl TlsData { } } +struct StackFrame { + body: Arc<MirBody>, + locals: Locals, + destination: Option<BasicBlockId>, + prev_stack_ptr: usize, + span: (MirSpan, DefWithBodyId), +} + pub struct Evaluator<'a> { db: &'a dyn HirDatabase, trait_env: Arc<TraitEnvironment>, stack: Vec<u8>, heap: Vec<u8>, + code_stack: Vec<StackFrame>, /// Stores the global location of the statics. We const evaluate every static first time we need it /// and see it's missing, then we add it to this to reuse. static_locations: FxHashMap<StaticId, Address>, @@ -127,8 +137,10 @@ pub struct Evaluator<'a> { /// time of use. vtable_map: VTableMap, thread_local_storage: TlsData, + random_state: oorandom::Rand64, stdout: Vec<u8>, stderr: Vec<u8>, + layout_cache: RefCell<FxHashMap<Ty, Arc<Layout>>>, crate_id: CrateId, // FIXME: This is a workaround, see the comment on `interpret_mir` assert_placeholder_ty_is_unused: bool, @@ -136,6 +148,8 @@ pub struct Evaluator<'a> { execution_limit: usize, /// An additional limit on stack depth, to prevent stack overflow stack_depth_limit: usize, + /// Maximum count of bytes that heap and stack can grow + memory_limit: usize, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -192,7 +206,7 @@ impl IntervalAndTy { addr: Address, ty: Ty, evaluator: &Evaluator<'_>, - locals: &Locals<'_>, + locals: &Locals, ) -> Result<IntervalAndTy> { let size = evaluator.size_of_sized(&ty, locals, "type of interval")?; Ok(IntervalAndTy { interval: Interval { addr, size }, ty }) @@ -226,18 +240,28 @@ impl IntervalOrOwned { } } +#[cfg(target_pointer_width = "64")] +const STACK_OFFSET: usize = 1 << 60; +#[cfg(target_pointer_width = "64")] +const HEAP_OFFSET: usize = 1 << 59; + +#[cfg(target_pointer_width = "32")] +const STACK_OFFSET: usize = 1 << 30; +#[cfg(target_pointer_width = "32")] +const HEAP_OFFSET: usize = 1 << 29; + impl Address { - fn from_bytes(x: &[u8]) -> Result<Self> { - Ok(Address::from_usize(from_bytes!(usize, x))) + fn from_bytes(it: &[u8]) -> Result<Self> { + Ok(Address::from_usize(from_bytes!(usize, it))) } - fn from_usize(x: usize) -> Self { - if x > usize::MAX / 2 { - Stack(x - usize::MAX / 2) - } else if x > usize::MAX / 4 { - Heap(x - usize::MAX / 4) + fn from_usize(it: usize) -> Self { + if it > STACK_OFFSET { + Stack(it - STACK_OFFSET) + } else if it > HEAP_OFFSET { + Heap(it - HEAP_OFFSET) } else { - Invalid(x) + Invalid(it) } } @@ -247,23 +271,23 @@ impl Address { fn to_usize(&self) -> usize { let as_num = match self { - Stack(x) => *x + usize::MAX / 2, - Heap(x) => *x + usize::MAX / 4, - Invalid(x) => *x, + Stack(it) => *it + STACK_OFFSET, + Heap(it) => *it + HEAP_OFFSET, + Invalid(it) => *it, }; as_num } fn map(&self, f: impl FnOnce(usize) -> usize) -> Address { match self { - Stack(x) => Stack(f(*x)), - Heap(x) => Heap(f(*x)), - Invalid(x) => Invalid(f(*x)), + Stack(it) => Stack(f(*it)), + Heap(it) => Heap(f(*it)), + Invalid(it) => Invalid(f(*it)), } } fn offset(&self, offset: usize) -> Address { - self.map(|x| x + offset) + self.map(|it| it + offset) } } @@ -282,7 +306,7 @@ pub enum MirEvalError { TypeIsUnsized(Ty, &'static str), NotSupported(String), InvalidConst(Const), - InFunction(Either<FunctionId, ClosureId>, Box<MirEvalError>, MirSpan, DefWithBodyId), + InFunction(Box<MirEvalError>, Vec<(Either<FunctionId, ClosureId>, MirSpan, DefWithBodyId)>), ExecutionLimitExceeded, StackOverflow, TargetDataLayoutNotAvailable, @@ -300,40 +324,42 @@ impl MirEvalError { ) -> std::result::Result<(), std::fmt::Error> { writeln!(f, "Mir eval error:")?; let mut err = self; - while let MirEvalError::InFunction(func, e, span, def) = err { + while let MirEvalError::InFunction(e, stack) = err { err = e; - match func { - Either::Left(func) => { - let function_name = db.function_data(*func); - writeln!( - f, - "In function {} ({:?})", - function_name.name.display(db.upcast()), - func - )?; - } - Either::Right(clos) => { - writeln!(f, "In {:?}", clos)?; + for (func, span, def) in stack.iter().take(30).rev() { + match func { + Either::Left(func) => { + let function_name = db.function_data(*func); + writeln!( + f, + "In function {} ({:?})", + function_name.name.display(db.upcast()), + func + )?; + } + Either::Right(clos) => { + writeln!(f, "In {:?}", clos)?; + } } + let source_map = db.body_with_source_map(*def).1; + let span: InFile<SyntaxNodePtr> = match span { + MirSpan::ExprId(e) => match source_map.expr_syntax(*e) { + Ok(s) => s.map(|it| it.into()), + Err(_) => continue, + }, + MirSpan::PatId(p) => match source_map.pat_syntax(*p) { + Ok(s) => s.map(|it| match it { + Either::Left(e) => e.into(), + Either::Right(e) => e.into(), + }), + Err(_) => continue, + }, + MirSpan::Unknown => continue, + }; + let file_id = span.file_id.original_file(db.upcast()); + let text_range = span.value.text_range(); + writeln!(f, "{}", span_formatter(file_id, text_range))?; } - let source_map = db.body_with_source_map(*def).1; - let span: InFile<SyntaxNodePtr> = match span { - MirSpan::ExprId(e) => match source_map.expr_syntax(*e) { - Ok(s) => s.map(|x| x.into()), - Err(_) => continue, - }, - MirSpan::PatId(p) => match source_map.pat_syntax(*p) { - Ok(s) => s.map(|x| match x { - Either::Left(e) => e.into(), - Either::Right(e) => e.into(), - }), - Err(_) => continue, - }, - MirSpan::Unknown => continue, - }; - let file_id = span.file_id.original_file(db.upcast()); - let text_range = span.value.text_range(); - writeln!(f, "{}", span_formatter(file_id, text_range))?; } match err { MirEvalError::InFunction(..) => unreachable!(), @@ -413,13 +439,7 @@ impl std::fmt::Debug for MirEvalError { let data = &arg0.data(Interner); f.debug_struct("InvalidConst").field("ty", &data.ty).field("value", &arg0).finish() } - Self::InFunction(func, e, span, _) => { - let mut e = &**e; - let mut stack = vec![(*func, *span)]; - while let Self::InFunction(f, next_e, span, _) = e { - e = &next_e; - stack.push((*f, *span)); - } + Self::InFunction(e, stack) => { f.debug_struct("WithStack").field("error", e).field("stack", &stack).finish() } } @@ -435,10 +455,10 @@ struct DropFlags { impl DropFlags { fn add_place(&mut self, p: Place) { - if p.iterate_over_parents().any(|x| self.need_drop.contains(&x)) { + if p.iterate_over_parents().any(|it| self.need_drop.contains(&it)) { return; } - self.need_drop.retain(|x| !p.is_parent(x)); + self.need_drop.retain(|it| !p.is_parent(it)); self.need_drop.insert(p); } @@ -449,15 +469,15 @@ impl DropFlags { } #[derive(Debug)] -struct Locals<'a> { - ptr: &'a ArenaMap<LocalId, Interval>, - body: &'a MirBody, +struct Locals { + ptr: ArenaMap<LocalId, Interval>, + body: Arc<MirBody>, drop_flags: DropFlags, } pub fn interpret_mir( db: &dyn HirDatabase, - body: &MirBody, + body: Arc<MirBody>, // FIXME: This is workaround. Ideally, const generics should have a separate body (issue #7434), but now // they share their body with their parent, so in MIR lowering we have locals of the parent body, which // might have placeholders. With this argument, we (wrongly) assume that every placeholder type has @@ -466,19 +486,22 @@ pub fn interpret_mir( assert_placeholder_ty_is_unused: bool, ) -> (Result<Const>, String, String) { let ty = body.locals[return_slot()].ty.clone(); - let mut evaluator = Evaluator::new(db, body, assert_placeholder_ty_is_unused); - let x: Result<Const> = (|| { - let bytes = evaluator.interpret_mir(&body, None.into_iter())?; + let mut evaluator = Evaluator::new(db, body.owner, assert_placeholder_ty_is_unused); + let it: Result<Const> = (|| { + if evaluator.ptr_size() != std::mem::size_of::<usize>() { + not_supported!("targets with different pointer size from host"); + } + let bytes = evaluator.interpret_mir(body.clone(), None.into_iter())?; let mut memory_map = evaluator.create_memory_map( &bytes, &ty, - &Locals { ptr: &ArenaMap::new(), body: &body, drop_flags: DropFlags::default() }, + &Locals { ptr: ArenaMap::new(), body, drop_flags: DropFlags::default() }, )?; memory_map.vtable = evaluator.vtable_map.clone(); return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty)); })(); ( - x, + it, String::from_utf8_lossy(&evaluator.stdout).into_owned(), String::from_utf8_lossy(&evaluator.stderr).into_owned(), ) @@ -487,18 +510,20 @@ pub fn interpret_mir( impl Evaluator<'_> { pub fn new<'a>( db: &'a dyn HirDatabase, - body: &MirBody, + owner: DefWithBodyId, assert_placeholder_ty_is_unused: bool, ) -> Evaluator<'a> { - let crate_id = body.owner.module(db.upcast()).krate(); - let trait_env = db.trait_environment_for_body(body.owner); + let crate_id = owner.module(db.upcast()).krate(); + let trait_env = db.trait_environment_for_body(owner); Evaluator { stack: vec![0], heap: vec![0], + code_stack: vec![], vtable_map: VTableMap::default(), thread_local_storage: TlsData::default(), static_locations: HashMap::default(), db, + random_state: oorandom::Rand64::new(0), trait_env, crate_id, stdout: vec![], @@ -506,14 +531,16 @@ impl Evaluator<'_> { assert_placeholder_ty_is_unused, stack_depth_limit: 100, execution_limit: 1000_000, + memory_limit: 1000_000_000, // 2GB, 1GB for stack and 1GB for heap + layout_cache: RefCell::new(HashMap::default()), } } - fn place_addr(&self, p: &Place, locals: &Locals<'_>) -> Result<Address> { + fn place_addr(&self, p: &Place, locals: &Locals) -> Result<Address> { Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0) } - fn place_interval(&self, p: &Place, locals: &Locals<'_>) -> Result<Interval> { + fn place_interval(&self, p: &Place, locals: &Locals) -> Result<Interval> { let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?; Ok(Interval { addr: place_addr_and_ty.0, @@ -527,7 +554,7 @@ impl Evaluator<'_> { fn ptr_size(&self) -> usize { match self.db.target_data_layout(self.crate_id) { - Some(x) => x.pointer_size.bytes_usize(), + Some(it) => it.pointer_size.bytes_usize(), None => 8, } } @@ -535,7 +562,7 @@ impl Evaluator<'_> { fn place_addr_and_ty_and_metadata<'a>( &'a self, p: &Place, - locals: &'a Locals<'a>, + locals: &'a Locals, ) -> Result<(Address, Ty, Option<IntervalOrOwned>)> { let mut addr = locals.ptr[p.local].addr; let mut ty: Ty = locals.body.locals[p.local].ty.clone(); @@ -569,8 +596,8 @@ impl Evaluator<'_> { } else { None }; - let x = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?); - addr = Address::from_usize(x); + let it = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?); + addr = Address::from_usize(it); } ProjectionElem::Index(op) => { let offset = from_bytes!( @@ -586,13 +613,13 @@ impl Evaluator<'_> { let offset = if from_end { let len = match prev_ty.kind(Interner) { TyKind::Array(_, c) => match try_const_usize(self.db, c) { - Some(x) => x as u64, + Some(it) => it as u64, None => { not_supported!("indexing array with unknown const from end") } }, TyKind::Slice(_) => match metadata { - Some(x) => from_bytes!(u64, x.get(self)?), + Some(it) => from_bytes!(u64, it.get(self)?), None => not_supported!("slice place without metadata"), }, _ => not_supported!("bad type for const index"), @@ -612,8 +639,8 @@ impl Evaluator<'_> { _ => TyKind::Error.intern(Interner), }; metadata = match metadata { - Some(x) => { - let prev_len = from_bytes!(u64, x.get(self)?); + Some(it) => { + let prev_len = from_bytes!(u64, it.get(self)?); Some(IntervalOrOwned::Owned( (prev_len - from - to).to_le_bytes().to_vec(), )) @@ -636,8 +663,8 @@ impl Evaluator<'_> { Variants::Single { .. } => &layout, Variants::Multiple { variants, .. } => { &variants[match f.parent { - hir_def::VariantId::EnumVariantId(x) => { - RustcEnumVariantIdx(x.local_id) + hir_def::VariantId::EnumVariantId(it) => { + RustcEnumVariantIdx(it.local_id) } _ => { return Err(MirEvalError::TypeError( @@ -662,9 +689,15 @@ impl Evaluator<'_> { } fn layout(&self, ty: &Ty) -> Result<Arc<Layout>> { - self.db + if let Some(x) = self.layout_cache.borrow().get(ty) { + return Ok(x.clone()); + } + let r = self + .db .layout_of_ty(ty.clone(), self.crate_id) - .map_err(|e| MirEvalError::LayoutError(e, ty.clone())) + .map_err(|e| MirEvalError::LayoutError(e, ty.clone()))?; + self.layout_cache.borrow_mut().insert(ty.clone(), r.clone()); + Ok(r) } fn layout_adt(&self, adt: AdtId, subst: Substitution) -> Result<Arc<Layout>> { @@ -673,11 +706,11 @@ impl Evaluator<'_> { }) } - fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<Ty> { + fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals) -> Result<Ty> { Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1) } - fn operand_ty(&self, o: &Operand, locals: &Locals<'_>) -> Result<Ty> { + fn operand_ty(&self, o: &Operand, locals: &Locals) -> Result<Ty> { Ok(match o { Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?, Operand::Constant(c) => c.data(Interner).ty.clone(), @@ -688,11 +721,7 @@ impl Evaluator<'_> { }) } - fn operand_ty_and_eval( - &mut self, - o: &Operand, - locals: &mut Locals<'_>, - ) -> Result<IntervalAndTy> { + fn operand_ty_and_eval(&mut self, o: &Operand, locals: &mut Locals) -> Result<IntervalAndTy> { Ok(IntervalAndTy { interval: self.eval_operand(o, locals)?, ty: self.operand_ty(o, locals)?, @@ -701,25 +730,209 @@ impl Evaluator<'_> { fn interpret_mir( &mut self, - body: &MirBody, - args: impl Iterator<Item = Vec<u8>>, + body: Arc<MirBody>, + args: impl Iterator<Item = IntervalOrOwned>, ) -> Result<Vec<u8>> { - if let Some(x) = self.stack_depth_limit.checked_sub(1) { - self.stack_depth_limit = x; + if let Some(it) = self.stack_depth_limit.checked_sub(1) { + self.stack_depth_limit = it; } else { return Err(MirEvalError::StackOverflow); } let mut current_block_idx = body.start_block; + let (mut locals, prev_stack_ptr) = self.create_locals_for_body(body.clone(), None)?; + self.fill_locals_for_body(&body, &mut locals, args)?; + let prev_code_stack = mem::take(&mut self.code_stack); + let span = (MirSpan::Unknown, body.owner); + self.code_stack.push(StackFrame { body, locals, destination: None, prev_stack_ptr, span }); + 'stack: loop { + let Some(mut my_stack_frame) = self.code_stack.pop() else { + not_supported!("missing stack frame"); + }; + let e = (|| { + let mut locals = &mut my_stack_frame.locals; + let body = &*my_stack_frame.body; + loop { + let current_block = &body.basic_blocks[current_block_idx]; + if let Some(it) = self.execution_limit.checked_sub(1) { + self.execution_limit = it; + } else { + return Err(MirEvalError::ExecutionLimitExceeded); + } + for statement in ¤t_block.statements { + match &statement.kind { + StatementKind::Assign(l, r) => { + let addr = self.place_addr(l, &locals)?; + let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?; + self.write_memory(addr, &result)?; + locals.drop_flags.add_place(l.clone()); + } + StatementKind::Deinit(_) => not_supported!("de-init statement"), + StatementKind::StorageLive(_) + | StatementKind::StorageDead(_) + | StatementKind::Nop => (), + } + } + let Some(terminator) = current_block.terminator.as_ref() else { + not_supported!("block without terminator"); + }; + match &terminator.kind { + TerminatorKind::Goto { target } => { + current_block_idx = *target; + } + TerminatorKind::Call { + func, + args, + destination, + target, + cleanup: _, + from_hir_call: _, + } => { + let destination_interval = self.place_interval(destination, &locals)?; + let fn_ty = self.operand_ty(func, &locals)?; + let args = args + .iter() + .map(|it| self.operand_ty_and_eval(it, &mut locals)) + .collect::<Result<Vec<_>>>()?; + let stack_frame = match &fn_ty.data(Interner).kind { + TyKind::Function(_) => { + let bytes = self.eval_operand(func, &mut locals)?; + self.exec_fn_pointer( + bytes, + destination_interval, + &args, + &locals, + *target, + terminator.span, + )? + } + TyKind::FnDef(def, generic_args) => self.exec_fn_def( + *def, + generic_args, + destination_interval, + &args, + &locals, + *target, + terminator.span, + )?, + it => not_supported!("unknown function type {it:?}"), + }; + locals.drop_flags.add_place(destination.clone()); + if let Some(stack_frame) = stack_frame { + self.code_stack.push(my_stack_frame); + current_block_idx = stack_frame.body.start_block; + self.code_stack.push(stack_frame); + return Ok(None); + } else { + current_block_idx = + target.ok_or(MirEvalError::UndefinedBehavior( + "Diverging function returned".to_owned(), + ))?; + } + } + TerminatorKind::SwitchInt { discr, targets } => { + let val = u128::from_le_bytes(pad16( + self.eval_operand(discr, &mut locals)?.get(&self)?, + false, + )); + current_block_idx = targets.target_for_value(val); + } + TerminatorKind::Return => { + break; + } + TerminatorKind::Unreachable => { + return Err(MirEvalError::UndefinedBehavior( + "unreachable executed".to_owned(), + )); + } + TerminatorKind::Drop { place, target, unwind: _ } => { + self.drop_place(place, &mut locals, terminator.span)?; + current_block_idx = *target; + } + _ => not_supported!("unknown terminator"), + } + } + Ok(Some(my_stack_frame)) + })(); + let my_stack_frame = match e { + Ok(None) => continue 'stack, + Ok(Some(x)) => x, + Err(e) => { + let my_code_stack = mem::replace(&mut self.code_stack, prev_code_stack); + let mut error_stack = vec![]; + for frame in my_code_stack.into_iter().rev() { + if let DefWithBodyId::FunctionId(f) = frame.body.owner { + error_stack.push((Either::Left(f), frame.span.0, frame.span.1)); + } + } + return Err(MirEvalError::InFunction(Box::new(e), error_stack)); + } + }; + match my_stack_frame.destination { + None => { + self.code_stack = prev_code_stack; + self.stack_depth_limit += 1; + return Ok(my_stack_frame.locals.ptr[return_slot()].get(self)?.to_vec()); + } + Some(bb) => { + // We don't support const promotion, so we can't truncate the stack yet. + let _ = my_stack_frame.prev_stack_ptr; + // self.stack.truncate(my_stack_frame.prev_stack_ptr); + current_block_idx = bb; + } + } + } + } + + fn fill_locals_for_body( + &mut self, + body: &MirBody, + locals: &mut Locals, + args: impl Iterator<Item = IntervalOrOwned>, + ) -> Result<()> { + let mut remain_args = body.param_locals.len(); + for ((l, interval), value) in locals.ptr.iter().skip(1).zip(args) { + locals.drop_flags.add_place(l.into()); + match value { + IntervalOrOwned::Owned(value) => interval.write_from_bytes(self, &value)?, + IntervalOrOwned::Borrowed(value) => interval.write_from_interval(self, value)?, + } + if remain_args == 0 { + return Err(MirEvalError::TypeError("more arguments provided")); + } + remain_args -= 1; + } + if remain_args > 0 { + return Err(MirEvalError::TypeError("not enough arguments provided")); + } + Ok(()) + } + + fn create_locals_for_body( + &mut self, + body: Arc<MirBody>, + destination: Option<Interval>, + ) -> Result<(Locals, usize)> { let mut locals = - Locals { ptr: &ArenaMap::new(), body: &body, drop_flags: DropFlags::default() }; + Locals { ptr: ArenaMap::new(), body: body.clone(), drop_flags: DropFlags::default() }; let (locals_ptr, stack_size) = { let mut stack_ptr = self.stack.len(); let addr = body .locals .iter() - .map(|(id, x)| { - let size = - self.size_of_sized(&x.ty, &locals, "no unsized local in extending stack")?; + .map(|(id, it)| { + if id == return_slot() { + if let Some(destination) = destination { + return Ok((id, destination)); + } + } + let (size, align) = self.size_align_of_sized( + &it.ty, + &locals, + "no unsized local in extending stack", + )?; + while stack_ptr % align != 0 { + stack_ptr += 1; + } let my_ptr = stack_ptr; stack_ptr += size; Ok((id, Interval { addr: Stack(my_ptr), size })) @@ -728,115 +941,21 @@ impl Evaluator<'_> { let stack_size = stack_ptr - self.stack.len(); (addr, stack_size) }; - locals.ptr = &locals_ptr; - self.stack.extend(iter::repeat(0).take(stack_size)); - let mut remain_args = body.param_locals.len(); - for ((l, interval), value) in locals_ptr.iter().skip(1).zip(args) { - locals.drop_flags.add_place(l.into()); - interval.write_from_bytes(self, &value)?; - if remain_args == 0 { - return Err(MirEvalError::TypeError("more arguments provided")); - } - remain_args -= 1; - } - if remain_args > 0 { - return Err(MirEvalError::TypeError("not enough arguments provided")); - } - loop { - let current_block = &body.basic_blocks[current_block_idx]; - if let Some(x) = self.execution_limit.checked_sub(1) { - self.execution_limit = x; - } else { - return Err(MirEvalError::ExecutionLimitExceeded); - } - for statement in ¤t_block.statements { - match &statement.kind { - StatementKind::Assign(l, r) => { - let addr = self.place_addr(l, &locals)?; - let result = self.eval_rvalue(r, &mut locals)?.to_vec(&self)?; - self.write_memory(addr, &result)?; - locals.drop_flags.add_place(l.clone()); - } - StatementKind::Deinit(_) => not_supported!("de-init statement"), - StatementKind::StorageLive(_) - | StatementKind::StorageDead(_) - | StatementKind::Nop => (), - } - } - let Some(terminator) = current_block.terminator.as_ref() else { - not_supported!("block without terminator"); - }; - match &terminator.kind { - TerminatorKind::Goto { target } => { - current_block_idx = *target; - } - TerminatorKind::Call { - func, - args, - destination, - target, - cleanup: _, - from_hir_call: _, - } => { - let destination_interval = self.place_interval(destination, &locals)?; - let fn_ty = self.operand_ty(func, &locals)?; - let args = args - .iter() - .map(|x| self.operand_ty_and_eval(x, &mut locals)) - .collect::<Result<Vec<_>>>()?; - match &fn_ty.data(Interner).kind { - TyKind::Function(_) => { - let bytes = self.eval_operand(func, &mut locals)?; - self.exec_fn_pointer( - bytes, - destination_interval, - &args, - &locals, - terminator.span, - )?; - } - TyKind::FnDef(def, generic_args) => { - self.exec_fn_def( - *def, - generic_args, - destination_interval, - &args, - &locals, - terminator.span, - )?; - } - x => not_supported!("unknown function type {x:?}"), - } - locals.drop_flags.add_place(destination.clone()); - current_block_idx = target.expect("broken mir, function without target"); - } - TerminatorKind::SwitchInt { discr, targets } => { - let val = u128::from_le_bytes(pad16( - self.eval_operand(discr, &mut locals)?.get(&self)?, - false, - )); - current_block_idx = targets.target_for_value(val); - } - TerminatorKind::Return => { - self.stack_depth_limit += 1; - return Ok(locals.ptr[return_slot()].get(self)?.to_vec()); - } - TerminatorKind::Unreachable => { - return Err(MirEvalError::UndefinedBehavior("unreachable executed".to_owned())); - } - TerminatorKind::Drop { place, target, unwind: _ } => { - self.drop_place(place, &mut locals, terminator.span)?; - current_block_idx = *target; - } - _ => not_supported!("unknown terminator"), - } + locals.ptr = locals_ptr; + let prev_stack_pointer = self.stack.len(); + if stack_size > self.memory_limit { + return Err(MirEvalError::Panic(format!( + "Stack overflow. Tried to grow stack to {stack_size} bytes" + ))); } + self.stack.extend(iter::repeat(0).take(stack_size)); + Ok((locals, prev_stack_pointer)) } - fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals<'_>) -> Result<IntervalOrOwned> { + fn eval_rvalue(&mut self, r: &Rvalue, locals: &mut Locals) -> Result<IntervalOrOwned> { use IntervalOrOwned::*; Ok(match r { - Rvalue::Use(x) => Borrowed(self.eval_operand(x, locals)?), + Rvalue::Use(it) => Borrowed(self.eval_operand(it, locals)?), Rvalue::Ref(_, p) => { let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?; let mut r = addr.to_bytes(); @@ -881,9 +1000,9 @@ impl Evaluator<'_> { c[0] = 1 - c[0]; } else { match op { - UnOp::Not => c.iter_mut().for_each(|x| *x = !*x), + UnOp::Not => c.iter_mut().for_each(|it| *it = !*it), UnOp::Neg => { - c.iter_mut().for_each(|x| *x = !*x); + c.iter_mut().for_each(|it| *it = !*it); for k in c.iter_mut() { let o; (*k, o) = k.overflowing_add(1); @@ -948,8 +1067,8 @@ impl Evaluator<'_> { }; Owned(r.to_le_bytes().into()) } - x => not_supported!( - "invalid binop {x:?} on floating point operators" + it => not_supported!( + "invalid binop {it:?} on floating point operators" ), } } @@ -976,8 +1095,8 @@ impl Evaluator<'_> { }; Owned(r.to_le_bytes().into()) } - x => not_supported!( - "invalid binop {x:?} on floating point operators" + it => not_supported!( + "invalid binop {it:?} on floating point operators" ), } } @@ -1034,13 +1153,18 @@ impl Evaluator<'_> { BinOp::Shr => l128.checked_shr(shift_amount), _ => unreachable!(), }; + if shift_amount as usize >= lc.len() * 8 { + return Err(MirEvalError::Panic(format!( + "Overflow in {op:?}" + ))); + } if let Some(r) = r { break 'b r; } }; return Err(MirEvalError::Panic(format!("Overflow in {op:?}"))); }; - check_overflow(r)? + Owned(r.to_le_bytes()[..lc.len()].to_vec()) } BinOp::Offset => not_supported!("offset binop"), } @@ -1049,64 +1173,15 @@ impl Evaluator<'_> { Rvalue::Discriminant(p) => { let ty = self.place_ty(p, locals)?; let bytes = self.eval_place(p, locals)?.get(&self)?; - let layout = self.layout(&ty)?; - let enum_id = 'b: { - match ty.kind(Interner) { - TyKind::Adt(e, _) => match e.0 { - AdtId::EnumId(e) => break 'b e, - _ => (), - }, - _ => (), - } - return Ok(Owned(0u128.to_le_bytes().to_vec())); - }; - match &layout.variants { - Variants::Single { index } => { - let r = self.const_eval_discriminant(EnumVariantId { - parent: enum_id, - local_id: index.0, - })?; - Owned(r.to_le_bytes().to_vec()) - } - Variants::Multiple { tag, tag_encoding, variants, .. } => { - let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else { - not_supported!("missing target data layout"); - }; - let size = tag.size(&*target_data_layout).bytes_usize(); - let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field - match tag_encoding { - TagEncoding::Direct => { - let tag = &bytes[offset..offset + size]; - Owned(pad16(tag, false).to_vec()) - } - TagEncoding::Niche { untagged_variant, niche_start, .. } => { - let tag = &bytes[offset..offset + size]; - let candidate_tag = i128::from_le_bytes(pad16(tag, false)) - .wrapping_sub(*niche_start as i128) - as usize; - let variant = variants - .iter_enumerated() - .map(|(x, _)| x) - .filter(|x| x != untagged_variant) - .nth(candidate_tag) - .unwrap_or(*untagged_variant) - .0; - let result = self.const_eval_discriminant(EnumVariantId { - parent: enum_id, - local_id: variant, - })?; - Owned(result.to_le_bytes().to_vec()) - } - } - } - } + let result = self.compute_discriminant(ty, bytes)?; + Owned(result.to_le_bytes().to_vec()) } - Rvalue::Repeat(x, len) => { + Rvalue::Repeat(it, len) => { let len = match try_const_usize(self.db, &len) { - Some(x) => x as usize, + Some(it) => it as usize, None => not_supported!("non evaluatable array len in repeat Rvalue"), }; - let val = self.eval_operand(x, locals)?.get(self)?; + let val = self.eval_operand(it, locals)?.get(self)?; let size = len * val.len(); Owned(val.iter().copied().cycle().take(size).collect()) } @@ -1115,20 +1190,20 @@ impl Evaluator<'_> { let Some((size, align)) = self.size_align_of(ty, locals)? else { not_supported!("unsized box initialization"); }; - let addr = self.heap_allocate(size, align); + let addr = self.heap_allocate(size, align)?; Owned(addr.to_bytes()) } Rvalue::CopyForDeref(_) => not_supported!("copy for deref"), Rvalue::Aggregate(kind, values) => { let values = values .iter() - .map(|x| self.eval_operand(x, locals)) + .map(|it| self.eval_operand(it, locals)) .collect::<Result<Vec<_>>>()?; match kind { AggregateKind::Array(_) => { let mut r = vec![]; - for x in values { - let value = x.get(&self)?; + for it in values { + let value = it.get(&self)?; r.extend(value); } Owned(r) @@ -1139,11 +1214,12 @@ impl Evaluator<'_> { layout.size.bytes_usize(), &layout, None, - values.iter().map(|&x| x.into()), + values.iter().map(|&it| it.into()), )?) } - AggregateKind::Union(x, f) => { - let layout = self.layout_adt((*x).into(), Substitution::empty(Interner))?; + AggregateKind::Union(it, f) => { + let layout = + self.layout_adt((*it).into(), Substitution::empty(Interner))?; let offset = layout .fields .offset(u32::from(f.local_id.into_raw()) as usize) @@ -1153,14 +1229,14 @@ impl Evaluator<'_> { result[offset..offset + op.len()].copy_from_slice(op); Owned(result) } - AggregateKind::Adt(x, subst) => { + AggregateKind::Adt(it, subst) => { let (size, variant_layout, tag) = - self.layout_of_variant(*x, subst.clone(), locals)?; + self.layout_of_variant(*it, subst.clone(), locals)?; Owned(self.make_by_layout( size, &variant_layout, tag, - values.iter().map(|&x| x.into()), + values.iter().map(|&it| it.into()), )?) } AggregateKind::Closure(ty) => { @@ -1169,7 +1245,7 @@ impl Evaluator<'_> { layout.size.bytes_usize(), &layout, None, - values.iter().map(|&x| x.into()), + values.iter().map(|&it| it.into()), )?) } } @@ -1229,21 +1305,75 @@ impl Evaluator<'_> { }) } + fn compute_discriminant(&self, ty: Ty, bytes: &[u8]) -> Result<i128> { + let layout = self.layout(&ty)?; + let enum_id = 'b: { + match ty.kind(Interner) { + TyKind::Adt(e, _) => match e.0 { + AdtId::EnumId(e) => break 'b e, + _ => (), + }, + _ => (), + } + return Ok(0); + }; + match &layout.variants { + Variants::Single { index } => { + let r = self.const_eval_discriminant(EnumVariantId { + parent: enum_id, + local_id: index.0, + })?; + Ok(r) + } + Variants::Multiple { tag, tag_encoding, variants, .. } => { + let Some(target_data_layout) = self.db.target_data_layout(self.crate_id) else { + not_supported!("missing target data layout"); + }; + let size = tag.size(&*target_data_layout).bytes_usize(); + let offset = layout.fields.offset(0).bytes_usize(); // The only field on enum variants is the tag field + match tag_encoding { + TagEncoding::Direct => { + let tag = &bytes[offset..offset + size]; + Ok(i128::from_le_bytes(pad16(tag, false))) + } + TagEncoding::Niche { untagged_variant, niche_start, .. } => { + let tag = &bytes[offset..offset + size]; + let candidate_tag = i128::from_le_bytes(pad16(tag, false)) + .wrapping_sub(*niche_start as i128) + as usize; + let variant = variants + .iter_enumerated() + .map(|(it, _)| it) + .filter(|it| it != untagged_variant) + .nth(candidate_tag) + .unwrap_or(*untagged_variant) + .0; + let result = self.const_eval_discriminant(EnumVariantId { + parent: enum_id, + local_id: variant, + })?; + Ok(result) + } + } + } + } + } + fn coerce_unsized_look_through_fields<T>( &self, ty: &Ty, goal: impl Fn(&TyKind) -> Option<T>, ) -> Result<T> { let kind = ty.kind(Interner); - if let Some(x) = goal(kind) { - return Ok(x); + if let Some(it) = goal(kind) { + return Ok(it); } if let TyKind::Adt(id, subst) = kind { if let AdtId::StructId(struct_id) = id.0 { let field_types = self.db.field_types(struct_id.into()); let mut field_types = field_types.iter(); if let Some(ty) = - field_types.next().map(|x| x.1.clone().substitute(Interner, subst)) + field_types.next().map(|it| it.1.clone().substitute(Interner, subst)) { return self.coerce_unsized_look_through_fields(&ty, goal); } @@ -1258,66 +1388,99 @@ impl Evaluator<'_> { current_ty: &Ty, target_ty: &Ty, ) -> Result<IntervalOrOwned> { - use IntervalOrOwned::*; - fn for_ptr(x: &TyKind) -> Option<Ty> { - match x { + fn for_ptr(it: &TyKind) -> Option<Ty> { + match it { TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => Some(ty.clone()), _ => None, } } - Ok(match self.coerce_unsized_look_through_fields(target_ty, for_ptr)? { - ty => match &ty.data(Interner).kind { - TyKind::Slice(_) => { - match self.coerce_unsized_look_through_fields(current_ty, for_ptr)? { - ty => match &ty.data(Interner).kind { - TyKind::Array(_, size) => { - let len = match try_const_usize(self.db, size) { - None => not_supported!( - "unevaluatble len of array in coerce unsized" - ), - Some(x) => x as usize, - }; - let mut r = Vec::with_capacity(16); - let addr = addr.get(self)?; - r.extend(addr.iter().copied()); - r.extend(len.to_le_bytes().into_iter()); - Owned(r) - } - t => { - not_supported!("slice unsizing from non array type {t:?}") - } - }, - } + let target_ty = self.coerce_unsized_look_through_fields(target_ty, for_ptr)?; + let current_ty = self.coerce_unsized_look_through_fields(current_ty, for_ptr)?; + + self.unsizing_ptr_from_addr(target_ty, current_ty, addr) + } + + /// Adds metadata to the address and create the fat pointer result of the unsizing operation. + fn unsizing_ptr_from_addr( + &mut self, + target_ty: Ty, + current_ty: Ty, + addr: Interval, + ) -> Result<IntervalOrOwned> { + use IntervalOrOwned::*; + Ok(match &target_ty.data(Interner).kind { + TyKind::Slice(_) => match ¤t_ty.data(Interner).kind { + TyKind::Array(_, size) => { + let len = match try_const_usize(self.db, size) { + None => { + not_supported!("unevaluatble len of array in coerce unsized") + } + Some(it) => it as usize, + }; + let mut r = Vec::with_capacity(16); + let addr = addr.get(self)?; + r.extend(addr.iter().copied()); + r.extend(len.to_le_bytes().into_iter()); + Owned(r) } - TyKind::Dyn(_) => match ¤t_ty.data(Interner).kind { - TyKind::Raw(_, ty) | TyKind::Ref(_, _, ty) => { - let vtable = self.vtable_map.id(ty.clone()); - let mut r = Vec::with_capacity(16); - let addr = addr.get(self)?; - r.extend(addr.iter().copied()); - r.extend(vtable.to_le_bytes().into_iter()); - Owned(r) + t => { + not_supported!("slice unsizing from non array type {t:?}") + } + }, + TyKind::Dyn(_) => { + let vtable = self.vtable_map.id(current_ty.clone()); + let mut r = Vec::with_capacity(16); + let addr = addr.get(self)?; + r.extend(addr.iter().copied()); + r.extend(vtable.to_le_bytes().into_iter()); + Owned(r) + } + TyKind::Adt(id, target_subst) => match ¤t_ty.data(Interner).kind { + TyKind::Adt(current_id, current_subst) => { + if id != current_id { + not_supported!("unsizing struct with different type"); } - _ => not_supported!("dyn unsizing from non pointers"), - }, - _ => not_supported!("unknown unsized cast"), + let id = match id.0 { + AdtId::StructId(s) => s, + AdtId::UnionId(_) => not_supported!("unsizing unions"), + AdtId::EnumId(_) => not_supported!("unsizing enums"), + }; + let Some((last_field, _)) = + self.db.struct_data(id).variant_data.fields().iter().rev().next() + else { + not_supported!("unsizing struct without field"); + }; + let target_last_field = self.db.field_types(id.into())[last_field] + .clone() + .substitute(Interner, target_subst); + let current_last_field = self.db.field_types(id.into())[last_field] + .clone() + .substitute(Interner, current_subst); + return self.unsizing_ptr_from_addr( + target_last_field, + current_last_field, + addr, + ); + } + _ => not_supported!("unsizing struct with non adt type"), }, + _ => not_supported!("unknown unsized cast"), }) } fn layout_of_variant( &mut self, - x: VariantId, + it: VariantId, subst: Substitution, - locals: &Locals<'_>, + locals: &Locals, ) -> Result<(usize, Arc<Layout>, Option<(usize, usize, i128)>)> { - let adt = x.adt_id(); + let adt = it.adt_id(); if let DefWithBodyId::VariantId(f) = locals.body.owner { - if let VariantId::EnumVariantId(x) = x { + if let VariantId::EnumVariantId(it) = it { if AdtId::from(f.parent) == adt { // Computing the exact size of enums require resolving the enum discriminants. In order to prevent loops (and // infinite sized type errors) we use a dummy layout - let i = self.const_eval_discriminant(x)?; + let i = self.const_eval_discriminant(it)?; return Ok((16, self.layout(&TyBuilder::unit())?, Some((0, 16, i)))); } } @@ -1330,8 +1493,8 @@ impl Evaluator<'_> { .db .target_data_layout(self.crate_id) .ok_or(MirEvalError::TargetDataLayoutNotAvailable)?; - let enum_variant_id = match x { - VariantId::EnumVariantId(x) => x, + let enum_variant_id = match it { + VariantId::EnumVariantId(it) => it, _ => not_supported!("multi variant layout for non-enums"), }; let rustc_enum_variant_idx = RustcEnumVariantIdx(enum_variant_id.local_id); @@ -1345,8 +1508,8 @@ impl Evaluator<'_> { } else { discriminant = (variants .iter_enumerated() - .filter(|(x, _)| x != untagged_variant) - .position(|(x, _)| x == rustc_enum_variant_idx) + .filter(|(it, _)| it != untagged_variant) + .position(|(it, _)| it == rustc_enum_variant_idx) .unwrap() as i128) .wrapping_add(*niche_start as i128); true @@ -1389,8 +1552,8 @@ impl Evaluator<'_> { Ok(result) } - fn eval_operand(&mut self, x: &Operand, locals: &mut Locals<'_>) -> Result<Interval> { - Ok(match x { + fn eval_operand(&mut self, it: &Operand, locals: &mut Locals) -> Result<Interval> { + Ok(match it { Operand::Copy(p) | Operand::Move(p) => { locals.drop_flags.remove_place(p); self.eval_place(p, locals)? @@ -1399,61 +1562,63 @@ impl Evaluator<'_> { let addr = self.eval_static(*st, locals)?; Interval::new(addr, self.ptr_size()) } - Operand::Constant(konst) => { - let data = &konst.data(Interner); - match &data.value { - chalk_ir::ConstValue::BoundVar(_) => not_supported!("bound var constant"), - chalk_ir::ConstValue::InferenceVar(_) => { - not_supported!("inference var constant") - } - chalk_ir::ConstValue::Placeholder(_) => not_supported!("placeholder constant"), - chalk_ir::ConstValue::Concrete(c) => { - self.allocate_const_in_heap(c, &data.ty, locals, konst)? - } - } - } + Operand::Constant(konst) => self.allocate_const_in_heap(locals, konst)?, }) } - fn allocate_const_in_heap( - &mut self, - c: &chalk_ir::ConcreteConst<Interner>, - ty: &Ty, - locals: &Locals<'_>, - konst: &chalk_ir::Const<Interner>, - ) -> Result<Interval> { - Ok(match &c.interned { - ConstScalar::Bytes(v, memory_map) => { - let mut v: Cow<'_, [u8]> = Cow::Borrowed(v); - let patch_map = memory_map.transform_addresses(|b| { - let addr = self.heap_allocate(b.len(), 1); // FIXME: align is wrong - self.write_memory(addr, b)?; - Ok(addr.to_usize()) + fn allocate_const_in_heap(&mut self, locals: &Locals, konst: &Const) -> Result<Interval> { + let ty = &konst.data(Interner).ty; + let chalk_ir::ConstValue::Concrete(c) = &konst.data(Interner).value else { + not_supported!("evaluating non concrete constant"); + }; + let result_owner; + let (v, memory_map) = match &c.interned { + ConstScalar::Bytes(v, mm) => (v, mm), + ConstScalar::UnevaluatedConst(const_id, subst) => 'b: { + let mut const_id = *const_id; + let mut subst = subst.clone(); + if let hir_def::GeneralConstId::ConstId(c) = const_id { + let (c, s) = lookup_impl_const(self.db, self.trait_env.clone(), c, subst); + const_id = hir_def::GeneralConstId::ConstId(c); + subst = s; + } + result_owner = self.db.const_eval(const_id.into(), subst).map_err(|e| { + let name = const_id.name(self.db.upcast()); + MirEvalError::ConstEvalError(name, Box::new(e)) })?; - let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1)); - if size != v.len() { - // Handle self enum - if size == 16 && v.len() < 16 { - v = Cow::Owned(pad16(&v, false).to_vec()); - } else if size < 16 && v.len() == 16 { - v = Cow::Owned(v[0..size].to_vec()); - } else { - return Err(MirEvalError::InvalidConst(konst.clone())); + if let chalk_ir::ConstValue::Concrete(c) = &result_owner.data(Interner).value { + if let ConstScalar::Bytes(v, mm) = &c.interned { + break 'b (v, mm); } } - let addr = self.heap_allocate(size, align); - self.write_memory(addr, &v)?; - self.patch_addresses(&patch_map, &memory_map.vtable, addr, ty, locals)?; - Interval::new(addr, size) - } - ConstScalar::UnevaluatedConst(..) => { - not_supported!("unevaluated const present in monomorphized mir"); + not_supported!("unevaluatable constant"); } ConstScalar::Unknown => not_supported!("evaluating unknown const"), - }) + }; + let mut v: Cow<'_, [u8]> = Cow::Borrowed(v); + let patch_map = memory_map.transform_addresses(|b, align| { + let addr = self.heap_allocate(b.len(), align)?; + self.write_memory(addr, b)?; + Ok(addr.to_usize()) + })?; + let (size, align) = self.size_align_of(ty, locals)?.unwrap_or((v.len(), 1)); + if size != v.len() { + // Handle self enum + if size == 16 && v.len() < 16 { + v = Cow::Owned(pad16(&v, false).to_vec()); + } else if size < 16 && v.len() == 16 { + v = Cow::Owned(v[0..size].to_vec()); + } else { + return Err(MirEvalError::InvalidConst(konst.clone())); + } + } + let addr = self.heap_allocate(size, align)?; + self.write_memory(addr, &v)?; + self.patch_addresses(&patch_map, &memory_map.vtable, addr, ty, locals)?; + Ok(Interval::new(addr, size)) } - fn eval_place(&mut self, p: &Place, locals: &Locals<'_>) -> Result<Interval> { + fn eval_place(&mut self, p: &Place, locals: &Locals) -> Result<Interval> { let addr = self.place_addr(p, locals)?; Ok(Interval::new( addr, @@ -1466,11 +1631,11 @@ impl Evaluator<'_> { return Ok(&[]); } let (mem, pos) = match addr { - Stack(x) => (&self.stack, x), - Heap(x) => (&self.heap, x), - Invalid(x) => { + Stack(it) => (&self.stack, it), + Heap(it) => (&self.heap, it), + Invalid(it) => { return Err(MirEvalError::UndefinedBehavior(format!( - "read invalid memory address {x} with size {size}" + "read invalid memory address {it} with size {size}" ))); } }; @@ -1478,28 +1643,30 @@ impl Evaluator<'_> { .ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_string())) } - fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> { - if r.is_empty() { - return Ok(()); - } + fn write_memory_using_ref(&mut self, addr: Address, size: usize) -> Result<&mut [u8]> { let (mem, pos) = match addr { - Stack(x) => (&mut self.stack, x), - Heap(x) => (&mut self.heap, x), - Invalid(x) => { + Stack(it) => (&mut self.stack, it), + Heap(it) => (&mut self.heap, it), + Invalid(it) => { return Err(MirEvalError::UndefinedBehavior(format!( - "write invalid memory address {x} with content {r:?}" + "write invalid memory address {it} with size {size}" ))); } }; - mem.get_mut(pos..pos + r.len()) - .ok_or_else(|| { - MirEvalError::UndefinedBehavior("out of bound memory write".to_string()) - })? - .copy_from_slice(r); + Ok(mem.get_mut(pos..pos + size).ok_or_else(|| { + MirEvalError::UndefinedBehavior("out of bound memory write".to_string()) + })?) + } + + fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> { + if r.is_empty() { + return Ok(()); + } + self.write_memory_using_ref(addr, r.len())?.copy_from_slice(r); Ok(()) } - fn size_align_of(&self, ty: &Ty, locals: &Locals<'_>) -> Result<Option<(usize, usize)>> { + fn size_align_of(&self, ty: &Ty, locals: &Locals) -> Result<Option<(usize, usize)>> { if let DefWithBodyId::VariantId(f) = locals.body.owner { if let Some((adt, _)) = ty.as_adt() { if AdtId::from(f.parent) == adt { @@ -1523,17 +1690,40 @@ impl Evaluator<'_> { /// A version of `self.size_of` which returns error if the type is unsized. `what` argument should /// be something that complete this: `error: type {ty} was unsized. {what} should be sized` - fn size_of_sized(&self, ty: &Ty, locals: &Locals<'_>, what: &'static str) -> Result<usize> { + fn size_of_sized(&self, ty: &Ty, locals: &Locals, what: &'static str) -> Result<usize> { match self.size_align_of(ty, locals)? { - Some(x) => Ok(x.0), + Some(it) => Ok(it.0), None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)), } } - fn heap_allocate(&mut self, size: usize, _align: usize) -> Address { + /// A version of `self.size_align_of` which returns error if the type is unsized. `what` argument should + /// be something that complete this: `error: type {ty} was unsized. {what} should be sized` + fn size_align_of_sized( + &self, + ty: &Ty, + locals: &Locals, + what: &'static str, + ) -> Result<(usize, usize)> { + match self.size_align_of(ty, locals)? { + Some(it) => Ok(it), + None => Err(MirEvalError::TypeIsUnsized(ty.clone(), what)), + } + } + + fn heap_allocate(&mut self, size: usize, align: usize) -> Result<Address> { + if !align.is_power_of_two() || align > 10000 { + return Err(MirEvalError::UndefinedBehavior(format!("Alignment {align} is invalid"))); + } + while self.heap.len() % align != 0 { + self.heap.push(0); + } + if size.checked_add(self.heap.len()).map_or(true, |x| x > self.memory_limit) { + return Err(MirEvalError::Panic(format!("Memory allocation of {size} bytes failed"))); + } let pos = self.heap.len(); self.heap.extend(iter::repeat(0).take(size)); - Address::Heap(pos) + Ok(Address::Heap(pos)) } fn detect_fn_trait(&self, def: FunctionId) -> Option<FnTrait> { @@ -1541,7 +1731,7 @@ impl Evaluator<'_> { let ItemContainerId::TraitId(parent) = self.db.lookup_intern_function(def).container else { return None; }; - let l = lang_attr(self.db.upcast(), parent)?; + let l = self.db.lang_attr(parent.into())?; match l { FnOnce => Some(FnTrait::FnOnce), FnMut => Some(FnTrait::FnMut), @@ -1550,12 +1740,12 @@ impl Evaluator<'_> { } } - fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals<'_>) -> Result<MemoryMap> { + fn create_memory_map(&self, bytes: &[u8], ty: &Ty, locals: &Locals) -> Result<MemoryMap> { fn rec( this: &Evaluator<'_>, bytes: &[u8], ty: &Ty, - locals: &Locals<'_>, + locals: &Locals, mm: &mut MemoryMap, ) -> Result<()> { match ty.kind(Interner) { @@ -1661,7 +1851,7 @@ impl Evaluator<'_> { old_vtable: &VTableMap, addr: Address, ty: &Ty, - locals: &Locals<'_>, + locals: &Locals, ) -> Result<()> { // FIXME: support indirect references let layout = self.layout(ty)?; @@ -1672,14 +1862,14 @@ impl Evaluator<'_> { match size { Some(_) => { let current = from_bytes!(usize, self.read_memory(addr, my_size)?); - if let Some(x) = patch_map.get(¤t) { - self.write_memory(addr, &x.to_le_bytes())?; + if let Some(it) = patch_map.get(¤t) { + self.write_memory(addr, &it.to_le_bytes())?; } } None => { let current = from_bytes!(usize, self.read_memory(addr, my_size / 2)?); - if let Some(x) = patch_map.get(¤t) { - self.write_memory(addr, &x.to_le_bytes())?; + if let Some(it) = patch_map.get(¤t) { + self.write_memory(addr, &it.to_le_bytes())?; } } } @@ -1735,21 +1925,21 @@ impl Evaluator<'_> { bytes: Interval, destination: Interval, args: &[IntervalAndTy], - locals: &Locals<'_>, + locals: &Locals, + target_bb: Option<BasicBlockId>, span: MirSpan, - ) -> Result<()> { + ) -> Result<Option<StackFrame>> { let id = from_bytes!(usize, bytes.get(self)?); let next_ty = self.vtable_map.ty(id)?.clone(); match &next_ty.data(Interner).kind { TyKind::FnDef(def, generic_args) => { - self.exec_fn_def(*def, generic_args, destination, args, &locals, span)?; + self.exec_fn_def(*def, generic_args, destination, args, &locals, target_bb, span) } TyKind::Closure(id, subst) => { - self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span)?; + self.exec_closure(*id, bytes.slice(0..0), subst, destination, args, locals, span) } - _ => return Err(MirEvalError::TypeError("function pointer to non function")), + _ => Err(MirEvalError::TypeError("function pointer to non function")), } - Ok(()) } fn exec_closure( @@ -1759,9 +1949,9 @@ impl Evaluator<'_> { generic_args: &Substitution, destination: Interval, args: &[IntervalAndTy], - locals: &Locals<'_>, + locals: &Locals, span: MirSpan, - ) -> Result<()> { + ) -> Result<Option<StackFrame>> { let mir_body = self .db .monomorphized_mir_body_for_closure( @@ -1769,7 +1959,7 @@ impl Evaluator<'_> { generic_args.clone(), self.trait_env.clone(), ) - .map_err(|x| MirEvalError::MirLowerErrorForClosure(closure, x))?; + .map_err(|it| MirEvalError::MirLowerErrorForClosure(closure, it))?; let closure_data = if mir_body.locals[mir_body.param_locals[0]].ty.as_reference().is_some() { closure_data.addr.to_bytes() @@ -1777,12 +1967,18 @@ impl Evaluator<'_> { closure_data.get(self)?.to_owned() }; let arg_bytes = iter::once(Ok(closure_data)) - .chain(args.iter().map(|x| Ok(x.get(&self)?.to_owned()))) + .chain(args.iter().map(|it| Ok(it.get(&self)?.to_owned()))) .collect::<Result<Vec<_>>>()?; - let bytes = self.interpret_mir(&mir_body, arg_bytes.into_iter()).map_err(|e| { - MirEvalError::InFunction(Either::Right(closure), Box::new(e), span, locals.body.owner) - })?; - destination.write_from_bytes(self, &bytes) + let bytes = self + .interpret_mir(mir_body, arg_bytes.into_iter().map(IntervalOrOwned::Owned)) + .map_err(|e| { + MirEvalError::InFunction( + Box::new(e), + vec![(Either::Right(closure), span, locals.body.owner)], + ) + })?; + destination.write_from_bytes(self, &bytes)?; + Ok(None) } fn exec_fn_def( @@ -1791,18 +1987,34 @@ impl Evaluator<'_> { generic_args: &Substitution, destination: Interval, args: &[IntervalAndTy], - locals: &Locals<'_>, + locals: &Locals, + target_bb: Option<BasicBlockId>, span: MirSpan, - ) -> Result<()> { + ) -> Result<Option<StackFrame>> { let def: CallableDefId = from_chalk(self.db, def); let generic_args = generic_args.clone(); match def { CallableDefId::FunctionId(def) => { if let Some(_) = self.detect_fn_trait(def) { - self.exec_fn_trait(&args, destination, locals, span)?; - return Ok(()); + return self.exec_fn_trait( + def, + args, + generic_args, + locals, + destination, + target_bb, + span, + ); } - self.exec_fn_with_args(def, args, generic_args, locals, destination, span)?; + self.exec_fn_with_args( + def, + args, + generic_args, + locals, + destination, + target_bb, + span, + ) } CallableDefId::StructId(id) => { let (size, variant_layout, tag) = @@ -1811,9 +2023,10 @@ impl Evaluator<'_> { size, &variant_layout, tag, - args.iter().map(|x| x.interval.into()), + args.iter().map(|it| it.interval.into()), )?; destination.write_from_bytes(self, &result)?; + Ok(None) } CallableDefId::EnumVariantId(id) => { let (size, variant_layout, tag) = @@ -1822,12 +2035,12 @@ impl Evaluator<'_> { size, &variant_layout, tag, - args.iter().map(|x| x.interval.into()), + args.iter().map(|it| it.interval.into()), )?; destination.write_from_bytes(self, &result)?; + Ok(None) } } - Ok(()) } fn exec_fn_with_args( @@ -1835,10 +2048,11 @@ impl Evaluator<'_> { def: FunctionId, args: &[IntervalAndTy], generic_args: Substitution, - locals: &Locals<'_>, + locals: &Locals, destination: Interval, + target_bb: Option<BasicBlockId>, span: MirSpan, - ) -> Result<()> { + ) -> Result<Option<StackFrame>> { if self.detect_and_exec_special_function( def, args, @@ -1847,10 +2061,9 @@ impl Evaluator<'_> { destination, span, )? { - return Ok(()); + return Ok(None); } - let arg_bytes = - args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?; + let arg_bytes = args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval)); if let Some(self_ty_idx) = is_dyn_method(self.db, self.trait_env.clone(), def, generic_args.clone()) { @@ -1858,74 +2071,103 @@ impl Evaluator<'_> { // `&T`, `&mut T`, `Box<T>`, `Rc<T>`, `Arc<T>`, and `Pin<P>` where `P` is one of possible recievers, // the vtable is exactly in the `[ptr_size..2*ptr_size]` bytes. So we can use it without branching on // the type. + let first_arg = arg_bytes.clone().next().unwrap(); + let first_arg = first_arg.get(self)?; let ty = - self.vtable_map.ty_of_bytes(&arg_bytes[0][self.ptr_size()..self.ptr_size() * 2])?; + self.vtable_map.ty_of_bytes(&first_arg[self.ptr_size()..self.ptr_size() * 2])?; let mut args_for_target = args.to_vec(); args_for_target[0] = IntervalAndTy { interval: args_for_target[0].interval.slice(0..self.ptr_size()), ty: ty.clone(), }; let ty = GenericArgData::Ty(ty.clone()).intern(Interner); - let generics_for_target = - Substitution::from_iter( - Interner, - generic_args.iter(Interner).enumerate().map(|(i, x)| { - if i == self_ty_idx { - &ty - } else { - x - } - }), - ); + let generics_for_target = Substitution::from_iter( + Interner, + generic_args.iter(Interner).enumerate().map(|(i, it)| { + if i == self_ty_idx { + &ty + } else { + it + } + }), + ); return self.exec_fn_with_args( def, &args_for_target, generics_for_target, locals, destination, + target_bb, span, ); } let (imp, generic_args) = - lookup_impl_method(self.db, self.trait_env.clone(), def, generic_args); - self.exec_looked_up_function(generic_args, locals, imp, arg_bytes, span, destination) + self.db.lookup_impl_method(self.trait_env.clone(), def, generic_args); + self.exec_looked_up_function( + generic_args, + locals, + imp, + arg_bytes, + span, + destination, + target_bb, + ) } fn exec_looked_up_function( &mut self, generic_args: Substitution, - locals: &Locals<'_>, + locals: &Locals, imp: FunctionId, - arg_bytes: Vec<Vec<u8>>, + arg_bytes: impl Iterator<Item = IntervalOrOwned>, span: MirSpan, destination: Interval, - ) -> Result<()> { + target_bb: Option<BasicBlockId>, + ) -> Result<Option<StackFrame>> { let def = imp.into(); let mir_body = self .db .monomorphized_mir_body(def, generic_args, self.trait_env.clone()) .map_err(|e| { MirEvalError::InFunction( - Either::Left(imp), Box::new(MirEvalError::MirLowerError(imp, e)), - span, - locals.body.owner, + vec![(Either::Left(imp), span, locals.body.owner)], ) })?; - let result = self.interpret_mir(&mir_body, arg_bytes.iter().cloned()).map_err(|e| { - MirEvalError::InFunction(Either::Left(imp), Box::new(e), span, locals.body.owner) - })?; - destination.write_from_bytes(self, &result)?; - Ok(()) + Ok(if let Some(target_bb) = target_bb { + let (mut locals, prev_stack_ptr) = + self.create_locals_for_body(mir_body.clone(), Some(destination))?; + self.fill_locals_for_body(&mir_body, &mut locals, arg_bytes.into_iter())?; + let span = (span, locals.body.owner); + Some(StackFrame { + body: mir_body, + locals, + destination: Some(target_bb), + prev_stack_ptr, + span, + }) + } else { + let result = self.interpret_mir(mir_body, arg_bytes).map_err(|e| { + MirEvalError::InFunction( + Box::new(e), + vec![(Either::Left(imp), span, locals.body.owner)], + ) + })?; + destination.write_from_bytes(self, &result)?; + None + }) } fn exec_fn_trait( &mut self, + def: FunctionId, args: &[IntervalAndTy], + generic_args: Substitution, + locals: &Locals, destination: Interval, - locals: &Locals<'_>, + target_bb: Option<BasicBlockId>, span: MirSpan, - ) -> Result<()> { + ) -> Result<Option<StackFrame>> { let func = args.get(0).ok_or(MirEvalError::TypeError("fn trait with no arg"))?; let mut func_ty = func.ty.clone(); let mut func_data = func.interval; @@ -1942,13 +2184,28 @@ impl Evaluator<'_> { } match &func_ty.data(Interner).kind { TyKind::FnDef(def, subst) => { - self.exec_fn_def(*def, subst, destination, &args[1..], locals, span)?; + return self.exec_fn_def( + *def, + subst, + destination, + &args[1..], + locals, + target_bb, + span, + ); } TyKind::Function(_) => { - self.exec_fn_pointer(func_data, destination, &args[1..], locals, span)?; + return self.exec_fn_pointer( + func_data, + destination, + &args[1..], + locals, + target_bb, + span, + ); } TyKind::Closure(closure, subst) => { - self.exec_closure( + return self.exec_closure( *closure, func_data, &Substitution::from_iter(Interner, ClosureSubst(subst).parent_subst()), @@ -1956,14 +2213,45 @@ impl Evaluator<'_> { &args[1..], locals, span, - )?; + ); + } + _ => { + // try to execute the manual impl of `FnTrait` for structs (nightly feature used in std) + let arg0 = func; + let args = &args[1..]; + let arg1 = { + let ty = TyKind::Tuple( + args.len(), + Substitution::from_iter(Interner, args.iter().map(|it| it.ty.clone())), + ) + .intern(Interner); + let layout = self.layout(&ty)?; + let result = self.make_by_layout( + layout.size.bytes_usize(), + &layout, + None, + args.iter().map(|it| IntervalOrOwned::Borrowed(it.interval)), + )?; + // FIXME: there is some leak here + let size = layout.size.bytes_usize(); + let addr = self.heap_allocate(size, layout.align.abi.bytes() as usize)?; + self.write_memory(addr, &result)?; + IntervalAndTy { interval: Interval { addr, size }, ty } + }; + return self.exec_fn_with_args( + def, + &[arg0.clone(), arg1], + generic_args, + locals, + destination, + target_bb, + span, + ); } - x => not_supported!("Call FnTrait methods with type {x:?}"), } - Ok(()) } - fn eval_static(&mut self, st: StaticId, locals: &Locals<'_>) -> Result<Address> { + fn eval_static(&mut self, st: StaticId, locals: &Locals) -> Result<Address> { if let Some(o) = self.static_locations.get(&st) { return Ok(*o); }; @@ -1975,21 +2263,16 @@ impl Evaluator<'_> { Box::new(e), ) })?; - let data = &konst.data(Interner); - if let chalk_ir::ConstValue::Concrete(c) = &data.value { - self.allocate_const_in_heap(&c, &data.ty, locals, &konst)? - } else { - not_supported!("unevaluatable static"); - } + self.allocate_const_in_heap(locals, &konst)? } else { let ty = &self.db.infer(st.into())[self.db.body(st.into()).body_expr]; let Some((size, align)) = self.size_align_of(&ty, locals)? else { not_supported!("unsized extern static"); }; - let addr = self.heap_allocate(size, align); + let addr = self.heap_allocate(size, align)?; Interval::new(addr, size) }; - let addr = self.heap_allocate(self.ptr_size(), self.ptr_size()); + let addr = self.heap_allocate(self.ptr_size(), self.ptr_size())?; self.write_memory(addr, &result.addr.to_bytes())?; self.static_locations.insert(st, addr); Ok(addr) @@ -2011,13 +2294,13 @@ impl Evaluator<'_> { } } - fn drop_place(&mut self, place: &Place, locals: &mut Locals<'_>, span: MirSpan) -> Result<()> { + fn drop_place(&mut self, place: &Place, locals: &mut Locals, span: MirSpan) -> Result<()> { let (addr, ty, metadata) = self.place_addr_and_ty_and_metadata(place, locals)?; if !locals.drop_flags.remove_place(place) { return Ok(()); } let metadata = match metadata { - Some(x) => x.get(self)?.to_vec(), + Some(it) => it.get(self)?.to_vec(), None => vec![], }; self.run_drop_glue_deep(ty, locals, addr, &metadata, span) @@ -2026,7 +2309,7 @@ impl Evaluator<'_> { fn run_drop_glue_deep( &mut self, ty: Ty, - locals: &Locals<'_>, + locals: &Locals, addr: Address, _metadata: &[u8], span: MirSpan, @@ -2039,8 +2322,7 @@ impl Evaluator<'_> { // we can ignore drop in them. return Ok(()); }; - let (impl_drop_candidate, subst) = lookup_impl_method( - self.db, + let (impl_drop_candidate, subst) = self.db.lookup_impl_method( self.trait_env.clone(), drop_fn, Substitution::from1(Interner, ty.clone()), @@ -2050,9 +2332,10 @@ impl Evaluator<'_> { subst, locals, impl_drop_candidate, - vec![addr.to_bytes()], + [IntervalOrOwned::Owned(addr.to_bytes())].into_iter(), span, Interval { addr: Address::Invalid(0), size: 0 }, + None, )?; } match ty.kind(Interner) { @@ -2121,10 +2404,77 @@ impl Evaluator<'_> { } } -pub fn pad16(x: &[u8], is_signed: bool) -> [u8; 16] { - let is_negative = is_signed && x.last().unwrap_or(&0) > &128; +pub fn render_const_using_debug_impl( + db: &dyn HirDatabase, + owner: ConstId, + c: &Const, +) -> Result<String> { + let mut evaluator = Evaluator::new(db, owner.into(), false); + let locals = &Locals { + ptr: ArenaMap::new(), + body: db + .mir_body(owner.into()) + .map_err(|_| MirEvalError::NotSupported("unreachable".to_string()))?, + drop_flags: DropFlags::default(), + }; + let data = evaluator.allocate_const_in_heap(locals, c)?; + let resolver = owner.resolver(db.upcast()); + let Some(TypeNs::TraitId(debug_trait)) = resolver.resolve_path_in_type_ns_fully( + db.upcast(), + &hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments( + hir_expand::mod_path::PathKind::Abs, + [name![core], name![fmt], name![Debug]].into_iter(), + )), + ) else { + not_supported!("core::fmt::Debug not found"); + }; + let Some(debug_fmt_fn) = db.trait_data(debug_trait).method_by_name(&name![fmt]) else { + not_supported!("core::fmt::Debug::fmt not found"); + }; + // a1 = &[""] + let a1 = evaluator.heap_allocate(evaluator.ptr_size() * 2, evaluator.ptr_size())?; + // a2 = &[::core::fmt::ArgumentV1::new(&(THE_CONST), ::core::fmt::Debug::fmt)] + // FIXME: we should call the said function, but since its name is going to break in the next rustc version + // and its ABI doesn't break yet, we put it in memory manually. + let a2 = evaluator.heap_allocate(evaluator.ptr_size() * 2, evaluator.ptr_size())?; + evaluator.write_memory(a2, &data.addr.to_bytes())?; + let debug_fmt_fn_ptr = evaluator.vtable_map.id(TyKind::FnDef( + db.intern_callable_def(debug_fmt_fn.into()).into(), + Substitution::from1(Interner, c.data(Interner).ty.clone()), + ) + .intern(Interner)); + evaluator.write_memory(a2.offset(evaluator.ptr_size()), &debug_fmt_fn_ptr.to_le_bytes())?; + // a3 = ::core::fmt::Arguments::new_v1(a1, a2) + // FIXME: similarly, we should call function here, not directly working with memory. + let a3 = evaluator.heap_allocate(evaluator.ptr_size() * 6, evaluator.ptr_size())?; + evaluator.write_memory(a3.offset(2 * evaluator.ptr_size()), &a1.to_bytes())?; + evaluator.write_memory(a3.offset(3 * evaluator.ptr_size()), &[1])?; + evaluator.write_memory(a3.offset(4 * evaluator.ptr_size()), &a2.to_bytes())?; + evaluator.write_memory(a3.offset(5 * evaluator.ptr_size()), &[1])?; + let Some(ValueNs::FunctionId(format_fn)) = resolver.resolve_path_in_value_ns_fully( + db.upcast(), + &hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments( + hir_expand::mod_path::PathKind::Abs, + [name![std], name![fmt], name![format]].into_iter(), + )), + ) else { + not_supported!("std::fmt::format not found"); + }; + let message_string = evaluator.interpret_mir( + db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?, + [IntervalOrOwned::Borrowed(Interval { addr: a3, size: evaluator.ptr_size() * 6 })] + .into_iter(), + )?; + let addr = + Address::from_bytes(&message_string[evaluator.ptr_size()..2 * evaluator.ptr_size()])?; + let size = from_bytes!(usize, message_string[2 * evaluator.ptr_size()..]); + Ok(std::string::String::from_utf8_lossy(evaluator.read_memory(addr, size)?).into_owned()) +} + +pub fn pad16(it: &[u8], is_signed: bool) -> [u8; 16] { + let is_negative = is_signed && it.last().unwrap_or(&0) > &127; let fill_with = if is_negative { 255 } else { 0 }; - x.iter() + it.iter() .copied() .chain(iter::repeat(fill_with)) .take(16) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs index 3b9ef03c369..9ad6087cad9 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim.rs @@ -3,20 +3,26 @@ use std::cmp; +use chalk_ir::TyKind; +use hir_def::resolver::HasResolver; +use hir_expand::mod_path::ModPath; + use super::*; +mod simd; + macro_rules! from_bytes { ($ty:tt, $value:expr) => { ($ty::from_le_bytes(match ($value).try_into() { - Ok(x) => x, + Ok(it) => it, Err(_) => return Err(MirEvalError::TypeError("mismatched size")), })) }; } macro_rules! not_supported { - ($x: expr) => { - return Err(MirEvalError::NotSupported(format!($x))) + ($it: expr) => { + return Err(MirEvalError::NotSupported(format!($it))) }; } @@ -26,7 +32,7 @@ impl Evaluator<'_> { def: FunctionId, args: &[IntervalAndTy], generic_args: &Substitution, - locals: &Locals<'_>, + locals: &Locals, destination: Interval, span: MirSpan, ) -> Result<bool> { @@ -53,6 +59,28 @@ impl Evaluator<'_> { )?; return Ok(true); } + let is_platform_intrinsic = match &function_data.abi { + Some(abi) => *abi == Interned::new_str("platform-intrinsic"), + None => match def.lookup(self.db.upcast()).container { + hir_def::ItemContainerId::ExternBlockId(block) => { + let id = block.lookup(self.db.upcast()).id; + id.item_tree(self.db.upcast())[id.value].abi.as_deref() + == Some("platform-intrinsic") + } + _ => false, + }, + }; + if is_platform_intrinsic { + self.exec_platform_intrinsic( + function_data.name.as_text().unwrap_or_default().as_str(), + args, + generic_args, + destination, + &locals, + span, + )?; + return Ok(true); + } let is_extern_c = match def.lookup(self.db.upcast()).container { hir_def::ItemContainerId::ExternBlockId(block) => { let id = block.lookup(self.db.upcast()).id; @@ -74,25 +102,25 @@ impl Evaluator<'_> { let alloc_fn = function_data .attrs .iter() - .filter_map(|x| x.path().as_ident()) - .filter_map(|x| x.as_str()) - .find(|x| { + .filter_map(|it| it.path().as_ident()) + .filter_map(|it| it.as_str()) + .find(|it| { [ "rustc_allocator", "rustc_deallocator", "rustc_reallocator", "rustc_allocator_zeroed", ] - .contains(x) + .contains(it) }); if let Some(alloc_fn) = alloc_fn { self.exec_alloc_fn(alloc_fn, args, destination)?; return Ok(true); } - if let Some(x) = self.detect_lang_function(def) { + if let Some(it) = self.detect_lang_function(def) { let arg_bytes = - args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?; - let result = self.exec_lang_item(x, generic_args, &arg_bytes, locals, span)?; + args.iter().map(|it| Ok(it.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?; + let result = self.exec_lang_item(it, generic_args, &arg_bytes, locals, span)?; destination.write_from_bytes(self, &result)?; return Ok(true); } @@ -112,7 +140,7 @@ impl Evaluator<'_> { }; let size = from_bytes!(usize, size.get(self)?); let align = from_bytes!(usize, align.get(self)?); - let result = self.heap_allocate(size, align); + let result = self.heap_allocate(size, align)?; destination.write_from_bytes(self, &result.to_bytes())?; } "rustc_deallocator" => { /* no-op for now */ } @@ -120,14 +148,18 @@ impl Evaluator<'_> { let [ptr, old_size, align, new_size] = args else { return Err(MirEvalError::TypeError("rustc_allocator args are not provided")); }; - let ptr = Address::from_bytes(ptr.get(self)?)?; let old_size = from_bytes!(usize, old_size.get(self)?); let new_size = from_bytes!(usize, new_size.get(self)?); - let align = from_bytes!(usize, align.get(self)?); - let result = self.heap_allocate(new_size, align); - Interval { addr: result, size: old_size } - .write_from_interval(self, Interval { addr: ptr, size: old_size })?; - destination.write_from_bytes(self, &result.to_bytes())?; + if old_size >= new_size { + destination.write_from_interval(self, ptr.interval)?; + } else { + let ptr = Address::from_bytes(ptr.get(self)?)?; + let align = from_bytes!(usize, align.get(self)?); + let result = self.heap_allocate(new_size, align)?; + Interval { addr: result, size: old_size } + .write_from_interval(self, Interval { addr: ptr, size: old_size })?; + destination.write_from_bytes(self, &result.to_bytes())?; + } } _ => not_supported!("unknown alloc function"), } @@ -136,7 +168,7 @@ impl Evaluator<'_> { fn detect_lang_function(&self, def: FunctionId) -> Option<LangItem> { use LangItem::*; - let candidate = lang_attr(self.db.upcast(), def)?; + let candidate = self.db.lang_attr(def.into())?; // We want to execute these functions with special logic if [PanicFmt, BeginPanic, SliceLen, DropInPlace].contains(&candidate) { return Some(candidate); @@ -146,56 +178,35 @@ impl Evaluator<'_> { fn exec_lang_item( &mut self, - x: LangItem, + it: LangItem, generic_args: &Substitution, args: &[Vec<u8>], - locals: &Locals<'_>, + locals: &Locals, span: MirSpan, ) -> Result<Vec<u8>> { use LangItem::*; let mut args = args.iter(); - match x { + match it { BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_string())), PanicFmt => { let message = (|| { - let arguments_struct = - self.db.lang_item(self.crate_id, LangItem::FormatArguments)?.as_struct()?; - let arguments_layout = self - .layout_adt(arguments_struct.into(), Substitution::empty(Interner)) - .ok()?; - let arguments_field_pieces = - self.db.struct_data(arguments_struct).variant_data.field(&name![pieces])?; - let pieces_offset = arguments_layout - .fields - .offset(u32::from(arguments_field_pieces.into_raw()) as usize) - .bytes_usize(); - let ptr_size = self.ptr_size(); - let arg = args.next()?; - let pieces_array_addr = - Address::from_bytes(&arg[pieces_offset..pieces_offset + ptr_size]).ok()?; - let pieces_array_len = usize::from_le_bytes( - (&arg[pieces_offset + ptr_size..pieces_offset + 2 * ptr_size]) - .try_into() - .ok()?, - ); - let mut message = "".to_string(); - for i in 0..pieces_array_len { - let piece_ptr_addr = pieces_array_addr.offset(2 * i * ptr_size); - let piece_addr = - Address::from_bytes(self.read_memory(piece_ptr_addr, ptr_size).ok()?) - .ok()?; - let piece_len = usize::from_le_bytes( - self.read_memory(piece_ptr_addr.offset(ptr_size), ptr_size) - .ok()? - .try_into() - .ok()?, - ); - let piece_data = self.read_memory(piece_addr, piece_len).ok()?; - message += &std::string::String::from_utf8_lossy(piece_data); - } - Some(message) + let resolver = self.db.crate_def_map(self.crate_id).crate_root().resolver(self.db.upcast()); + let Some(format_fn) = resolver.resolve_path_in_value_ns_fully( + self.db.upcast(), + &hir_def::path::Path::from_known_path_with_no_generic(ModPath::from_segments( + hir_expand::mod_path::PathKind::Abs, + [name![std], name![fmt], name![format]].into_iter(), + )), + ) else { + not_supported!("std::fmt::format not found"); + }; + let hir_def::resolver::ValueNs::FunctionId(format_fn) = format_fn else { not_supported!("std::fmt::format is not a function") }; + let message_string = self.interpret_mir(self.db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?, args.map(|x| IntervalOrOwned::Owned(x.clone())))?; + let addr = Address::from_bytes(&message_string[self.ptr_size()..2 * self.ptr_size()])?; + let size = from_bytes!(usize, message_string[2 * self.ptr_size()..]); + Ok(std::string::String::from_utf8_lossy(self.read_memory(addr, size)?).into_owned()) })() - .unwrap_or_else(|| "<format-args-evaluation-failed>".to_string()); + .unwrap_or_else(|e| format!("Failed to render panic format args: {e:?}")); Err(MirEvalError::Panic(message)) } SliceLen => { @@ -207,7 +218,7 @@ impl Evaluator<'_> { } DropInPlace => { let ty = - generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)).ok_or( + generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)).ok_or( MirEvalError::TypeError( "generic argument of drop_in_place is not provided", ), @@ -224,7 +235,35 @@ impl Evaluator<'_> { )?; Ok(vec![]) } - x => not_supported!("Executing lang item {x:?}"), + it => not_supported!("Executing lang item {it:?}"), + } + } + + fn exec_syscall( + &mut self, + id: i64, + args: &[IntervalAndTy], + destination: Interval, + _locals: &Locals, + _span: MirSpan, + ) -> Result<()> { + match id { + 318 => { + // SYS_getrandom + let [buf, len, _flags] = args else { + return Err(MirEvalError::TypeError("SYS_getrandom args are not provided")); + }; + let addr = Address::from_bytes(buf.get(self)?)?; + let size = from_bytes!(usize, len.get(self)?); + for i in 0..size { + let rand_byte = self.random_state.rand_u64() as u8; + self.write_memory(addr.offset(i), &[rand_byte])?; + } + destination.write_from_interval(self, len.interval) + } + _ => { + not_supported!("Unknown syscall id {id:?}") + } } } @@ -234,8 +273,8 @@ impl Evaluator<'_> { args: &[IntervalAndTy], _generic_args: &Substitution, destination: Interval, - locals: &Locals<'_>, - _span: MirSpan, + locals: &Locals, + span: MirSpan, ) -> Result<()> { match as_str { "memcmp" => { @@ -299,7 +338,9 @@ impl Evaluator<'_> { } "pthread_getspecific" => { let Some(arg0) = args.get(0) else { - return Err(MirEvalError::TypeError("pthread_getspecific arg0 is not provided")); + return Err(MirEvalError::TypeError( + "pthread_getspecific arg0 is not provided", + )); }; let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]); let value = self.thread_local_storage.get_key(key)?; @@ -308,11 +349,15 @@ impl Evaluator<'_> { } "pthread_setspecific" => { let Some(arg0) = args.get(0) else { - return Err(MirEvalError::TypeError("pthread_setspecific arg0 is not provided")); + return Err(MirEvalError::TypeError( + "pthread_setspecific arg0 is not provided", + )); }; let key = from_bytes!(usize, &pad16(arg0.get(self)?, false)[0..8]); let Some(arg1) = args.get(1) else { - return Err(MirEvalError::TypeError("pthread_setspecific arg1 is not provided")); + return Err(MirEvalError::TypeError( + "pthread_setspecific arg1 is not provided", + )); }; let value = from_bytes!(u128, pad16(arg1.get(self)?, false)); self.thread_local_storage.set_key(key, value)?; @@ -326,17 +371,52 @@ impl Evaluator<'_> { destination.write_from_bytes(self, &0u64.to_le_bytes()[0..destination.size])?; Ok(()) } + "syscall" => { + let Some((id, rest)) = args.split_first() else { + return Err(MirEvalError::TypeError( + "syscall arg1 is not provided", + )); + }; + let id = from_bytes!(i64, id.get(self)?); + self.exec_syscall(id, rest, destination, locals, span) + } + "sched_getaffinity" => { + let [_pid, _set_size, set] = args else { + return Err(MirEvalError::TypeError("libc::write args are not provided")); + }; + let set = Address::from_bytes(set.get(self)?)?; + // Only enable core 0 (we are single threaded anyway), which is bitset 0x0000001 + self.write_memory(set, &[1])?; + // return 0 as success + self.write_memory_using_ref(destination.addr, destination.size)?.fill(0); + Ok(()) + } _ => not_supported!("unknown external function {as_str}"), } } + fn exec_platform_intrinsic( + &mut self, + name: &str, + args: &[IntervalAndTy], + generic_args: &Substitution, + destination: Interval, + locals: &Locals, + span: MirSpan, + ) -> Result<()> { + if let Some(name) = name.strip_prefix("simd_") { + return self.exec_simd_intrinsic(name, args, generic_args, destination, locals, span); + } + not_supported!("unknown platform intrinsic {name}"); + } + fn exec_intrinsic( &mut self, name: &str, args: &[IntervalAndTy], generic_args: &Substitution, destination: Interval, - locals: &Locals<'_>, + locals: &Locals, span: MirSpan, ) -> Result<()> { if let Some(name) = name.strip_prefix("atomic_") { @@ -347,7 +427,9 @@ impl Evaluator<'_> { "sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs" | "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => { let [arg] = args else { - return Err(MirEvalError::TypeError("f64 intrinsic signature doesn't match fn (f64) -> f64")); + return Err(MirEvalError::TypeError( + "f64 intrinsic signature doesn't match fn (f64) -> f64", + )); }; let arg = from_bytes!(f64, arg.get(self)?); match name { @@ -373,7 +455,9 @@ impl Evaluator<'_> { } "pow" | "minnum" | "maxnum" | "copysign" => { let [arg1, arg2] = args else { - return Err(MirEvalError::TypeError("f64 intrinsic signature doesn't match fn (f64, f64) -> f64")); + return Err(MirEvalError::TypeError( + "f64 intrinsic signature doesn't match fn (f64, f64) -> f64", + )); }; let arg1 = from_bytes!(f64, arg1.get(self)?); let arg2 = from_bytes!(f64, arg2.get(self)?); @@ -387,7 +471,9 @@ impl Evaluator<'_> { } "powi" => { let [arg1, arg2] = args else { - return Err(MirEvalError::TypeError("powif64 signature doesn't match fn (f64, i32) -> f64")); + return Err(MirEvalError::TypeError( + "powif64 signature doesn't match fn (f64, i32) -> f64", + )); }; let arg1 = from_bytes!(f64, arg1.get(self)?); let arg2 = from_bytes!(i32, arg2.get(self)?); @@ -395,7 +481,9 @@ impl Evaluator<'_> { } "fma" => { let [arg1, arg2, arg3] = args else { - return Err(MirEvalError::TypeError("fmaf64 signature doesn't match fn (f64, f64, f64) -> f64")); + return Err(MirEvalError::TypeError( + "fmaf64 signature doesn't match fn (f64, f64, f64) -> f64", + )); }; let arg1 = from_bytes!(f64, arg1.get(self)?); let arg2 = from_bytes!(f64, arg2.get(self)?); @@ -411,7 +499,9 @@ impl Evaluator<'_> { "sqrt" | "sin" | "cos" | "exp" | "exp2" | "log" | "log10" | "log2" | "fabs" | "floor" | "ceil" | "trunc" | "rint" | "nearbyint" | "round" | "roundeven" => { let [arg] = args else { - return Err(MirEvalError::TypeError("f32 intrinsic signature doesn't match fn (f32) -> f32")); + return Err(MirEvalError::TypeError( + "f32 intrinsic signature doesn't match fn (f32) -> f32", + )); }; let arg = from_bytes!(f32, arg.get(self)?); match name { @@ -437,7 +527,9 @@ impl Evaluator<'_> { } "pow" | "minnum" | "maxnum" | "copysign" => { let [arg1, arg2] = args else { - return Err(MirEvalError::TypeError("f32 intrinsic signature doesn't match fn (f32, f32) -> f32")); + return Err(MirEvalError::TypeError( + "f32 intrinsic signature doesn't match fn (f32, f32) -> f32", + )); }; let arg1 = from_bytes!(f32, arg1.get(self)?); let arg2 = from_bytes!(f32, arg2.get(self)?); @@ -451,7 +543,9 @@ impl Evaluator<'_> { } "powi" => { let [arg1, arg2] = args else { - return Err(MirEvalError::TypeError("powif32 signature doesn't match fn (f32, i32) -> f32")); + return Err(MirEvalError::TypeError( + "powif32 signature doesn't match fn (f32, i32) -> f32", + )); }; let arg1 = from_bytes!(f32, arg1.get(self)?); let arg2 = from_bytes!(i32, arg2.get(self)?); @@ -459,7 +553,9 @@ impl Evaluator<'_> { } "fma" => { let [arg1, arg2, arg3] = args else { - return Err(MirEvalError::TypeError("fmaf32 signature doesn't match fn (f32, f32, f32) -> f32")); + return Err(MirEvalError::TypeError( + "fmaf32 signature doesn't match fn (f32, f32, f32) -> f32", + )); }; let arg1 = from_bytes!(f32, arg1.get(self)?); let arg2 = from_bytes!(f32, arg2.get(self)?); @@ -472,21 +568,74 @@ impl Evaluator<'_> { } match name { "size_of" => { - let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) + else { return Err(MirEvalError::TypeError("size_of generic arg is not provided")); }; let size = self.size_of_sized(ty, locals, "size_of arg")?; destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size]) } "min_align_of" | "pref_align_of" => { - let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else { return Err(MirEvalError::TypeError("align_of generic arg is not provided")); }; let align = self.layout(ty)?.align.abi.bytes(); destination.write_from_bytes(self, &align.to_le_bytes()[0..destination.size]) } + "size_of_val" => { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) + else { + return Err(MirEvalError::TypeError("size_of_val generic arg is not provided")); + }; + let [arg] = args else { + return Err(MirEvalError::TypeError("size_of_val args are not provided")); + }; + if let Some((size, _)) = self.size_align_of(ty, locals)? { + destination.write_from_bytes(self, &size.to_le_bytes()) + } else { + let metadata = arg.interval.slice(self.ptr_size()..self.ptr_size() * 2); + let (size, _) = self.size_align_of_unsized(ty, metadata, locals)?; + destination.write_from_bytes(self, &size.to_le_bytes()) + } + } + "min_align_of_val" => { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else { + return Err(MirEvalError::TypeError("min_align_of_val generic arg is not provided")); + }; + let [arg] = args else { + return Err(MirEvalError::TypeError("min_align_of_val args are not provided")); + }; + if let Some((_, align)) = self.size_align_of(ty, locals)? { + destination.write_from_bytes(self, &align.to_le_bytes()) + } else { + let metadata = arg.interval.slice(self.ptr_size()..self.ptr_size() * 2); + let (_, align) = self.size_align_of_unsized(ty, metadata, locals)?; + destination.write_from_bytes(self, &align.to_le_bytes()) + } + } + "type_name" => { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) + else { + return Err(MirEvalError::TypeError("type_name generic arg is not provided")); + }; + let Ok(ty_name) = ty.display_source_code( + self.db, + locals.body.owner.module(self.db.upcast()), + true, + ) else { + not_supported!("fail in generating type_name using source code display"); + }; + let len = ty_name.len(); + let addr = self.heap_allocate(len, 1)?; + self.write_memory(addr, ty_name.as_bytes())?; + destination.slice(0..self.ptr_size()).write_from_bytes(self, &addr.to_bytes())?; + destination + .slice(self.ptr_size()..2 * self.ptr_size()) + .write_from_bytes(self, &len.to_le_bytes()) + } "needs_drop" => { - let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) + else { return Err(MirEvalError::TypeError("size_of generic arg is not provided")); }; let result = !ty.clone().is_copy(self.db, locals.body.owner); @@ -501,13 +650,17 @@ impl Evaluator<'_> { let ans = lhs.get(self)? == rhs.get(self)?; destination.write_from_bytes(self, &[u8::from(ans)]) } - "saturating_add" => { + "saturating_add" | "saturating_sub" => { let [lhs, rhs] = args else { return Err(MirEvalError::TypeError("saturating_add args are not provided")); }; let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); - let ans = lhs.saturating_add(rhs); + let ans = match name { + "saturating_add" => lhs.saturating_add(rhs), + "saturating_sub" => lhs.saturating_sub(rhs), + _ => unreachable!(), + }; let bits = destination.size * 8; // FIXME: signed let is_signed = false; @@ -544,6 +697,26 @@ impl Evaluator<'_> { let ans = lhs.wrapping_mul(rhs); destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]) } + "wrapping_shl" | "unchecked_shl" => { + // FIXME: signed + let [lhs, rhs] = args else { + return Err(MirEvalError::TypeError("unchecked_shl args are not provided")); + }; + let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); + let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); + let ans = lhs.wrapping_shl(rhs as u32); + destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]) + } + "wrapping_shr" | "unchecked_shr" => { + // FIXME: signed + let [lhs, rhs] = args else { + return Err(MirEvalError::TypeError("unchecked_shr args are not provided")); + }; + let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false)); + let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false)); + let ans = lhs.wrapping_shr(rhs as u32); + destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size]) + } "unchecked_rem" => { // FIXME: signed let [lhs, rhs] = args else { @@ -588,7 +761,7 @@ impl Evaluator<'_> { _ => unreachable!(), }; let is_overflow = u128overflow - || ans.to_le_bytes()[op_size..].iter().any(|&x| x != 0 && x != 255); + || ans.to_le_bytes()[op_size..].iter().any(|&it| it != 0 && it != 255); let is_overflow = vec![u8::from(is_overflow)]; let layout = self.layout(&result_ty)?; let result = self.make_by_layout( @@ -603,10 +776,15 @@ impl Evaluator<'_> { } "copy" | "copy_nonoverlapping" => { let [src, dst, offset] = args else { - return Err(MirEvalError::TypeError("copy_nonoverlapping args are not provided")); + return Err(MirEvalError::TypeError( + "copy_nonoverlapping args are not provided", + )); }; - let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { - return Err(MirEvalError::TypeError("copy_nonoverlapping generic arg is not provided")); + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) + else { + return Err(MirEvalError::TypeError( + "copy_nonoverlapping generic arg is not provided", + )); }; let src = Address::from_bytes(src.get(self)?)?; let dst = Address::from_bytes(dst.get(self)?)?; @@ -621,7 +799,8 @@ impl Evaluator<'_> { let [ptr, offset] = args else { return Err(MirEvalError::TypeError("offset args are not provided")); }; - let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) + else { return Err(MirEvalError::TypeError("offset generic arg is not provided")); }; let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false)); @@ -652,20 +831,106 @@ impl Evaluator<'_> { } "ctpop" => { let [arg] = args else { - return Err(MirEvalError::TypeError("likely arg is not provided")); + return Err(MirEvalError::TypeError("ctpop arg is not provided")); }; let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).count_ones(); destination .write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size]) } + "ctlz" | "ctlz_nonzero" => { + let [arg] = args else { + return Err(MirEvalError::TypeError("cttz arg is not provided")); + }; + let result = + u128::from_le_bytes(pad16(arg.get(self)?, false)).leading_zeros() as usize; + let result = result - (128 - arg.interval.size * 8); + destination + .write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size]) + } "cttz" | "cttz_nonzero" => { let [arg] = args else { - return Err(MirEvalError::TypeError("likely arg is not provided")); + return Err(MirEvalError::TypeError("cttz arg is not provided")); }; let result = u128::from_le_bytes(pad16(arg.get(self)?, false)).trailing_zeros(); destination .write_from_bytes(self, &(result as u128).to_le_bytes()[0..destination.size]) } + "rotate_left" => { + let [lhs, rhs] = args else { + return Err(MirEvalError::TypeError("rotate_left args are not provided")); + }; + let lhs = &lhs.get(self)?[0..destination.size]; + let rhs = rhs.get(self)?[0] as u32; + match destination.size { + 1 => { + let r = from_bytes!(u8, lhs).rotate_left(rhs); + destination.write_from_bytes(self, &r.to_le_bytes()) + } + 2 => { + let r = from_bytes!(u16, lhs).rotate_left(rhs); + destination.write_from_bytes(self, &r.to_le_bytes()) + } + 4 => { + let r = from_bytes!(u32, lhs).rotate_left(rhs); + destination.write_from_bytes(self, &r.to_le_bytes()) + } + 8 => { + let r = from_bytes!(u64, lhs).rotate_left(rhs); + destination.write_from_bytes(self, &r.to_le_bytes()) + } + 16 => { + let r = from_bytes!(u128, lhs).rotate_left(rhs); + destination.write_from_bytes(self, &r.to_le_bytes()) + } + s => not_supported!("destination with size {s} for rotate_left"), + } + } + "rotate_right" => { + let [lhs, rhs] = args else { + return Err(MirEvalError::TypeError("rotate_right args are not provided")); + }; + let lhs = &lhs.get(self)?[0..destination.size]; + let rhs = rhs.get(self)?[0] as u32; + match destination.size { + 1 => { + let r = from_bytes!(u8, lhs).rotate_right(rhs); + destination.write_from_bytes(self, &r.to_le_bytes()) + } + 2 => { + let r = from_bytes!(u16, lhs).rotate_right(rhs); + destination.write_from_bytes(self, &r.to_le_bytes()) + } + 4 => { + let r = from_bytes!(u32, lhs).rotate_right(rhs); + destination.write_from_bytes(self, &r.to_le_bytes()) + } + 8 => { + let r = from_bytes!(u64, lhs).rotate_right(rhs); + destination.write_from_bytes(self, &r.to_le_bytes()) + } + 16 => { + let r = from_bytes!(u128, lhs).rotate_right(rhs); + destination.write_from_bytes(self, &r.to_le_bytes()) + } + s => not_supported!("destination with size {s} for rotate_right"), + } + } + "discriminant_value" => { + let [arg] = args else { + return Err(MirEvalError::TypeError("discriminant_value arg is not provided")); + }; + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) + else { + return Err(MirEvalError::TypeError( + "discriminant_value generic arg is not provided", + )); + }; + let addr = Address::from_bytes(arg.get(self)?)?; + let size = self.size_of_sized(ty, locals, "discriminant_value ptr type")?; + let interval = Interval { addr, size }; + let r = self.compute_discriminant(ty.clone(), interval.get(self)?)?; + destination.write_from_bytes(self, &r.to_le_bytes()[0..destination.size]) + } "const_eval_select" => { let [tuple, const_fn, _] = args else { return Err(MirEvalError::TypeError("const_eval_select args are not provided")); @@ -681,24 +946,119 @@ impl Evaluator<'_> { let addr = tuple.interval.addr.offset(offset); args.push(IntervalAndTy::new(addr, field, self, locals)?); } - self.exec_fn_trait(&args, destination, locals, span) + if let Some(target) = self.db.lang_item(self.crate_id, LangItem::FnOnce) { + if let Some(def) = target + .as_trait() + .and_then(|it| self.db.trait_data(it).method_by_name(&name![call_once])) + { + self.exec_fn_trait( + def, + &args, + // FIXME: wrong for manual impls of `FnOnce` + Substitution::empty(Interner), + locals, + destination, + None, + span, + )?; + return Ok(()); + } + } + not_supported!("FnOnce was not available for executing const_eval_select"); + } + "read_via_copy" | "volatile_load" => { + let [arg] = args else { + return Err(MirEvalError::TypeError("read_via_copy args are not provided")); + }; + let addr = Address::from_bytes(arg.interval.get(self)?)?; + destination.write_from_interval(self, Interval { addr, size: destination.size }) + } + "write_bytes" => { + let [dst, val, count] = args else { + return Err(MirEvalError::TypeError("write_bytes args are not provided")); + }; + let count = from_bytes!(usize, count.get(self)?); + let val = from_bytes!(u8, val.get(self)?); + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) + else { + return Err(MirEvalError::TypeError( + "write_bytes generic arg is not provided", + )); + }; + let dst = Address::from_bytes(dst.get(self)?)?; + let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?; + let size = count * size; + self.write_memory_using_ref(dst, size)?.fill(val); + Ok(()) } _ => not_supported!("unknown intrinsic {name}"), } } + fn size_align_of_unsized( + &mut self, + ty: &Ty, + metadata: Interval, + locals: &Locals, + ) -> Result<(usize, usize)> { + Ok(match ty.kind(Interner) { + TyKind::Str => (from_bytes!(usize, metadata.get(self)?), 1), + TyKind::Slice(inner) => { + let len = from_bytes!(usize, metadata.get(self)?); + let (size, align) = self.size_align_of_sized(inner, locals, "slice inner type")?; + (size * len, align) + } + TyKind::Dyn(_) => self.size_align_of_sized( + self.vtable_map.ty_of_bytes(metadata.get(self)?)?, + locals, + "dyn concrete type", + )?, + TyKind::Adt(id, subst) => { + let id = id.0; + let layout = self.layout_adt(id, subst.clone())?; + let id = match id { + AdtId::StructId(s) => s, + _ => not_supported!("unsized enum or union"), + }; + let field_types = &self.db.field_types(id.into()); + let last_field_ty = + field_types.iter().rev().next().unwrap().1.clone().substitute(Interner, subst); + let sized_part_size = + layout.fields.offset(field_types.iter().count() - 1).bytes_usize(); + let sized_part_align = layout.align.abi.bytes() as usize; + let (unsized_part_size, unsized_part_align) = + self.size_align_of_unsized(&last_field_ty, metadata, locals)?; + let align = sized_part_align.max(unsized_part_align) as isize; + let size = (sized_part_size + unsized_part_size) as isize; + // Must add any necessary padding to `size` + // (to make it a multiple of `align`) before returning it. + // + // Namely, the returned size should be, in C notation: + // + // `size + ((size & (align-1)) ? align : 0)` + // + // emulated via the semi-standard fast bit trick: + // + // `(size + (align-1)) & -align` + let size = (size + (align - 1)) & (-align); + (size as usize, align as usize) + } + _ => not_supported!("unsized type other than str, slice, struct and dyn"), + }) + } + fn exec_atomic_intrinsic( &mut self, name: &str, args: &[IntervalAndTy], generic_args: &Substitution, destination: Interval, - locals: &Locals<'_>, + locals: &Locals, _span: MirSpan, ) -> Result<()> { // We are a single threaded runtime with no UB checking and no optimization, so // we can implement these as normal functions. - let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { + let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else { return Err(MirEvalError::TypeError("atomic intrinsic generic arg is not provided")); }; let Some(arg0) = args.get(0) else { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs new file mode 100644 index 00000000000..ec746310487 --- /dev/null +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/shim/simd.rs @@ -0,0 +1,177 @@ +//! Shim implementation for simd intrinsics + +use std::cmp::Ordering; + +use crate::TyKind; + +use super::*; + +macro_rules! from_bytes { + ($ty:tt, $value:expr) => { + ($ty::from_le_bytes(match ($value).try_into() { + Ok(it) => it, + Err(_) => return Err(MirEvalError::TypeError("mismatched size")), + })) + }; +} + +macro_rules! not_supported { + ($it: expr) => { + return Err(MirEvalError::NotSupported(format!($it))) + }; +} + +impl Evaluator<'_> { + fn detect_simd_ty(&self, ty: &Ty) -> Result<(usize, Ty)> { + match ty.kind(Interner) { + TyKind::Adt(id, subst) => { + let len = match subst.as_slice(Interner).get(1).and_then(|it| it.constant(Interner)) + { + Some(len) => len, + _ => { + if let AdtId::StructId(id) = id.0 { + let struct_data = self.db.struct_data(id); + let fields = struct_data.variant_data.fields(); + let Some((first_field, _)) = fields.iter().next() else { + not_supported!("simd type with no field"); + }; + let field_ty = self.db.field_types(id.into())[first_field] + .clone() + .substitute(Interner, subst); + return Ok((fields.len(), field_ty)); + } + return Err(MirEvalError::TypeError("simd type with no len param")); + } + }; + match try_const_usize(self.db, len) { + Some(len) => { + let Some(ty) = subst.as_slice(Interner).get(0).and_then(|it| it.ty(Interner)) else { + return Err(MirEvalError::TypeError("simd type with no ty param")); + }; + Ok((len as usize, ty.clone())) + } + None => Err(MirEvalError::TypeError("simd type with unevaluatable len param")), + } + } + _ => Err(MirEvalError::TypeError("simd type which is not a struct")), + } + } + + pub(super) fn exec_simd_intrinsic( + &mut self, + name: &str, + args: &[IntervalAndTy], + _generic_args: &Substitution, + destination: Interval, + _locals: &Locals, + _span: MirSpan, + ) -> Result<()> { + match name { + "and" | "or" | "xor" => { + let [left, right] = args else { + return Err(MirEvalError::TypeError("simd bit op args are not provided")); + }; + let result = left + .get(self)? + .iter() + .zip(right.get(self)?) + .map(|(&it, &y)| match name { + "and" => it & y, + "or" => it | y, + "xor" => it ^ y, + _ => unreachable!(), + }) + .collect::<Vec<_>>(); + destination.write_from_bytes(self, &result) + } + "eq" | "ne" | "lt" | "le" | "gt" | "ge" => { + let [left, right] = args else { + return Err(MirEvalError::TypeError("simd args are not provided")); + }; + let (len, ty) = self.detect_simd_ty(&left.ty)?; + let is_signed = matches!(ty.as_builtin(), Some(BuiltinType::Int(_))); + let size = left.interval.size / len; + let dest_size = destination.size / len; + let mut destination_bytes = vec![]; + let vector = left.get(self)?.chunks(size).zip(right.get(self)?.chunks(size)); + for (l, r) in vector { + let mut result = Ordering::Equal; + for (l, r) in l.iter().zip(r).rev() { + let it = l.cmp(r); + if it != Ordering::Equal { + result = it; + break; + } + } + if is_signed { + if let Some((&l, &r)) = l.iter().zip(r).rev().next() { + if l != r { + result = (l as i8).cmp(&(r as i8)); + } + } + } + let result = match result { + Ordering::Less => ["lt", "le", "ne"].contains(&name), + Ordering::Equal => ["ge", "le", "eq"].contains(&name), + Ordering::Greater => ["ge", "gt", "ne"].contains(&name), + }; + let result = if result { 255 } else { 0 }; + destination_bytes.extend(std::iter::repeat(result).take(dest_size)); + } + + destination.write_from_bytes(self, &destination_bytes) + } + "bitmask" => { + let [op] = args else { + return Err(MirEvalError::TypeError("simd_bitmask args are not provided")); + }; + let (op_len, _) = self.detect_simd_ty(&op.ty)?; + let op_count = op.interval.size / op_len; + let mut result: u64 = 0; + for (i, val) in op.get(self)?.chunks(op_count).enumerate() { + if !val.iter().all(|&it| it == 0) { + result |= 1 << i; + } + } + destination.write_from_bytes(self, &result.to_le_bytes()[0..destination.size]) + } + "shuffle" => { + let [left, right, index] = args else { + return Err(MirEvalError::TypeError("simd_shuffle args are not provided")); + }; + let TyKind::Array(_, index_len) = index.ty.kind(Interner) else { + return Err(MirEvalError::TypeError( + "simd_shuffle index argument has non-array type", + )); + }; + let index_len = match try_const_usize(self.db, index_len) { + Some(it) => it as usize, + None => { + return Err(MirEvalError::TypeError( + "simd type with unevaluatable len param", + )) + } + }; + let (left_len, _) = self.detect_simd_ty(&left.ty)?; + let left_size = left.interval.size / left_len; + let vector = + left.get(self)?.chunks(left_size).chain(right.get(self)?.chunks(left_size)); + let mut result = vec![]; + for index in index.get(self)?.chunks(index.interval.size / index_len) { + let index = from_bytes!(u32, index) as usize; + let val = match vector.clone().nth(index) { + Some(it) => it, + None => { + return Err(MirEvalError::TypeError( + "out of bound access in simd shuffle", + )) + } + }; + result.extend(val); + } + destination.write_from_bytes(self, &result) + } + _ => not_supported!("unknown simd intrinsic {name}"), + } + } +} diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs index ca4268b8fb0..03c083bac42 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/eval/tests.rs @@ -30,7 +30,7 @@ fn eval_main(db: &TestDB, file_id: FileId) -> Result<(String, String), MirEvalEr db.trait_environment(func_id.into()), ) .map_err(|e| MirEvalError::MirLowerError(func_id.into(), e))?; - let (result, stdout, stderr) = interpret_mir(db, &body, false); + let (result, stdout, stderr) = interpret_mir(db, body, false); result?; Ok((stdout, stderr)) } @@ -614,6 +614,34 @@ fn main() { } #[test] +fn syscalls() { + check_pass( + r#" +//- minicore: option + +extern "C" { + pub unsafe extern "C" fn syscall(num: i64, ...) -> i64; +} + +const SYS_getrandom: i64 = 318; + +fn should_not_reach() { + _ // FIXME: replace this function with panic when that works +} + +fn main() { + let mut x: i32 = 0; + let r = syscall(SYS_getrandom, &mut x, 4usize, 0); + if r != 4 { + should_not_reach(); + } +} + +"#, + ) +} + +#[test] fn posix_tls() { check_pass( r#" diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs index 2cb29b4ab91..36108587904 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower.rs @@ -146,12 +146,12 @@ impl MirLowerError { ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter)?, } } - MirLowerError::MissingFunctionDefinition(owner, x) => { + MirLowerError::MissingFunctionDefinition(owner, it) => { let body = db.body(*owner); writeln!( f, "Missing function definition for {}", - body.pretty_print_expr(db.upcast(), *owner, *x) + body.pretty_print_expr(db.upcast(), *owner, *it) )?; } MirLowerError::TypeMismatch(e) => { @@ -202,15 +202,15 @@ impl MirLowerError { } macro_rules! not_supported { - ($x: expr) => { - return Err(MirLowerError::NotSupported(format!($x))) + ($it: expr) => { + return Err(MirLowerError::NotSupported(format!($it))) }; } macro_rules! implementation_error { - ($x: expr) => {{ - ::stdx::never!("MIR lower implementation bug: {}", format!($x)); - return Err(MirLowerError::ImplementationError(format!($x))); + ($it: expr) => {{ + ::stdx::never!("MIR lower implementation bug: {}", format!($it)); + return Err(MirLowerError::ImplementationError(format!($it))); }}; } @@ -310,24 +310,30 @@ impl<'ctx> MirLowerCtx<'ctx> { self.lower_expr_to_place_with_adjust(expr_id, temp.into(), current, rest) } Adjust::Deref(_) => { - let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, adjustments)? else { - return Ok(None); - }; + let Some((p, current)) = + self.lower_expr_as_place_with_adjust(current, expr_id, true, adjustments)? + else { + return Ok(None); + }; self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into()); Ok(Some(current)) } Adjust::Borrow(AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) => { - let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else { - return Ok(None); - }; + let Some((p, current)) = + self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? + else { + return Ok(None); + }; let bk = BorrowKind::from_chalk(*m); self.push_assignment(current, place, Rvalue::Ref(bk, p), expr_id.into()); Ok(Some(current)) } Adjust::Pointer(cast) => { - let Some((p, current)) = self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? else { - return Ok(None); - }; + let Some((p, current)) = + self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)? + else { + return Ok(None); + }; self.push_assignment( current, place, @@ -373,45 +379,49 @@ impl<'ctx> MirLowerCtx<'ctx> { } } Err(MirLowerError::IncompleteExpr) - }, + } Expr::Path(p) => { - let pr = if let Some((assoc, subst)) = self - .infer - .assoc_resolutions_for_expr(expr_id) - { - match assoc { - hir_def::AssocItemId::ConstId(c) => { - self.lower_const(c.into(), current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?; - return Ok(Some(current)) - }, - hir_def::AssocItemId::FunctionId(_) => { - // FnDefs are zero sized, no action is needed. - return Ok(Some(current)) + let pr = + if let Some((assoc, subst)) = self.infer.assoc_resolutions_for_expr(expr_id) { + match assoc { + hir_def::AssocItemId::ConstId(c) => { + self.lower_const( + c.into(), + current, + place, + subst, + expr_id.into(), + self.expr_ty_without_adjust(expr_id), + )?; + return Ok(Some(current)); + } + hir_def::AssocItemId::FunctionId(_) => { + // FnDefs are zero sized, no action is needed. + return Ok(Some(current)); + } + hir_def::AssocItemId::TypeAliasId(_) => { + // FIXME: If it is unreachable, use proper error instead of `not_supported`. + not_supported!("associated functions and types") + } } - hir_def::AssocItemId::TypeAliasId(_) => { - // FIXME: If it is unreachable, use proper error instead of `not_supported`. - not_supported!("associated functions and types") - }, - } - } else if let Some(variant) = self - .infer - .variant_resolution_for_expr(expr_id) - { - match variant { - VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e), - VariantId::StructId(s) => ValueNs::StructId(s), - VariantId::UnionId(_) => implementation_error!("Union variant as path"), - } - } else { - let unresolved_name = || MirLowerError::unresolved_path(self.db, p); - let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id); - resolver - .resolve_path_in_value_ns_fully(self.db.upcast(), p) - .ok_or_else(unresolved_name)? - }; + } else if let Some(variant) = self.infer.variant_resolution_for_expr(expr_id) { + match variant { + VariantId::EnumVariantId(e) => ValueNs::EnumVariantId(e), + VariantId::StructId(s) => ValueNs::StructId(s), + VariantId::UnionId(_) => implementation_error!("Union variant as path"), + } + } else { + let unresolved_name = || MirLowerError::unresolved_path(self.db, p); + let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id); + resolver + .resolve_path_in_value_ns_fully(self.db.upcast(), p) + .ok_or_else(unresolved_name)? + }; match pr { ValueNs::LocalBinding(_) | ValueNs::StaticId(_) => { - let Some((temp, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, false)? else { + let Some((temp, current)) = + self.lower_expr_as_place_without_adjust(current, expr_id, false)? + else { return Ok(None); }; self.push_assignment( @@ -423,11 +433,19 @@ impl<'ctx> MirLowerCtx<'ctx> { Ok(Some(current)) } ValueNs::ConstId(const_id) => { - self.lower_const(const_id.into(), current, place, Substitution::empty(Interner), expr_id.into(), self.expr_ty_without_adjust(expr_id))?; + self.lower_const( + const_id.into(), + current, + place, + Substitution::empty(Interner), + expr_id.into(), + self.expr_ty_without_adjust(expr_id), + )?; Ok(Some(current)) } ValueNs::EnumVariantId(variant_id) => { - let variant_data = &self.db.enum_data(variant_id.parent).variants[variant_id.local_id]; + let variant_data = + &self.db.enum_data(variant_id.parent).variants[variant_id.local_id]; if variant_data.variant_data.kind() == StructKind::Unit { let ty = self.infer.type_of_expr[expr_id].clone(); current = self.lower_enum_variant( @@ -472,13 +490,15 @@ impl<'ctx> MirLowerCtx<'ctx> { // It's probably a unit struct or a zero sized function, so no action is needed. Ok(Some(current)) } - x => { - not_supported!("unknown name {x:?} in value name space"); + it => { + not_supported!("unknown name {it:?} in value name space"); } } } Expr::If { condition, then_branch, else_branch } => { - let Some((discr, current)) = self.lower_expr_to_some_operand(*condition, current)? else { + let Some((discr, current)) = + self.lower_expr_to_some_operand(*condition, current)? + else { return Ok(None); }; let start_of_then = self.new_basic_block(); @@ -501,15 +521,12 @@ impl<'ctx> MirLowerCtx<'ctx> { Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into())) } Expr::Let { pat, expr } => { - let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)? else { + let Some((cond_place, current)) = self.lower_expr_as_place(current, *expr, true)? + else { return Ok(None); }; - let (then_target, else_target) = self.pattern_match( - current, - None, - cond_place, - *pat, - )?; + let (then_target, else_target) = + self.pattern_match(current, None, cond_place, *pat)?; self.write_bytes_to_place( then_target, place.clone(), @@ -533,31 +550,47 @@ impl<'ctx> MirLowerCtx<'ctx> { } Expr::Block { id: _, statements, tail, label } => { if let Some(label) = label { - self.lower_loop(current, place.clone(), Some(*label), expr_id.into(), |this, begin| { - if let Some(current) = this.lower_block_to_place(statements, begin, *tail, place, expr_id.into())? { - let end = this.current_loop_end()?; - this.set_goto(current, end, expr_id.into()); - } - Ok(()) - }) + self.lower_loop( + current, + place.clone(), + Some(*label), + expr_id.into(), + |this, begin| { + if let Some(current) = this.lower_block_to_place( + statements, + begin, + *tail, + place, + expr_id.into(), + )? { + let end = this.current_loop_end()?; + this.set_goto(current, end, expr_id.into()); + } + Ok(()) + }, + ) } else { self.lower_block_to_place(statements, current, *tail, place, expr_id.into()) } } - Expr::Loop { body, label } => self.lower_loop(current, place, *label, expr_id.into(), |this, begin| { - let scope = this.push_drop_scope(); - if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? { - current = scope.pop_and_drop(this, current); - this.set_goto(current, begin, expr_id.into()); - } else { - scope.pop_assume_dropped(this); - } - Ok(()) - }), + Expr::Loop { body, label } => { + self.lower_loop(current, place, *label, expr_id.into(), |this, begin| { + let scope = this.push_drop_scope(); + if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? { + current = scope.pop_and_drop(this, current); + this.set_goto(current, begin, expr_id.into()); + } else { + scope.pop_assume_dropped(this); + } + Ok(()) + }) + } Expr::While { condition, body, label } => { - self.lower_loop(current, place, *label, expr_id.into(),|this, begin| { + self.lower_loop(current, place, *label, expr_id.into(), |this, begin| { let scope = this.push_drop_scope(); - let Some((discr, to_switch)) = this.lower_expr_to_some_operand(*condition, begin)? else { + let Some((discr, to_switch)) = + this.lower_expr_to_some_operand(*condition, begin)? + else { return Ok(()); }; let fail_cond = this.new_basic_block(); @@ -583,8 +616,7 @@ impl<'ctx> MirLowerCtx<'ctx> { }) } Expr::Call { callee, args, .. } => { - if let Some((func_id, generic_args)) = - self.infer.method_resolution(expr_id) { + if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) { let ty = chalk_ir::TyKind::FnDef( CallableDefId::FunctionId(func_id).to_chalk(self.db), generic_args, @@ -604,21 +636,43 @@ impl<'ctx> MirLowerCtx<'ctx> { match &callee_ty.data(Interner).kind { chalk_ir::TyKind::FnDef(..) => { let func = Operand::from_bytes(vec![], callee_ty.clone()); - self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into()) + self.lower_call_and_args( + func, + args.iter().copied(), + place, + current, + self.is_uninhabited(expr_id), + expr_id.into(), + ) } chalk_ir::TyKind::Function(_) => { - let Some((func, current)) = self.lower_expr_to_some_operand(*callee, current)? else { + let Some((func, current)) = + self.lower_expr_to_some_operand(*callee, current)? + else { return Ok(None); }; - self.lower_call_and_args(func, args.iter().copied(), place, current, self.is_uninhabited(expr_id), expr_id.into()) + self.lower_call_and_args( + func, + args.iter().copied(), + place, + current, + self.is_uninhabited(expr_id), + expr_id.into(), + ) + } + TyKind::Error => { + return Err(MirLowerError::MissingFunctionDefinition(self.owner, expr_id)) } - TyKind::Error => return Err(MirLowerError::MissingFunctionDefinition(self.owner, expr_id)), _ => return Err(MirLowerError::TypeError("function call on bad type")), } } Expr::MethodCall { receiver, args, method_name, .. } => { let (func_id, generic_args) = - self.infer.method_resolution(expr_id).ok_or_else(|| MirLowerError::UnresolvedMethod(method_name.display(self.db.upcast()).to_string()))?; + self.infer.method_resolution(expr_id).ok_or_else(|| { + MirLowerError::UnresolvedMethod( + method_name.display(self.db.upcast()).to_string(), + ) + })?; let func = Operand::from_fn(self.db, func_id, generic_args); self.lower_call_and_args( func, @@ -630,23 +684,27 @@ impl<'ctx> MirLowerCtx<'ctx> { ) } Expr::Match { expr, arms } => { - let Some((cond_place, mut current)) = self.lower_expr_as_place(current, *expr, true)? + let Some((cond_place, mut current)) = + self.lower_expr_as_place(current, *expr, true)? else { return Ok(None); }; let mut end = None; for MatchArm { pat, guard, expr } in arms.iter() { - let (then, mut otherwise) = self.pattern_match( - current, - None, - cond_place.clone(), - *pat, - )?; + let (then, mut otherwise) = + self.pattern_match(current, None, cond_place.clone(), *pat)?; let then = if let &Some(guard) = guard { let next = self.new_basic_block(); let o = otherwise.get_or_insert_with(|| self.new_basic_block()); if let Some((discr, c)) = self.lower_expr_to_some_operand(guard, then)? { - self.set_terminator(c, TerminatorKind::SwitchInt { discr, targets: SwitchTargets::static_if(1, next, *o) }, expr_id.into()); + self.set_terminator( + c, + TerminatorKind::SwitchInt { + discr, + targets: SwitchTargets::static_if(1, next, *o), + }, + expr_id.into(), + ); } next } else { @@ -672,33 +730,53 @@ impl<'ctx> MirLowerCtx<'ctx> { } Expr::Continue { label } => { let loop_data = match label { - Some(l) => self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)?, - None => self.current_loop_blocks.as_ref().ok_or(MirLowerError::ContinueWithoutLoop)?, + Some(l) => { + self.labeled_loop_blocks.get(l).ok_or(MirLowerError::UnresolvedLabel)? + } + None => self + .current_loop_blocks + .as_ref() + .ok_or(MirLowerError::ContinueWithoutLoop)?, }; let begin = loop_data.begin; current = self.drop_until_scope(loop_data.drop_scope_index, current); self.set_goto(current, begin, expr_id.into()); Ok(None) - }, + } &Expr::Break { expr, label } => { if let Some(expr) = expr { let loop_data = match label { - Some(l) => self.labeled_loop_blocks.get(&l).ok_or(MirLowerError::UnresolvedLabel)?, - None => self.current_loop_blocks.as_ref().ok_or(MirLowerError::BreakWithoutLoop)?, + Some(l) => self + .labeled_loop_blocks + .get(&l) + .ok_or(MirLowerError::UnresolvedLabel)?, + None => self + .current_loop_blocks + .as_ref() + .ok_or(MirLowerError::BreakWithoutLoop)?, }; - let Some(c) = self.lower_expr_to_place(expr, loop_data.place.clone(), current)? else { + let Some(c) = + self.lower_expr_to_place(expr, loop_data.place.clone(), current)? + else { return Ok(None); }; current = c; } let (end, drop_scope) = match label { Some(l) => { - let loop_blocks = self.labeled_loop_blocks.get(&l).ok_or(MirLowerError::UnresolvedLabel)?; - (loop_blocks.end.expect("We always generate end for labeled loops"), loop_blocks.drop_scope_index) - }, - None => { - (self.current_loop_end()?, self.current_loop_blocks.as_ref().unwrap().drop_scope_index) - }, + let loop_blocks = self + .labeled_loop_blocks + .get(&l) + .ok_or(MirLowerError::UnresolvedLabel)?; + ( + loop_blocks.end.expect("We always generate end for labeled loops"), + loop_blocks.drop_scope_index, + ) + } + None => ( + self.current_loop_end()?, + self.current_loop_blocks.as_ref().unwrap().drop_scope_index, + ), }; current = self.drop_until_scope(drop_scope, current); self.set_goto(current, end, expr_id.into()); @@ -706,7 +784,9 @@ impl<'ctx> MirLowerCtx<'ctx> { } Expr::Return { expr } => { if let Some(expr) = expr { - if let Some(c) = self.lower_expr_to_place(*expr, return_slot().into(), current)? { + if let Some(c) = + self.lower_expr_to_place(*expr, return_slot().into(), current)? + { current = c; } else { return Ok(None); @@ -719,19 +799,17 @@ impl<'ctx> MirLowerCtx<'ctx> { Expr::Yield { .. } => not_supported!("yield"), Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => { let spread_place = match spread { - &Some(x) => { - let Some((p, c)) = self.lower_expr_as_place(current, x, true)? else { + &Some(it) => { + let Some((p, c)) = self.lower_expr_as_place(current, it, true)? else { return Ok(None); }; current = c; Some(p) - }, + } None => None, }; - let variant_id = self - .infer - .variant_resolution_for_expr(expr_id) - .ok_or_else(|| match path { + let variant_id = + self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path { Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()), None => MirLowerError::RecordLiteralWithoutPath, })?; @@ -746,7 +824,8 @@ impl<'ctx> MirLowerCtx<'ctx> { for RecordLitField { name, expr } in fields.iter() { let field_id = variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?; - let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)? else { + let Some((op, c)) = self.lower_expr_to_some_operand(*expr, current)? + else { return Ok(None); }; current = c; @@ -758,18 +837,23 @@ impl<'ctx> MirLowerCtx<'ctx> { Rvalue::Aggregate( AggregateKind::Adt(variant_id, subst), match spread_place { - Some(sp) => operands.into_iter().enumerate().map(|(i, x)| { - match x { - Some(x) => x, + Some(sp) => operands + .into_iter() + .enumerate() + .map(|(i, it)| match it { + Some(it) => it, None => { - let p = sp.project(ProjectionElem::Field(FieldId { - parent: variant_id, - local_id: LocalFieldId::from_raw(RawIdx::from(i as u32)), - })); + let p = + sp.project(ProjectionElem::Field(FieldId { + parent: variant_id, + local_id: LocalFieldId::from_raw( + RawIdx::from(i as u32), + ), + })); Operand::Copy(p) - }, - } - }).collect(), + } + }) + .collect(), None => operands.into_iter().collect::<Option<_>>().ok_or( MirLowerError::TypeError("missing field in record literal"), )?, @@ -785,7 +869,10 @@ impl<'ctx> MirLowerCtx<'ctx> { }; let local_id = variant_data.field(name).ok_or(MirLowerError::UnresolvedField)?; - let place = place.project(PlaceElem::Field(FieldId { parent: union_id.into(), local_id })); + let place = place.project(PlaceElem::Field(FieldId { + parent: union_id.into(), + local_id, + })); self.lower_expr_to_place(*expr, place, current) } } @@ -795,11 +882,18 @@ impl<'ctx> MirLowerCtx<'ctx> { Expr::Async { .. } => not_supported!("async block"), &Expr::Const(id) => { let subst = self.placeholder_subst(); - self.lower_const(id.into(), current, place, subst, expr_id.into(), self.expr_ty_without_adjust(expr_id))?; + self.lower_const( + id.into(), + current, + place, + subst, + expr_id.into(), + self.expr_ty_without_adjust(expr_id), + )?; Ok(Some(current)) - }, + } Expr::Cast { expr, type_ref: _ } => { - let Some((x, current)) = self.lower_expr_to_some_operand(*expr, current)? else { + let Some((it, current)) = self.lower_expr_to_some_operand(*expr, current)? else { return Ok(None); }; let source_ty = self.infer[*expr].clone(); @@ -807,7 +901,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_assignment( current, place, - Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, x, target_ty), + Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, it, target_ty), expr_id.into(), ); Ok(Some(current)) @@ -822,23 +916,37 @@ impl<'ctx> MirLowerCtx<'ctx> { } Expr::Box { expr } => { let ty = self.expr_ty_after_adjustments(*expr); - self.push_assignment(current, place.clone(), Rvalue::ShallowInitBoxWithAlloc(ty), expr_id.into()); - let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else { + self.push_assignment( + current, + place.clone(), + Rvalue::ShallowInitBoxWithAlloc(ty), + expr_id.into(), + ); + let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? + else { return Ok(None); }; let p = place.project(ProjectionElem::Deref); self.push_assignment(current, p, operand.into(), expr_id.into()); Ok(Some(current)) - }, - Expr::Field { .. } | Expr::Index { .. } | Expr::UnaryOp { op: hir_def::hir::UnaryOp::Deref, .. } => { - let Some((p, current)) = self.lower_expr_as_place_without_adjust(current, expr_id, true)? else { + } + Expr::Field { .. } + | Expr::Index { .. } + | Expr::UnaryOp { op: hir_def::hir::UnaryOp::Deref, .. } => { + let Some((p, current)) = + self.lower_expr_as_place_without_adjust(current, expr_id, true)? + else { return Ok(None); }; self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into()); Ok(Some(current)) } - Expr::UnaryOp { expr, op: op @ (hir_def::hir::UnaryOp::Not | hir_def::hir::UnaryOp::Neg) } => { - let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? else { + Expr::UnaryOp { + expr, + op: op @ (hir_def::hir::UnaryOp::Not | hir_def::hir::UnaryOp::Neg), + } => { + let Some((operand, current)) = self.lower_expr_to_some_operand(*expr, current)? + else { return Ok(None); }; let operation = match op { @@ -853,7 +961,7 @@ impl<'ctx> MirLowerCtx<'ctx> { expr_id.into(), ); Ok(Some(current)) - }, + } Expr::BinaryOp { lhs, rhs, op } => { let op = op.ok_or(MirLowerError::IncompleteExpr)?; let is_builtin = 'b: { @@ -861,16 +969,19 @@ impl<'ctx> MirLowerCtx<'ctx> { // for binary operator, and use without adjust to simplify our conditions. let lhs_ty = self.expr_ty_without_adjust(*lhs); let rhs_ty = self.expr_ty_without_adjust(*rhs); - if matches!(op ,BinaryOp::CmpOp(syntax::ast::CmpOp::Eq { .. })) { + if matches!(op, BinaryOp::CmpOp(syntax::ast::CmpOp::Eq { .. })) { if lhs_ty.as_raw_ptr().is_some() && rhs_ty.as_raw_ptr().is_some() { break 'b true; } } let builtin_inequal_impls = matches!( op, - BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) | BinaryOp::Assignment { op: Some(ArithOp::Shl | ArithOp::Shr) } + BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) + | BinaryOp::Assignment { op: Some(ArithOp::Shl | ArithOp::Shr) } ); - lhs_ty.is_scalar() && rhs_ty.is_scalar() && (lhs_ty == rhs_ty || builtin_inequal_impls) + lhs_ty.is_scalar() + && rhs_ty.is_scalar() + && (lhs_ty == rhs_ty || builtin_inequal_impls) }; if !is_builtin { if let Some((func_id, generic_args)) = self.infer.method_resolution(expr_id) { @@ -892,34 +1003,45 @@ impl<'ctx> MirLowerCtx<'ctx> { .infer .expr_adjustments .get(lhs) - .and_then(|x| x.split_last()) - .map(|x| x.1) - .ok_or(MirLowerError::TypeError("adjustment of binary op was missing"))?; + .and_then(|it| it.split_last()) + .map(|it| it.1) + .ok_or(MirLowerError::TypeError( + "adjustment of binary op was missing", + ))?; let Some((lhs_place, current)) = self.lower_expr_as_place_with_adjust(current, *lhs, false, adjusts)? else { return Ok(None); }; - let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else { + let Some((rhs_op, current)) = + self.lower_expr_to_some_operand(*rhs, current)? + else { return Ok(None); }; - let r_value = Rvalue::CheckedBinaryOp(op.into(), Operand::Copy(lhs_place.clone()), rhs_op); + let r_value = Rvalue::CheckedBinaryOp( + op.into(), + Operand::Copy(lhs_place.clone()), + rhs_op, + ); self.push_assignment(current, lhs_place, r_value, expr_id.into()); return Ok(Some(current)); } else { let Some((lhs_place, current)) = - self.lower_expr_as_place(current, *lhs, false)? + self.lower_expr_as_place(current, *lhs, false)? else { return Ok(None); }; - let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else { + let Some((rhs_op, current)) = + self.lower_expr_to_some_operand(*rhs, current)? + else { return Ok(None); }; self.push_assignment(current, lhs_place, rhs_op.into(), expr_id.into()); return Ok(Some(current)); } } - let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)? else { + let Some((lhs_op, current)) = self.lower_expr_to_some_operand(*lhs, current)? + else { return Ok(None); }; if let hir_def::hir::BinaryOp::LogicOp(op) = op { @@ -928,22 +1050,31 @@ impl<'ctx> MirLowerCtx<'ctx> { syntax::ast::LogicOp::Or => 1, }; let start_of_then = self.new_basic_block(); - self.push_assignment(start_of_then, place.clone(), lhs_op.clone().into(), expr_id.into()); + self.push_assignment( + start_of_then, + place.clone(), + lhs_op.clone().into(), + expr_id.into(), + ); let end_of_then = Some(start_of_then); let start_of_else = self.new_basic_block(); - let end_of_else = - self.lower_expr_to_place(*rhs, place, start_of_else)?; + let end_of_else = self.lower_expr_to_place(*rhs, place, start_of_else)?; self.set_terminator( current, TerminatorKind::SwitchInt { discr: lhs_op, - targets: SwitchTargets::static_if(value_to_short, start_of_then, start_of_else), + targets: SwitchTargets::static_if( + value_to_short, + start_of_then, + start_of_else, + ), }, expr_id.into(), ); return Ok(self.merge_blocks(end_of_then, end_of_else, expr_id.into())); } - let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? else { + let Some((rhs_op, current)) = self.lower_expr_to_some_operand(*rhs, current)? + else { return Ok(None); }; self.push_assignment( @@ -976,15 +1107,15 @@ impl<'ctx> MirLowerCtx<'ctx> { }; let mut lp = None; let mut rp = None; - if let Some(x) = lhs { - let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else { + if let Some(it) = lhs { + let Some((o, c)) = self.lower_expr_to_some_operand(it, current)? else { return Ok(None); }; lp = Some(o); current = c; } - if let Some(x) = rhs { - let Some((o, c)) = self.lower_expr_to_some_operand(x, current)? else { + if let Some(it) = rhs { + let Some((o, c)) = self.lower_expr_to_some_operand(it, current)? else { return Ok(None); }; rp = Some(o); @@ -995,20 +1126,28 @@ impl<'ctx> MirLowerCtx<'ctx> { place, Rvalue::Aggregate( AggregateKind::Adt(st.into(), subst.clone()), - self.db.struct_data(st).variant_data.fields().iter().map(|x| { - let o = match x.1.name.as_str() { - Some("start") => lp.take(), - Some("end") => rp.take(), - Some("exhausted") => Some(Operand::from_bytes(vec![0], TyBuilder::bool())), - _ => None, - }; - o.ok_or(MirLowerError::UnresolvedField) - }).collect::<Result<_>>()?, + self.db + .struct_data(st) + .variant_data + .fields() + .iter() + .map(|it| { + let o = match it.1.name.as_str() { + Some("start") => lp.take(), + Some("end") => rp.take(), + Some("exhausted") => { + Some(Operand::from_bytes(vec![0], TyBuilder::bool())) + } + _ => None, + }; + o.ok_or(MirLowerError::UnresolvedField) + }) + .collect::<Result<_>>()?, ), expr_id.into(), ); Ok(Some(current)) - }, + } Expr::Closure { .. } => { let ty = self.expr_ty_without_adjust(expr_id); let TyKind::Closure(id, _) = ty.kind(Interner) else { @@ -1020,22 +1159,33 @@ impl<'ctx> MirLowerCtx<'ctx> { for capture in captures.iter() { let p = Place { local: self.binding_local(capture.place.local)?, - projection: capture.place.projections.clone().into_iter().map(|x| { - match x { + projection: capture + .place + .projections + .clone() + .into_iter() + .map(|it| match it { ProjectionElem::Deref => ProjectionElem::Deref, - ProjectionElem::Field(x) => ProjectionElem::Field(x), - ProjectionElem::TupleOrClosureField(x) => ProjectionElem::TupleOrClosureField(x), - ProjectionElem::ConstantIndex { offset, from_end } => ProjectionElem::ConstantIndex { offset, from_end }, - ProjectionElem::Subslice { from, to } => ProjectionElem::Subslice { from, to }, - ProjectionElem::OpaqueCast(x) => ProjectionElem::OpaqueCast(x), - ProjectionElem::Index(x) => match x { }, - } - }).collect(), + ProjectionElem::Field(it) => ProjectionElem::Field(it), + ProjectionElem::TupleOrClosureField(it) => { + ProjectionElem::TupleOrClosureField(it) + } + ProjectionElem::ConstantIndex { offset, from_end } => { + ProjectionElem::ConstantIndex { offset, from_end } + } + ProjectionElem::Subslice { from, to } => { + ProjectionElem::Subslice { from, to } + } + ProjectionElem::OpaqueCast(it) => ProjectionElem::OpaqueCast(it), + ProjectionElem::Index(it) => match it {}, + }) + .collect(), }; match &capture.kind { CaptureKind::ByRef(bk) => { let placeholder_subst = self.placeholder_subst(); - let tmp_ty = capture.ty.clone().substitute(Interner, &placeholder_subst); + let tmp_ty = + capture.ty.clone().substitute(Interner, &placeholder_subst); let tmp: Place = self.temp(tmp_ty, current, capture.span)?.into(); self.push_assignment( current, @@ -1044,7 +1194,7 @@ impl<'ctx> MirLowerCtx<'ctx> { capture.span, ); operands.push(Operand::Move(tmp)); - }, + } CaptureKind::ByValue => operands.push(Operand::Move(p)), } } @@ -1055,18 +1205,18 @@ impl<'ctx> MirLowerCtx<'ctx> { expr_id.into(), ); Ok(Some(current)) - }, + } Expr::Tuple { exprs, is_assignee_expr: _ } => { let Some(values) = exprs - .iter() - .map(|x| { - let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else { - return Ok(None); - }; - current = c; - Ok(Some(o)) - }) - .collect::<Result<Option<_>>>()? + .iter() + .map(|it| { + let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)? else { + return Ok(None); + }; + current = c; + Ok(Some(o)) + }) + .collect::<Result<Option<_>>>()? else { return Ok(None); }; @@ -1088,27 +1238,26 @@ impl<'ctx> MirLowerCtx<'ctx> { } }; let Some(values) = elements - .iter() - .map(|x| { - let Some((o, c)) = self.lower_expr_to_some_operand(*x, current)? else { - return Ok(None); - }; - current = c; - Ok(Some(o)) - }) - .collect::<Result<Option<_>>>()? + .iter() + .map(|it| { + let Some((o, c)) = self.lower_expr_to_some_operand(*it, current)? else { + return Ok(None); + }; + current = c; + Ok(Some(o)) + }) + .collect::<Result<Option<_>>>()? else { return Ok(None); }; - let r = Rvalue::Aggregate( - AggregateKind::Array(elem_ty), - values, - ); + let r = Rvalue::Aggregate(AggregateKind::Array(elem_ty), values); self.push_assignment(current, place, r, expr_id.into()); Ok(Some(current)) } Array::Repeat { initializer, .. } => { - let Some((init, current)) = self.lower_expr_to_some_operand(*initializer, current)? else { + let Some((init, current)) = + self.lower_expr_to_some_operand(*initializer, current)? + else { return Ok(None); }; let len = match &self.expr_ty_without_adjust(expr_id).data(Interner).kind { @@ -1122,7 +1271,7 @@ impl<'ctx> MirLowerCtx<'ctx> { let r = Rvalue::Repeat(init, len); self.push_assignment(current, place, r, expr_id.into()); Ok(Some(current)) - }, + } }, Expr::Literal(l) => { let ty = self.expr_ty_without_adjust(expr_id); @@ -1136,7 +1285,7 @@ impl<'ctx> MirLowerCtx<'ctx> { fn placeholder_subst(&mut self) -> Substitution { let placeholder_subst = match self.owner.as_generic_def_id() { - Some(x) => TyBuilder::placeholder_subst(self.db, x), + Some(it) => TyBuilder::placeholder_subst(self.db, it), None => Substitution::empty(Interner), }; placeholder_subst @@ -1226,8 +1375,8 @@ impl<'ctx> MirLowerCtx<'ctx> { } hir_def::hir::Literal::Char(c) => u32::from(*c).to_le_bytes().into(), hir_def::hir::Literal::Bool(b) => vec![*b as u8], - hir_def::hir::Literal::Int(x, _) => x.to_le_bytes()[0..size].into(), - hir_def::hir::Literal::Uint(x, _) => x.to_le_bytes()[0..size].into(), + hir_def::hir::Literal::Int(it, _) => it.to_le_bytes()[0..size].into(), + hir_def::hir::Literal::Uint(it, _) => it.to_le_bytes()[0..size].into(), hir_def::hir::Literal::Float(f, _) => match size { 8 => f.into_f64().to_le_bytes().into(), 4 => f.into_f32().to_le_bytes().into(), @@ -1377,9 +1526,9 @@ impl<'ctx> MirLowerCtx<'ctx> { fn expr_ty_after_adjustments(&self, e: ExprId) -> Ty { let mut ty = None; - if let Some(x) = self.infer.expr_adjustments.get(&e) { - if let Some(x) = x.last() { - ty = Some(x.target.clone()); + if let Some(it) = self.infer.expr_adjustments.get(&e) { + if let Some(it) = it.last() { + ty = Some(it.target.clone()); } } ty.unwrap_or_else(|| self.expr_ty_without_adjust(e)) @@ -1401,7 +1550,7 @@ impl<'ctx> MirLowerCtx<'ctx> { fn discr_temp_place(&mut self, current: BasicBlockId) -> Place { match &self.discr_temp { - Some(x) => x.clone(), + Some(it) => it.clone(), None => { let tmp: Place = self .temp(TyBuilder::discr_ty(), current, MirSpan::Unknown) @@ -1448,7 +1597,7 @@ impl<'ctx> MirLowerCtx<'ctx> { } fn has_adjustments(&self, expr_id: ExprId) -> bool { - !self.infer.expr_adjustments.get(&expr_id).map(|x| x.is_empty()).unwrap_or(true) + !self.infer.expr_adjustments.get(&expr_id).map(|it| it.is_empty()).unwrap_or(true) } fn merge_blocks( @@ -1478,7 +1627,7 @@ impl<'ctx> MirLowerCtx<'ctx> { ))? .end { - Some(x) => x, + Some(it) => it, None => { let s = self.new_basic_block(); self.current_loop_blocks @@ -1602,10 +1751,10 @@ impl<'ctx> MirLowerCtx<'ctx> { pick_binding: impl Fn(BindingId) -> bool, ) -> Result<BasicBlockId> { let base_param_count = self.result.param_locals.len(); - self.result.param_locals.extend(params.clone().map(|(x, ty)| { + self.result.param_locals.extend(params.clone().map(|(it, ty)| { let local_id = self.result.locals.alloc(Local { ty }); self.drop_scopes.last_mut().unwrap().locals.push(local_id); - if let Pat::Bind { id, subpat: None } = self.body[x] { + if let Pat::Bind { id, subpat: None } = self.body[it] { if matches!( self.body.bindings[id].mode, BindingAnnotation::Unannotated | BindingAnnotation::Mutable @@ -1646,7 +1795,7 @@ impl<'ctx> MirLowerCtx<'ctx> { fn binding_local(&self, b: BindingId) -> Result<LocalId> { match self.result.binding_locals.get(b) { - Some(x) => Ok(*x), + Some(it) => Ok(*it), None => { // FIXME: It should never happens, but currently it will happen in `const_dependent_on_local` test, which // is a hir lowering problem IMO. @@ -1731,6 +1880,7 @@ impl<'ctx> MirLowerCtx<'ctx> { fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> { Ok(match (source_ty.kind(Interner), target_ty.kind(Interner)) { + (TyKind::FnDef(..), TyKind::Function(_)) => CastKind::Pointer(PointerCast::ReifyFnPointer), (TyKind::Scalar(s), TyKind::Scalar(t)) => match (s, t) { (chalk_ir::Scalar::Float(_), chalk_ir::Scalar::Float(_)) => CastKind::FloatToFloat, (chalk_ir::Scalar::Float(_), _) => CastKind::FloatToInt, @@ -1742,17 +1892,17 @@ fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> { (TyKind::Raw(_, a) | TyKind::Ref(_, _, a), TyKind::Raw(_, b) | TyKind::Ref(_, _, b)) => { CastKind::Pointer(if a == b { PointerCast::MutToConstPointer - } else if matches!(a.kind(Interner), TyKind::Slice(_) | TyKind::Str) - && matches!(b.kind(Interner), TyKind::Slice(_) | TyKind::Str) + } else if matches!(b.kind(Interner), TyKind::Slice(_)) + && matches!(a.kind(Interner), TyKind::Array(_, _)) + || matches!(b.kind(Interner), TyKind::Dyn(_)) { - // slice to slice cast is no-op (metadata is not touched), so we use this - PointerCast::MutToConstPointer - } else if matches!(b.kind(Interner), TyKind::Slice(_) | TyKind::Dyn(_)) { PointerCast::Unsize } else if matches!(a.kind(Interner), TyKind::Slice(s) if s == b) { PointerCast::ArrayToPointer } else { - // cast between two sized pointer, like *const i32 to *const i8. There is no specific variant + // cast between two sized pointer, like *const i32 to *const i8, or two unsized pointer, like + // slice to slice, slice to str, ... . These are no-ops (even in the unsized case, no metadata + // will be touched) but there is no specific variant // for it in `PointerCast` so we use `MutToConstPointer` PointerCast::MutToConstPointer }) @@ -1796,7 +1946,7 @@ pub fn mir_body_for_closure_query( implementation_error!("closure has not callable sig"); }; let current = ctx.lower_params_and_bindings( - args.iter().zip(sig.params().iter()).map(|(x, y)| (*x, y.clone())), + args.iter().zip(sig.params().iter()).map(|(it, y)| (*it, y.clone())), |_| true, )?; if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? { @@ -1815,34 +1965,35 @@ pub fn mir_body_for_closure_query( FnTrait::FnMut | FnTrait::Fn => vec![ProjectionElem::Deref], }; ctx.result.walk_places(|p| { - if let Some(x) = upvar_map.get(&p.local) { - let r = x.iter().find(|x| { - if p.projection.len() < x.0.place.projections.len() { + if let Some(it) = upvar_map.get(&p.local) { + let r = it.iter().find(|it| { + if p.projection.len() < it.0.place.projections.len() { return false; } - for (x, y) in p.projection.iter().zip(x.0.place.projections.iter()) { - match (x, y) { + for (it, y) in p.projection.iter().zip(it.0.place.projections.iter()) { + match (it, y) { (ProjectionElem::Deref, ProjectionElem::Deref) => (), - (ProjectionElem::Field(x), ProjectionElem::Field(y)) if x == y => (), + (ProjectionElem::Field(it), ProjectionElem::Field(y)) if it == y => (), ( - ProjectionElem::TupleOrClosureField(x), + ProjectionElem::TupleOrClosureField(it), ProjectionElem::TupleOrClosureField(y), - ) if x == y => (), + ) if it == y => (), _ => return false, } } true }); match r { - Some(x) => { + Some(it) => { p.local = closure_local; let mut next_projs = closure_projection.clone(); - next_projs.push(PlaceElem::TupleOrClosureField(x.1)); + next_projs.push(PlaceElem::TupleOrClosureField(it.1)); let prev_projs = mem::take(&mut p.projection); - if x.0.kind != CaptureKind::ByValue { + if it.0.kind != CaptureKind::ByValue { next_projs.push(ProjectionElem::Deref); } - next_projs.extend(prev_projs.iter().cloned().skip(x.0.place.projections.len())); + next_projs + .extend(prev_projs.iter().cloned().skip(it.0.place.projections.len())); p.projection = next_projs.into(); } None => err = Some(p.clone()), @@ -1902,8 +2053,8 @@ pub fn lower_to_mir( // need to take this input explicitly. root_expr: ExprId, ) -> Result<MirBody> { - if let Some((_, x)) = infer.type_mismatches().next() { - return Err(MirLowerError::TypeMismatch(x.clone())); + if let Some((_, it)) = infer.type_mismatches().next() { + return Err(MirLowerError::TypeMismatch(it.clone())); } let mut ctx = MirLowerCtx::new(db, owner, body, infer); // 0 is return local @@ -1929,7 +2080,7 @@ pub fn lower_to_mir( body.params .iter() .zip(callable_sig.params().iter()) - .map(|(x, y)| (*x, y.clone())), + .map(|(it, y)| (*it, y.clone())), binding_picker, )?; } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs index d2c8d9a089e..213f151ab67 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/as_place.rs @@ -1,12 +1,12 @@ //! MIR lowering for places use super::*; -use hir_def::{lang_item::lang_attr, FunctionId}; +use hir_def::FunctionId; use hir_expand::name; macro_rules! not_supported { - ($x: expr) => { - return Err(MirLowerError::NotSupported(format!($x))) + ($it: expr) => { + return Err(MirLowerError::NotSupported(format!($it))) }; } @@ -18,7 +18,9 @@ impl MirLowerCtx<'_> { ) -> Result<Option<(Place, BasicBlockId)>> { let ty = self.expr_ty_without_adjust(expr_id); let place = self.temp(ty, prev_block, expr_id.into())?; - let Some(current) = self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? else { + let Some(current) = + self.lower_expr_to_place_without_adjust(expr_id, place.into(), prev_block)? + else { return Ok(None); }; Ok(Some((place.into(), current))) @@ -32,10 +34,12 @@ impl MirLowerCtx<'_> { ) -> Result<Option<(Place, BasicBlockId)>> { let ty = adjustments .last() - .map(|x| x.target.clone()) + .map(|it| it.target.clone()) .unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)); let place = self.temp(ty, prev_block, expr_id.into())?; - let Some(current) = self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? else { + let Some(current) = + self.lower_expr_to_place_with_adjust(expr_id, place.into(), prev_block, adjustments)? + else { return Ok(None); }; Ok(Some((place.into(), current))) @@ -57,16 +61,17 @@ impl MirLowerCtx<'_> { if let Some((last, rest)) = adjustments.split_last() { match last.kind { Adjust::Deref(None) => { - let Some(mut x) = self.lower_expr_as_place_with_adjust( + let Some(mut it) = self.lower_expr_as_place_with_adjust( current, expr_id, upgrade_rvalue, rest, - )? else { + )? + else { return Ok(None); }; - x.0 = x.0.project(ProjectionElem::Deref); - Ok(Some(x)) + it.0 = it.0.project(ProjectionElem::Deref); + Ok(Some(it)) } Adjust::Deref(Some(od)) => { let Some((r, current)) = self.lower_expr_as_place_with_adjust( @@ -74,14 +79,15 @@ impl MirLowerCtx<'_> { expr_id, upgrade_rvalue, rest, - )? else { + )? + else { return Ok(None); }; self.lower_overloaded_deref( current, r, rest.last() - .map(|x| x.target.clone()) + .map(|it| it.target.clone()) .unwrap_or_else(|| self.expr_ty_without_adjust(expr_id)), last.target.clone(), expr_id.into(), @@ -156,7 +162,7 @@ impl MirLowerCtx<'_> { let is_builtin = match self.expr_ty_without_adjust(*expr).kind(Interner) { TyKind::Ref(..) | TyKind::Raw(..) => true, TyKind::Adt(id, _) => { - if let Some(lang_item) = lang_attr(self.db.upcast(), id.0) { + if let Some(lang_item) = self.db.lang_attr(id.0.into()) { lang_item == LangItem::OwnedBox } else { false @@ -165,7 +171,8 @@ impl MirLowerCtx<'_> { _ => false, }; if !is_builtin { - let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? else { + let Some((p, current)) = self.lower_expr_as_place(current, *expr, true)? + else { return Ok(None); }; return self.lower_overloaded_deref( @@ -192,7 +199,8 @@ impl MirLowerCtx<'_> { }, ); } - let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? else { + let Some((mut r, current)) = self.lower_expr_as_place(current, *expr, true)? + else { return Ok(None); }; r = r.project(ProjectionElem::Deref); @@ -217,12 +225,18 @@ impl MirLowerCtx<'_> { ) { let Some(index_fn) = self.infer.method_resolution(expr_id) else { - return Err(MirLowerError::UnresolvedMethod("[overloaded index]".to_string())); + return Err(MirLowerError::UnresolvedMethod( + "[overloaded index]".to_string(), + )); }; - let Some((base_place, current)) = self.lower_expr_as_place(current, *base, true)? else { + let Some((base_place, current)) = + self.lower_expr_as_place(current, *base, true)? + else { return Ok(None); }; - let Some((index_operand, current)) = self.lower_expr_to_some_operand(*index, current)? else { + let Some((index_operand, current)) = + self.lower_expr_to_some_operand(*index, current)? + else { return Ok(None); }; return self.lower_overloaded_index( @@ -239,8 +253,8 @@ impl MirLowerCtx<'_> { .infer .expr_adjustments .get(base) - .and_then(|x| x.split_last()) - .map(|x| x.1) + .and_then(|it| it.split_last()) + .map(|it| it.1) .unwrap_or(&[]); let Some((mut p_base, current)) = self.lower_expr_as_place_with_adjust(current, *base, true, adjusts)? @@ -249,7 +263,8 @@ impl MirLowerCtx<'_> { }; let l_index = self.temp(self.expr_ty_after_adjustments(*index), current, expr_id.into())?; - let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)? else { + let Some(current) = self.lower_expr_to_place(*index, l_index.into(), current)? + else { return Ok(None); }; p_base = p_base.project(ProjectionElem::Index(l_index)); @@ -282,7 +297,15 @@ impl MirLowerCtx<'_> { ) .intern(Interner), ); - let Some(current) = self.lower_call(index_fn_op, Box::new([Operand::Copy(place), index_operand]), result.clone(), current, false, span)? else { + let Some(current) = self.lower_call( + index_fn_op, + Box::new([Operand::Copy(place), index_operand]), + result.clone(), + current, + false, + span, + )? + else { return Ok(None); }; result = result.project(ProjectionElem::Deref); @@ -329,7 +352,15 @@ impl MirLowerCtx<'_> { .intern(Interner), ); let mut result: Place = self.temp(target_ty_ref, current, span)?.into(); - let Some(current) = self.lower_call(deref_fn_op, Box::new([Operand::Copy(ref_place)]), result.clone(), current, false, span)? else { + let Some(current) = self.lower_call( + deref_fn_op, + Box::new([Operand::Copy(ref_place)]), + result.clone(), + current, + false, + span, + )? + else { return Ok(None); }; result = result.project(ProjectionElem::Deref); diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index ff43c64a9e6..3354cbd76a0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -307,6 +307,11 @@ impl MirLowerCtx<'_> { mode, )?, None => { + // The path is not a variant, so it is a const + if mode != MatchingMode::Check { + // A const don't bind anything. Only needs check. + return Ok((current, current_else)); + } let unresolved_name = || MirLowerError::unresolved_path(self.db, p); let resolver = self.owner.resolver(self.db.upcast()); let pr = resolver @@ -362,8 +367,8 @@ impl MirLowerCtx<'_> { }, Pat::Lit(l) => match &self.body.exprs[*l] { Expr::Literal(l) => { - let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?; if mode == MatchingMode::Check { + let c = self.lower_literal_to_operand(self.infer[pattern].clone(), l)?; self.pattern_match_const(current_else, current, c, cond_place, pattern)? } else { (current, current_else) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs index ce3f7a8e510..c565228d91e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/monomorphization.rs @@ -13,15 +13,14 @@ use chalk_ir::{ fold::{FallibleTypeFolder, TypeFoldable, TypeSuperFoldable}, ConstData, DebruijnIndex, }; -use hir_def::{DefWithBodyId, GeneralConstId}; +use hir_def::DefWithBodyId; use triomphe::Arc; use crate::{ - consteval::unknown_const, + consteval::{intern_const_scalar, unknown_const}, db::HirDatabase, from_placeholder_idx, infer::normalize, - method_resolution::lookup_impl_const, utils::{generics, Generics}, ClosureId, Const, Interner, ProjectionTy, Substitution, TraitEnvironment, Ty, TyKind, }; @@ -29,8 +28,8 @@ use crate::{ use super::{MirBody, MirLowerError, Operand, Rvalue, StatementKind, TerminatorKind}; macro_rules! not_supported { - ($x: expr) => { - return Err(MirLowerError::NotSupported(format!($x))) + ($it: expr) => { + return Err(MirLowerError::NotSupported(format!($it))) }; } @@ -97,16 +96,16 @@ impl FallibleTypeFolder<Interner> for Filler<'_> { idx: chalk_ir::PlaceholderIndex, _outer_binder: DebruijnIndex, ) -> std::result::Result<chalk_ir::Const<Interner>, Self::Error> { - let x = from_placeholder_idx(self.db, idx); - let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else { + let it = from_placeholder_idx(self.db, idx); + let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else { not_supported!("missing idx in generics"); }; Ok(self .subst .as_slice(Interner) .get(idx) - .and_then(|x| x.constant(Interner)) - .ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))? + .and_then(|it| it.constant(Interner)) + .ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))? .clone()) } @@ -115,16 +114,16 @@ impl FallibleTypeFolder<Interner> for Filler<'_> { idx: chalk_ir::PlaceholderIndex, _outer_binder: DebruijnIndex, ) -> std::result::Result<Ty, Self::Error> { - let x = from_placeholder_idx(self.db, idx); - let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(x)) else { + let it = from_placeholder_idx(self.db, idx); + let Some(idx) = self.generics.as_ref().and_then(|g| g.param_idx(it)) else { not_supported!("missing idx in generics"); }; Ok(self .subst .as_slice(Interner) .get(idx) - .and_then(|x| x.ty(Interner)) - .ok_or_else(|| MirLowerError::GenericArgNotProvided(x, self.subst.clone()))? + .and_then(|it| it.ty(Interner)) + .ok_or_else(|| MirLowerError::GenericArgNotProvided(it, self.subst.clone()))? .clone()) } @@ -180,7 +179,7 @@ impl Filler<'_> { MirLowerError::GenericArgNotProvided( self.generics .as_ref() - .and_then(|x| x.iter().nth(b.index)) + .and_then(|it| it.iter().nth(b.index)) .unwrap() .0, self.subst.clone(), @@ -193,25 +192,12 @@ impl Filler<'_> { | chalk_ir::ConstValue::Placeholder(_) => {} chalk_ir::ConstValue::Concrete(cc) => match &cc.interned { crate::ConstScalar::UnevaluatedConst(const_id, subst) => { - let mut const_id = *const_id; let mut subst = subst.clone(); self.fill_subst(&mut subst)?; - if let GeneralConstId::ConstId(c) = const_id { - let (c, s) = lookup_impl_const( - self.db, - self.db.trait_environment_for_body(self.owner), - c, - subst, - ); - const_id = GeneralConstId::ConstId(c); - subst = s; - } - let result = - self.db.const_eval(const_id.into(), subst).map_err(|e| { - let name = const_id.name(self.db.upcast()); - MirLowerError::ConstEvalError(name, Box::new(e)) - })?; - *c = result; + *c = intern_const_scalar( + crate::ConstScalar::UnevaluatedConst(*const_id, subst), + c.data(Interner).ty.clone(), + ); } crate::ConstScalar::Bytes(_, _) | crate::ConstScalar::Unknown => (), }, diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs index ac23e77bd2b..781ffaecad5 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/pretty.rs @@ -135,7 +135,7 @@ impl<'a> MirPrettyCtx<'a> { fn for_closure(&mut self, closure: ClosureId) { let body = match self.db.mir_body_for_closure(closure) { - Ok(x) => x, + Ok(it) => it, Err(e) => { wln!(self, "// error in {closure:?}: {e:?}"); return; @@ -145,7 +145,7 @@ impl<'a> MirPrettyCtx<'a> { let indent = mem::take(&mut self.indent); let mut ctx = MirPrettyCtx { body: &body, - local_to_binding: body.binding_locals.iter().map(|(x, y)| (*y, x)).collect(), + local_to_binding: body.binding_locals.iter().map(|(it, y)| (*y, it)).collect(), result, indent, ..*self @@ -167,7 +167,7 @@ impl<'a> MirPrettyCtx<'a> { } fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self { - let local_to_binding = body.binding_locals.iter().map(|(x, y)| (*y, x)).collect(); + let local_to_binding = body.binding_locals.iter().map(|(it, y)| (*y, it)).collect(); MirPrettyCtx { body, db, @@ -315,17 +315,17 @@ impl<'a> MirPrettyCtx<'a> { } } } - ProjectionElem::TupleOrClosureField(x) => { + ProjectionElem::TupleOrClosureField(it) => { f(this, local, head); - w!(this, ".{}", x); + w!(this, ".{}", it); } ProjectionElem::Index(l) => { f(this, local, head); w!(this, "[{}]", this.local_name(*l).display(this.db)); } - x => { + it => { f(this, local, head); - w!(this, ".{:?}", x); + w!(this, ".{:?}", it); } } } @@ -356,14 +356,14 @@ impl<'a> MirPrettyCtx<'a> { } self.place(p); } - Rvalue::Aggregate(AggregateKind::Tuple(_), x) => { + Rvalue::Aggregate(AggregateKind::Tuple(_), it) => { w!(self, "("); - self.operand_list(x); + self.operand_list(it); w!(self, ")"); } - Rvalue::Aggregate(AggregateKind::Array(_), x) => { + Rvalue::Aggregate(AggregateKind::Array(_), it) => { w!(self, "["); - self.operand_list(x); + self.operand_list(it); w!(self, "]"); } Rvalue::Repeat(op, len) => { @@ -371,19 +371,19 @@ impl<'a> MirPrettyCtx<'a> { self.operand(op); w!(self, "; {}]", len.display(self.db)); } - Rvalue::Aggregate(AggregateKind::Adt(_, _), x) => { + Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => { w!(self, "Adt("); - self.operand_list(x); + self.operand_list(it); w!(self, ")"); } - Rvalue::Aggregate(AggregateKind::Closure(_), x) => { + Rvalue::Aggregate(AggregateKind::Closure(_), it) => { w!(self, "Closure("); - self.operand_list(x); + self.operand_list(it); w!(self, ")"); } - Rvalue::Aggregate(AggregateKind::Union(_, _), x) => { + Rvalue::Aggregate(AggregateKind::Union(_, _), it) => { w!(self, "Union("); - self.operand_list(x); + self.operand_list(it); w!(self, ")"); } Rvalue::Len(p) => { @@ -428,8 +428,8 @@ impl<'a> MirPrettyCtx<'a> { } } - fn operand_list(&mut self, x: &[Operand]) { - let mut it = x.iter(); + fn operand_list(&mut self, it: &[Operand]) { + let mut it = it.iter(); if let Some(first) = it.next() { self.operand(first); for op in it { diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs index 85714128006..d22d0d85c8e 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests.rs @@ -30,7 +30,7 @@ use syntax::{ ast::{self, AstNode, HasName}, SyntaxNode, }; -use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry}; +use tracing_subscriber::{layer::SubscriberExt, Registry}; use tracing_tree::HierarchicalLayer; use triomphe::Arc; @@ -52,7 +52,8 @@ fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> { return None; } - let filter = EnvFilter::from_env("CHALK_DEBUG"); + let filter: tracing_subscriber::filter::Targets = + env::var("CHALK_DEBUG").ok().and_then(|it| it.parse().ok()).unwrap_or_default(); let layer = HierarchicalLayer::default() .with_indent_lines(true) .with_ansi(false) @@ -205,7 +206,9 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour let Some(node) = (match expr_or_pat { hir_def::hir::ExprOrPatId::ExprId(expr) => expr_node(&body_source_map, expr, &db), hir_def::hir::ExprOrPatId::PatId(pat) => pat_node(&body_source_map, pat, &db), - }) else { continue; }; + }) else { + continue; + }; let range = node.as_ref().original_file_range(&db); let actual = format!( "expected {}, got {}", diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs index 111ac0b618e..b71c457f015 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/macros.rs @@ -202,11 +202,11 @@ fn expr_macro_def_expanded_in_various_places() { 100..119 'for _ ...!() {}': IntoIterator::IntoIter<isize> 100..119 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize> 100..119 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item> - 100..119 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>> + 100..119 'for _ ...!() {}': Option<IntoIterator::Item<isize>> 100..119 'for _ ...!() {}': () 100..119 'for _ ...!() {}': () 100..119 'for _ ...!() {}': () - 104..105 '_': Iterator::Item<IntoIterator::IntoIter<isize>> + 104..105 '_': IntoIterator::Item<isize> 117..119 '{}': () 124..134 '|| spam!()': impl Fn() -> isize 140..156 'while ...!() {}': () @@ -293,11 +293,11 @@ fn expr_macro_rules_expanded_in_various_places() { 114..133 'for _ ...!() {}': IntoIterator::IntoIter<isize> 114..133 'for _ ...!() {}': &mut IntoIterator::IntoIter<isize> 114..133 'for _ ...!() {}': fn next<IntoIterator::IntoIter<isize>>(&mut IntoIterator::IntoIter<isize>) -> Option<<IntoIterator::IntoIter<isize> as Iterator>::Item> - 114..133 'for _ ...!() {}': Option<Iterator::Item<IntoIterator::IntoIter<isize>>> + 114..133 'for _ ...!() {}': Option<IntoIterator::Item<isize>> 114..133 'for _ ...!() {}': () 114..133 'for _ ...!() {}': () 114..133 'for _ ...!() {}': () - 118..119 '_': Iterator::Item<IntoIterator::IntoIter<isize>> + 118..119 '_': IntoIterator::Item<isize> 131..133 '{}': () 138..148 '|| spam!()': impl Fn() -> isize 154..170 'while ...!() {}': () diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs index 1e57a4ae296..a8e146b096a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/method_resolution.rs @@ -1216,6 +1216,52 @@ fn main() { } #[test] +fn inherent_method_deref_raw() { + check_types( + r#" +struct Val; + +impl Val { + pub fn method(self: *const Val) -> u32 { + 0 + } +} + +fn main() { + let foo: *const Val; + foo.method(); + // ^^^^^^^^^^^^ u32 +} +"#, + ); +} + +#[test] +fn trait_method_deref_raw() { + check_types( + r#" +trait Trait { + fn method(self: *const Self) -> u32; +} + +struct Val; + +impl Trait for Val { + fn method(self: *const Self) -> u32 { + 0 + } +} + +fn main() { + let foo: *const Val; + foo.method(); + // ^^^^^^^^^^^^ u32 +} +"#, + ); +} + +#[test] fn method_on_dyn_impl() { check_types( r#" diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs index 047900a324e..8b95110233f 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs @@ -1240,11 +1240,11 @@ fn test() { 16..66 'for _ ... }': IntoIterator::IntoIter<()> 16..66 'for _ ... }': &mut IntoIterator::IntoIter<()> 16..66 'for _ ... }': fn next<IntoIterator::IntoIter<()>>(&mut IntoIterator::IntoIter<()>) -> Option<<IntoIterator::IntoIter<()> as Iterator>::Item> - 16..66 'for _ ... }': Option<Iterator::Item<IntoIterator::IntoIter<()>>> + 16..66 'for _ ... }': Option<IntoIterator::Item<()>> 16..66 'for _ ... }': () 16..66 'for _ ... }': () 16..66 'for _ ... }': () - 20..21 '_': Iterator::Item<IntoIterator::IntoIter<()>> + 20..21 '_': IntoIterator::Item<()> 25..39 '{ let x = 0; }': () 31..32 'x': i32 35..36 '0': i32 diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs index 97ae732a904..5f5cd794512 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/tests/traits.rs @@ -4149,6 +4149,30 @@ where } #[test] +fn gats_in_bounds_for_assoc() { + check_types( + r#" +trait Trait { + type Assoc: Another<Gat<i32> = usize>; + type Assoc2<T>: Another<Gat<T> = T>; +} +trait Another { + type Gat<T>; + fn foo(&self) -> Self::Gat<i32>; + fn bar<T>(&self) -> Self::Gat<T>; +} + +fn test<T: Trait>(a: T::Assoc, b: T::Assoc2<isize>) { + let v = a.foo(); + //^ usize + let v = b.bar::<isize>(); + //^ isize +} +"#, + ); +} + +#[test] fn bin_op_with_scalar_fallback() { // Extra impls are significant so that chalk doesn't give us definite guidances. check_types( diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs index f40b7db3a55..3c7cfbaed3a 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/traits.rs @@ -170,7 +170,7 @@ fn solve( struct LoggingRustIrDatabaseLoggingOnDrop<'a>(LoggingRustIrDatabase<Interner, ChalkContext<'a>>); -impl<'a> Drop for LoggingRustIrDatabaseLoggingOnDrop<'a> { +impl Drop for LoggingRustIrDatabaseLoggingOnDrop<'_> { fn drop(&mut self) { eprintln!("chalk program:\n{}", self.0); } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs index 3636580630d..0c38fe5d6ab 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/utils.rs @@ -89,7 +89,7 @@ struct SuperTraits<'a> { seen: FxHashSet<ChalkTraitId>, } -impl<'a> SuperTraits<'a> { +impl SuperTraits<'_> { fn elaborate(&mut self, trait_ref: &TraitRef) { direct_super_trait_refs(self.db, trait_ref, |trait_ref| { if !self.seen.contains(&trait_ref.trait_id) { @@ -99,7 +99,7 @@ impl<'a> SuperTraits<'a> { } } -impl<'a> Iterator for SuperTraits<'a> { +impl Iterator for SuperTraits<'_> { type Item = TraitRef; fn next(&mut self) -> Option<Self::Item> { diff --git a/src/tools/rust-analyzer/crates/hir/Cargo.toml b/src/tools/rust-analyzer/crates/hir/Cargo.toml index a20aff93f19..f860ee94845 100644 --- a/src/tools/rust-analyzer/crates/hir/Cargo.toml +++ b/src/tools/rust-analyzer/crates/hir/Cargo.toml @@ -18,7 +18,7 @@ arrayvec = "0.7.2" itertools = "0.10.5" smallvec.workspace = true triomphe.workspace = true -once_cell = "1.17.0" +once_cell = "1.17.1" # local deps base-db.workspace = true diff --git a/src/tools/rust-analyzer/crates/hir/src/attrs.rs b/src/tools/rust-analyzer/crates/hir/src/attrs.rs index b817937296d..cf8db2a5a24 100644 --- a/src/tools/rust-analyzer/crates/hir/src/attrs.rs +++ b/src/tools/rust-analyzer/crates/hir/src/attrs.rs @@ -141,6 +141,7 @@ fn resolve_doc_path( AttrDefId::ImplId(it) => it.resolver(db.upcast()), AttrDefId::ExternBlockId(it) => it.resolver(db.upcast()), AttrDefId::MacroId(it) => it.resolver(db.upcast()), + AttrDefId::ExternCrateId(it) => it.resolver(db.upcast()), AttrDefId::GenericParamId(it) => match it { GenericParamId::TypeParamId(it) => it.parent(), GenericParamId::ConstParamId(it) => it.parent(), diff --git a/src/tools/rust-analyzer/crates/hir/src/db.rs b/src/tools/rust-analyzer/crates/hir/src/db.rs index e0cde689fed..f3a0608944b 100644 --- a/src/tools/rust-analyzer/crates/hir/src/db.rs +++ b/src/tools/rust-analyzer/crates/hir/src/db.rs @@ -5,9 +5,9 @@ //! But we need this for at least LRU caching at the query level. pub use hir_def::db::*; pub use hir_expand::db::{ - AstIdMapQuery, ExpandDatabase, ExpandDatabaseStorage, ExpandProcMacroQuery, HygieneFrameQuery, - InternMacroCallQuery, MacroArgTextQuery, MacroDefQuery, MacroExpandQuery, - ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, + AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, + ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery, + MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, }; pub use hir_ty::db::*; diff --git a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs index b64d81490bb..80c3bcdca8b 100644 --- a/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/diagnostics.rs @@ -3,7 +3,7 @@ //! //! This probably isn't the best way to do this -- ideally, diagnostics should //! be expressed in terms of hir types themselves. -pub use hir_ty::diagnostics::{IncoherentImpl, IncorrectCase}; +pub use hir_ty::diagnostics::{CaseType, IncoherentImpl, IncorrectCase}; use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; diff --git a/src/tools/rust-analyzer/crates/hir/src/display.rs b/src/tools/rust-analyzer/crates/hir/src/display.rs index 9a2090ab79a..4de9c872ad6 100644 --- a/src/tools/rust-analyzer/crates/hir/src/display.rs +++ b/src/tools/rust-analyzer/crates/hir/src/display.rs @@ -251,8 +251,8 @@ impl HirDisplay for GenericParam { impl HirDisplay for TypeOrConstParam { fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> { match self.split(f.db) { - either::Either::Left(x) => x.hir_fmt(f), - either::Either::Right(x) => x.hir_fmt(f), + either::Either::Left(it) => it.hir_fmt(f), + either::Either::Right(it) => it.hir_fmt(f), } } } @@ -303,11 +303,11 @@ fn write_generic_params( ) -> Result<(), HirDisplayError> { let params = f.db.generic_params(def); if params.lifetimes.is_empty() - && params.type_or_consts.iter().all(|x| x.1.const_param().is_none()) + && params.type_or_consts.iter().all(|it| it.1.const_param().is_none()) && params .type_or_consts .iter() - .filter_map(|x| x.1.type_param()) + .filter_map(|it| it.1.type_param()) .all(|param| !matches!(param.provenance, TypeParamProvenance::TypeParamList)) { return Ok(()); diff --git a/src/tools/rust-analyzer/crates/hir/src/has_source.rs b/src/tools/rust-analyzer/crates/hir/src/has_source.rs index 9f6b5c0a9fc..b46a3856d45 100644 --- a/src/tools/rust-analyzer/crates/hir/src/has_source.rs +++ b/src/tools/rust-analyzer/crates/hir/src/has_source.rs @@ -1,12 +1,13 @@ //! Provides set of implementation for hir's objects that allows get back location in file. +use base_db::FileId; use either::Either; use hir_def::{ nameres::{ModuleOrigin, ModuleSource}, src::{HasChildSource, HasSource as _}, Lookup, MacroId, VariantId, }; -use hir_expand::InFile; +use hir_expand::{HirFileId, InFile}; use syntax::ast; use crate::{ @@ -20,6 +21,10 @@ pub trait HasSource { /// Fetches the definition's source node. /// Using [`crate::Semantics::source`] is preferred when working with [`crate::Semantics`], /// as that caches the parsed file in the semantics' cache. + /// + /// The current some implementations can return `InFile` instead of `Option<InFile>`. + /// But we made this method `Option` to support rlib in the future + /// by https://github.com/rust-lang/rust-analyzer/issues/6913 fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>>; } @@ -32,6 +37,11 @@ impl Module { def_map[self.id.local_id].definition_source(db.upcast()) } + pub fn definition_source_file_id(self, db: &dyn HirDatabase) -> HirFileId { + let def_map = self.id.def_map(db.upcast()); + def_map[self.id.local_id].definition_source_file_id() + } + pub fn is_mod_rs(self, db: &dyn HirDatabase) -> bool { let def_map = self.id.def_map(db.upcast()); match def_map[self.id.local_id].origin { @@ -40,6 +50,16 @@ impl Module { } } + pub fn as_source_file_id(self, db: &dyn HirDatabase) -> Option<FileId> { + let def_map = self.id.def_map(db.upcast()); + match def_map[self.id.local_id].origin { + ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition, .. } => { + Some(definition) + } + _ => None, + } + } + pub fn is_inline(self, db: &dyn HirDatabase) -> bool { let def_map = self.id.def_map(db.upcast()); def_map[self.id.local_id].origin.is_inline() diff --git a/src/tools/rust-analyzer/crates/hir/src/lib.rs b/src/tools/rust-analyzer/crates/hir/src/lib.rs index 6df625380ff..f8d9398ae2c 100644 --- a/src/tools/rust-analyzer/crates/hir/src/lib.rs +++ b/src/tools/rust-analyzer/crates/hir/src/lib.rs @@ -47,7 +47,7 @@ use hir_def::{ lang_item::LangItemTarget, layout::{self, ReprOptions, TargetDataLayout}, macro_id_to_def_id, - nameres::{self, diagnostics::DefDiagnostic, ModuleOrigin}, + nameres::{self, diagnostics::DefDiagnostic}, per_ns::PerNs, resolver::{HasResolver, Resolver}, src::HasSource as _, @@ -62,7 +62,6 @@ use hir_ty::{ all_super_traits, autoderef, consteval::{try_const_usize, unknown_const_as_generic, ConstEvalError, ConstExt}, diagnostics::BodyValidationDiagnostic, - display::HexifiedConst, layout::{Layout as TyLayout, RustcEnumVariantIdx, TagEncoding}, method_resolution::{self, TyFingerprint}, mir::{self, interpret_mir}, @@ -89,11 +88,11 @@ use crate::db::{DefDatabase, HirDatabase}; pub use crate::{ attrs::{HasAttrs, Namespace}, diagnostics::{ - AnyDiagnostic, BreakOutsideOfLoop, ExpectedFunction, InactiveCode, IncoherentImpl, - IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError, MacroExpansionParseError, - MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms, MissingUnsafe, - MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem, PrivateField, - ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel, + AnyDiagnostic, BreakOutsideOfLoop, CaseType, ExpectedFunction, InactiveCode, + IncoherentImpl, IncorrectCase, InvalidDeriveTarget, MacroDefError, MacroError, + MacroExpansionParseError, MalformedDerive, MismatchedArgCount, MissingFields, + MissingMatchArms, MissingUnsafe, MovedOutOfRef, NeedMut, NoSuchField, PrivateAssocItem, + PrivateField, ReplaceFilterMapNextWithFindMap, TypeMismatch, TypedHole, UndeclaredLabel, UnimplementedBuiltinMacro, UnreachableLabel, UnresolvedExternCrate, UnresolvedField, UnresolvedImport, UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule, UnresolvedProcMacro, UnusedMut, @@ -505,15 +504,10 @@ impl Module { /// Finds nearest non-block ancestor `Module` (`self` included). pub fn nearest_non_block_module(self, db: &dyn HirDatabase) -> Module { let mut id = self.id; - loop { - let def_map = id.def_map(db.upcast()); - let origin = def_map[id.local_id].origin; - if matches!(origin, ModuleOrigin::BlockExpr { .. }) { - id = id.containing_module(db.upcast()).expect("block without parent module") - } else { - return Module { id }; - } + while id.is_block_module() { + id = id.containing_module(db.upcast()).expect("block without parent module"); } + Module { id } } pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> { @@ -619,15 +613,21 @@ impl Module { let inherent_impls = db.inherent_impls_in_crate(self.id.krate()); for impl_def in self.impl_defs(db) { + let loc = impl_def.id.lookup(db.upcast()); + let tree = loc.id.item_tree(db.upcast()); + let node = &tree[loc.id.value]; + let file_id = loc.id.file_id(); + if file_id.is_builtin_derive(db.upcast()) { + // these expansion come from us, diagnosing them is a waste of resources + // FIXME: Once we diagnose the inputs to builtin derives, we should at least extract those diagnostics somehow + continue; + } + for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() { emit_def_diagnostic(db, acc, diag); } if inherent_impls.invalid_impls().contains(&impl_def.id) { - let loc = impl_def.id.lookup(db.upcast()); - let tree = loc.id.item_tree(db.upcast()); - let node = &tree[loc.id.value]; - let file_id = loc.id.file_id(); let ast_id_map = db.ast_id_map(file_id); acc.push(IncoherentImpl { impl_: ast_id_map.get(node.ast_id()), file_id }.into()) @@ -698,16 +698,18 @@ impl Module { fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, m: Macro) { let id = macro_id_to_def_id(db.upcast(), m.id); - if let Err(e) = db.macro_def(id) { - let Some(ast) = id.ast_id().left() else { - never!("MacroDefError for proc-macro: {:?}", e); + if let hir_expand::db::TokenExpander::DeclarativeMacro(expander) = db.macro_expander(id) { + if let Some(e) = expander.mac.err() { + let Some(ast) = id.ast_id().left() else { + never!("declarative expander for non decl-macro: {:?}", e); return; }; - emit_def_diagnostic_( - db, - acc, - &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() }, - ); + emit_def_diagnostic_( + db, + acc, + &DefDiagnosticKind::MacroDefError { ast, message: e.to_string() }, + ); + } } } @@ -753,7 +755,7 @@ fn emit_def_diagnostic_( let item = ast.to_node(db.upcast()); acc.push( InactiveCode { - node: ast.with_value(AstPtr::new(&item).into()), + node: ast.with_value(SyntaxNodePtr::new(&item).into()), cfg: cfg.clone(), opts: opts.clone(), } @@ -1234,7 +1236,7 @@ impl Adt { pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool { let subst = db.generic_defaults(self.into()); subst.iter().any(|ty| match ty.skip_binders().data(Interner) { - GenericArgData::Ty(x) => x.is_unknown(), + GenericArgData::Ty(it) => it.is_unknown(), _ => false, }) } @@ -1635,11 +1637,11 @@ impl DefWithBody { for moof in &borrowck_result.moved_out_of_ref { let span: InFile<SyntaxNodePtr> = match moof.span { mir::MirSpan::ExprId(e) => match source_map.expr_syntax(e) { - Ok(s) => s.map(|x| x.into()), + Ok(s) => s.map(|it| it.into()), Err(_) => continue, }, mir::MirSpan::PatId(p) => match source_map.pat_syntax(p) { - Ok(s) => s.map(|x| match x { + Ok(s) => s.map(|it| match it { Either::Left(e) => e.into(), Either::Right(e) => e.into(), }), @@ -1661,6 +1663,14 @@ impl DefWithBody { let Some(&local) = mir_body.binding_locals.get(binding_id) else { continue; }; + if body[binding_id] + .definitions + .iter() + .any(|&pat| source_map.pat_syntax(pat).is_err()) + { + // Skip synthetic bindings + continue; + } let need_mut = &mol[local]; let local = Local { parent: self.into(), binding_id }; match (need_mut, local.is_mut(db)) { @@ -1670,11 +1680,11 @@ impl DefWithBody { for span in spans { let span: InFile<SyntaxNodePtr> = match span { mir::MirSpan::ExprId(e) => match source_map.expr_syntax(*e) { - Ok(s) => s.map(|x| x.into()), + Ok(s) => s.map(|it| it.into()), Err(_) => continue, }, mir::MirSpan::PatId(p) => match source_map.pat_syntax(*p) { - Ok(s) => s.map(|x| match x { + Ok(s) => s.map(|it| match it { Either::Left(e) => e.into(), Either::Right(e) => e.into(), }), @@ -1687,7 +1697,7 @@ impl DefWithBody { } (mir::MutabilityReason::Not, true) => { if !infer.mutated_bindings_in_closure.contains(&binding_id) { - let should_ignore = matches!(body[binding_id].name.as_str(), Some(x) if x.starts_with("_")); + let should_ignore = matches!(body[binding_id].name.as_str(), Some(it) if it.starts_with("_")); if !should_ignore { acc.push(UnusedMut { local }.into()) } @@ -1919,6 +1929,21 @@ impl Function { db.function_data(self.id).has_async_kw() } + /// Does this function have `#[test]` attribute? + pub fn is_test(self, db: &dyn HirDatabase) -> bool { + db.function_data(self.id).attrs.is_test() + } + + /// Does this function have the ignore attribute? + pub fn is_ignore(self, db: &dyn HirDatabase) -> bool { + db.function_data(self.id).attrs.is_ignore() + } + + /// Does this function have `#[bench]` attribute? + pub fn is_bench(self, db: &dyn HirDatabase) -> bool { + db.function_data(self.id).attrs.is_bench() + } + pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool { hir_ty::is_fn_unsafe_to_call(db, self.id) } @@ -1962,7 +1987,7 @@ impl Function { return r; } }; - let (result, stdout, stderr) = interpret_mir(db, &body, false); + let (result, stdout, stderr) = interpret_mir(db, body, false); let mut text = match result { Ok(_) => "pass".to_string(), Err(e) => { @@ -2132,7 +2157,27 @@ impl Const { pub fn render_eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> { let c = db.const_eval(self.id.into(), Substitution::empty(Interner))?; - let r = format!("{}", HexifiedConst(c).display(db)); + let data = &c.data(Interner); + if let TyKind::Scalar(s) = data.ty.kind(Interner) { + if matches!(s, Scalar::Int(_) | Scalar::Uint(_)) { + if let hir_ty::ConstValue::Concrete(c) = &data.value { + if let hir_ty::ConstScalar::Bytes(b, _) = &c.interned { + let value = u128::from_le_bytes(mir::pad16(b, false)); + let value_signed = + i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_)))); + if value >= 10 { + return Ok(format!("{} ({:#X})", value_signed, value)); + } else { + return Ok(format!("{}", value_signed)); + } + } + } + } + } + if let Ok(s) = mir::render_const_using_debug_impl(db, self.id, &c) { + return Ok(s); + } + let r = format!("{}", c.display(db)); return Ok(r); } } @@ -2270,7 +2315,7 @@ impl TypeAlias { pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool { let subst = db.generic_defaults(self.id.into()); subst.iter().any(|ty| match ty.skip_binders().data(Interner) { - GenericArgData::Ty(x) => x.is_unknown(), + GenericArgData::Ty(it) => it.is_unknown(), _ => false, }) } @@ -2660,8 +2705,8 @@ impl GenericDef { let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| { let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } }; match toc.split(db) { - Either::Left(x) => GenericParam::ConstParam(x), - Either::Right(x) => GenericParam::TypeParam(x), + Either::Left(it) => GenericParam::ConstParam(it), + Either::Right(it) => GenericParam::TypeParam(it), } }); self.lifetime_params(db) @@ -2709,14 +2754,14 @@ pub struct LocalSource { impl LocalSource { pub fn as_ident_pat(&self) -> Option<&ast::IdentPat> { match &self.source.value { - Either::Left(x) => Some(x), + Either::Left(it) => Some(it), Either::Right(_) => None, } } pub fn into_ident_pat(self) -> Option<ast::IdentPat> { match self.source.value { - Either::Left(x) => Some(x), + Either::Left(it) => Some(it), Either::Right(_) => None, } } @@ -2738,7 +2783,7 @@ impl LocalSource { } pub fn syntax_ptr(self) -> InFile<SyntaxNodePtr> { - self.source.map(|x| SyntaxNodePtr::new(x.syntax())) + self.source.map(|it| SyntaxNodePtr::new(it.syntax())) } } @@ -2797,13 +2842,13 @@ impl Local { Type::new(db, def, ty) } - /// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = x;` + /// All definitions for this local. Example: `let (a$0, _) | (_, a$0) = it;` pub fn sources(self, db: &dyn HirDatabase) -> Vec<LocalSource> { let (body, source_map) = db.body_with_source_map(self.parent); self.sources_(db, &body, &source_map).collect() } - /// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = x;` + /// The leftmost definition for this local. Example: `let (a$0, _) | (_, a) = it;` pub fn primary_source(self, db: &dyn HirDatabase) -> LocalSource { let (body, source_map) = db.body_with_source_map(self.parent); let src = self.sources_(db, &body, &source_map).next().unwrap(); @@ -3057,7 +3102,9 @@ impl TypeParam { let subst = TyBuilder::placeholder_subst(db, self.id.parent()); let ty = ty.substitute(Interner, &subst); match ty.data(Interner) { - GenericArgData::Ty(x) => Some(Type::new_with_resolver_inner(db, &resolver, x.clone())), + GenericArgData::Ty(it) => { + Some(Type::new_with_resolver_inner(db, &resolver, it.clone())) + } _ => None, } } @@ -3096,7 +3143,7 @@ impl ConstParam { pub fn name(self, db: &dyn HirDatabase) -> Name { let params = db.generic_params(self.id.parent()); match params.type_or_consts[self.id.local_id()].name() { - Some(x) => x.clone(), + Some(it) => it.clone(), None => { never!(); Name::missing() @@ -3153,8 +3200,8 @@ impl TypeOrConstParam { pub fn ty(self, db: &dyn HirDatabase) -> Type { match self.split(db) { - Either::Left(x) => x.ty(db), - Either::Right(x) => x.ty(db), + Either::Left(it) => it.ty(db), + Either::Right(it) => it.ty(db), } } } @@ -3260,9 +3307,9 @@ impl Impl { self.id.lookup(db.upcast()).container.into() } - pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> { + pub fn as_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> { let src = self.source(db)?; - src.file_id.is_builtin_derive(db.upcast()) + src.file_id.as_builtin_derive_attr_node(db.upcast()) } } @@ -3652,9 +3699,9 @@ impl Type { }; let parent_subst = TyBuilder::subst_for_def(db, trait_id, None) .push(self.ty.clone()) - .fill(|x| { + .fill(|it| { // FIXME: this code is not covered in tests. - match x { + match it { ParamKind::Type => { GenericArgData::Ty(args.next().unwrap().ty.clone()).intern(Interner) } @@ -3821,7 +3868,7 @@ impl Type { pub fn as_array(&self, db: &dyn HirDatabase) -> Option<(Type, usize)> { if let TyKind::Array(ty, len) = &self.ty.kind(Interner) { - try_const_usize(db, len).map(|x| (self.derived(ty.clone()), x as usize)) + try_const_usize(db, len).map(|it| (self.derived(ty.clone()), it as usize)) } else { None } diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics.rs b/src/tools/rust-analyzer/crates/hir/src/semantics.rs index 5a76a9185a2..39a3e1c4489 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics.rs @@ -1494,7 +1494,11 @@ impl<'db> SemanticsImpl<'db> { } fn is_inside_unsafe(&self, expr: &ast::Expr) -> bool { - let Some(enclosing_item) = expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast) else { return false }; + let Some(enclosing_item) = + expr.syntax().ancestors().find_map(Either::<ast::Item, ast::Variant>::cast) + else { + return false; + }; let def = match &enclosing_item { Either::Left(ast::Item::Fn(it)) if it.unsafe_token().is_some() => return true, @@ -1631,7 +1635,7 @@ pub struct SemanticsScope<'a> { resolver: Resolver, } -impl<'a> SemanticsScope<'a> { +impl SemanticsScope<'_> { pub fn module(&self) -> Module { Module { id: self.resolver.module() } } diff --git a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs index c50ffa4f8b7..b971ca62387 100644 --- a/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs +++ b/src/tools/rust-analyzer/crates/hir/src/semantics/source_to_def.rs @@ -298,7 +298,7 @@ impl SourceToDefCtx<'_, '_> { pub(super) fn type_param_to_def(&mut self, src: InFile<ast::TypeParam>) -> Option<TypeParamId> { let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into(); let dyn_map = self.cache_for(container, src.file_id); - dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|x| TypeParamId::from_unchecked(x)) + dyn_map[keys::TYPE_PARAM].get(&src.value).copied().map(|it| TypeParamId::from_unchecked(it)) } pub(super) fn lifetime_param_to_def( @@ -316,7 +316,10 @@ impl SourceToDefCtx<'_, '_> { ) -> Option<ConstParamId> { let container: ChildContainer = self.find_generic_param_container(src.syntax())?.into(); let dyn_map = self.cache_for(container, src.file_id); - dyn_map[keys::CONST_PARAM].get(&src.value).copied().map(|x| ConstParamId::from_unchecked(x)) + dyn_map[keys::CONST_PARAM] + .get(&src.value) + .copied() + .map(|it| ConstParamId::from_unchecked(it)) } pub(super) fn generic_param_to_def( diff --git a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs index ecb1b306a66..3499daf1140 100644 --- a/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs +++ b/src/tools/rust-analyzer/crates/hir/src/source_analyzer.rs @@ -832,7 +832,7 @@ impl SourceAnalyzer { None => return func, }; let env = db.trait_environment_for_body(owner); - method_resolution::lookup_impl_method(db, env, func, substs).0 + db.lookup_impl_method(env, func, substs).0 } fn resolve_impl_const_or_trait_def( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs index 7384390f28b..ac0b74ee8e7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/add_missing_match_arms.rs @@ -8,10 +8,7 @@ use itertools::Itertools; use syntax::ast::edit_in_place::Removable; use syntax::ast::{self, make, AstNode, HasName, MatchArmList, MatchExpr, Pat}; -use crate::{ - utils::{self, render_snippet, Cursor}, - AssistContext, AssistId, AssistKind, Assists, -}; +use crate::{utils, AssistContext, AssistId, AssistKind, Assists}; // Assist: add_missing_match_arms // @@ -75,14 +72,18 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) .collect(); let module = ctx.sema.scope(expr.syntax())?.module(); - let (mut missing_pats, is_non_exhaustive): ( + let (mut missing_pats, is_non_exhaustive, has_hidden_variants): ( Peekable<Box<dyn Iterator<Item = (ast::Pat, bool)>>>, bool, + bool, ) = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) { let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate()); let variants = enum_def.variants(ctx.db()); + let has_hidden_variants = + variants.iter().any(|variant| variant.should_be_hidden(ctx.db(), module.krate())); + let missing_pats = variants .into_iter() .filter_map(|variant| { @@ -101,7 +102,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) } else { Box::new(missing_pats) }; - (missing_pats.peekable(), is_non_exhaustive) + (missing_pats.peekable(), is_non_exhaustive, has_hidden_variants) } else if let Some(enum_defs) = resolve_tuple_of_enum_def(&ctx.sema, &expr) { let is_non_exhaustive = enum_defs.iter().any(|enum_def| enum_def.is_non_exhaustive(ctx.db(), module.krate())); @@ -124,6 +125,12 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) if n_arms > 256 { return None; } + + let has_hidden_variants = variants_of_enums + .iter() + .flatten() + .any(|variant| variant.should_be_hidden(ctx.db(), module.krate())); + let missing_pats = variants_of_enums .into_iter() .multi_cartesian_product() @@ -139,7 +146,11 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) (ast::Pat::from(make::tuple_pat(patterns)), is_hidden) }) .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat)); - ((Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), is_non_exhaustive) + ( + (Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), + is_non_exhaustive, + has_hidden_variants, + ) } else if let Some((enum_def, len)) = resolve_array_of_enum_def(&ctx.sema, &expr) { let is_non_exhaustive = enum_def.is_non_exhaustive(ctx.db(), module.krate()); let variants = enum_def.variants(ctx.db()); @@ -148,6 +159,9 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) return None; } + let has_hidden_variants = + variants.iter().any(|variant| variant.should_be_hidden(ctx.db(), module.krate())); + let variants_of_enums = vec![variants; len]; let missing_pats = variants_of_enums @@ -164,14 +178,20 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) (ast::Pat::from(make::slice_pat(patterns)), is_hidden) }) .filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat)); - ((Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), is_non_exhaustive) + ( + (Box::new(missing_pats) as Box<dyn Iterator<Item = _>>).peekable(), + is_non_exhaustive, + has_hidden_variants, + ) } else { return None; }; let mut needs_catch_all_arm = is_non_exhaustive && !has_catch_all_arm; - if !needs_catch_all_arm && missing_pats.peek().is_none() { + if !needs_catch_all_arm + && ((has_hidden_variants && has_catch_all_arm) || missing_pats.peek().is_none()) + { return None; } @@ -179,13 +199,21 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) AssistId("add_missing_match_arms", AssistKind::QuickFix), "Fill match arms", target_range, - |builder| { + |edit| { let new_match_arm_list = match_arm_list.clone_for_update(); + + // having any hidden variants means that we need a catch-all arm + needs_catch_all_arm |= has_hidden_variants; + let missing_arms = missing_pats - .map(|(pat, hidden)| { - (make::match_arm(iter::once(pat), None, make::ext::expr_todo()), hidden) + .filter(|(_, hidden)| { + // filter out hidden patterns because they're handled by the catch-all arm + !hidden }) - .map(|(it, hidden)| (it.clone_for_update(), hidden)); + .map(|(pat, _)| { + make::match_arm(iter::once(pat), None, make::ext::expr_todo()) + .clone_for_update() + }); let catch_all_arm = new_match_arm_list .arms() @@ -204,15 +232,13 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) cov_mark::hit!(add_missing_match_arms_empty_expr); } } + let mut first_new_arm = None; - for (arm, hidden) in missing_arms { - if hidden { - needs_catch_all_arm = !has_catch_all_arm; - } else { - first_new_arm.get_or_insert_with(|| arm.clone()); - new_match_arm_list.add_arm(arm); - } + for arm in missing_arms { + first_new_arm.get_or_insert_with(|| arm.clone()); + new_match_arm_list.add_arm(arm); } + if needs_catch_all_arm && !has_catch_all_arm { cov_mark::hit!(added_wildcard_pattern); let arm = make::match_arm( @@ -225,24 +251,39 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>) new_match_arm_list.add_arm(arm); } - let old_range = ctx.sema.original_range(match_arm_list.syntax()).range; - match (first_new_arm, ctx.config.snippet_cap) { - (Some(first_new_arm), Some(cap)) => { - let extend_lifetime; - let cursor = - match first_new_arm.syntax().descendants().find_map(ast::WildcardPat::cast) - { - Some(it) => { - extend_lifetime = it.syntax().clone(); - Cursor::Replace(&extend_lifetime) - } - None => Cursor::Before(first_new_arm.syntax()), - }; - let snippet = render_snippet(cap, new_match_arm_list.syntax(), cursor); - builder.replace_snippet(cap, old_range, snippet); + if let (Some(first_new_arm), Some(cap)) = (first_new_arm, ctx.config.snippet_cap) { + match first_new_arm.syntax().descendants().find_map(ast::WildcardPat::cast) { + Some(it) => edit.add_placeholder_snippet(cap, it), + None => edit.add_tabstop_before(cap, first_new_arm), } - _ => builder.replace(old_range, new_match_arm_list.to_string()), } + + // FIXME: Hack for mutable syntax trees not having great support for macros + // Just replace the element that the original range came from + let old_place = { + // Find the original element + let old_file_range = ctx.sema.original_range(match_arm_list.syntax()); + let file = ctx.sema.parse(old_file_range.file_id); + let old_place = file.syntax().covering_element(old_file_range.range); + + // Make `old_place` mut + match old_place { + syntax::SyntaxElement::Node(it) => { + syntax::SyntaxElement::from(edit.make_syntax_mut(it)) + } + syntax::SyntaxElement::Token(it) => { + // Don't have a way to make tokens mut, so instead make the parent mut + // and find the token again + let parent = edit.make_syntax_mut(it.parent().unwrap()); + let mut_token = + parent.covering_element(it.text_range()).into_token().unwrap(); + + syntax::SyntaxElement::from(mut_token) + } + } + }; + + syntax::ted::replace(old_place, new_match_arm_list.syntax()); }, ) } @@ -1621,10 +1662,9 @@ pub enum E { #[doc(hidden)] A, } ); } - // FIXME: I don't think the assist should be applicable in this case #[test] fn does_not_fill_wildcard_with_wildcard() { - check_assist( + check_assist_not_applicable( add_missing_match_arms, r#" //- /main.rs crate:main deps:e @@ -1636,13 +1676,6 @@ fn foo(t: ::e::E) { //- /e.rs crate:e pub enum E { #[doc(hidden)] A, } "#, - r#" -fn foo(t: ::e::E) { - match t { - _ => todo!(), - } -} -"#, ); } @@ -1777,7 +1810,7 @@ fn foo(t: ::e::E, b: bool) { #[test] fn does_not_fill_wildcard_with_partial_wildcard_and_wildcard() { - check_assist( + check_assist_not_applicable( add_missing_match_arms, r#" //- /main.rs crate:main deps:e @@ -1789,14 +1822,6 @@ fn foo(t: ::e::E, b: bool) { } //- /e.rs crate:e pub enum E { #[doc(hidden)] A, }"#, - r#" -fn foo(t: ::e::E, b: bool) { - match t { - _ if b => todo!(), - _ => todo!(), - } -} -"#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs index 226a5dd9fa8..ddc8a50ed40 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/desugar_doc_comment.rs @@ -27,7 +27,9 @@ use crate::{ pub(crate) fn desugar_doc_comment(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let comment = ctx.find_token_at_offset::<ast::Comment>()?; // Only allow doc comments - let Some(placement) = comment.kind().doc else { return None; }; + let Some(placement) = comment.kind().doc else { + return None; + }; // Only allow comments which are alone on their line if let Some(prev) = comment.syntax().prev_token() { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs index 5c435dd9c29..3aff5c9144f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/expand_glob_import.rs @@ -1,5 +1,5 @@ use either::Either; -use hir::{AssocItem, HasVisibility, Module, ModuleDef, Name, PathResolution, ScopeDef}; +use hir::{AssocItem, Enum, HasVisibility, Module, ModuleDef, Name, PathResolution, ScopeDef}; use ide_db::{ defs::{Definition, NameRefClass}, search::SearchScope, @@ -45,7 +45,8 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> let use_tree = star.parent().and_then(ast::UseTree::cast)?; let (parent, mod_path) = find_parent_and_path(&star)?; let target_module = match ctx.sema.resolve_path(&mod_path)? { - PathResolution::Def(ModuleDef::Module(it)) => it, + PathResolution::Def(ModuleDef::Module(it)) => Expandable::Module(it), + PathResolution::Def(ModuleDef::Adt(hir::Adt::Enum(e))) => Expandable::Enum(e), _ => return None, }; @@ -90,6 +91,11 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> ) } +enum Expandable { + Module(Module), + Enum(Enum), +} + fn find_parent_and_path( star: &SyntaxToken, ) -> Option<(Either<ast::UseTree, ast::UseTreeList>, ast::Path)> { @@ -168,23 +174,59 @@ impl Refs { } } -fn find_refs_in_mod(ctx: &AssistContext<'_>, module: Module, visible_from: Module) -> Option<Refs> { - if !is_mod_visible_from(ctx, module, visible_from) { +fn find_refs_in_mod( + ctx: &AssistContext<'_>, + expandable: Expandable, + visible_from: Module, +) -> Option<Refs> { + if !is_expandable_visible_from(ctx, &expandable, visible_from) { return None; } - let module_scope = module.scope(ctx.db(), Some(visible_from)); - let refs = module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect(); - Some(Refs(refs)) + match expandable { + Expandable::Module(module) => { + let module_scope = module.scope(ctx.db(), Some(visible_from)); + let refs = + module_scope.into_iter().filter_map(|(n, d)| Ref::from_scope_def(n, d)).collect(); + Some(Refs(refs)) + } + Expandable::Enum(enm) => Some(Refs( + enm.variants(ctx.db()) + .into_iter() + .map(|v| Ref { visible_name: v.name(ctx.db()), def: Definition::Variant(v) }) + .collect(), + )), + } } -fn is_mod_visible_from(ctx: &AssistContext<'_>, module: Module, from: Module) -> bool { - match module.parent(ctx.db()) { - Some(parent) => { - module.visibility(ctx.db()).is_visible_from(ctx.db(), from.into()) - && is_mod_visible_from(ctx, parent, from) +fn is_expandable_visible_from( + ctx: &AssistContext<'_>, + expandable: &Expandable, + from: Module, +) -> bool { + fn is_mod_visible_from(ctx: &AssistContext<'_>, module: Module, from: Module) -> bool { + match module.parent(ctx.db()) { + Some(parent) => { + module.visibility(ctx.db()).is_visible_from(ctx.db(), from.into()) + && is_mod_visible_from(ctx, parent, from) + } + None => true, + } + } + + match expandable { + Expandable::Module(module) => match module.parent(ctx.db()) { + Some(parent) => { + module.visibility(ctx.db()).is_visible_from(ctx.db(), from.into()) + && is_mod_visible_from(ctx, parent, from) + } + None => true, + }, + Expandable::Enum(enm) => { + let module = enm.module(ctx.db()); + enm.visibility(ctx.db()).is_visible_from(ctx.db(), from.into()) + && is_mod_visible_from(ctx, module, from) } - None => true, } } @@ -897,4 +939,98 @@ struct Baz { ", ); } + + #[test] + fn test_support_for_enums() { + check_assist( + expand_glob_import, + r#" +mod foo { + pub enum Foo { + Bar, + Baz, + } +} + +use foo::Foo; +use foo::Foo::*$0; + +struct Strukt { + bar: Foo, +} + +fn main() { + let s: Strukt = Strukt { bar: Bar }; +}"#, + r#" +mod foo { + pub enum Foo { + Bar, + Baz, + } +} + +use foo::Foo; +use foo::Foo::Bar; + +struct Strukt { + bar: Foo, +} + +fn main() { + let s: Strukt = Strukt { bar: Bar }; +}"#, + ) + } + + #[test] + fn test_expanding_multiple_variants_at_once() { + check_assist( + expand_glob_import, + r#" +mod foo { + pub enum Foo { + Bar, + Baz, + } +} + +mod abc { + use super::foo; + use super::foo::Foo::*$0; + + struct Strukt { + baz: foo::Foo, + bar: foo::Foo, + } + + fn trying_calling() { + let s: Strukt = Strukt { bar: Bar , baz : Baz }; + } + +}"#, + r#" +mod foo { + pub enum Foo { + Bar, + Baz, + } +} + +mod abc { + use super::foo; + use super::foo::Foo::{Bar, Baz}; + + struct Strukt { + baz: foo::Foo, + bar: foo::Foo, + } + + fn trying_calling() { + let s: Strukt = Strukt { bar: Bar , baz : Baz }; + } + +}"#, + ) + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs index 2a67909e637..e9db38aca0f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_function.rs @@ -1360,14 +1360,15 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> St } format_to!(buf, "{expr}"); - let insert_comma = fun - .body - .parent() - .and_then(ast::MatchArm::cast) - .map_or(false, |it| it.comma_token().is_none()); + let parent_match_arm = fun.body.parent().and_then(ast::MatchArm::cast); + let insert_comma = parent_match_arm.as_ref().is_some_and(|it| it.comma_token().is_none()); + if insert_comma { buf.push(','); - } else if fun.ret_ty.is_unit() && (!fun.outliving_locals.is_empty() || !expr.is_block_like()) { + } else if parent_match_arm.is_none() + && fun.ret_ty.is_unit() + && (!fun.outliving_locals.is_empty() || !expr.is_block_like()) + { buf.push(';'); } buf @@ -4611,6 +4612,29 @@ fn $0fun_name() -> i32 { } "#, ); + + // Makes sure no semicolon is added for unit-valued match arms + check_assist( + extract_function, + r#" +fn main() { + match () { + _ => $0()$0, + } +} +"#, + r#" +fn main() { + match () { + _ => fun_name(), + } +} + +fn $0fun_name() { + () +} +"#, + ) } #[test] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs index d6c59a9c829..c9f272474e7 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/fix_visibility.rs @@ -1,11 +1,11 @@ use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef}; use ide_db::base_db::FileId; use syntax::{ - ast::{self, HasVisibility as _}, - AstNode, TextRange, TextSize, + ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _}, + AstNode, TextRange, }; -use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists}; +use crate::{AssistContext, AssistId, AssistKind, Assists}; // FIXME: this really should be a fix for diagnostic, rather than an assist. @@ -40,12 +40,16 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) let qualifier = path.qualifier()?; let name_ref = path.segment()?.name_ref()?; let qualifier_res = ctx.sema.resolve_path(&qualifier)?; - let PathResolution::Def(ModuleDef::Module(module)) = qualifier_res else { return None; }; + let PathResolution::Def(ModuleDef::Module(module)) = qualifier_res else { + return None; + }; let (_, def) = module .scope(ctx.db(), None) .into_iter() .find(|(name, _)| name.to_smol_str() == name_ref.text().as_str())?; - let ScopeDef::ModuleDef(def) = def else { return None; }; + let ScopeDef::ModuleDef(def) = def else { + return None; + }; let current_module = ctx.sema.scope(path.syntax())?.module(); let target_module = def.module(ctx.db())?; @@ -54,11 +58,13 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) return None; }; - let (offset, current_visibility, target, target_file, target_name) = - target_data_for_def(ctx.db(), def)?; + let (vis_owner, target, target_file, target_name) = target_data_for_def(ctx.db(), def)?; - let missing_visibility = - if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" }; + let missing_visibility = if current_module.krate() == target_module.krate() { + make::visibility_pub_crate() + } else { + make::visibility_pub() + }; let assist_label = match target_name { None => format!("Change visibility to {missing_visibility}"), @@ -67,23 +73,14 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) } }; - acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| { - builder.edit_file(target_file); - match ctx.config.snippet_cap { - Some(cap) => match current_visibility { - Some(current_visibility) => builder.replace_snippet( - cap, - current_visibility.syntax().text_range(), - format!("$0{missing_visibility}"), - ), - None => builder.insert_snippet(cap, offset, format!("$0{missing_visibility} ")), - }, - None => match current_visibility { - Some(current_visibility) => { - builder.replace(current_visibility.syntax().text_range(), missing_visibility) - } - None => builder.insert(offset, format!("{missing_visibility} ")), - }, + acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| { + edit.edit_file(target_file); + + let vis_owner = edit.make_mut(vis_owner); + vis_owner.set_visibility(missing_visibility.clone_for_update()); + + if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { + edit.add_tabstop_before(cap, vis); } }) } @@ -103,19 +100,22 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_> let target_module = parent.module(ctx.db()); let in_file_source = record_field_def.source(ctx.db())?; - let (offset, current_visibility, target) = match in_file_source.value { + let (vis_owner, target) = match in_file_source.value { hir::FieldSource::Named(it) => { - let s = it.syntax(); - (vis_offset(s), it.visibility(), s.text_range()) + let range = it.syntax().text_range(); + (ast::AnyHasVisibility::new(it), range) } hir::FieldSource::Pos(it) => { - let s = it.syntax(); - (vis_offset(s), it.visibility(), s.text_range()) + let range = it.syntax().text_range(); + (ast::AnyHasVisibility::new(it), range) } }; - let missing_visibility = - if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" }; + let missing_visibility = if current_module.krate() == target_module.krate() { + make::visibility_pub_crate() + } else { + make::visibility_pub() + }; let target_file = in_file_source.file_id.original_file(ctx.db()); let target_name = record_field_def.name(ctx.db()); @@ -125,23 +125,14 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_> target_name.display(ctx.db()) ); - acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| { - builder.edit_file(target_file); - match ctx.config.snippet_cap { - Some(cap) => match current_visibility { - Some(current_visibility) => builder.replace_snippet( - cap, - current_visibility.syntax().text_range(), - format!("$0{missing_visibility}"), - ), - None => builder.insert_snippet(cap, offset, format!("$0{missing_visibility} ")), - }, - None => match current_visibility { - Some(current_visibility) => { - builder.replace(current_visibility.syntax().text_range(), missing_visibility) - } - None => builder.insert(offset, format!("{missing_visibility} ")), - }, + acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| { + edit.edit_file(target_file); + + let vis_owner = edit.make_mut(vis_owner); + vis_owner.set_visibility(missing_visibility.clone_for_update()); + + if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { + edit.add_tabstop_before(cap, vis); } }) } @@ -149,11 +140,11 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_> fn target_data_for_def( db: &dyn HirDatabase, def: hir::ModuleDef, -) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId, Option<hir::Name>)> { +) -> Option<(ast::AnyHasVisibility, TextRange, FileId, Option<hir::Name>)> { fn offset_target_and_file_id<S, Ast>( db: &dyn HirDatabase, x: S, - ) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId)> + ) -> Option<(ast::AnyHasVisibility, TextRange, FileId)> where S: HasSource<Ast = Ast>, Ast: AstNode + ast::HasVisibility, @@ -161,18 +152,12 @@ fn target_data_for_def( let source = x.source(db)?; let in_file_syntax = source.syntax(); let file_id = in_file_syntax.file_id; - let syntax = in_file_syntax.value; - let current_visibility = source.value.visibility(); - Some(( - vis_offset(syntax), - current_visibility, - syntax.text_range(), - file_id.original_file(db.upcast()), - )) + let range = in_file_syntax.value.text_range(); + Some((ast::AnyHasVisibility::new(source.value), range, file_id.original_file(db.upcast()))) } let target_name; - let (offset, current_visibility, target, target_file) = match def { + let (offset, target, target_file) = match def { hir::ModuleDef::Function(f) => { target_name = Some(f.name(db)); offset_target_and_file_id(db, f)? @@ -209,8 +194,8 @@ fn target_data_for_def( target_name = m.name(db); let in_file_source = m.declaration_source(db)?; let file_id = in_file_source.file_id.original_file(db.upcast()); - let syntax = in_file_source.value.syntax(); - (vis_offset(syntax), in_file_source.value.visibility(), syntax.text_range(), file_id) + let range = in_file_source.value.syntax().text_range(); + (ast::AnyHasVisibility::new(in_file_source.value), range, file_id) } // FIXME hir::ModuleDef::Macro(_) => return None, @@ -218,7 +203,7 @@ fn target_data_for_def( hir::ModuleDef::Variant(_) | hir::ModuleDef::BuiltinType(_) => return None, }; - Some((offset, current_visibility, target, target_file, target_name)) + Some((offset, target, target_file, target_name)) } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs index b68c766e647..31fc69562c9 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -1,13 +1,17 @@ use std::collections::HashSet; use hir::{self, HasCrate, HasSource, HasVisibility}; -use syntax::ast::{self, make, AstNode, HasGenericParams, HasName, HasVisibility as _}; +use syntax::{ + ast::{ + self, edit_in_place::Indent, make, AstNode, HasGenericParams, HasName, HasVisibility as _, + }, + ted, +}; use crate::{ - utils::{convert_param_list_to_arg_list, find_struct_impl, render_snippet, Cursor}, + utils::{convert_param_list_to_arg_list, find_struct_impl}, AssistContext, AssistId, AssistKind, Assists, GroupLabel, }; -use syntax::ast::edit::AstNodeEdit; // Assist: generate_delegate_methods // @@ -88,13 +92,15 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' let adt = ast::Adt::Struct(strukt.clone()); let name = name.display(ctx.db()).to_string(); // if `find_struct_impl` returns None, that means that a function named `name` already exists. - let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else { continue; }; + let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else { + continue; + }; acc.add_group( &GroupLabel("Generate delegate methods…".to_owned()), AssistId("generate_delegate_methods", AssistKind::Generate), format!("Generate delegate for `{field_name}.{name}()`",), target, - |builder| { + |edit| { // Create the function let method_source = match method.source(ctx.db()) { Some(source) => source.value, @@ -133,36 +139,12 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' is_const, is_unsafe, ) - .indent(ast::edit::IndentLevel(1)) .clone_for_update(); - let cursor = Cursor::Before(f.syntax()); - - // Create or update an impl block, attach the function to it, - // then insert into our code. - match impl_def { - Some(impl_def) => { - // Remember where in our source our `impl` block lives. - let impl_def = impl_def.clone_for_update(); - let old_range = impl_def.syntax().text_range(); - - // Attach the function to the impl block - let assoc_items = impl_def.get_or_create_assoc_item_list(); - assoc_items.add_item(f.clone().into()); - - // Update the impl block. - match ctx.config.snippet_cap { - Some(cap) => { - let snippet = render_snippet(cap, impl_def.syntax(), cursor); - builder.replace_snippet(cap, old_range, snippet); - } - None => { - builder.replace(old_range, impl_def.syntax().to_string()); - } - } - } + // Get the impl to update, or create one if we need to. + let impl_def = match impl_def { + Some(impl_def) => edit.make_mut(impl_def), None => { - // Attach the function to the impl block let name = &strukt_name.to_string(); let params = strukt.generic_param_list(); let ty_params = params.clone(); @@ -176,24 +158,34 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<' None, ) .clone_for_update(); - let assoc_items = impl_def.get_or_create_assoc_item_list(); - assoc_items.add_item(f.clone().into()); + + // Fixup impl_def indentation + let indent = strukt.indent_level(); + impl_def.reindent_to(indent); // Insert the impl block. - match ctx.config.snippet_cap { - Some(cap) => { - let offset = strukt.syntax().text_range().end(); - let snippet = render_snippet(cap, impl_def.syntax(), cursor); - let snippet = format!("\n\n{snippet}"); - builder.insert_snippet(cap, offset, snippet); - } - None => { - let offset = strukt.syntax().text_range().end(); - let snippet = format!("\n\n{}", impl_def.syntax()); - builder.insert(offset, snippet); - } - } + let strukt = edit.make_mut(strukt.clone()); + ted::insert_all( + ted::Position::after(strukt.syntax()), + vec![ + make::tokens::whitespace(&format!("\n\n{indent}")).into(), + impl_def.syntax().clone().into(), + ], + ); + + impl_def } + }; + + // Fixup function indentation. + // FIXME: Should really be handled by `AssocItemList::add_item` + f.reindent_to(impl_def.indent_level() + 1); + + let assoc_items = impl_def.get_or_create_assoc_item_list(); + assoc_items.add_item(f.clone().into()); + + if let Some(cap) = ctx.config.snippet_cap { + edit.add_tabstop_before(cap, f) } }, )?; @@ -243,6 +235,45 @@ impl Person { } #[test] + fn test_generate_delegate_create_impl_block_match_indent() { + check_assist( + generate_delegate_methods, + r#" +mod indent { + struct Age(u8); + impl Age { + fn age(&self) -> u8 { + self.0 + } + } + + struct Person { + ag$0e: Age, + } +}"#, + r#" +mod indent { + struct Age(u8); + impl Age { + fn age(&self) -> u8 { + self.0 + } + } + + struct Person { + age: Age, + } + + impl Person { + $0fn age(&self) -> u8 { + self.age.age() + } + } +}"#, + ); + } + + #[test] fn test_generate_delegate_update_impl_block() { check_assist( generate_delegate_methods, @@ -280,6 +311,47 @@ impl Person { } #[test] + fn test_generate_delegate_update_impl_block_match_indent() { + check_assist( + generate_delegate_methods, + r#" +mod indent { + struct Age(u8); + impl Age { + fn age(&self) -> u8 { + self.0 + } + } + + struct Person { + ag$0e: Age, + } + + impl Person {} +}"#, + r#" +mod indent { + struct Age(u8); + impl Age { + fn age(&self) -> u8 { + self.0 + } + } + + struct Person { + age: Age, + } + + impl Person { + $0fn age(&self) -> u8 { + self.age.age() + } + } +}"#, + ); + } + + #[test] fn test_generate_delegate_tuple_struct() { check_assist( generate_delegate_methods, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs new file mode 100644 index 00000000000..185f47184d4 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_delegate_trait.rs @@ -0,0 +1,1049 @@ +use std::ops::Not; + +use crate::{ + assist_context::{AssistContext, Assists}, + utils::convert_param_list_to_arg_list, +}; +use either::Either; +use hir::{db::HirDatabase, HasVisibility}; +use ide_db::{ + assists::{AssistId, GroupLabel}, + path_transform::PathTransform, +}; +use syntax::{ + ast::{ + self, + edit::{self, AstNodeEdit}, + make, AssocItem, HasGenericParams, HasName, HasVisibility as astHasVisibility, Path, + }, + ted::{self, Position}, + AstNode, NodeOrToken, SyntaxKind, +}; + +// Assist: generate_delegate_trait +// +// Generate delegate trait implementation for `StructField`s. +// +// ``` +// trait SomeTrait { +// type T; +// fn fn_(arg: u32) -> u32; +// fn method_(&mut self) -> bool; +// } +// struct A; +// impl SomeTrait for A { +// type T = u32; +// +// fn fn_(arg: u32) -> u32 { +// 42 +// } +// +// fn method_(&mut self) -> bool { +// false +// } +// } +// struct B { +// a$0: A, +// } +// ``` +// -> +// ``` +// trait SomeTrait { +// type T; +// fn fn_(arg: u32) -> u32; +// fn method_(&mut self) -> bool; +// } +// struct A; +// impl SomeTrait for A { +// type T = u32; +// +// fn fn_(arg: u32) -> u32 { +// 42 +// } +// +// fn method_(&mut self) -> bool { +// false +// } +// } +// struct B { +// a: A, +// } +// +// impl SomeTrait for B { +// type T = <A as SomeTrait>::T; +// +// fn fn_(arg: u32) -> u32 { +// <A as SomeTrait>::fn_(arg) +// } +// +// fn method_(&mut self) -> bool { +// <A as SomeTrait>::method_( &mut self.a ) +// } +// } +// ``` +pub(crate) fn generate_delegate_trait(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let strukt = Struct::new(ctx.find_node_at_offset::<ast::Struct>()?)?; + + let field: Field = match ctx.find_node_at_offset::<ast::RecordField>() { + Some(field) => Field::new(&ctx, Either::Left(field))?, + None => { + let field = ctx.find_node_at_offset::<ast::TupleField>()?; + let field_list = ctx.find_node_at_offset::<ast::TupleFieldList>()?; + Field::new(&ctx, either::Right((field, field_list)))? + } + }; + + strukt.delegate(field, acc, ctx); + Some(()) +} + +/// A utility object that represents a struct's field. +struct Field { + name: String, + ty: ast::Type, + range: syntax::TextRange, + impls: Vec<Delegee>, +} + +impl Field { + pub(crate) fn new( + ctx: &AssistContext<'_>, + f: Either<ast::RecordField, (ast::TupleField, ast::TupleFieldList)>, + ) -> Option<Field> { + let db = ctx.sema.db; + let name: String; + let range: syntax::TextRange; + let ty: ast::Type; + + let module = ctx.sema.to_module_def(ctx.file_id())?; + + match f { + Either::Left(f) => { + name = f.name()?.to_string(); + ty = f.ty()?; + range = f.syntax().text_range(); + } + Either::Right((f, l)) => { + name = l.fields().position(|it| it == f)?.to_string(); + ty = f.ty()?; + range = f.syntax().text_range(); + } + }; + + let hir_ty = ctx.sema.resolve_type(&ty)?; + let type_impls = hir::Impl::all_for_type(db, hir_ty.clone()); + let mut impls = Vec::with_capacity(type_impls.len()); + let type_param = hir_ty.as_type_param(db); + + if let Some(tp) = type_param { + for tb in tp.trait_bounds(db) { + impls.push(Delegee::Bound(BoundCase(tb))); + } + }; + + for imp in type_impls { + match imp.trait_(db) { + Some(tr) => { + if tr.is_visible_from(db, module) { + impls.push(Delegee::Impls(ImplCase(tr, imp))) + } + } + None => (), + } + } + + Some(Field { name, ty, range, impls }) + } +} + +/// A field that we want to delegate can offer the enclosing struct +/// trait to implement in two ways. The first way is when the field +/// actually implements the trait and the second way is when the field +/// has a bound type parameter. We handle these cases in different ways +/// hence the enum. +enum Delegee { + Bound(BoundCase), + Impls(ImplCase), +} + +struct BoundCase(hir::Trait); +struct ImplCase(hir::Trait, hir::Impl); + +impl Delegee { + fn signature(&self, db: &dyn HirDatabase) -> String { + let mut s = String::new(); + + let (Delegee::Bound(BoundCase(it)) | Delegee::Impls(ImplCase(it, _))) = self; + + for m in it.module(db).path_to_root(db).iter().rev() { + if let Some(name) = m.name(db) { + s.push_str(&format!("{}::", name.to_smol_str())); + } + } + + s.push_str(&it.name(db).to_smol_str()); + s + } +} + +/// A utility struct that is used for the enclosing struct. +struct Struct { + strukt: ast::Struct, + name: ast::Name, +} + +impl Struct { + pub(crate) fn new(s: ast::Struct) -> Option<Self> { + let name = s.name()?; + Some(Struct { name, strukt: s }) + } + + pub(crate) fn delegate(&self, field: Field, acc: &mut Assists, ctx: &AssistContext<'_>) { + let db = ctx.db(); + for delegee in &field.impls { + // FIXME : We can omit already implemented impl_traits + // But we don't know what the &[hir::Type] argument should look like. + + // let trait_ = match delegee { + // Delegee::Bound(b) => b.0, + // Delegee::Impls(i) => i.1, + // }; + + // if self.hir_ty.impls_trait(db, trait_, &[]) { + // continue; + // } + let signature = delegee.signature(db); + let delegate = generate_impl(ctx, self, &field.ty, &field.name, delegee); + + acc.add_group( + &GroupLabel("Delegate trait impl for field...".to_owned()), + AssistId("generate_delegate_trait", ide_db::assists::AssistKind::Generate), + format!("Generate delegate impl `{}` for `{}`", signature, field.name), + field.range, + |builder| { + builder.insert( + self.strukt.syntax().text_range().end(), + format!("\n\n{}", delegate.syntax()), + ); + }, + ); + } + } +} + +fn generate_impl( + ctx: &AssistContext<'_>, + strukt: &Struct, + field_ty: &ast::Type, + field_name: &String, + delegee: &Delegee, +) -> ast::Impl { + let delegate: ast::Impl; + let source: ast::Impl; + let genpar: Option<ast::GenericParamList>; + let db = ctx.db(); + let base_path = make::path_from_text(&field_ty.to_string().as_str()); + let s_path = make::ext::ident_path(&strukt.name.to_string()); + + match delegee { + Delegee::Bound(delegee) => { + let in_file = ctx.sema.source(delegee.0.to_owned()).unwrap(); + let source: ast::Trait = in_file.value; + + delegate = make::impl_trait( + delegee.0.is_unsafe(db), + None, + None, + strukt.strukt.generic_param_list(), + None, + delegee.0.is_auto(db), + make::ty(&delegee.0.name(db).to_smol_str()), + make::ty_path(s_path), + source.where_clause(), + strukt.strukt.where_clause(), + None, + ) + .clone_for_update(); + + genpar = source.generic_param_list(); + let delegate_assoc_items = delegate.get_or_create_assoc_item_list(); + let gen_args: String = + genpar.map_or_else(String::new, |params| params.to_generic_args().to_string()); + + // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths + let qualified_path_type = make::path_from_text(&format!( + "<{} as {}{}>", + base_path.to_string(), + delegee.0.name(db).to_smol_str(), + gen_args.to_string() + )); + + match source.assoc_item_list() { + Some(ai) => { + ai.assoc_items() + .filter(|item| matches!(item, AssocItem::MacroCall(_)).not()) + .for_each(|item| { + let assoc = + process_assoc_item(item, qualified_path_type.clone(), &field_name); + if let Some(assoc) = assoc { + delegate_assoc_items.add_item(assoc); + } + }); + } + None => {} + }; + + let target = ctx.sema.scope(strukt.strukt.syntax()).unwrap(); + let source = ctx.sema.scope(source.syntax()).unwrap(); + + let transform = + PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone()); + transform.apply(&delegate.syntax()); + } + Delegee::Impls(delegee) => { + let in_file = ctx.sema.source(delegee.1.to_owned()).unwrap(); + source = in_file.value; + delegate = make::impl_trait( + delegee.0.is_unsafe(db), + source.generic_param_list(), + None, + None, + None, + delegee.0.is_auto(db), + make::ty(&delegee.0.name(db).to_smol_str()), + make::ty_path(s_path), + source.where_clause(), + strukt.strukt.where_clause(), + None, + ) + .clone_for_update(); + genpar = source.generic_param_list(); + let delegate_assoc_items = delegate.get_or_create_assoc_item_list(); + let gen_args: String = + genpar.map_or_else(String::new, |params| params.to_generic_args().to_string()); + + // Goto link : https://doc.rust-lang.org/reference/paths.html#qualified-paths + let qualified_path_type = make::path_from_text(&format!( + "<{} as {}{}>", + base_path.to_string().as_str(), + delegee.0.name(db).to_smol_str(), + gen_args.to_string().as_str() + )); + + source + .get_or_create_assoc_item_list() + .assoc_items() + .filter(|item| matches!(item, AssocItem::MacroCall(_)).not()) + .for_each(|item| { + let assoc = process_assoc_item(item, qualified_path_type.clone(), &field_name); + if let Some(assoc) = assoc { + delegate_assoc_items.add_item(assoc); + } + }); + + let target = ctx.sema.scope(strukt.strukt.syntax()).unwrap(); + let source = ctx.sema.scope(source.syntax()).unwrap(); + + let transform = + PathTransform::trait_impl(&target, &source, delegee.0, delegate.clone()); + transform.apply(&delegate.syntax()); + } + } + + delegate +} + +fn process_assoc_item( + item: syntax::ast::AssocItem, + qual_path_ty: ast::Path, + base_name: &str, +) -> Option<ast::AssocItem> { + match item { + AssocItem::Const(c) => Some(const_assoc_item(c, qual_path_ty)), + AssocItem::Fn(f) => Some(func_assoc_item(f, qual_path_ty, base_name)), + AssocItem::MacroCall(_) => { + // FIXME : Handle MacroCall case. + // return Some(macro_assoc_item(mac, qual_path_ty)); + None + } + AssocItem::TypeAlias(ta) => Some(ty_assoc_item(ta, qual_path_ty)), + } +} + +fn const_assoc_item(item: syntax::ast::Const, qual_path_ty: ast::Path) -> AssocItem { + let path_expr_segment = make::path_from_text(item.name().unwrap().to_string().as_str()); + + // We want rhs of the const assignment to be a qualified path + // The general case for const assigment can be found [here](`https://doc.rust-lang.org/reference/items/constant-items.html`) + // The qualified will have the following generic syntax : + // <Base as Trait<GenArgs>>::ConstName; + // FIXME : We can't rely on `make::path_qualified` for now but it would be nice to replace the following with it. + // make::path_qualified(qual_path_ty, path_expr_segment.as_single_segment().unwrap()); + let qualpath = qualpath(qual_path_ty, path_expr_segment); + let inner = make::item_const( + item.visibility(), + item.name().unwrap(), + item.ty().unwrap(), + make::expr_path(qualpath), + ) + .clone_for_update(); + + AssocItem::Const(inner) +} + +fn func_assoc_item(item: syntax::ast::Fn, qual_path_ty: Path, base_name: &str) -> AssocItem { + let path_expr_segment = make::path_from_text(item.name().unwrap().to_string().as_str()); + let qualpath = qualpath(qual_path_ty, path_expr_segment); + + let call = match item.param_list() { + // Methods and funcs should be handled separately. + // We ask if the func has a `self` param. + Some(l) => match l.self_param() { + Some(slf) => { + let mut self_kw = make::expr_path(make::path_from_text("self")); + self_kw = make::expr_field(self_kw, base_name); + + let tail_expr_self = match slf.kind() { + ast::SelfParamKind::Owned => self_kw, + ast::SelfParamKind::Ref => make::expr_ref(self_kw, false), + ast::SelfParamKind::MutRef => make::expr_ref(self_kw, true), + }; + + let param_count = l.params().count(); + let args = convert_param_list_to_arg_list(l).clone_for_update(); + + if param_count > 0 { + // Add SelfParam and a TOKEN::COMMA + ted::insert_all( + Position::after(args.l_paren_token().unwrap()), + vec![ + NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()), + NodeOrToken::Token(make::token(SyntaxKind::WHITESPACE)), + NodeOrToken::Token(make::token(SyntaxKind::COMMA)), + ], + ); + } else { + // Add SelfParam only + ted::insert( + Position::after(args.l_paren_token().unwrap()), + NodeOrToken::Node(tail_expr_self.syntax().clone_for_update()), + ); + } + + make::expr_call(make::expr_path(qualpath), args) + } + None => make::expr_call(make::expr_path(qualpath), convert_param_list_to_arg_list(l)), + }, + None => make::expr_call( + make::expr_path(qualpath), + convert_param_list_to_arg_list(make::param_list(None, Vec::new())), + ), + } + .clone_for_update(); + + let body = make::block_expr(vec![], Some(call)).clone_for_update(); + let func = make::fn_( + item.visibility(), + item.name().unwrap(), + item.generic_param_list(), + item.where_clause(), + item.param_list().unwrap(), + body, + item.ret_type(), + item.async_token().is_some(), + item.const_token().is_some(), + item.unsafe_token().is_some(), + ) + .clone_for_update(); + + AssocItem::Fn(func.indent(edit::IndentLevel(1)).clone_for_update()) +} + +fn ty_assoc_item(item: syntax::ast::TypeAlias, qual_path_ty: Path) -> AssocItem { + let path_expr_segment = make::path_from_text(item.name().unwrap().to_string().as_str()); + let qualpath = qualpath(qual_path_ty, path_expr_segment); + let ty = make::ty_path(qualpath); + let ident = item.name().unwrap().to_string(); + + let alias = make::ty_alias( + ident.as_str(), + item.generic_param_list(), + None, + item.where_clause(), + Some((ty, None)), + ) + .clone_for_update(); + + AssocItem::TypeAlias(alias) +} + +fn qualpath(qual_path_ty: ast::Path, path_expr_seg: ast::Path) -> ast::Path { + make::path_from_text(&format!("{}::{}", qual_path_ty.to_string(), path_expr_seg.to_string())) +} + +#[cfg(test)] +mod test { + + use super::*; + use crate::tests::{check_assist, check_assist_not_applicable}; + + #[test] + fn test_tuple_struct_basic() { + check_assist( + generate_delegate_trait, + r#" +struct Base; +struct S(B$0ase); +trait Trait {} +impl Trait for Base {} +"#, + r#" +struct Base; +struct S(Base); + +impl Trait for S {} +trait Trait {} +impl Trait for Base {} +"#, + ); + } + + #[test] + fn test_struct_struct_basic() { + check_assist( + generate_delegate_trait, + r#" +struct Base; +struct S { + ba$0se : Base +} +trait Trait {} +impl Trait for Base {} +"#, + r#" +struct Base; +struct S { + base : Base +} + +impl Trait for S {} +trait Trait {} +impl Trait for Base {} +"#, + ) + } + + // Structs need to be by def populated with fields + // However user can invoke this assist while still editing + // We therefore assert its non-applicability + #[test] + fn test_yet_empty_struct() { + check_assist_not_applicable( + generate_delegate_trait, + r#" +struct Base; +struct S { + $0 +} + +impl Trait for S {} +trait Trait {} +impl Trait for Base {} +"#, + ) + } + + #[test] + fn test_yet_unspecified_field_type() { + check_assist_not_applicable( + generate_delegate_trait, + r#" +struct Base; +struct S { + ab$0c +} + +impl Trait for S {} +trait Trait {} +impl Trait for Base {} +"#, + ); + } + + #[test] + fn test_unsafe_trait() { + check_assist( + generate_delegate_trait, + r#" +struct Base; +struct S { + ba$0se : Base +} +unsafe trait Trait {} +unsafe impl Trait for Base {} +"#, + r#" +struct Base; +struct S { + base : Base +} + +unsafe impl Trait for S {} +unsafe trait Trait {} +unsafe impl Trait for Base {} +"#, + ); + } + + #[test] + fn test_unsafe_trait_with_unsafe_fn() { + check_assist( + generate_delegate_trait, + r#" +struct Base; +struct S { + ba$0se: Base, +} + +unsafe trait Trait { + unsafe fn a_func(); + unsafe fn a_method(&self); +} +unsafe impl Trait for Base { + unsafe fn a_func() {} + unsafe fn a_method(&self) {} +} +"#, + r#" +struct Base; +struct S { + base: Base, +} + +unsafe impl Trait for S { + unsafe fn a_func() { + <Base as Trait>::a_func() + } + + unsafe fn a_method(&self) { + <Base as Trait>::a_method( &self.base ) + } +} + +unsafe trait Trait { + unsafe fn a_func(); + unsafe fn a_method(&self); +} +unsafe impl Trait for Base { + unsafe fn a_func() {} + unsafe fn a_method(&self) {} +} +"#, + ); + } + + #[test] + fn test_struct_with_where_clause() { + check_assist( + generate_delegate_trait, + r#" +trait AnotherTrait {} +struct S<T> +where + T: AnotherTrait, +{ + b$0 : T, +}"#, + r#" +trait AnotherTrait {} +struct S<T> +where + T: AnotherTrait, +{ + b : T, +} + +impl<T> AnotherTrait for S<T> +where + T: AnotherTrait, +{}"#, + ); + } + + #[test] + fn test_complex_without_where() { + check_assist( + generate_delegate_trait, + r#" +trait Trait<'a, T, const C: usize> { + type AssocType; + const AssocConst: usize; + fn assoc_fn(p: ()); + fn assoc_method(&self, p: ()); +} + +struct Base; +struct S { + field$0: Base +} + +impl<'a, T, const C: usize> Trait<'a, T, C> for Base { + type AssocType = (); + const AssocConst: usize = 0; + fn assoc_fn(p: ()) {} + fn assoc_method(&self, p: ()) {} +} +"#, + r#" +trait Trait<'a, T, const C: usize> { + type AssocType; + const AssocConst: usize; + fn assoc_fn(p: ()); + fn assoc_method(&self, p: ()); +} + +struct Base; +struct S { + field: Base +} + +impl<'a, T, const C: usize> Trait<'a, T, C> for S { + type AssocType = <Base as Trait<'a, T, C>>::AssocType; + + const AssocConst: usize = <Base as Trait<'a, T, C>>::AssocConst; + + fn assoc_fn(p: ()) { + <Base as Trait<'a, T, C>>::assoc_fn(p) + } + + fn assoc_method(&self, p: ()) { + <Base as Trait<'a, T, C>>::assoc_method( &self.field , p) + } +} + +impl<'a, T, const C: usize> Trait<'a, T, C> for Base { + type AssocType = (); + const AssocConst: usize = 0; + fn assoc_fn(p: ()) {} + fn assoc_method(&self, p: ()) {} +} +"#, + ); + } + + #[test] + fn test_complex_two() { + check_assist( + generate_delegate_trait, + r" +trait AnotherTrait {} + +trait Trait<'a, T, const C: usize> { + type AssocType; + const AssocConst: usize; + fn assoc_fn(p: ()); + fn assoc_method(&self, p: ()); +} + +struct Base; +struct S { + fi$0eld: Base, +} + +impl<'b, C, const D: usize> Trait<'b, C, D> for Base +where + C: AnotherTrait, +{ + type AssocType = (); + const AssocConst: usize = 0; + fn assoc_fn(p: ()) {} + fn assoc_method(&self, p: ()) {} +}", + r#" +trait AnotherTrait {} + +trait Trait<'a, T, const C: usize> { + type AssocType; + const AssocConst: usize; + fn assoc_fn(p: ()); + fn assoc_method(&self, p: ()); +} + +struct Base; +struct S { + field: Base, +} + +impl<'b, C, const D: usize> Trait<'b, C, D> for S +where + C: AnotherTrait, +{ + type AssocType = <Base as Trait<'b, C, D>>::AssocType; + + const AssocConst: usize = <Base as Trait<'b, C, D>>::AssocConst; + + fn assoc_fn(p: ()) { + <Base as Trait<'b, C, D>>::assoc_fn(p) + } + + fn assoc_method(&self, p: ()) { + <Base as Trait<'b, C, D>>::assoc_method( &self.field , p) + } +} + +impl<'b, C, const D: usize> Trait<'b, C, D> for Base +where + C: AnotherTrait, +{ + type AssocType = (); + const AssocConst: usize = 0; + fn assoc_fn(p: ()) {} + fn assoc_method(&self, p: ()) {} +}"#, + ) + } + + #[test] + fn test_complex_three() { + check_assist( + generate_delegate_trait, + r#" +trait AnotherTrait {} +trait YetAnotherTrait {} + +struct StructImplsAll(); +impl AnotherTrait for StructImplsAll {} +impl YetAnotherTrait for StructImplsAll {} + +trait Trait<'a, T, const C: usize> { + type A; + const ASSOC_CONST: usize = C; + fn assoc_fn(p: ()); + fn assoc_method(&self, p: ()); +} + +struct Base; +struct S { + fi$0eld: Base, +} + +impl<'b, A: AnotherTrait + YetAnotherTrait, const B: usize> Trait<'b, A, B> for Base +where + A: AnotherTrait, +{ + type A = i32; + + const ASSOC_CONST: usize = B; + + fn assoc_fn(p: ()) {} + + fn assoc_method(&self, p: ()) {} +} +"#, + r#" +trait AnotherTrait {} +trait YetAnotherTrait {} + +struct StructImplsAll(); +impl AnotherTrait for StructImplsAll {} +impl YetAnotherTrait for StructImplsAll {} + +trait Trait<'a, T, const C: usize> { + type A; + const ASSOC_CONST: usize = C; + fn assoc_fn(p: ()); + fn assoc_method(&self, p: ()); +} + +struct Base; +struct S { + field: Base, +} + +impl<'b, A: AnotherTrait + YetAnotherTrait, const B: usize> Trait<'b, A, B> for S +where + A: AnotherTrait, +{ + type A = <Base as Trait<'b, A, B>>::A; + + const ASSOC_CONST: usize = <Base as Trait<'b, A, B>>::ASSOC_CONST; + + fn assoc_fn(p: ()) { + <Base as Trait<'b, A, B>>::assoc_fn(p) + } + + fn assoc_method(&self, p: ()) { + <Base as Trait<'b, A, B>>::assoc_method( &self.field , p) + } +} + +impl<'b, A: AnotherTrait + YetAnotherTrait, const B: usize> Trait<'b, A, B> for Base +where + A: AnotherTrait, +{ + type A = i32; + + const ASSOC_CONST: usize = B; + + fn assoc_fn(p: ()) {} + + fn assoc_method(&self, p: ()) {} +} +"#, + ) + } + + #[test] + fn test_type_bound() { + check_assist( + generate_delegate_trait, + r#" +trait AnotherTrait {} +struct S<T> +where + T: AnotherTrait, +{ + b$0: T, +}"#, + r#" +trait AnotherTrait {} +struct S<T> +where + T: AnotherTrait, +{ + b: T, +} + +impl<T> AnotherTrait for S<T> +where + T: AnotherTrait, +{}"#, + ); + } + + #[test] + fn test_docstring_example() { + check_assist( + generate_delegate_trait, + r#" +trait SomeTrait { + type T; + fn fn_(arg: u32) -> u32; + fn method_(&mut self) -> bool; +} +struct A; +impl SomeTrait for A { + type T = u32; + fn fn_(arg: u32) -> u32 { + 42 + } + fn method_(&mut self) -> bool { + false + } +} +struct B { + a$0: A, +} +"#, + r#" +trait SomeTrait { + type T; + fn fn_(arg: u32) -> u32; + fn method_(&mut self) -> bool; +} +struct A; +impl SomeTrait for A { + type T = u32; + fn fn_(arg: u32) -> u32 { + 42 + } + fn method_(&mut self) -> bool { + false + } +} +struct B { + a: A, +} + +impl SomeTrait for B { + type T = <A as SomeTrait>::T; + + fn fn_(arg: u32) -> u32 { + <A as SomeTrait>::fn_(arg) + } + + fn method_(&mut self) -> bool { + <A as SomeTrait>::method_( &mut self.a ) + } +} +"#, + ); + } + + #[test] + fn import_from_other_mod() { + check_assist( + generate_delegate_trait, + r#" +mod some_module { + pub trait SomeTrait { + type T; + fn fn_(arg: u32) -> u32; + fn method_(&mut self) -> bool; + } + pub struct A; + impl SomeTrait for A { + type T = u32; + + fn fn_(arg: u32) -> u32 { + 42 + } + + fn method_(&mut self) -> bool { + false + } + } +} + +struct B { + a$0: some_module::A, +}"#, + r#" +mod some_module { + pub trait SomeTrait { + type T; + fn fn_(arg: u32) -> u32; + fn method_(&mut self) -> bool; + } + pub struct A; + impl SomeTrait for A { + type T = u32; + + fn fn_(arg: u32) -> u32 { + 42 + } + + fn method_(&mut self) -> bool { + false + } + } +} + +struct B { + a: some_module::A, +} + +impl some_module::SomeTrait for B { + type T = <some_module::A as some_module::SomeTrait>::T; + + fn fn_(arg: u32) -> u32 { + <some_module::A as some_module::SomeTrait>::fn_(arg) + } + + fn method_(&mut self) -> bool { + <some_module::A as some_module::SomeTrait>::method_( &mut self.a ) + } +}"#, + ) + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs index 78ac2eb30e5..747f70f9f6f 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_derive.rs @@ -1,7 +1,6 @@ use syntax::{ - ast::{self, edit::IndentLevel, AstNode, HasAttrs}, - SyntaxKind::{COMMENT, WHITESPACE}, - TextSize, + ast::{self, edit_in_place::AttrsOwnerEdit, make, AstNode, HasAttrs}, + T, }; use crate::{AssistContext, AssistId, AssistKind, Assists}; @@ -27,48 +26,37 @@ use crate::{AssistContext, AssistId, AssistKind, Assists}; pub(crate) fn generate_derive(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { let cap = ctx.config.snippet_cap?; let nominal = ctx.find_node_at_offset::<ast::Adt>()?; - let node_start = derive_insertion_offset(&nominal)?; let target = nominal.syntax().text_range(); - acc.add( - AssistId("generate_derive", AssistKind::Generate), - "Add `#[derive]`", - target, - |builder| { - let derive_attr = nominal - .attrs() - .filter_map(|x| x.as_simple_call()) - .filter(|(name, _arg)| name == "derive") - .map(|(_name, arg)| arg) - .next(); - match derive_attr { - None => { - let indent_level = IndentLevel::from_node(nominal.syntax()); - builder.insert_snippet( - cap, - node_start, - format!("#[derive($0)]\n{indent_level}"), - ); - } - Some(tt) => { - // Just move the cursor. - builder.insert_snippet( - cap, - tt.syntax().text_range().end() - TextSize::of(')'), - "$0", - ) - } - }; - }, - ) -} + acc.add(AssistId("generate_derive", AssistKind::Generate), "Add `#[derive]`", target, |edit| { + let derive_attr = nominal + .attrs() + .filter_map(|x| x.as_simple_call()) + .filter(|(name, _arg)| name == "derive") + .map(|(_name, arg)| arg) + .next(); + match derive_attr { + None => { + let derive = make::attr_outer(make::meta_token_tree( + make::ext::ident_path("derive"), + make::token_tree(T!['('], vec![]).clone_for_update(), + )) + .clone_for_update(); + + let nominal = edit.make_mut(nominal); + nominal.add_attr(derive.clone()); -// Insert `derive` after doc comments. -fn derive_insertion_offset(nominal: &ast::Adt) -> Option<TextSize> { - let non_ws_child = nominal - .syntax() - .children_with_tokens() - .find(|it| it.kind() != COMMENT && it.kind() != WHITESPACE)?; - Some(non_ws_child.text_range().start()) + edit.add_tabstop_before_token( + cap, + derive.meta().unwrap().token_tree().unwrap().r_paren_token().unwrap(), + ); + } + Some(tt) => { + // Just move the cursor. + let tt = edit.make_mut(tt); + edit.add_tabstop_before_token(cap, tt.right_delimiter_token().unwrap()); + } + }; + }) } #[cfg(test)] @@ -115,6 +103,38 @@ mod m { } #[test] + fn add_derive_existing_with_brackets() { + check_assist( + generate_derive, + " +#[derive[Clone]] +struct Foo { a: i32$0, } +", + " +#[derive[Clone$0]] +struct Foo { a: i32, } +", + ); + } + + #[test] + fn add_derive_existing_missing_delimiter() { + // since `#[derive]` isn't a simple attr call (i.e. `#[derive()]`) + // we don't consider it as a proper derive attr and generate a new + // one instead + check_assist( + generate_derive, + " +#[derive] +struct Foo { a: i32$0, }", + " +#[derive] +#[derive($0)] +struct Foo { a: i32, }", + ); + } + + #[test] fn add_derive_new_with_doc_comment() { check_assist( generate_derive, diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs index c579f6780db..8085572497a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_function.rs @@ -623,7 +623,9 @@ fn fn_generic_params( fn params_and_where_preds_in_scope( ctx: &AssistContext<'_>, ) -> (Vec<ast::GenericParam>, Vec<ast::WherePred>) { - let Some(body) = containing_body(ctx) else { return Default::default(); }; + let Some(body) = containing_body(ctx) else { + return Default::default(); + }; let mut generic_params = Vec::new(); let mut where_clauses = Vec::new(); diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs index dd6bbd84afc..9c9478b040d 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_getter_or_setter.rs @@ -1,4 +1,4 @@ -use ide_db::famous_defs::FamousDefs; +use ide_db::{famous_defs::FamousDefs, source_change::SourceChangeBuilder}; use stdx::{format_to, to_lower_snake_case}; use syntax::{ ast::{self, AstNode, HasName, HasVisibility}, @@ -10,6 +10,66 @@ use crate::{ AssistContext, AssistId, AssistKind, Assists, GroupLabel, }; +// Assist: generate_setter +// +// Generate a setter method. +// +// ``` +// struct Person { +// nam$0e: String, +// } +// ``` +// -> +// ``` +// struct Person { +// name: String, +// } +// +// impl Person { +// fn $0set_name(&mut self, name: String) { +// self.name = name; +// } +// } +// ``` +pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + // This if condition denotes two modes this assist can work in: + // - First is acting upon selection of record fields + // - Next is acting upon a single record field + // + // This is the only part where implementation diverges a bit, + // subsequent code is generic for both of these modes + + let (strukt, info_of_record_fields, mut fn_names) = extract_and_parse(ctx, AssistType::Set)?; + + // No record fields to do work on :( + if info_of_record_fields.len() == 0 { + return None; + } + + // Prepend set_ to fn names. + fn_names.iter_mut().for_each(|name| *name = format!("set_{}", name)); + + // Return early if we've found an existing fn + let impl_def = find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &fn_names)?; + + // Computing collective text range of all record fields in selected region + let target: TextRange = info_of_record_fields + .iter() + .map(|record_field_info| record_field_info.target) + .reduce(|acc, target| acc.cover(target))?; + + let setter_info = AssistInfo { impl_def, strukt, assist_type: AssistType::Set }; + + acc.add_group( + &GroupLabel("Generate getter/setter".to_owned()), + AssistId("generate_setter", AssistKind::Generate), + "Generate a setter method", + target, + |builder| build_source_change(builder, ctx, info_of_record_fields, setter_info), + ); + Some(()) +} + // Assist: generate_getter // // Generate a getter method. @@ -83,10 +143,16 @@ struct RecordFieldInfo { target: TextRange, } -struct GetterInfo { +struct AssistInfo { impl_def: Option<ast::Impl>, strukt: ast::Struct, - mutable: bool, + assist_type: AssistType, +} + +enum AssistType { + Get, + MutGet, + Set, } pub(crate) fn generate_getter_impl( @@ -94,40 +160,8 @@ pub(crate) fn generate_getter_impl( ctx: &AssistContext<'_>, mutable: bool, ) -> Option<()> { - // This if condition denotes two modes this assist can work in: - // - First is acting upon selection of record fields - // - Next is acting upon a single record field - // - // This is the only part where implementation diverges a bit, - // subsequent code is generic for both of these modes - - let (strukt, info_of_record_fields, fn_names) = if !ctx.has_empty_selection() { - // Selection Mode - let node = ctx.covering_element(); - - let node = match node { - syntax::NodeOrToken::Node(n) => n, - syntax::NodeOrToken::Token(t) => t.parent()?, - }; - - let parent_struct = node.ancestors().find_map(ast::Struct::cast)?; - - let (info_of_record_fields, field_names) = - extract_and_parse_record_fields(&parent_struct, ctx.selection_trimmed(), mutable)?; - - (parent_struct, info_of_record_fields, field_names) - } else { - // Single Record Field mode - let strukt = ctx.find_node_at_offset::<ast::Struct>()?; - let field = ctx.find_node_at_offset::<ast::RecordField>()?; - - let record_field_info = parse_record_field(field, mutable)?; - - let fn_name = record_field_info.fn_name.clone(); - - (strukt, vec![record_field_info], vec![fn_name]) - }; - + let (strukt, info_of_record_fields, fn_names) = + extract_and_parse(ctx, if mutable { AssistType::MutGet } else { AssistType::Get })?; // No record fields to do work on :( if info_of_record_fields.len() == 0 { return None; @@ -147,98 +181,30 @@ pub(crate) fn generate_getter_impl( .map(|record_field_info| record_field_info.target) .reduce(|acc, target| acc.cover(target))?; - let getter_info = GetterInfo { impl_def, strukt, mutable }; + let getter_info = AssistInfo { + impl_def, + strukt, + assist_type: if mutable { AssistType::MutGet } else { AssistType::Get }, + }; acc.add_group( &GroupLabel("Generate getter/setter".to_owned()), AssistId(id, AssistKind::Generate), label, target, - |builder| { - let record_fields_count = info_of_record_fields.len(); - - let mut buf = String::with_capacity(512); - - // Check if an impl exists - if let Some(impl_def) = &getter_info.impl_def { - // Check if impl is empty - if let Some(assoc_item_list) = impl_def.assoc_item_list() { - if assoc_item_list.assoc_items().next().is_some() { - // If not empty then only insert a new line - buf.push('\n'); - } - } - } - - for (i, record_field_info) in info_of_record_fields.iter().enumerate() { - // this buf inserts a newline at the end of a getter - // automatically, if one wants to add one more newline - // for separating it from other assoc items, that needs - // to be handled separately - let mut getter_buf = - generate_getter_from_info(ctx, &getter_info, record_field_info); - - // Insert `$0` only for last getter we generate - if i == record_fields_count - 1 { - if ctx.config.snippet_cap.is_some() { - getter_buf = getter_buf.replacen("fn ", "fn $0", 1); - } - } - - // For first element we do not merge with '\n', as - // that can be inserted by impl_def check defined - // above, for other cases which are: - // - // - impl exists but it empty, here we would ideally - // not want to keep newline between impl <struct> { - // and fn <fn-name>() { line - // - // - next if impl itself does not exist, in this - // case we ourselves generate a new impl and that - // again ends up with the same reasoning as above - // for not keeping newline - if i == 0 { - buf = buf + &getter_buf; - } else { - buf = buf + "\n" + &getter_buf; - } - - // We don't insert a new line at the end of - // last getter as it will end up in the end - // of an impl where we would not like to keep - // getter and end of impl ( i.e. `}` ) with an - // extra line for no reason - if i < record_fields_count - 1 { - buf = buf + "\n"; - } - } - - let start_offset = getter_info - .impl_def - .as_ref() - .and_then(|impl_def| find_impl_block_end(impl_def.to_owned(), &mut buf)) - .unwrap_or_else(|| { - buf = generate_impl_text(&ast::Adt::Struct(getter_info.strukt.clone()), &buf); - getter_info.strukt.syntax().text_range().end() - }); - - match ctx.config.snippet_cap { - Some(cap) => builder.insert_snippet(cap, start_offset, buf), - None => builder.insert(start_offset, buf), - } - }, + |builder| build_source_change(builder, ctx, info_of_record_fields, getter_info), ) } fn generate_getter_from_info( ctx: &AssistContext<'_>, - info: &GetterInfo, + info: &AssistInfo, record_field_info: &RecordFieldInfo, ) -> String { let mut buf = String::with_capacity(512); let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{v} ")); - let (ty, body) = if info.mutable { + let (ty, body) = if matches!(info.assist_type, AssistType::MutGet) { ( format!("&mut {}", record_field_info.field_ty), format!("&mut self.{}", record_field_info.field_name), @@ -273,7 +239,7 @@ fn generate_getter_from_info( }}", vis, record_field_info.fn_name, - info.mutable.then_some("mut ").unwrap_or_default(), + matches!(info.assist_type, AssistType::MutGet).then_some("mut ").unwrap_or_default(), ty, body, ); @@ -281,10 +247,58 @@ fn generate_getter_from_info( buf } +fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> String { + let mut buf = String::with_capacity(512); + let strukt = &info.strukt; + let fn_name = &record_field_info.fn_name; + let field_ty = &record_field_info.field_ty; + let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} ")); + format_to!( + buf, + " {vis}fn set_{fn_name}(&mut self, {fn_name}: {field_ty}) {{ + self.{fn_name} = {fn_name}; + }}" + ); + + buf +} + +fn extract_and_parse( + ctx: &AssistContext<'_>, + assist_type: AssistType, +) -> Option<(ast::Struct, Vec<RecordFieldInfo>, Vec<String>)> { + // This if condition denotes two modes assists can work in: + // - First is acting upon selection of record fields + // - Next is acting upon a single record field + if !ctx.has_empty_selection() { + // Selection Mode + let node = ctx.covering_element(); + + let node = match node { + syntax::NodeOrToken::Node(n) => n, + syntax::NodeOrToken::Token(t) => t.parent()?, + }; + + let parent_struct = node.ancestors().find_map(ast::Struct::cast)?; + + let (info_of_record_fields, field_names) = + extract_and_parse_record_fields(&parent_struct, ctx.selection_trimmed(), &assist_type)?; + + return Some((parent_struct, info_of_record_fields, field_names)); + } + + // Single Record Field mode + let strukt = ctx.find_node_at_offset::<ast::Struct>()?; + let field = ctx.find_node_at_offset::<ast::RecordField>()?; + let record_field_info = parse_record_field(field, &assist_type)?; + let fn_name = record_field_info.fn_name.clone(); + Some((strukt, vec![record_field_info], vec![fn_name])) +} + fn extract_and_parse_record_fields( node: &ast::Struct, selection_range: TextRange, - mutable: bool, + assist_type: &AssistType, ) -> Option<(Vec<RecordFieldInfo>, Vec<String>)> { let mut field_names: Vec<String> = vec![]; let field_list = node.field_list()?; @@ -295,7 +309,7 @@ fn extract_and_parse_record_fields( .fields() .filter_map(|record_field| { if selection_range.contains_range(record_field.syntax().text_range()) { - let record_field_info = parse_record_field(record_field, mutable)?; + let record_field_info = parse_record_field(record_field, assist_type)?; field_names.push(record_field_info.fn_name.clone()); return Some(record_field_info); } @@ -316,12 +330,15 @@ fn extract_and_parse_record_fields( } } -fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option<RecordFieldInfo> { +fn parse_record_field( + record_field: ast::RecordField, + assist_type: &AssistType, +) -> Option<RecordFieldInfo> { let field_name = record_field.name()?; let field_ty = record_field.ty()?; let mut fn_name = to_lower_snake_case(&field_name.to_string()); - if mutable { + if matches!(assist_type, AssistType::MutGet) { format_to!(fn_name, "_mut"); } @@ -330,8 +347,89 @@ fn parse_record_field(record_field: ast::RecordField, mutable: bool) -> Option<R Some(RecordFieldInfo { field_name, field_ty, fn_name, target }) } +fn build_source_change( + builder: &mut SourceChangeBuilder, + ctx: &AssistContext<'_>, + info_of_record_fields: Vec<RecordFieldInfo>, + assist_info: AssistInfo, +) { + let record_fields_count = info_of_record_fields.len(); + + let mut buf = String::with_capacity(512); + + // Check if an impl exists + if let Some(impl_def) = &assist_info.impl_def { + // Check if impl is empty + if let Some(assoc_item_list) = impl_def.assoc_item_list() { + if assoc_item_list.assoc_items().next().is_some() { + // If not empty then only insert a new line + buf.push('\n'); + } + } + } + + for (i, record_field_info) in info_of_record_fields.iter().enumerate() { + // this buf inserts a newline at the end of a getter + // automatically, if one wants to add one more newline + // for separating it from other assoc items, that needs + // to be handled separately + let mut getter_buf = match assist_info.assist_type { + AssistType::Set => generate_setter_from_info(&assist_info, record_field_info), + _ => generate_getter_from_info(ctx, &assist_info, record_field_info), + }; + + // Insert `$0` only for last getter we generate + if i == record_fields_count - 1 { + if ctx.config.snippet_cap.is_some() { + getter_buf = getter_buf.replacen("fn ", "fn $0", 1); + } + } + + // For first element we do not merge with '\n', as + // that can be inserted by impl_def check defined + // above, for other cases which are: + // + // - impl exists but it empty, here we would ideally + // not want to keep newline between impl <struct> { + // and fn <fn-name>() { line + // + // - next if impl itself does not exist, in this + // case we ourselves generate a new impl and that + // again ends up with the same reasoning as above + // for not keeping newline + if i == 0 { + buf = buf + &getter_buf; + } else { + buf = buf + "\n" + &getter_buf; + } + + // We don't insert a new line at the end of + // last getter as it will end up in the end + // of an impl where we would not like to keep + // getter and end of impl ( i.e. `}` ) with an + // extra line for no reason + if i < record_fields_count - 1 { + buf = buf + "\n"; + } + } + + let start_offset = assist_info + .impl_def + .as_ref() + .and_then(|impl_def| find_impl_block_end(impl_def.to_owned(), &mut buf)) + .unwrap_or_else(|| { + buf = generate_impl_text(&ast::Adt::Struct(assist_info.strukt.clone()), &buf); + assist_info.strukt.syntax().text_range().end() + }); + + match ctx.config.snippet_cap { + Some(cap) => builder.insert_snippet(cap, start_offset, buf), + None => builder.insert(start_offset, buf), + } +} + #[cfg(test)] -mod tests { +mod tests_getter { use crate::tests::{check_assist, check_assist_no_snippet_cap, check_assist_not_applicable}; use super::*; @@ -812,3 +910,105 @@ impl Context { ); } } + +#[cfg(test)] +mod tests_setter { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + fn check_not_applicable(ra_fixture: &str) { + check_assist_not_applicable(generate_setter, ra_fixture) + } + + #[test] + fn test_generate_setter_from_field() { + check_assist( + generate_setter, + r#" +struct Person<T: Clone> { + dat$0a: T, +}"#, + r#" +struct Person<T: Clone> { + data: T, +} + +impl<T: Clone> Person<T> { + fn $0set_data(&mut self, data: T) { + self.data = data; + } +}"#, + ); + } + + #[test] + fn test_generate_setter_already_implemented() { + check_not_applicable( + r#" +struct Person<T: Clone> { + dat$0a: T, +} + +impl<T: Clone> Person<T> { + fn set_data(&mut self, data: T) { + self.data = data; + } +}"#, + ); + } + + #[test] + fn test_generate_setter_from_field_with_visibility_marker() { + check_assist( + generate_setter, + r#" +pub(crate) struct Person<T: Clone> { + dat$0a: T, +}"#, + r#" +pub(crate) struct Person<T: Clone> { + data: T, +} + +impl<T: Clone> Person<T> { + pub(crate) fn $0set_data(&mut self, data: T) { + self.data = data; + } +}"#, + ); + } + + #[test] + fn test_multiple_generate_setter() { + check_assist( + generate_setter, + r#" +struct Context<T: Clone> { + data: T, + cou$0nt: usize, +} + +impl<T: Clone> Context<T> { + fn set_data(&mut self, data: T) { + self.data = data; + } +}"#, + r#" +struct Context<T: Clone> { + data: T, + count: usize, +} + +impl<T: Clone> Context<T> { + fn set_data(&mut self, data: T) { + self.data = data; + } + + fn $0set_count(&mut self, count: usize) { + self.count = count; + } +}"#, + ); + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs deleted file mode 100644 index 62f72df1c9d..00000000000 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_setter.rs +++ /dev/null @@ -1,175 +0,0 @@ -use stdx::{format_to, to_lower_snake_case}; -use syntax::ast::{self, AstNode, HasName, HasVisibility}; - -use crate::{ - utils::{find_impl_block_end, find_struct_impl, generate_impl_text}, - AssistContext, AssistId, AssistKind, Assists, GroupLabel, -}; - -// Assist: generate_setter -// -// Generate a setter method. -// -// ``` -// struct Person { -// nam$0e: String, -// } -// ``` -// -> -// ``` -// struct Person { -// name: String, -// } -// -// impl Person { -// fn set_name(&mut self, name: String) { -// self.name = name; -// } -// } -// ``` -pub(crate) fn generate_setter(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { - let strukt = ctx.find_node_at_offset::<ast::Struct>()?; - let field = ctx.find_node_at_offset::<ast::RecordField>()?; - - let field_name = field.name()?; - let field_ty = field.ty()?; - - // Return early if we've found an existing fn - let fn_name = to_lower_snake_case(&field_name.to_string()); - let impl_def = - find_struct_impl(ctx, &ast::Adt::Struct(strukt.clone()), &[format!("set_{fn_name}")])?; - - let target = field.syntax().text_range(); - acc.add_group( - &GroupLabel("Generate getter/setter".to_owned()), - AssistId("generate_setter", AssistKind::Generate), - "Generate a setter method", - target, - |builder| { - let mut buf = String::with_capacity(512); - - if impl_def.is_some() { - buf.push('\n'); - } - - let vis = strukt.visibility().map_or(String::new(), |v| format!("{v} ")); - format_to!( - buf, - " {vis}fn set_{fn_name}(&mut self, {fn_name}: {field_ty}) {{ - self.{fn_name} = {fn_name}; - }}" - ); - - let start_offset = impl_def - .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf)) - .unwrap_or_else(|| { - buf = generate_impl_text(&ast::Adt::Struct(strukt.clone()), &buf); - strukt.syntax().text_range().end() - }); - - builder.insert(start_offset, buf); - }, - ) -} - -#[cfg(test)] -mod tests { - use crate::tests::{check_assist, check_assist_not_applicable}; - - use super::*; - - fn check_not_applicable(ra_fixture: &str) { - check_assist_not_applicable(generate_setter, ra_fixture) - } - - #[test] - fn test_generate_setter_from_field() { - check_assist( - generate_setter, - r#" -struct Person<T: Clone> { - dat$0a: T, -}"#, - r#" -struct Person<T: Clone> { - data: T, -} - -impl<T: Clone> Person<T> { - fn set_data(&mut self, data: T) { - self.data = data; - } -}"#, - ); - } - - #[test] - fn test_generate_setter_already_implemented() { - check_not_applicable( - r#" -struct Person<T: Clone> { - dat$0a: T, -} - -impl<T: Clone> Person<T> { - fn set_data(&mut self, data: T) { - self.data = data; - } -}"#, - ); - } - - #[test] - fn test_generate_setter_from_field_with_visibility_marker() { - check_assist( - generate_setter, - r#" -pub(crate) struct Person<T: Clone> { - dat$0a: T, -}"#, - r#" -pub(crate) struct Person<T: Clone> { - data: T, -} - -impl<T: Clone> Person<T> { - pub(crate) fn set_data(&mut self, data: T) { - self.data = data; - } -}"#, - ); - } - - #[test] - fn test_multiple_generate_setter() { - check_assist( - generate_setter, - r#" -struct Context<T: Clone> { - data: T, - cou$0nt: usize, -} - -impl<T: Clone> Context<T> { - fn set_data(&mut self, data: T) { - self.data = data; - } -}"#, - r#" -struct Context<T: Clone> { - data: T, - count: usize, -} - -impl<T: Clone> Context<T> { - fn set_data(&mut self, data: T) { - self.data = data; - } - - fn set_count(&mut self, count: usize) { - self.count = count; - } -}"#, - ); - } -} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs new file mode 100644 index 00000000000..0f67380d12b --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -0,0 +1,429 @@ +use crate::assist_context::{AssistContext, Assists}; +use ide_db::assists::AssistId; +use syntax::{ + ast::{self, edit::IndentLevel, make, HasGenericParams, HasVisibility}, + ted, AstNode, SyntaxKind, +}; + +// NOTES : +// We generate erroneous code if a function is declared const (E0379) +// This is left to the user to correct as our only option is to remove the +// function completely which we should not be doing. + +// Assist: generate_trait_from_impl +// +// Generate trait for an already defined inherent impl and convert impl to a trait impl. +// +// ``` +// struct Foo<const N: usize>([i32; N]); +// +// macro_rules! const_maker { +// ($t:ty, $v:tt) => { +// const CONST: $t = $v; +// }; +// } +// +// impl<const N: usize> Fo$0o<N> { +// // Used as an associated constant. +// const CONST_ASSOC: usize = N * 4; +// +// fn create() -> Option<()> { +// Some(()) +// } +// +// const_maker! {i32, 7} +// } +// ``` +// -> +// ``` +// struct Foo<const N: usize>([i32; N]); +// +// macro_rules! const_maker { +// ($t:ty, $v:tt) => { +// const CONST: $t = $v; +// }; +// } +// +// trait ${0:TraitName}<const N: usize> { +// // Used as an associated constant. +// const CONST_ASSOC: usize = N * 4; +// +// fn create() -> Option<()>; +// +// const_maker! {i32, 7} +// } +// +// impl<const N: usize> ${0:TraitName}<N> for Foo<N> { +// // Used as an associated constant. +// const CONST_ASSOC: usize = N * 4; +// +// fn create() -> Option<()> { +// Some(()) +// } +// +// const_maker! {i32, 7} +// } +// ``` +pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + // Get AST Node + let impl_ast = ctx.find_node_at_offset::<ast::Impl>()?; + + // Check if cursor is to the left of assoc item list's L_CURLY. + // if no L_CURLY then return. + let l_curly = impl_ast.assoc_item_list()?.l_curly_token()?; + + let cursor_offset = ctx.offset(); + let l_curly_offset = l_curly.text_range(); + if cursor_offset >= l_curly_offset.start() { + return None; + } + + // If impl is not inherent then we don't really need to go any further. + if impl_ast.for_token().is_some() { + return None; + } + + let assoc_items = impl_ast.assoc_item_list()?; + let first_element = assoc_items.assoc_items().next(); + if first_element.is_none() { + // No reason for an assist. + return None; + } + + let impl_name = impl_ast.self_ty()?; + + acc.add( + AssistId("generate_trait_from_impl", ide_db::assists::AssistKind::Generate), + "Generate trait from impl", + impl_ast.syntax().text_range(), + |builder| { + let trait_items = assoc_items.clone_for_update(); + let impl_items = assoc_items.clone_for_update(); + + trait_items.assoc_items().for_each(|item| { + strip_body(&item); + remove_items_visibility(&item); + }); + + impl_items.assoc_items().for_each(|item| { + remove_items_visibility(&item); + }); + + let trait_ast = make::trait_( + false, + "NewTrait", + impl_ast.generic_param_list(), + impl_ast.where_clause(), + trait_items, + ); + + // Change `impl Foo` to `impl NewTrait for Foo` + let arg_list = if let Some(genpars) = impl_ast.generic_param_list() { + genpars.to_generic_args().to_string() + } else { + "".to_string() + }; + + if let Some(snippet_cap) = ctx.config.snippet_cap { + builder.replace_snippet( + snippet_cap, + impl_name.syntax().text_range(), + format!("${{0:TraitName}}{} for {}", arg_list, impl_name.to_string()), + ); + + // Insert trait before TraitImpl + builder.insert_snippet( + snippet_cap, + impl_ast.syntax().text_range().start(), + format!( + "{}\n\n{}", + trait_ast.to_string().replace("NewTrait", "${0:TraitName}"), + IndentLevel::from_node(impl_ast.syntax()) + ), + ); + } else { + builder.replace( + impl_name.syntax().text_range(), + format!("NewTrait{} for {}", arg_list, impl_name.to_string()), + ); + + // Insert trait before TraitImpl + builder.insert( + impl_ast.syntax().text_range().start(), + format!( + "{}\n\n{}", + trait_ast.to_string(), + IndentLevel::from_node(impl_ast.syntax()) + ), + ); + } + + builder.replace(assoc_items.syntax().text_range(), impl_items.to_string()); + }, + ); + + Some(()) +} + +/// `E0449` Trait items always share the visibility of their trait +fn remove_items_visibility(item: &ast::AssocItem) { + match item { + ast::AssocItem::Const(c) => { + if let Some(vis) = c.visibility() { + ted::remove(vis.syntax()); + } + } + ast::AssocItem::Fn(f) => { + if let Some(vis) = f.visibility() { + ted::remove(vis.syntax()); + } + } + ast::AssocItem::TypeAlias(t) => { + if let Some(vis) = t.visibility() { + ted::remove(vis.syntax()); + } + } + _ => (), + } +} + +fn strip_body(item: &ast::AssocItem) { + match item { + ast::AssocItem::Fn(f) => { + if let Some(body) = f.body() { + // In constrast to function bodies, we want to see no ws before a semicolon. + // So let's remove them if we see any. + if let Some(prev) = body.syntax().prev_sibling_or_token() { + if prev.kind() == SyntaxKind::WHITESPACE { + ted::remove(prev); + } + } + + ted::replace(body.syntax(), make::tokens::semicolon()); + } + } + _ => (), + }; +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::tests::{check_assist, check_assist_no_snippet_cap, check_assist_not_applicable}; + + #[test] + fn test_trigger_when_cursor_on_header() { + check_assist_not_applicable( + generate_trait_from_impl, + r#" +struct Foo(f64); + +impl Foo { $0 + fn add(&mut self, x: f64) { + self.0 += x; + } +}"#, + ); + } + + #[test] + fn test_assoc_item_fn() { + check_assist_no_snippet_cap( + generate_trait_from_impl, + r#" +struct Foo(f64); + +impl F$0oo { + fn add(&mut self, x: f64) { + self.0 += x; + } +}"#, + r#" +struct Foo(f64); + +trait NewTrait { + fn add(&mut self, x: f64); +} + +impl NewTrait for Foo { + fn add(&mut self, x: f64) { + self.0 += x; + } +}"#, + ) + } + + #[test] + fn test_assoc_item_macro() { + check_assist_no_snippet_cap( + generate_trait_from_impl, + r#" +struct Foo; + +macro_rules! const_maker { + ($t:ty, $v:tt) => { + const CONST: $t = $v; + }; +} + +impl F$0oo { + const_maker! {i32, 7} +}"#, + r#" +struct Foo; + +macro_rules! const_maker { + ($t:ty, $v:tt) => { + const CONST: $t = $v; + }; +} + +trait NewTrait { + const_maker! {i32, 7} +} + +impl NewTrait for Foo { + const_maker! {i32, 7} +}"#, + ) + } + + #[test] + fn test_assoc_item_const() { + check_assist_no_snippet_cap( + generate_trait_from_impl, + r#" +struct Foo; + +impl F$0oo { + const ABC: i32 = 3; +}"#, + r#" +struct Foo; + +trait NewTrait { + const ABC: i32 = 3; +} + +impl NewTrait for Foo { + const ABC: i32 = 3; +}"#, + ) + } + + #[test] + fn test_impl_with_generics() { + check_assist_no_snippet_cap( + generate_trait_from_impl, + r#" +struct Foo<const N: usize>([i32; N]); + +impl<const N: usize> F$0oo<N> { + // Used as an associated constant. + const CONST: usize = N * 4; +} + "#, + r#" +struct Foo<const N: usize>([i32; N]); + +trait NewTrait<const N: usize> { + // Used as an associated constant. + const CONST: usize = N * 4; +} + +impl<const N: usize> NewTrait<N> for Foo<N> { + // Used as an associated constant. + const CONST: usize = N * 4; +} + "#, + ) + } + + #[test] + fn test_trait_items_should_not_have_vis() { + check_assist_no_snippet_cap( + generate_trait_from_impl, + r#" +struct Foo; + +impl F$0oo { + pub fn a_func() -> Option<()> { + Some(()) + } +}"#, + r#" +struct Foo; + +trait NewTrait { + fn a_func() -> Option<()>; +} + +impl NewTrait for Foo { + fn a_func() -> Option<()> { + Some(()) + } +}"#, + ) + } + + #[test] + fn test_empty_inherent_impl() { + check_assist_not_applicable( + generate_trait_from_impl, + r#" +impl Emp$0tyImpl{} +"#, + ) + } + + #[test] + fn test_not_top_level_impl() { + check_assist_no_snippet_cap( + generate_trait_from_impl, + r#" +mod a { + impl S$0 { + fn foo() {} + } +}"#, + r#" +mod a { + trait NewTrait { + fn foo(); + } + + impl NewTrait for S { + fn foo() {} + } +}"#, + ) + } + + #[test] + fn test_snippet_cap_is_some() { + check_assist( + generate_trait_from_impl, + r#" +struct Foo<const N: usize>([i32; N]); + +impl<const N: usize> F$0oo<N> { + // Used as an associated constant. + const CONST: usize = N * 4; +} + "#, + r#" +struct Foo<const N: usize>([i32; N]); + +trait ${0:TraitName}<const N: usize> { + // Used as an associated constant. + const CONST: usize = N * 4; +} + +impl<const N: usize> ${0:TraitName}<N> for Foo<N> { + // Used as an associated constant. + const CONST: usize = N * 4; +} + "#, + ) + } +} diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs index 797180fa189..67036029f5e 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs @@ -15,7 +15,7 @@ use ide_db::{ }; use itertools::{izip, Itertools}; use syntax::{ - ast::{self, edit_in_place::Indent, HasArgList, PathExpr}, + ast::{self, edit::IndentLevel, edit_in_place::Indent, HasArgList, PathExpr}, ted, AstNode, NodeOrToken, SyntaxKind, }; @@ -306,7 +306,7 @@ fn inline( params: &[(ast::Pat, Option<ast::Type>, hir::Param)], CallInfo { node, arguments, generic_arg_list }: &CallInfo, ) -> ast::Expr { - let body = if sema.hir_file_for(fn_body.syntax()).is_macro() { + let mut body = if sema.hir_file_for(fn_body.syntax()).is_macro() { cov_mark::hit!(inline_call_defined_in_macro); if let Some(body) = ast::BlockExpr::cast(insert_ws_into(fn_body.syntax().clone())) { body @@ -391,19 +391,19 @@ fn inline( } } + let mut let_stmts = Vec::new(); + // Inline parameter expressions or generate `let` statements depending on whether inlining works or not. - for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments).rev() { + for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments) { // izip confuses RA due to our lack of hygiene info currently losing us type info causing incorrect errors let usages: &[ast::PathExpr] = &usages; let expr: &ast::Expr = expr; - let insert_let_stmt = || { + let mut insert_let_stmt = || { let ty = sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty.clone()); - if let Some(stmt_list) = body.stmt_list() { - stmt_list.push_front( - make::let_stmt(pat.clone(), ty, Some(expr.clone())).clone_for_update().into(), - ) - } + let_stmts.push( + make::let_stmt(pat.clone(), ty, Some(expr.clone())).clone_for_update().into(), + ); }; // check if there is a local var in the function that conflicts with parameter @@ -457,6 +457,24 @@ fn inline( } } + let is_async_fn = function.is_async(sema.db); + if is_async_fn { + cov_mark::hit!(inline_call_async_fn); + body = make::async_move_block_expr(body.statements(), body.tail_expr()).clone_for_update(); + + // Arguments should be evaluated outside the async block, and then moved into it. + if !let_stmts.is_empty() { + cov_mark::hit!(inline_call_async_fn_with_let_stmts); + body.indent(IndentLevel(1)); + body = make::block_expr(let_stmts, Some(body.into())).clone_for_update(); + } + } else if let Some(stmt_list) = body.stmt_list() { + ted::insert_all( + ted::Position::after(stmt_list.l_curly_token().unwrap()), + let_stmts.into_iter().map(|stmt| stmt.syntax().clone().into()).collect(), + ); + } + let original_indentation = match node { ast::CallableExpr::Call(it) => it.indent_level(), ast::CallableExpr::MethodCall(it) => it.indent_level(), @@ -464,7 +482,7 @@ fn inline( body.reindent_to(original_indentation); match body.tail_expr() { - Some(expr) if body.statements().next().is_none() => expr, + Some(expr) if !is_async_fn && body.statements().next().is_none() => expr, _ => match node .syntax() .parent() @@ -1353,4 +1371,107 @@ fn main() { "#, ); } + + #[test] + fn async_fn_single_expression() { + cov_mark::check!(inline_call_async_fn); + check_assist( + inline_call, + r#" +async fn bar(x: u32) -> u32 { x + 1 } +async fn foo(arg: u32) -> u32 { + bar(arg).await * 2 +} +fn spawn<T>(_: T) {} +fn main() { + spawn(foo$0(42)); +} +"#, + r#" +async fn bar(x: u32) -> u32 { x + 1 } +async fn foo(arg: u32) -> u32 { + bar(arg).await * 2 +} +fn spawn<T>(_: T) {} +fn main() { + spawn(async move { + bar(42).await * 2 + }); +} +"#, + ); + } + + #[test] + fn async_fn_multiple_statements() { + cov_mark::check!(inline_call_async_fn); + check_assist( + inline_call, + r#" +async fn bar(x: u32) -> u32 { x + 1 } +async fn foo(arg: u32) -> u32 { + bar(arg).await; + 42 +} +fn spawn<T>(_: T) {} +fn main() { + spawn(foo$0(42)); +} +"#, + r#" +async fn bar(x: u32) -> u32 { x + 1 } +async fn foo(arg: u32) -> u32 { + bar(arg).await; + 42 +} +fn spawn<T>(_: T) {} +fn main() { + spawn(async move { + bar(42).await; + 42 + }); +} +"#, + ); + } + + #[test] + fn async_fn_with_let_statements() { + cov_mark::check!(inline_call_async_fn); + cov_mark::check!(inline_call_async_fn_with_let_stmts); + check_assist( + inline_call, + r#" +async fn bar(x: u32) -> u32 { x + 1 } +async fn foo(x: u32, y: u32, z: &u32) -> u32 { + bar(x).await; + y + y + *z +} +fn spawn<T>(_: T) {} +fn main() { + let var = 42; + spawn(foo$0(var, var + 1, &var)); +} +"#, + r#" +async fn bar(x: u32) -> u32 { x + 1 } +async fn foo(x: u32, y: u32, z: &u32) -> u32 { + bar(x).await; + y + y + *z +} +fn spawn<T>(_: T) {} +fn main() { + let var = 42; + spawn({ + let y = var + 1; + let z: &u32 = &var; + async move { + bar(var).await; + y + y + *z + } + }); +} +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs index b6027eac55d..e1849eb71d5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/move_const_to_impl.rs @@ -54,7 +54,11 @@ pub(crate) fn move_const_to_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> // NOTE: We can technically provide this assist for default methods in trait definitions, but // it's somewhat complex to handle it correctly when the const's name conflicts with // supertrait's item. We may want to consider implementing it in the future. - let AssocItemContainer::Impl(impl_) = ctx.sema.to_def(&parent_fn)?.as_assoc_item(db)?.container(db) else { return None; }; + let AssocItemContainer::Impl(impl_) = + ctx.sema.to_def(&parent_fn)?.as_assoc_item(db)?.container(db) + else { + return None; + }; if impl_.trait_(db).is_some() { return None; } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs index 23153b4c566..5cc110cf12b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/promote_local_to_const.rs @@ -8,13 +8,10 @@ use ide_db::{ use stdx::to_upper_snake_case; use syntax::{ ast::{self, make, HasName}, - AstNode, WalkEvent, + ted, AstNode, WalkEvent, }; -use crate::{ - assist_context::{AssistContext, Assists}, - utils::{render_snippet, Cursor}, -}; +use crate::assist_context::{AssistContext, Assists}; // Assist: promote_local_to_const // @@ -70,29 +67,33 @@ pub(crate) fn promote_local_to_const(acc: &mut Assists, ctx: &AssistContext<'_>) cov_mark::hit!(promote_local_non_const); return None; } - let target = let_stmt.syntax().text_range(); + acc.add( AssistId("promote_local_to_const", AssistKind::Refactor), "Promote local to constant", - target, - |builder| { + let_stmt.syntax().text_range(), + |edit| { let name = to_upper_snake_case(&name.to_string()); let usages = Definition::Local(local).usages(&ctx.sema).all(); if let Some(usages) = usages.references.get(&ctx.file_id()) { + let name = make::name_ref(&name); + for usage in usages { - builder.replace(usage.range, &name); + let Some(usage) = usage.name.as_name_ref().cloned() else { continue }; + let usage = edit.make_mut(usage); + ted::replace(usage.syntax(), name.clone_for_update().syntax()); } } - let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer); - match ctx.config.snippet_cap.zip(item.name()) { - Some((cap, name)) => builder.replace_snippet( - cap, - target, - render_snippet(cap, item.syntax(), Cursor::Before(name.syntax())), - ), - None => builder.replace(target, item.to_string()), + let item = make::item_const(None, make::name(&name), make::ty(&ty), initializer) + .clone_for_update(); + let let_stmt = edit.make_mut(let_stmt); + + if let Some((cap, name)) = ctx.config.snippet_cap.zip(item.name()) { + edit.add_tabstop_before(cap, name); } + + ted::replace(let_stmt.syntax(), item.syntax()); }, ) } @@ -158,6 +159,27 @@ fn foo() { } #[test] + fn multiple_uses() { + check_assist( + promote_local_to_const, + r" +fn foo() { + let x$0 = 0; + let y = x; + let z = (x, x, x, x); +} +", + r" +fn foo() { + const $0X: i32 = 0; + let y = X; + let z = (X, X, X, X); +} +", + ); + } + + #[test] fn not_applicable_non_const_meth_call() { cov_mark::check!(promote_local_non_const); check_assist_not_applicable( diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs index a5c7fea403d..f222b3eb903 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/pull_assignment_up.rs @@ -102,7 +102,7 @@ struct AssignmentsCollector<'a> { assignments: Vec<(ast::BinExpr, ast::Expr)>, } -impl<'a> AssignmentsCollector<'a> { +impl AssignmentsCollector<'_> { fn collect_match(&mut self, match_expr: &ast::MatchExpr) -> Option<()> { for arm in match_expr.match_arm_list()?.arms() { match arm.expr()? { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs index 3bdd795bea8..c03bc2f41d5 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/replace_derive_with_manual_impl.rs @@ -73,7 +73,7 @@ pub(crate) fn replace_derive_with_manual_impl( &ctx.sema, current_crate, NameToImport::exact_case_sensitive(path.segments().last()?.to_string()), - items_locator::AssocItemSearch::Exclude, + items_locator::AssocSearchMode::Exclude, Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()), ) .filter_map(|item| match item.as_module_def()? { diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs index 26f3c192617..f235b554e61 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/unwrap_result_return_type.rs @@ -38,14 +38,18 @@ pub(crate) fn unwrap_result_return_type(acc: &mut Assists, ctx: &AssistContext<' }; let type_ref = &ret_type.ty()?; - let Some(hir::Adt::Enum(ret_enum)) = ctx.sema.resolve_type(type_ref)?.as_adt() else { return None; }; + let Some(hir::Adt::Enum(ret_enum)) = ctx.sema.resolve_type(type_ref)?.as_adt() else { + return None; + }; let result_enum = FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate()).core_result_Result()?; if ret_enum != result_enum { return None; } - let Some(ok_type) = unwrap_result_type(type_ref) else { return None; }; + let Some(ok_type) = unwrap_result_type(type_ref) else { + return None; + }; acc.add( AssistId("unwrap_result_return_type", AssistKind::RefactorRewrite), @@ -130,12 +134,16 @@ fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) { // Tries to extract `T` from `Result<T, E>`. fn unwrap_result_type(ty: &ast::Type) -> Option<ast::Type> { - let ast::Type::PathType(path_ty) = ty else { return None; }; + let ast::Type::PathType(path_ty) = ty else { + return None; + }; let path = path_ty.path()?; let segment = path.first_segment()?; let generic_arg_list = segment.generic_arg_list()?; let generic_args: Vec<_> = generic_arg_list.generic_args().collect(); - let ast::GenericArg::TypeArg(ok_type) = generic_args.first()? else { return None; }; + let ast::GenericArg::TypeArg(ok_type) = generic_args.first()? else { + return None; + }; ok_type.ty() } diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs index b6c489eb62e..24c3387457a 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/handlers/wrap_return_type_in_result.rs @@ -6,7 +6,7 @@ use ide_db::{ }; use syntax::{ ast::{self, make, Expr}, - match_ast, AstNode, + match_ast, ted, AstNode, }; use crate::{AssistContext, AssistId, AssistKind, Assists}; @@ -52,8 +52,8 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext< AssistId("wrap_return_type_in_result", AssistKind::RefactorRewrite), "Wrap return type in Result", type_ref.syntax().text_range(), - |builder| { - let body = ast::Expr::BlockExpr(body); + |edit| { + let body = edit.make_mut(ast::Expr::BlockExpr(body)); let mut exprs_to_wrap = Vec::new(); let tail_cb = &mut |e: &_| tail_cb_impl(&mut exprs_to_wrap, e); @@ -70,17 +70,24 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext< let ok_wrapped = make::expr_call( make::expr_path(make::ext::ident_path("Ok")), make::arg_list(iter::once(ret_expr_arg.clone())), - ); - builder.replace_ast(ret_expr_arg, ok_wrapped); + ) + .clone_for_update(); + ted::replace(ret_expr_arg.syntax(), ok_wrapped.syntax()); } - match ctx.config.snippet_cap { - Some(cap) => { - let snippet = format!("Result<{type_ref}, ${{0:_}}>"); - builder.replace_snippet(cap, type_ref.syntax().text_range(), snippet) - } - None => builder - .replace(type_ref.syntax().text_range(), format!("Result<{type_ref}, _>")), + let new_result_ty = + make::ext::ty_result(type_ref.clone(), make::ty_placeholder()).clone_for_update(); + let old_result_ty = edit.make_mut(type_ref.clone()); + + ted::replace(old_result_ty.syntax(), new_result_ty.syntax()); + + if let Some(cap) = ctx.config.snippet_cap { + let generic_args = new_result_ty + .syntax() + .descendants() + .find_map(ast::GenericArgList::cast) + .unwrap(); + edit.add_placeholder_snippet(cap, generic_args.generic_args().last().unwrap()); } }, ) diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs index 111753bf309..a82f1f9dd8b 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/lib.rs @@ -145,6 +145,7 @@ mod handlers { mod generate_constant; mod generate_default_from_enum_variant; mod generate_default_from_new; + mod generate_delegate_trait; mod generate_deref; mod generate_derive; mod generate_documentation_template; @@ -153,12 +154,12 @@ mod handlers { mod generate_enum_variant; mod generate_from_impl_for_enum; mod generate_function; - mod generate_getter; + mod generate_getter_or_setter; mod generate_impl; mod generate_is_empty_from_len; mod generate_new; - mod generate_setter; mod generate_delegate_methods; + mod generate_trait_from_impl; mod add_return_type; mod inline_call; mod inline_const_as_literal; @@ -251,6 +252,7 @@ mod handlers { generate_constant::generate_constant, generate_default_from_enum_variant::generate_default_from_enum_variant, generate_default_from_new::generate_default_from_new, + generate_delegate_trait::generate_delegate_trait, generate_derive::generate_derive, generate_documentation_template::generate_documentation_template, generate_documentation_template::generate_doc_example, @@ -264,6 +266,7 @@ mod handlers { generate_impl::generate_trait_impl, generate_is_empty_from_len::generate_is_empty_from_len, generate_new::generate_new, + generate_trait_from_impl::generate_trait_from_impl, inline_call::inline_call, inline_call::inline_into_callers, inline_const_as_literal::inline_const_as_literal, @@ -334,9 +337,9 @@ mod handlers { extract_function::extract_function, extract_module::extract_module, // - generate_getter::generate_getter, - generate_getter::generate_getter_mut, - generate_setter::generate_setter, + generate_getter_or_setter::generate_getter, + generate_getter_or_setter::generate_getter_mut, + generate_getter_or_setter::generate_setter, generate_delegate_methods::generate_delegate_methods, generate_deref::generate_deref, // diff --git a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs index c097e073980..4d47a199b7c 100644 --- a/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs +++ b/src/tools/rust-analyzer/crates/ide-assists/src/tests/generated.rs @@ -1016,6 +1016,69 @@ impl Person { } #[test] +fn doctest_generate_delegate_trait() { + check_doc_test( + "generate_delegate_trait", + r#####" +trait SomeTrait { + type T; + fn fn_(arg: u32) -> u32; + fn method_(&mut self) -> bool; +} +struct A; +impl SomeTrait for A { + type T = u32; + + fn fn_(arg: u32) -> u32 { + 42 + } + + fn method_(&mut self) -> bool { + false + } +} +struct B { + a$0: A, +} +"#####, + r#####" +trait SomeTrait { + type T; + fn fn_(arg: u32) -> u32; + fn method_(&mut self) -> bool; +} +struct A; +impl SomeTrait for A { + type T = u32; + + fn fn_(arg: u32) -> u32 { + 42 + } + + fn method_(&mut self) -> bool { + false + } +} +struct B { + a: A, +} + +impl SomeTrait for B { + type T = <A as SomeTrait>::T; + + fn fn_(arg: u32) -> u32 { + <A as SomeTrait>::fn_(arg) + } + + fn method_(&mut self) -> bool { + <A as SomeTrait>::method_( &mut self.a ) + } +} +"#####, + ) +} + +#[test] fn doctest_generate_deref() { check_doc_test( "generate_deref", @@ -1429,7 +1492,7 @@ struct Person { } impl Person { - fn set_name(&mut self, name: String) { + fn $0set_name(&mut self, name: String) { self.name = name; } } @@ -1438,6 +1501,62 @@ impl Person { } #[test] +fn doctest_generate_trait_from_impl() { + check_doc_test( + "generate_trait_from_impl", + r#####" +struct Foo<const N: usize>([i32; N]); + +macro_rules! const_maker { + ($t:ty, $v:tt) => { + const CONST: $t = $v; + }; +} + +impl<const N: usize> Fo$0o<N> { + // Used as an associated constant. + const CONST_ASSOC: usize = N * 4; + + fn create() -> Option<()> { + Some(()) + } + + const_maker! {i32, 7} +} +"#####, + r#####" +struct Foo<const N: usize>([i32; N]); + +macro_rules! const_maker { + ($t:ty, $v:tt) => { + const CONST: $t = $v; + }; +} + +trait ${0:TraitName}<const N: usize> { + // Used as an associated constant. + const CONST_ASSOC: usize = N * 4; + + fn create() -> Option<()>; + + const_maker! {i32, 7} +} + +impl<const N: usize> ${0:TraitName}<N> for Foo<N> { + // Used as an associated constant. + const CONST_ASSOC: usize = N * 4; + + fn create() -> Option<()> { + Some(()) + } + + const_maker! {i32, 7} +} +"#####, + ) +} + +#[test] fn doctest_generate_trait_impl() { check_doc_test( "generate_trait_impl", diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs index d3e75c6da47..1e09894059d 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/completions/mod_.rs @@ -42,7 +42,7 @@ pub(crate) fn complete_mod( } let module_definition_file = - current_module.definition_source(ctx.db).file_id.original_file(ctx.db); + current_module.definition_source_file_id(ctx.db).original_file(ctx.db); let source_root = ctx.db.source_root(ctx.db.file_source_root(module_definition_file)); let directory_to_look_for_submodules = directory_to_look_for_submodules( current_module, diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs index 7b145f3c14e..3cb65b2729a 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context.rs @@ -463,7 +463,9 @@ impl CompletionContext<'_> { /// Checks whether this item should be listed in regards to stability. Returns `true` if we should. pub(crate) fn check_stability(&self, attrs: Option<&hir::Attrs>) -> bool { - let Some(attrs) = attrs else { return true; }; + let Some(attrs) = attrs else { + return true; + }; !attrs.is_unstable() || self.is_nightly } diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs index cc5221cfccb..3ea50659030 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs @@ -243,10 +243,7 @@ fn analyze( let Some(name_like) = find_node_at_offset(&speculative_file, offset) else { let analysis = if let Some(original) = ast::String::cast(original_token.clone()) { - CompletionAnalysis::String { - original, - expanded: ast::String::cast(self_token.clone()), - } + CompletionAnalysis::String { original, expanded: ast::String::cast(self_token.clone()) } } else { // Fix up trailing whitespace problem // #[attr(foo = $0 @@ -736,7 +733,7 @@ fn classify_name_ref( return None; } let parent = match ast::Fn::cast(parent.parent()?) { - Some(x) => x.param_list(), + Some(it) => it.param_list(), None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(), }; diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs index 106d4e1e52f..2eaa42040a0 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/lib.rs @@ -231,7 +231,7 @@ pub fn resolve_completion_edits( &sema, current_crate, NameToImport::exact_case_sensitive(imported_name), - items_locator::AssocItemSearch::Include, + items_locator::AssocSearchMode::Include, Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()), ); let import = items_with_name diff --git a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs index c97144b61b6..1aaf3958726 100644 --- a/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs +++ b/src/tools/rust-analyzer/crates/ide-completion/src/tests/attribute.rs @@ -300,6 +300,7 @@ struct Foo; at deprecated at derive macro derive at derive(…) + at derive_const macro derive_const at doc = "…" at doc(alias = "…") at doc(hidden) diff --git a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml index 4e75dc4dba5..faec7420681 100644 --- a/src/tools/rust-analyzer/crates/ide-db/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-db/Cargo.toml @@ -21,7 +21,7 @@ once_cell = "1.17.0" either = "1.7.0" itertools = "0.10.5" arrayvec = "0.7.2" -indexmap = "1.9.1" +indexmap = "2.0.0" memchr = "2.5.0" triomphe.workspace = true nohash-hasher.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs index 0dd544d0ae2..a0b05c87ae7 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/apply_change.rs @@ -99,8 +99,8 @@ impl RootDatabase { hir::db::AstIdMapQuery hir::db::ParseMacroExpansionQuery hir::db::InternMacroCallQuery - hir::db::MacroArgTextQuery - hir::db::MacroDefQuery + hir::db::MacroArgNodeQuery + hir::db::DeclMacroExpanderQuery hir::db::MacroExpandQuery hir::db::ExpandProcMacroQuery hir::db::HygieneFrameQuery diff --git a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs index eba9d8afc40..1eb8f00020b 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/helpers.rs @@ -9,7 +9,10 @@ use syntax::{ AstToken, SyntaxKind, SyntaxToken, TokenAtOffset, }; -use crate::{defs::Definition, generated, RootDatabase}; +use crate::{ + defs::{Definition, IdentClass}, + generated, RootDatabase, +}; pub fn item_name(db: &RootDatabase, item: ItemInNs) -> Option<Name> { match item { @@ -109,3 +112,16 @@ pub fn is_editable_crate(krate: Crate, db: &RootDatabase) -> bool { let source_root_id = db.file_source_root(root_file); !db.source_root(source_root_id).is_library } + +pub fn get_definition( + sema: &Semantics<'_, RootDatabase>, + token: SyntaxToken, +) -> Option<Definition> { + for token in sema.descend_into_macros(token) { + let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); + if let Some(&[x]) = def.as_deref() { + return Some(x); + } + } + None +} diff --git a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs index 901d592c691..e52dc356775 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/imports/import_assets.rs @@ -13,7 +13,7 @@ use syntax::{ use crate::{ helpers::item_name, - items_locator::{self, AssocItemSearch, DEFAULT_QUERY_SEARCH_LIMIT}, + items_locator::{self, AssocSearchMode, DEFAULT_QUERY_SEARCH_LIMIT}, RootDatabase, }; @@ -317,7 +317,7 @@ fn path_applicable_imports( // * improve the associated completion item matching and/or scoring to ensure no noisy completions appear // // see also an ignored test under FIXME comment in the qualify_path.rs module - AssocItemSearch::Exclude, + AssocSearchMode::Exclude, Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()), ) .filter_map(|item| { @@ -334,7 +334,7 @@ fn path_applicable_imports( sema, current_crate, path_candidate.name.clone(), - AssocItemSearch::Include, + AssocSearchMode::Include, Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()), ) .filter_map(|item| { @@ -483,7 +483,7 @@ fn trait_applicable_items( sema, current_crate, trait_candidate.assoc_item_name.clone(), - AssocItemSearch::AssocItemsOnly, + AssocSearchMode::AssocItemsOnly, Some(DEFAULT_QUERY_SEARCH_LIMIT.inner()), ) .filter_map(|input| item_as_assoc(db, input)) diff --git a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs index 46f1353e2e1..3f7a3ec2d0f 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/items_locator.rs @@ -3,10 +3,7 @@ //! The main reason for this module to exist is the fact that project's items and dependencies' items //! are located in different caches, with different APIs. use either::Either; -use hir::{ - import_map::{self, ImportKind}, - AsAssocItem, Crate, ItemInNs, Semantics, -}; +use hir::{import_map, AsAssocItem, Crate, ItemInNs, Semantics}; use limit::Limit; use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase}; @@ -14,23 +11,14 @@ use crate::{imports::import_assets::NameToImport, symbol_index, RootDatabase}; /// A value to use, when uncertain which limit to pick. pub static DEFAULT_QUERY_SEARCH_LIMIT: Limit = Limit::new(40); -/// Three possible ways to search for the name in associated and/or other items. -#[derive(Debug, Clone, Copy)] -pub enum AssocItemSearch { - /// Search for the name in both associated and other items. - Include, - /// Search for the name in other items only. - Exclude, - /// Search for the name in the associated items only. - AssocItemsOnly, -} +pub use import_map::AssocSearchMode; /// Searches for importable items with the given name in the crate and its dependencies. pub fn items_with_name<'a>( sema: &'a Semantics<'_, RootDatabase>, krate: Crate, name: NameToImport, - assoc_item_search: AssocItemSearch, + assoc_item_search: AssocSearchMode, limit: Option<usize>, ) -> impl Iterator<Item = ItemInNs> + 'a { let _p = profile::span("items_with_name").detail(|| { @@ -48,9 +36,7 @@ pub fn items_with_name<'a>( let mut local_query = symbol_index::Query::new(exact_name.clone()); local_query.exact(); - let external_query = import_map::Query::new(exact_name) - .name_only() - .search_mode(import_map::SearchMode::Equals); + let external_query = import_map::Query::new(exact_name); ( local_query, @@ -61,17 +47,8 @@ pub fn items_with_name<'a>( let mut local_query = symbol_index::Query::new(fuzzy_search_string.clone()); let mut external_query = import_map::Query::new(fuzzy_search_string.clone()) - .search_mode(import_map::SearchMode::Fuzzy) - .name_only(); - match assoc_item_search { - AssocItemSearch::Include => {} - AssocItemSearch::Exclude => { - external_query = external_query.exclude_import_kind(ImportKind::AssociatedItem); - } - AssocItemSearch::AssocItemsOnly => { - external_query = external_query.assoc_items_only(); - } - } + .fuzzy() + .assoc_search_mode(assoc_item_search); if fuzzy_search_string.to_lowercase() != fuzzy_search_string { local_query.case_sensitive(); @@ -93,13 +70,15 @@ pub fn items_with_name<'a>( fn find_items<'a>( sema: &'a Semantics<'_, RootDatabase>, krate: Crate, - assoc_item_search: AssocItemSearch, + assoc_item_search: AssocSearchMode, local_query: symbol_index::Query, external_query: import_map::Query, ) -> impl Iterator<Item = ItemInNs> + 'a { let _p = profile::span("find_items"); let db = sema.db; + // NOTE: `external_query` includes `assoc_item_search`, so we don't need to + // filter on our own. let external_importables = krate.query_external_importables(db, external_query).map(|external_importable| { match external_importable { @@ -112,18 +91,15 @@ fn find_items<'a>( let local_results = local_query .search(&symbol_index::crate_symbols(db, krate)) .into_iter() - .filter_map(|local_candidate| match local_candidate.def { - hir::ModuleDef::Macro(macro_def) => Some(ItemInNs::Macros(macro_def)), - def => Some(ItemInNs::from(def)), + .filter(move |candidate| match assoc_item_search { + AssocSearchMode::Include => true, + AssocSearchMode::Exclude => candidate.def.as_assoc_item(db).is_none(), + AssocSearchMode::AssocItemsOnly => candidate.def.as_assoc_item(db).is_some(), + }) + .map(|local_candidate| match local_candidate.def { + hir::ModuleDef::Macro(macro_def) => ItemInNs::Macros(macro_def), + def => ItemInNs::from(def), }); - external_importables.chain(local_results).filter(move |&item| match assoc_item_search { - AssocItemSearch::Include => true, - AssocItemSearch::Exclude => !is_assoc_item(item, sema.db), - AssocItemSearch::AssocItemsOnly => is_assoc_item(item, sema.db), - }) -} - -fn is_assoc_item(item: ItemInNs, db: &RootDatabase) -> bool { - item.as_module_def().and_then(|module_def| module_def.as_assoc_item(db)).is_some() + external_importables.chain(local_results) } diff --git a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs index ff1a20f03f4..f27ed485d81 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/lib.rs @@ -200,8 +200,8 @@ impl RootDatabase { hir_db::AstIdMapQuery // hir_db::ParseMacroExpansionQuery // hir_db::InternMacroCallQuery - hir_db::MacroArgTextQuery - hir_db::MacroDefQuery + hir_db::MacroArgNodeQuery + hir_db::DeclMacroExpanderQuery // hir_db::MacroExpandQuery hir_db::ExpandProcMacroQuery hir_db::HygieneFrameQuery diff --git a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs index 73e6a920ee4..1d0cb426a57 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/path_transform.rs @@ -195,7 +195,7 @@ fn postorder(item: &SyntaxNode) -> impl Iterator<Item = SyntaxNode> { }) } -impl<'a> Ctx<'a> { +impl Ctx<'_> { fn apply(&self, item: &SyntaxNode) { // `transform_path` may update a node's parent and that would break the // tree traversal. Thus all paths in the tree are collected into a vec diff --git a/src/tools/rust-analyzer/crates/ide-db/src/search.rs b/src/tools/rust-analyzer/crates/ide-db/src/search.rs index e8ff107bd49..f3c0f79c589 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/search.rs @@ -149,10 +149,8 @@ impl SearchScope { let mut to_visit: Vec<_> = module.children(db).collect(); while let Some(module) = to_visit.pop() { - if let InFile { file_id, value: ModuleSource::SourceFile(_) } = - module.definition_source(db) - { - entries.insert(file_id.original_file(db), None); + if let Some(file_id) = module.as_source_file_id(db) { + entries.insert(file_id, None); } to_visit.extend(module.children(db)); } @@ -340,21 +338,21 @@ pub struct FindUsages<'a> { search_self_mod: bool, } -impl<'a> FindUsages<'a> { +impl FindUsages<'_> { /// Enable searching for `Self` when the definition is a type or `self` for modules. - pub fn include_self_refs(mut self) -> FindUsages<'a> { + pub fn include_self_refs(mut self) -> Self { self.include_self_kw_refs = def_to_ty(self.sema, &self.def); self.search_self_mod = true; self } /// Limit the search to a given [`SearchScope`]. - pub fn in_scope(self, scope: SearchScope) -> FindUsages<'a> { + pub fn in_scope(self, scope: SearchScope) -> Self { self.set_scope(Some(scope)) } /// Limit the search to a given [`SearchScope`]. - pub fn set_scope(mut self, scope: Option<SearchScope>) -> FindUsages<'a> { + pub fn set_scope(mut self, scope: Option<SearchScope>) -> Self { assert!(self.scope.is_none()); self.scope = scope; self diff --git a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs index 061fb0f05cf..fad0ca51a02 100644 --- a/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs +++ b/src/tools/rust-analyzer/crates/ide-db/src/source_change.rs @@ -9,7 +9,10 @@ use crate::SnippetCap; use base_db::{AnchoredPathBuf, FileId}; use nohash_hasher::IntMap; use stdx::never; -use syntax::{algo, ast, ted, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize}; +use syntax::{ + algo, ast, ted, AstNode, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, + TextSize, +}; use text_edit::{TextEdit, TextEditBuilder}; #[derive(Default, Debug, Clone)] @@ -237,19 +240,31 @@ impl SourceChangeBuilder { /// Adds a tabstop snippet to place the cursor before `node` pub fn add_tabstop_before(&mut self, _cap: SnippetCap, node: impl AstNode) { assert!(node.syntax().parent().is_some()); - self.add_snippet(PlaceSnippet::Before(node.syntax().clone())); + self.add_snippet(PlaceSnippet::Before(node.syntax().clone().into())); } /// Adds a tabstop snippet to place the cursor after `node` pub fn add_tabstop_after(&mut self, _cap: SnippetCap, node: impl AstNode) { assert!(node.syntax().parent().is_some()); - self.add_snippet(PlaceSnippet::After(node.syntax().clone())); + self.add_snippet(PlaceSnippet::After(node.syntax().clone().into())); + } + + /// Adds a tabstop snippet to place the cursor before `token` + pub fn add_tabstop_before_token(&mut self, _cap: SnippetCap, token: SyntaxToken) { + assert!(token.parent().is_some()); + self.add_snippet(PlaceSnippet::Before(token.clone().into())); + } + + /// Adds a tabstop snippet to place the cursor after `token` + pub fn add_tabstop_after_token(&mut self, _cap: SnippetCap, token: SyntaxToken) { + assert!(token.parent().is_some()); + self.add_snippet(PlaceSnippet::After(token.clone().into())); } /// Adds a snippet to move the cursor selected over `node` pub fn add_placeholder_snippet(&mut self, _cap: SnippetCap, node: impl AstNode) { assert!(node.syntax().parent().is_some()); - self.add_snippet(PlaceSnippet::Over(node.syntax().clone())) + self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into())) } fn add_snippet(&mut self, snippet: PlaceSnippet) { @@ -282,38 +297,40 @@ impl From<FileSystemEdit> for SourceChange { } enum PlaceSnippet { - /// Place a tabstop before a node - Before(SyntaxNode), - /// Place a tabstop before a node - After(SyntaxNode), - /// Place a placeholder snippet in place of the node - Over(SyntaxNode), + /// Place a tabstop before an element + Before(SyntaxElement), + /// Place a tabstop before an element + After(SyntaxElement), + /// Place a placeholder snippet in place of the element + Over(SyntaxElement), } impl PlaceSnippet { - /// Places the snippet before or over a node with the given tab stop index + /// Places the snippet before or over an element with the given tab stop index fn place(self, order: usize) { - // ensure the target node is still attached + // ensure the target element is still attached match &self { - PlaceSnippet::Before(node) | PlaceSnippet::After(node) | PlaceSnippet::Over(node) => { - // node should still be in the tree, but if it isn't + PlaceSnippet::Before(element) + | PlaceSnippet::After(element) + | PlaceSnippet::Over(element) => { + // element should still be in the tree, but if it isn't // then it's okay to just ignore this place - if stdx::never!(node.parent().is_none()) { + if stdx::never!(element.parent().is_none()) { return; } } } match self { - PlaceSnippet::Before(node) => { - ted::insert_raw(ted::Position::before(&node), Self::make_tab_stop(order)); + PlaceSnippet::Before(element) => { + ted::insert_raw(ted::Position::before(&element), Self::make_tab_stop(order)); } - PlaceSnippet::After(node) => { - ted::insert_raw(ted::Position::after(&node), Self::make_tab_stop(order)); + PlaceSnippet::After(element) => { + ted::insert_raw(ted::Position::after(&element), Self::make_tab_stop(order)); } - PlaceSnippet::Over(node) => { - let position = ted::Position::before(&node); - node.detach(); + PlaceSnippet::Over(element) => { + let position = ted::Position::before(&element); + element.detach(); let snippet = ast::SourceFile::parse(&format!("${{{order}:_}}")) .syntax_node() @@ -321,7 +338,7 @@ impl PlaceSnippet { let placeholder = snippet.descendants().find_map(ast::UnderscoreExpr::cast).unwrap(); - ted::replace(placeholder.syntax(), node); + ted::replace(placeholder.syntax(), element); ted::insert_raw(position, snippet); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml index e18624fcc26..14aa3940199 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/Cargo.toml @@ -16,6 +16,7 @@ cov-mark = "2.0.0-pre.1" either = "1.7.0" itertools = "0.10.5" serde_json = "1.0.86" +once_cell = "1.17.0" # local deps profile.workspace = true diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs index 30576c71fb7..49100540388 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/break_outside_of_loop.rs @@ -1,4 +1,4 @@ -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: break-outside-of-loop // @@ -13,10 +13,11 @@ pub(crate) fn break_outside_of_loop( let construct = if d.is_break { "break" } else { "continue" }; format!("{construct} outside of loop") }; - Diagnostic::new( - "break-outside-of-loop", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0268"), message, - ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range, + d.expr.clone().map(|it| it.into()), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs index d2f27664d6f..e1e5db91c54 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/expected_function.rs @@ -1,6 +1,6 @@ use hir::HirDisplay; -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: expected-function // @@ -9,10 +9,11 @@ pub(crate) fn expected_function( ctx: &DiagnosticsContext<'_>, d: &hir::ExpectedFunction, ) -> Diagnostic { - Diagnostic::new( - "expected-function", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0618"), format!("expected function, found {}", d.found.display(ctx.sema.db)), - ctx.sema.diagnostics_display_range(d.call.clone().map(|it| it.into())).range, + d.call.clone().map(|it| it.into()), ) .experimental() } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs index 2b710536250..3b69640af9b 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/field_shorthand.rs @@ -5,7 +5,7 @@ use ide_db::{base_db::FileId, source_change::SourceChange}; use syntax::{ast, match_ast, AstNode, SyntaxNode}; use text_edit::TextEdit; -use crate::{fix, Diagnostic, Severity}; +use crate::{fix, Diagnostic, DiagnosticCode}; pub(crate) fn field_shorthand(acc: &mut Vec<Diagnostic>, file_id: FileId, node: &SyntaxNode) { match_ast! { @@ -46,14 +46,17 @@ fn check_expr_field_shorthand( let field_range = record_field.syntax().text_range(); acc.push( - Diagnostic::new("use-field-shorthand", "Shorthand struct initialization", field_range) - .severity(Severity::WeakWarning) - .with_fixes(Some(vec![fix( - "use_expr_field_shorthand", - "Use struct shorthand initialization", - SourceChange::from_text_edit(file_id, edit), - field_range, - )])), + Diagnostic::new( + DiagnosticCode::Clippy("redundant_field_names"), + "Shorthand struct initialization", + field_range, + ) + .with_fixes(Some(vec![fix( + "use_expr_field_shorthand", + "Use struct shorthand initialization", + SourceChange::from_text_edit(file_id, edit), + field_range, + )])), ); } } @@ -87,14 +90,17 @@ fn check_pat_field_shorthand( let field_range = record_pat_field.syntax().text_range(); acc.push( - Diagnostic::new("use-field-shorthand", "Shorthand struct pattern", field_range) - .severity(Severity::WeakWarning) - .with_fixes(Some(vec![fix( - "use_pat_field_shorthand", - "Use struct field shorthand", - SourceChange::from_text_edit(file_id, edit), - field_range, - )])), + Diagnostic::new( + DiagnosticCode::Clippy("redundant_field_names"), + "Shorthand struct pattern", + field_range, + ) + .with_fixes(Some(vec![fix( + "use_pat_field_shorthand", + "Use struct field shorthand", + SourceChange::from_text_edit(file_id, edit), + field_range, + )])), ); } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs index f558b7256a4..9eb763d3e2c 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -1,7 +1,7 @@ use cfg::DnfExpr; use stdx::format_to; -use crate::{Diagnostic, DiagnosticsContext, Severity}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity}; // Diagnostic: inactive-code // @@ -27,13 +27,12 @@ pub(crate) fn inactive_code( format_to!(message, ": {}", inactive); } } - + // FIXME: This shouldn't be a diagnostic let res = Diagnostic::new( - "inactive-code", + DiagnosticCode::Ra("inactive-code", Severity::WeakWarning), message, ctx.sema.diagnostics_display_range(d.node.clone()).range, ) - .severity(Severity::WeakWarning) .with_unused(true); Some(res) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs index 72af9ebfcbb..4afb4db03bd 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incoherent_impl.rs @@ -1,17 +1,17 @@ use hir::InFile; -use crate::{Diagnostic, DiagnosticsContext, Severity}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: incoherent-impl // // This diagnostic is triggered if the targe type of an impl is from a foreign crate. pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentImpl) -> Diagnostic { - Diagnostic::new( - "incoherent-impl", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0210"), format!("cannot define inherent `impl` for foreign type"), - ctx.sema.diagnostics_display_range(InFile::new(d.file_id, d.impl_.clone().into())).range, + InFile::new(d.file_id, d.impl_.clone().into()), ) - .severity(Severity::Error) } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs index 90279e14536..92fd4f71ca5 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -1,4 +1,4 @@ -use hir::{db::ExpandDatabase, InFile}; +use hir::{db::ExpandDatabase, CaseType, InFile}; use ide_db::{assists::Assist, defs::NameClass}; use syntax::AstNode; @@ -6,23 +6,29 @@ use crate::{ // references::rename::rename_with_semantics, unresolved_fix, Diagnostic, + DiagnosticCode, DiagnosticsContext, - Severity, }; // Diagnostic: incorrect-ident-case // // This diagnostic is triggered if an item name doesn't follow https://doc.rust-lang.org/1.0.0/style/style/naming/README.html[Rust naming convention]. pub(crate) fn incorrect_case(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Diagnostic { - Diagnostic::new( - "incorrect-ident-case", + let code = match d.expected_case { + CaseType::LowerSnakeCase => DiagnosticCode::RustcLint("non_snake_case"), + CaseType::UpperSnakeCase => DiagnosticCode::RustcLint("non_upper_case_globals"), + // The name is lying. It also covers variants, traits, ... + CaseType::UpperCamelCase => DiagnosticCode::RustcLint("non_camel_case_types"), + }; + Diagnostic::new_with_syntax_node_ptr( + ctx, + code, format!( "{} `{}` should have {} name, e.g. `{}`", d.ident_type, d.ident_text, d.expected_case, d.suggested_text ), - ctx.sema.diagnostics_display_range(InFile::new(d.file, d.ident.clone().into())).range, + InFile::new(d.file, d.ident.clone().into()), ) - .severity(Severity::WeakWarning) .with_fixes(fixes(ctx, d)) } @@ -149,7 +155,7 @@ impl TestStruct { check_diagnostics( r#" fn FOO() {} -// ^^^ 💡 weak: Function `FOO` should have snake_case name, e.g. `foo` +// ^^^ 💡 warn: Function `FOO` should have snake_case name, e.g. `foo` "#, ); check_fix(r#"fn FOO$0() {}"#, r#"fn foo() {}"#); @@ -160,7 +166,7 @@ fn FOO() {} check_diagnostics( r#" fn NonSnakeCaseName() {} -// ^^^^^^^^^^^^^^^^ 💡 weak: Function `NonSnakeCaseName` should have snake_case name, e.g. `non_snake_case_name` +// ^^^^^^^^^^^^^^^^ 💡 warn: Function `NonSnakeCaseName` should have snake_case name, e.g. `non_snake_case_name` "#, ); } @@ -170,10 +176,10 @@ fn NonSnakeCaseName() {} check_diagnostics( r#" fn foo(SomeParam: u8) {} - // ^^^^^^^^^ 💡 weak: Parameter `SomeParam` should have snake_case name, e.g. `some_param` + // ^^^^^^^^^ 💡 warn: Parameter `SomeParam` should have snake_case name, e.g. `some_param` fn foo2(ok_param: &str, CAPS_PARAM: u8) {} - // ^^^^^^^^^^ 💡 weak: Parameter `CAPS_PARAM` should have snake_case name, e.g. `caps_param` + // ^^^^^^^^^^ 💡 warn: Parameter `CAPS_PARAM` should have snake_case name, e.g. `caps_param` "#, ); } @@ -184,9 +190,9 @@ fn foo2(ok_param: &str, CAPS_PARAM: u8) {} r#" fn foo() { let SOME_VALUE = 10; - // ^^^^^^^^^^ 💡 weak: Variable `SOME_VALUE` should have snake_case name, e.g. `some_value` + // ^^^^^^^^^^ 💡 warn: Variable `SOME_VALUE` should have snake_case name, e.g. `some_value` let AnotherValue = 20; - // ^^^^^^^^^^^^ 💡 weak: Variable `AnotherValue` should have snake_case name, e.g. `another_value` + // ^^^^^^^^^^^^ 💡 warn: Variable `AnotherValue` should have snake_case name, e.g. `another_value` } "#, ); @@ -197,10 +203,10 @@ fn foo() { check_diagnostics( r#" struct non_camel_case_name {} - // ^^^^^^^^^^^^^^^^^^^ 💡 weak: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName` + // ^^^^^^^^^^^^^^^^^^^ 💡 warn: Structure `non_camel_case_name` should have CamelCase name, e.g. `NonCamelCaseName` struct SCREAMING_CASE {} - // ^^^^^^^^^^^^^^ 💡 weak: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase` + // ^^^^^^^^^^^^^^ 💡 warn: Structure `SCREAMING_CASE` should have CamelCase name, e.g. `ScreamingCase` "#, ); } @@ -219,7 +225,7 @@ struct AABB {} check_diagnostics( r#" struct SomeStruct { SomeField: u8 } - // ^^^^^^^^^ 💡 weak: Field `SomeField` should have snake_case name, e.g. `some_field` + // ^^^^^^^^^ 💡 warn: Field `SomeField` should have snake_case name, e.g. `some_field` "#, ); } @@ -229,10 +235,10 @@ struct SomeStruct { SomeField: u8 } check_diagnostics( r#" enum some_enum { Val(u8) } - // ^^^^^^^^^ 💡 weak: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum` + // ^^^^^^^^^ 💡 warn: Enum `some_enum` should have CamelCase name, e.g. `SomeEnum` enum SOME_ENUM {} - // ^^^^^^^^^ 💡 weak: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum` + // ^^^^^^^^^ 💡 warn: Enum `SOME_ENUM` should have CamelCase name, e.g. `SomeEnum` "#, ); } @@ -251,7 +257,7 @@ enum AABB {} check_diagnostics( r#" enum SomeEnum { SOME_VARIANT(u8) } - // ^^^^^^^^^^^^ 💡 weak: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant` + // ^^^^^^^^^^^^ 💡 warn: Variant `SOME_VARIANT` should have CamelCase name, e.g. `SomeVariant` "#, ); } @@ -261,7 +267,7 @@ enum SomeEnum { SOME_VARIANT(u8) } check_diagnostics( r#" const some_weird_const: u8 = 10; - // ^^^^^^^^^^^^^^^^ 💡 weak: Constant `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST` + // ^^^^^^^^^^^^^^^^ 💡 warn: Constant `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST` "#, ); } @@ -271,7 +277,7 @@ const some_weird_const: u8 = 10; check_diagnostics( r#" static some_weird_const: u8 = 10; - // ^^^^^^^^^^^^^^^^ 💡 weak: Static variable `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST` + // ^^^^^^^^^^^^^^^^ 💡 warn: Static variable `some_weird_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_WEIRD_CONST` "#, ); } @@ -281,13 +287,13 @@ static some_weird_const: u8 = 10; check_diagnostics( r#" struct someStruct; - // ^^^^^^^^^^ 💡 weak: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct` + // ^^^^^^^^^^ 💡 warn: Structure `someStruct` should have CamelCase name, e.g. `SomeStruct` impl someStruct { fn SomeFunc(&self) { - // ^^^^^^^^ 💡 weak: Function `SomeFunc` should have snake_case name, e.g. `some_func` + // ^^^^^^^^ 💡 warn: Function `SomeFunc` should have snake_case name, e.g. `some_func` let WHY_VAR_IS_CAPS = 10; - // ^^^^^^^^^^^^^^^ 💡 weak: Variable `WHY_VAR_IS_CAPS` should have snake_case name, e.g. `why_var_is_caps` + // ^^^^^^^^^^^^^^^ 💡 warn: Variable `WHY_VAR_IS_CAPS` should have snake_case name, e.g. `why_var_is_caps` } } "#, @@ -319,7 +325,7 @@ enum Option { Some, None } fn main() { match Option::None { SOME_VAR @ None => (), - // ^^^^^^^^ 💡 weak: Variable `SOME_VAR` should have snake_case name, e.g. `some_var` + // ^^^^^^^^ 💡 warn: Variable `SOME_VAR` should have snake_case name, e.g. `some_var` Some => (), } } @@ -461,7 +467,7 @@ mod CheckNonstandardStyle { #[allow(bad_style)] mod CheckBadStyle { - fn HiImABadFnName() {} + struct fooo; } mod F { @@ -483,4 +489,60 @@ pub static SomeStatic: u8 = 10; "#, ); } + + #[test] + fn deny_attributes() { + check_diagnostics( + r#" +#[deny(non_snake_case)] +fn NonSnakeCaseName(some_var: u8) -> u8 { + //^^^^^^^^^^^^^^^^ 💡 error: Function `NonSnakeCaseName` should have snake_case name, e.g. `non_snake_case_name` + // cov_flags generated output from elsewhere in this file + extern "C" { + #[no_mangle] + static lower_case: u8; + } + + let OtherVar = some_var + 1; + //^^^^^^^^ 💡 error: Variable `OtherVar` should have snake_case name, e.g. `other_var` + OtherVar +} + +#[deny(nonstandard_style)] +mod CheckNonstandardStyle { + fn HiImABadFnName() {} + //^^^^^^^^^^^^^^ 💡 error: Function `HiImABadFnName` should have snake_case name, e.g. `hi_im_abad_fn_name` +} + +#[deny(warnings)] +mod CheckBadStyle { + struct fooo; + //^^^^ 💡 error: Structure `fooo` should have CamelCase name, e.g. `Fooo` +} + +mod F { + #![deny(non_snake_case)] + fn CheckItWorksWithModAttr() {} + //^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: Function `CheckItWorksWithModAttr` should have snake_case name, e.g. `check_it_works_with_mod_attr` +} + +#[deny(non_snake_case, non_camel_case_types)] +pub struct some_type { + //^^^^^^^^^ 💡 error: Structure `some_type` should have CamelCase name, e.g. `SomeType` + SOME_FIELD: u8, + //^^^^^^^^^^ 💡 error: Field `SOME_FIELD` should have snake_case name, e.g. `some_field` + SomeField: u16, + //^^^^^^^^^ 💡 error: Field `SomeField` should have snake_case name, e.g. `some_field` +} + +#[deny(non_upper_case_globals)] +pub const some_const: u8 = 10; + //^^^^^^^^^^ 💡 error: Constant `some_const` should have UPPER_SNAKE_CASE name, e.g. `SOME_CONST` + +#[deny(non_upper_case_globals)] +pub static SomeStatic: u8 = 10; + //^^^^^^^^^^ 💡 error: Static variable `SomeStatic` should have UPPER_SNAKE_CASE name, e.g. `SOME_STATIC` + "#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs index c779266bc97..1ec17952b23 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/invalid_derive_target.rs @@ -1,4 +1,4 @@ -use crate::{Diagnostic, DiagnosticsContext, Severity}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: invalid-derive-target // @@ -11,11 +11,10 @@ pub(crate) fn invalid_derive_target( let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range; Diagnostic::new( - "invalid-derive-target", + DiagnosticCode::RustcHardError("E0774"), "`derive` may only be applied to `struct`s, `enum`s and `union`s", display_range, ) - .severity(Severity::Error) } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs index 04ce1e0feee..a337e2660d6 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/json_is_not_rust.rs @@ -17,7 +17,7 @@ use syntax::{ }; use text_edit::TextEdit; -use crate::{fix, Diagnostic, DiagnosticsConfig, Severity}; +use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsConfig, Severity}; #[derive(Default)] struct State { @@ -117,11 +117,10 @@ pub(crate) fn json_in_items( edit.insert(range.start(), state.result); acc.push( Diagnostic::new( - "json-is-not-rust", + DiagnosticCode::Ra("json-is-not-rust", Severity::WeakWarning), "JSON syntax is not valid as a Rust item", range, ) - .severity(Severity::WeakWarning) .with_fixes(Some(vec![{ let mut scb = SourceChangeBuilder::new(file_id); let scope = match import_scope { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs index 7547779a95c..937e2f96642 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/macro_error.rs @@ -1,4 +1,4 @@ -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity}; // Diagnostic: macro-error // @@ -6,7 +6,12 @@ use crate::{Diagnostic, DiagnosticsContext}; pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic { // Use more accurate position if available. let display_range = ctx.resolve_precise_location(&d.node, d.precise_location); - Diagnostic::new("macro-error", d.message.clone(), display_range).experimental() + Diagnostic::new( + DiagnosticCode::Ra("macro-error", Severity::Error), + d.message.clone(), + display_range, + ) + .experimental() } // Diagnostic: macro-error @@ -16,7 +21,12 @@ pub(crate) fn macro_def_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroDefErr // Use more accurate position if available. let display_range = ctx.resolve_precise_location(&d.node.clone().map(|it| it.syntax_node_ptr()), d.name); - Diagnostic::new("macro-def-error", d.message.clone(), display_range).experimental() + Diagnostic::new( + DiagnosticCode::Ra("macro-def-error", Severity::Error), + d.message.clone(), + display_range, + ) + .experimental() } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs index cd48bdba07e..fc57dde69f2 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/malformed_derive.rs @@ -1,4 +1,4 @@ -use crate::{Diagnostic, DiagnosticsContext, Severity}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: malformed-derive // @@ -10,11 +10,10 @@ pub(crate) fn malformed_derive( let display_range = ctx.sema.diagnostics_display_range(d.node.clone()).range; Diagnostic::new( - "malformed-derive", + DiagnosticCode::RustcHardError("E0777"), "malformed derive input, derive attributes are of the form `#[derive(Derive1, Derive2, ...)]`", display_range, ) - .severity(Severity::Error) } #[cfg(test)] diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs index c5db8c3741b..6238c7e09e9 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mismatched_arg_count.rs @@ -3,7 +3,7 @@ use syntax::{ AstNode, TextRange, }; -use crate::{adjusted_display_range, Diagnostic, DiagnosticsContext}; +use crate::{adjusted_display_range, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: mismatched-arg-count // @@ -14,7 +14,7 @@ pub(crate) fn mismatched_arg_count( ) -> Diagnostic { let s = if d.expected == 1 { "" } else { "s" }; let message = format!("expected {} argument{s}, found {}", d.expected, d.found); - Diagnostic::new("mismatched-arg-count", message, invalid_args_range(ctx, d)) + Diagnostic::new(DiagnosticCode::RustcHardError("E0107"), message, invalid_args_range(ctx, d)) } fn invalid_args_range(ctx: &DiagnosticsContext<'_>, d: &hir::MismatchedArgCount) -> TextRange { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs index 60ccc41df01..bb0e36ff3a1 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -15,7 +15,7 @@ use syntax::{ }; use text_edit::TextEdit; -use crate::{fix, Diagnostic, DiagnosticsContext}; +use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: missing-fields // @@ -42,7 +42,7 @@ pub(crate) fn missing_fields(ctx: &DiagnosticsContext<'_>, d: &hir::MissingField .unwrap_or_else(|| d.field_list_parent.clone().either(|it| it.into(), |it| it.into())), ); - Diagnostic::new("missing-fields", message, ctx.sema.diagnostics_display_range(ptr).range) + Diagnostic::new_with_syntax_node_ptr(ctx, DiagnosticCode::RustcHardError("E0063"), message, ptr) .with_fixes(fixes(ctx, d)) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 3f13b97a447..82a9a3bd559 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -1,4 +1,4 @@ -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: missing-match-arm // @@ -7,10 +7,11 @@ pub(crate) fn missing_match_arms( ctx: &DiagnosticsContext<'_>, d: &hir::MissingMatchArms, ) -> Diagnostic { - Diagnostic::new( - "missing-match-arm", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0004"), format!("missing match arm: {}", d.uncovered_patterns), - ctx.sema.diagnostics_display_range(d.scrutinee_expr.clone().map(Into::into)).range, + d.scrutinee_expr.clone().map(Into::into), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs index 2026b6fcef5..70b26009bae 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/missing_unsafe.rs @@ -4,16 +4,17 @@ use syntax::{ast, SyntaxNode}; use syntax::{match_ast, AstNode}; use text_edit::TextEdit; -use crate::{fix, Diagnostic, DiagnosticsContext}; +use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: missing-unsafe // // This diagnostic is triggered if an operation marked as `unsafe` is used outside of an `unsafe` function or block. pub(crate) fn missing_unsafe(ctx: &DiagnosticsContext<'_>, d: &hir::MissingUnsafe) -> Diagnostic { - Diagnostic::new( - "missing-unsafe", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0133"), "this operation is unsafe and requires an unsafe function or block", - ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range, + d.expr.clone().map(|it| it.into()), ) .with_fixes(fixes(ctx, d)) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs index 32e321107e6..3aa4aa97026 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/moved_out_of_ref.rs @@ -1,14 +1,15 @@ -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; use hir::HirDisplay; // Diagnostic: moved-out-of-ref // // This diagnostic is triggered on moving non copy things out of references. pub(crate) fn moved_out_of_ref(ctx: &DiagnosticsContext<'_>, d: &hir::MovedOutOfRef) -> Diagnostic { - Diagnostic::new( - "moved-out-of-ref", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0507"), format!("cannot move `{}` out of reference", d.ty.display(ctx.sema.db)), - ctx.sema.diagnostics_display_range(d.span.clone()).range, + d.span.clone(), ) .experimental() // spans are broken, and I'm not sure how precise we can detect copy types } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs index f61460e317f..e0c3bedce46 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -2,7 +2,7 @@ use ide_db::source_change::SourceChange; use syntax::{AstNode, SyntaxKind, SyntaxNode, SyntaxToken, T}; use text_edit::TextEdit; -use crate::{fix, Diagnostic, DiagnosticsContext, Severity}; +use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: need-mut // @@ -29,13 +29,15 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Diagno use_range, )]) })(); - Diagnostic::new( - "need-mut", + Diagnostic::new_with_syntax_node_ptr( + ctx, + // FIXME: `E0384` is not the only error that this diagnostic handles + DiagnosticCode::RustcHardError("E0384"), format!( "cannot mutate immutable variable `{}`", d.local.name(ctx.sema.db).display(ctx.sema.db) ), - ctx.sema.diagnostics_display_range(d.span.clone()).range, + d.span.clone(), ) .with_fixes(fixes) } @@ -68,12 +70,12 @@ pub(crate) fn unused_mut(ctx: &DiagnosticsContext<'_>, d: &hir::UnusedMut) -> Di )]) })(); let ast = d.local.primary_source(ctx.sema.db).syntax_ptr(); - Diagnostic::new( - "unused-mut", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcLint("unused_mut"), "variable does not need to be mutable", - ctx.sema.diagnostics_display_range(ast).range, + ast, ) - .severity(Severity::WeakWarning) .experimental() // Not supporting `#[allow(unused_mut)]` leads to false positive. .with_fixes(fixes) } @@ -93,7 +95,7 @@ mod tests { fn f(_: i32) {} fn main() { let mut x = 2; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable f(x); } "#, @@ -268,7 +270,7 @@ fn main() { fn f(_: i32) {} fn main() { let mut x = (2, 7); - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable f(x.1); } "#, @@ -302,7 +304,7 @@ fn main() { r#" fn main() { let mut x = &mut 2; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable *x = 5; } "#, @@ -346,7 +348,7 @@ fn main() { r#" //- minicore: copy, builtin_impls fn clone(mut i: &!) -> ! { - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable *i } "#, @@ -360,7 +362,7 @@ fn main() { //- minicore: option fn main() { let mut v = &mut Some(2); - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable let _ = || match v { Some(k) => { *k = 5; @@ -386,7 +388,7 @@ fn main() { fn main() { match (2, 3) { (x, mut y) => { - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable x = 7; //^^^^^ 💡 error: cannot mutate immutable variable `x` } @@ -407,7 +409,7 @@ fn main() { fn main() { return; let mut x = 2; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable &mut x; } "#, @@ -417,7 +419,7 @@ fn main() { fn main() { loop {} let mut x = 2; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable &mut x; } "#, @@ -438,7 +440,7 @@ fn main(b: bool) { g(); } let mut x = 2; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable &mut x; } "#, @@ -452,7 +454,7 @@ fn main(b: bool) { return; } let mut x = 2; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable &mut x; } "#, @@ -466,7 +468,7 @@ fn main(b: bool) { fn f(_: i32) {} fn main() { let mut x; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable x = 5; f(x); } @@ -477,7 +479,7 @@ fn main() { fn f(_: i32) {} fn main(b: bool) { let mut x; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable if b { x = 1; } else { @@ -552,15 +554,15 @@ fn f(_: i32) {} fn main() { loop { let mut x = 1; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable f(x); if let mut y = 2 { - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable f(y); } match 3 { mut z => f(z), - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable } } } @@ -577,9 +579,9 @@ fn main() { loop { let c @ ( mut b, - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable mut d - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable ); a = 1; //^^^^^ 💡 error: cannot mutate immutable variable `a` @@ -597,7 +599,7 @@ fn main() { check_diagnostics( r#" fn f(mut x: i32) { - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable } "#, ); @@ -640,7 +642,7 @@ fn f() { //- minicore: iterators, copy fn f(x: [(i32, u8); 10]) { for (a, mut b) in x { - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable a = 2; //^^^^^ 💡 error: cannot mutate immutable variable `a` } @@ -657,9 +659,9 @@ fn f(x: [(i32, u8); 10]) { fn f(x: [(i32, u8); 10]) { let mut it = x.into_iter(); while let Some((a, mut b)) = it.next() { - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable while let Some((c, mut d)) = it.next() { - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable a = 2; //^^^^^ 💡 error: cannot mutate immutable variable `a` c = 2; @@ -683,7 +685,7 @@ fn f() { let x = &mut x; //^^^^^^ 💡 error: cannot mutate immutable variable `x` let mut x = x; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable x[2] = 5; } "#, @@ -711,13 +713,13 @@ impl IndexMut<usize> for Foo { } fn f() { let mut x = Foo; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable let y = &x[2]; let x = Foo; let y = &mut x[2]; //^💡 error: cannot mutate immutable variable `x` let mut x = &mut Foo; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable let y: &mut (i32, u8) = &mut x[2]; let x = Foo; let ref mut y = x[7]; @@ -731,7 +733,7 @@ fn f() { } let mut x = Foo; let mut i = 5; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable let y = &mut x[i]; } "#, @@ -759,7 +761,7 @@ impl DerefMut for Foo { } fn f() { let mut x = Foo; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable let y = &*x; let x = Foo; let y = &mut *x; @@ -790,11 +792,27 @@ fn f() { fn f(_: i32) {} fn main() { let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7)); - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable f(x); } "#, ); + check_diagnostics( + r#" +struct Foo(i32); + +const X: Foo = Foo(5); +const Y: Foo = Foo(12); + +const fn f(mut a: Foo) -> bool { + //^^^^^ 💡 warn: variable does not need to be mutable + match a { + X | Y => true, + _ => false, + } +} +"#, + ); } #[test] @@ -842,7 +860,7 @@ pub struct TreeLeaf { pub fn test() { let mut tree = Tree::Leaf( - //^^^^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^^^^ 💡 warn: variable does not need to be mutable TreeLeaf { depth: 0, data: 0 @@ -859,7 +877,7 @@ pub fn test() { r#" //- minicore: fn fn fn_ref(mut x: impl Fn(u8) -> u8) -> u8 { - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable x(2) } fn fn_mut(x: impl FnMut(u8) -> u8) -> u8 { @@ -867,11 +885,11 @@ fn fn_mut(x: impl FnMut(u8) -> u8) -> u8 { //^ 💡 error: cannot mutate immutable variable `x` } fn fn_borrow_mut(mut x: &mut impl FnMut(u8) -> u8) -> u8 { - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable x(2) } fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 { - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable x(2) } "#, @@ -915,14 +933,14 @@ fn fn_once(mut x: impl FnOnce(u8) -> u8) -> u8 { //- minicore: copy, fn fn f() { let mut x = 5; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable let mut y = 2; y = 7; let closure = || { let mut z = 8; z = 3; let mut k = z; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable }; } "#, @@ -949,7 +967,7 @@ fn f() { fn f() { struct X; let mut x = X; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable let c1 = || x; let mut x = X; let c2 = || { x = X; x }; @@ -965,12 +983,12 @@ fn f() { fn f() { let mut x = &mut 5; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable let closure1 = || { *x = 2; }; let _ = closure1(); //^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1` let mut x = &mut 5; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable let closure1 = || { *x = 2; &x; }; let _ = closure1(); //^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1` @@ -979,12 +997,12 @@ fn f() { let _ = closure1(); //^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1` let mut x = &mut 5; - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable let closure1 = move || { *x = 2; }; let _ = closure1(); //^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1` let mut x = &mut X(1, 2); - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable let closure1 = || { x.0 = 2; }; let _ = closure1(); //^^^^^^^^ 💡 error: cannot mutate immutable variable `closure1` @@ -1001,7 +1019,7 @@ fn f() { fn x(t: &[u8]) { match t { &[a, mut b] | &[a, _, mut b] => { - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable a = 2; //^^^^^ 💡 error: cannot mutate immutable variable `a` @@ -1055,7 +1073,7 @@ fn f() { *x = 7; //^^^^^^ 💡 error: cannot mutate immutable variable `x` let mut y = Box::new(5); - //^^^^^ 💡 weak: variable does not need to be mutable + //^^^^^ 💡 warn: variable does not need to be mutable *x = *y; //^^^^^^^ 💡 error: cannot mutate immutable variable `x` let x = Box::new(5); @@ -1067,6 +1085,33 @@ fn f() { } #[test] + fn regression_15143() { + check_diagnostics( + r#" + trait Tr { + type Ty; + } + + struct A; + + impl Tr for A { + type Ty = (u32, i64); + } + + struct B<T: Tr> { + f: <T as Tr>::Ty, + } + + fn main(b: B<A>) { + let f = b.f.0; + f = 5; + //^^^^^ 💡 error: cannot mutate immutable variable `f` + } + "#, + ); + } + + #[test] fn allow_unused_mut_for_identifiers_starting_with_underline() { check_diagnostics( r#" @@ -1080,17 +1125,51 @@ fn main() { } #[test] - fn respect_allow_unused_mut() { - // FIXME: respect + fn respect_lint_attributes_for_unused_mut() { check_diagnostics( r#" fn f(_: i32) {} fn main() { #[allow(unused_mut)] let mut x = 2; - //^^^^^ 💡 weak: variable does not need to be mutable f(x); } + +fn main2() { + #[deny(unused_mut)] + let mut x = 2; + //^^^^^ 💡 error: variable does not need to be mutable + f(x); +} +"#, + ); + check_diagnostics( + r#" +macro_rules! mac { + ($($x:expr),*$(,)*) => ({ + #[allow(unused_mut)] + let mut vec = 2; + vec + }); +} + +fn main2() { + let mut x = mac![]; + //^^^^^ 💡 warn: variable does not need to be mutable +} + "#, + ); + } + + #[test] + fn regression_15099() { + check_diagnostics( + r#" +//- minicore: iterator, range +fn f() { + loop {} + for _ in 0..2 {} +} "#, ); } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs index a39eceab243..a34a5824f29 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/no_such_field.rs @@ -6,16 +6,17 @@ use syntax::{ }; use text_edit::TextEdit; -use crate::{fix, Assist, Diagnostic, DiagnosticsContext}; +use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: no-such-field // // This diagnostic is triggered if created structure does not have field provided in record. pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic { - Diagnostic::new( - "no-such-field", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0559"), "no such field", - ctx.sema.diagnostics_display_range(d.field.clone().map(|it| it.into())).range, + d.field.clone().map(|it| it.into()), ) .with_fixes(fixes(ctx, d)) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs index 4cd85a479a0..c44d28e77f6 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_assoc_item.rs @@ -1,6 +1,6 @@ use either::Either; -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: private-assoc-item // @@ -16,8 +16,9 @@ pub(crate) fn private_assoc_item( .name(ctx.sema.db) .map(|name| format!("`{}` ", name.display(ctx.sema.db))) .unwrap_or_default(); - Diagnostic::new( - "private-assoc-item", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0624"), format!( "{} {}is private", match d.item { @@ -27,15 +28,13 @@ pub(crate) fn private_assoc_item( }, name, ), - ctx.sema - .diagnostics_display_range(d.expr_or_pat.clone().map(|it| match it { + d.expr_or_pat.clone().map(|it| match it { + Either::Left(it) => it.into(), + Either::Right(it) => match it { Either::Left(it) => it.into(), - Either::Right(it) => match it { - Either::Left(it) => it.into(), - Either::Right(it) => it.into(), - }, - })) - .range, + Either::Right(it) => it.into(), + }, + }), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs index de7f51f693c..553defcf990 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/private_field.rs @@ -1,18 +1,19 @@ -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: private-field // // This diagnostic is triggered if the accessed field is not visible from the current module. pub(crate) fn private_field(ctx: &DiagnosticsContext<'_>, d: &hir::PrivateField) -> Diagnostic { // FIXME: add quickfix - Diagnostic::new( - "private-field", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0616"), format!( "field `{}` of `{}` is private", d.field.name(ctx.sema.db).display(ctx.sema.db), d.field.parent_def(ctx.sema.db).name(ctx.sema.db).display(ctx.sema.db) ), - ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range, + d.expr.clone().map(|it| it.into()), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs index d3eda3c5ebc..083ef3e8dc1 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/replace_filter_map_next_with_find_map.rs @@ -6,7 +6,7 @@ use syntax::{ }; use text_edit::TextEdit; -use crate::{fix, Assist, Diagnostic, DiagnosticsContext, Severity}; +use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: replace-filter-map-next-with-find-map // @@ -15,12 +15,12 @@ pub(crate) fn replace_filter_map_next_with_find_map( ctx: &DiagnosticsContext<'_>, d: &hir::ReplaceFilterMapNextWithFindMap, ) -> Diagnostic { - Diagnostic::new( - "replace-filter-map-next-with-find-map", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::Clippy("filter_map_next"), "replace filter_map(..).next() with find_map(..)", - ctx.sema.diagnostics_display_range(InFile::new(d.file, d.next_expr.clone().into())).range, + InFile::new(d.file, d.next_expr.clone().into()), ) - .severity(Severity::WeakWarning) .with_fixes(fixes(ctx, d)) } @@ -64,7 +64,7 @@ mod tests { pub(crate) fn check_diagnostics(ra_fixture: &str) { let mut config = DiagnosticsConfig::test_sample(); config.disabled.insert("inactive-code".to_string()); - config.disabled.insert("unresolved-method".to_string()); + config.disabled.insert("E0599".to_string()); check_diagnostics_with_config(config, ra_fixture) } @@ -139,4 +139,33 @@ fn foo() { "#, ) } + + #[test] + fn respect_lint_attributes_for_clippy_equivalent() { + check_diagnostics( + r#" +//- minicore: iterators + +fn foo() { + #[allow(clippy::filter_map_next)] + let m = core::iter::repeat(()).filter_map(|()| Some(92)).next(); +} + +#[deny(clippy::filter_map_next)] +fn foo() { + let m = core::iter::repeat(()).filter_map(|()| Some(92)).next(); +} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 error: replace filter_map(..).next() with find_map(..) + +fn foo() { + let m = core::iter::repeat(()).filter_map(|()| Some(92)).next(); +} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 weak: replace filter_map(..).next() with find_map(..) + +#[warn(clippy::filter_map_next)] +fn foo() { + let m = core::iter::repeat(()).filter_map(|()| Some(92)).next(); +} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 💡 warn: replace filter_map(..).next() with find_map(..) + +"#, + ); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs index c28f98d8333..15bd28c00df 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -7,7 +7,7 @@ use syntax::{ }; use text_edit::TextEdit; -use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticsContext}; +use crate::{adjusted_display_range, fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: type-mismatch // @@ -39,7 +39,7 @@ pub(crate) fn type_mismatch(ctx: &DiagnosticsContext<'_>, d: &hir::TypeMismatch) } }; let mut diag = Diagnostic::new( - "type-mismatch", + DiagnosticCode::RustcHardError("E0308"), format!( "expected {}, found {}", d.expected.display(ctx.sema.db).with_closure_style(ClosureStyle::ClosureWithId), diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs index e12bbcf6820..4af67227115 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -7,7 +7,7 @@ use ide_db::{ use syntax::AstNode; use text_edit::TextEdit; -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: typed-hole // @@ -26,7 +26,8 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di ) }; - Diagnostic::new("typed-hole", message, display_range.range).with_fixes(fixes) + Diagnostic::new(DiagnosticCode::RustcHardError("typed-hole"), message, display_range.range) + .with_fixes(fixes) } fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> { diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs index 034e4fcfb85..7de9a9a323e 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/undeclared_label.rs @@ -1,4 +1,4 @@ -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: undeclared-label pub(crate) fn undeclared_label( @@ -6,10 +6,11 @@ pub(crate) fn undeclared_label( d: &hir::UndeclaredLabel, ) -> Diagnostic { let name = &d.name; - Diagnostic::new( - "undeclared-label", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("undeclared-label"), format!("use of undeclared label `{}`", name.display(ctx.sema.db)), - ctx.sema.diagnostics_display_range(d.node.clone().map(|it| it.into())).range, + d.node.clone().map(|it| it.into()), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs index e879de75cd8..bcce72a7d01 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unimplemented_builtin_macro.rs @@ -1,4 +1,4 @@ -use crate::{Diagnostic, DiagnosticsContext, Severity}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity}; // Diagnostic: unimplemented-builtin-macro // @@ -7,10 +7,10 @@ pub(crate) fn unimplemented_builtin_macro( ctx: &DiagnosticsContext<'_>, d: &hir::UnimplementedBuiltinMacro, ) -> Diagnostic { - Diagnostic::new( - "unimplemented-builtin-macro", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::Ra("unimplemented-builtin-macro", Severity::WeakWarning), "unimplemented built-in macro".to_string(), - ctx.sema.diagnostics_display_range(d.node.clone()).range, + d.node.clone(), ) - .severity(Severity::WeakWarning) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs index 271e7ce73bc..e04f27c27fd 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unlinked_file.rs @@ -14,7 +14,7 @@ use syntax::{ }; use text_edit::TextEdit; -use crate::{fix, Assist, Diagnostic, DiagnosticsContext, Severity}; +use crate::{fix, Assist, Diagnostic, DiagnosticCode, DiagnosticsContext, Severity}; // Diagnostic: unlinked-file // @@ -46,8 +46,7 @@ pub(crate) fn unlinked_file( .unwrap_or(range); acc.push( - Diagnostic::new("unlinked-file", message, range) - .severity(Severity::WeakWarning) + Diagnostic::new(DiagnosticCode::Ra("unlinked-file", Severity::WeakWarning), message, range) .with_fixes(fixes), ); } @@ -119,10 +118,11 @@ fn fixes(ctx: &DiagnosticsContext<'_>, file_id: FileId) -> Option<Vec<Assist>> { stack.pop(); 'crates: for &krate in ctx.sema.db.relevant_crates(parent_id).iter() { let crate_def_map = ctx.sema.db.crate_def_map(krate); - let Some((_, module)) = - crate_def_map.modules() - .find(|(_, module)| module.origin.file_id() == Some(parent_id) && !module.origin.is_inline()) - else { continue }; + let Some((_, module)) = crate_def_map.modules().find(|(_, module)| { + module.origin.file_id() == Some(parent_id) && !module.origin.is_inline() + }) else { + continue; + }; if stack.is_empty() { return make_fixes( diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs index 9fedadeae03..1c5d6cd0909 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unreachable_label.rs @@ -1,4 +1,4 @@ -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: unreachable-label pub(crate) fn unreachable_label( @@ -6,10 +6,11 @@ pub(crate) fn unreachable_label( d: &hir::UnreachableLabel, ) -> Diagnostic { let name = &d.name; - Diagnostic::new( - "unreachable-label", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0767"), format!("use of unreachable label `{}`", name.display(ctx.sema.db)), - ctx.sema.diagnostics_display_range(d.node.clone().map(|it| it.into())).range, + d.node.clone().map(|it| it.into()), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs index 74e4a69c64b..f8265b63275 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_extern_crate.rs @@ -1,4 +1,4 @@ -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: unresolved-extern-crate // @@ -7,10 +7,11 @@ pub(crate) fn unresolved_extern_crate( ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedExternCrate, ) -> Diagnostic { - Diagnostic::new( - "unresolved-extern-crate", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("unresolved-extern-crate"), "unresolved extern crate", - ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range, + d.decl.clone().map(|it| it.into()), ) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 5e4efa41fd9..0758706e45a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -8,7 +8,7 @@ use ide_db::{ use syntax::{ast, AstNode, AstPtr}; use text_edit::TextEdit; -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: unresolved-field // @@ -22,14 +22,15 @@ pub(crate) fn unresolved_field( } else { "" }; - Diagnostic::new( - "unresolved-field", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0559"), format!( "no field `{}` on type `{}`{method_suffix}", d.name.display(ctx.sema.db), d.receiver.display(ctx.sema.db) ), - ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range, + d.expr.clone().map(|it| it.into()), ) .with_fixes(fixes(ctx, d)) .experimental() @@ -67,7 +68,10 @@ fn method_fix( } #[cfg(test)] mod tests { - use crate::tests::check_diagnostics; + use crate::{ + tests::{check_diagnostics, check_diagnostics_with_config}, + DiagnosticsConfig, + }; #[test] fn smoke_test() { @@ -145,4 +149,11 @@ fn foo() { "#, ); } + + #[test] + fn no_diagnostic_for_missing_name() { + let mut config = DiagnosticsConfig::test_sample(); + config.disabled.insert("syntax-error".to_owned()); + check_diagnostics_with_config(config, "fn foo() { (). }"); + } } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs index e52a88459d5..6b8026c0341 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_import.rs @@ -1,4 +1,4 @@ -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: unresolved-import // @@ -8,10 +8,11 @@ pub(crate) fn unresolved_import( ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedImport, ) -> Diagnostic { - Diagnostic::new( - "unresolved-import", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0432"), "unresolved import", - ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range, + d.decl.clone().map(|it| it.into()), ) // This currently results in false positives in the following cases: // - `cfg_if!`-generated code in libstd (we don't load the sysroot correctly) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs index 3943b51ab42..33e7c2e37c3 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_macro_call.rs @@ -1,4 +1,4 @@ -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: unresolved-macro-call // @@ -12,7 +12,7 @@ pub(crate) fn unresolved_macro_call( let display_range = ctx.resolve_precise_location(&d.macro_call, d.precise_location); let bang = if d.is_bang { "!" } else { "" }; Diagnostic::new( - "unresolved-macro-call", + DiagnosticCode::RustcHardError("unresolved-macro-call"), format!("unresolved macro `{}{bang}`", d.path.display(ctx.sema.db)), display_range, ) diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 8bbb837e670..ae9f6744c40 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -8,7 +8,7 @@ use ide_db::{ use syntax::{ast, AstNode, TextRange}; use text_edit::TextEdit; -use crate::{Diagnostic, DiagnosticsContext}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: unresolved-method // @@ -22,14 +22,15 @@ pub(crate) fn unresolved_method( } else { "" }; - Diagnostic::new( - "unresolved-method", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0599"), format!( "no method `{}` on type `{}`{field_suffix}", d.name.display(ctx.sema.db), d.receiver.display(ctx.sema.db) ), - ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range, + d.expr.clone().map(|it| it.into()), ) .with_fixes(fixes(ctx, d)) .experimental() diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs index 6e3fd3b42b0..be24e50c987 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_module.rs @@ -3,7 +3,7 @@ use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSyste use itertools::Itertools; use syntax::AstNode; -use crate::{fix, Diagnostic, DiagnosticsContext}; +use crate::{fix, Diagnostic, DiagnosticCode, DiagnosticsContext}; // Diagnostic: unresolved-module // @@ -12,8 +12,9 @@ pub(crate) fn unresolved_module( ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule, ) -> Diagnostic { - Diagnostic::new( - "unresolved-module", + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0583"), match &*d.candidates { [] => "unresolved module".to_string(), [candidate] => format!("unresolved module, can't find module file: {candidate}"), @@ -25,7 +26,7 @@ pub(crate) fn unresolved_module( ) } }, - ctx.sema.diagnostics_display_range(d.decl.clone().map(|it| it.into())).range, + d.decl.clone().map(|it| it.into()), ) .with_fixes(fixes(ctx, d)) } @@ -82,8 +83,8 @@ mod baz {} expect![[r#" [ Diagnostic { - code: DiagnosticCode( - "unresolved-module", + code: RustcHardError( + "E0583", ), message: "unresolved module, can't find module file: foo.rs, or foo/mod.rs", range: 0..8, @@ -148,6 +149,22 @@ mod baz {} }, ], ), + main_node: Some( + InFile { + file_id: FileId( + FileId( + 0, + ), + ), + value: MODULE@0..8 + MOD_KW@0..3 "mod" + WHITESPACE@3..4 " " + NAME@4..7 + IDENT@4..7 "foo" + SEMICOLON@7..8 ";" + , + }, + ), }, ] "#]], diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs index ae5cf135839..015a3d6b2ce 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/unresolved_proc_macro.rs @@ -1,6 +1,6 @@ use hir::db::DefDatabase; -use crate::{Diagnostic, DiagnosticsContext, Severity}; +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext, Severity}; // Diagnostic: unresolved-proc-macro // @@ -41,5 +41,5 @@ pub(crate) fn unresolved_proc_macro( }; let message = format!("{not_expanded_message}: {message}"); - Diagnostic::new("unresolved-proc-macro", message, display_range).severity(severity) + Diagnostic::new(DiagnosticCode::Ra("unresolved-proc-macro", severity), message, display_range) } diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs index 289ed0458c6..0aa439f797a 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/useless_braces.rs @@ -1,9 +1,9 @@ use ide_db::{base_db::FileId, source_change::SourceChange}; use itertools::Itertools; -use syntax::{ast, AstNode, SyntaxNode, TextRange}; +use syntax::{ast, AstNode, SyntaxNode}; use text_edit::TextEdit; -use crate::{fix, Diagnostic, Severity}; +use crate::{fix, Diagnostic, DiagnosticCode}; // Diagnostic: unnecessary-braces // @@ -15,6 +15,11 @@ pub(crate) fn useless_braces( ) -> Option<()> { let use_tree_list = ast::UseTreeList::cast(node.clone())?; if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { + // If there is a `self` inside the bracketed `use`, don't show diagnostic. + if single_use_tree.path()?.segment()?.self_token().is_some() { + return Some(()); + } + // If there is a comment inside the bracketed `use`, // assume it is a commented out module path and don't show diagnostic. if use_tree_list.has_inner_comment() { @@ -22,21 +27,18 @@ pub(crate) fn useless_braces( } let use_range = use_tree_list.syntax().text_range(); - let edit = remove_braces(&single_use_tree).unwrap_or_else(|| { - let to_replace = single_use_tree.syntax().text().to_string(); - let mut edit_builder = TextEdit::builder(); - edit_builder.delete(use_range); - edit_builder.insert(use_range.start(), to_replace); - edit_builder.finish() - }); + let to_replace = single_use_tree.syntax().text().to_string(); + let mut edit_builder = TextEdit::builder(); + edit_builder.delete(use_range); + edit_builder.insert(use_range.start(), to_replace); + let edit = edit_builder.finish(); acc.push( Diagnostic::new( - "unnecessary-braces", + DiagnosticCode::RustcLint("unused_braces"), "Unnecessary braces in use statement".to_string(), use_range, ) - .severity(Severity::WeakWarning) .with_fixes(Some(vec![fix( "remove_braces", "Remove unnecessary braces", @@ -49,19 +51,12 @@ pub(crate) fn useless_braces( Some(()) } -fn remove_braces(single_use_tree: &ast::UseTree) -> Option<TextEdit> { - let use_tree_list_node = single_use_tree.syntax().parent()?; - if single_use_tree.path()?.segment()?.self_token().is_some() { - let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start(); - let end = use_tree_list_node.text_range().end(); - return Some(TextEdit::delete(TextRange::new(start, end))); - } - None -} - #[cfg(test)] mod tests { - use crate::tests::{check_diagnostics, check_fix}; + use crate::{ + tests::{check_diagnostics, check_diagnostics_with_config, check_fix}, + DiagnosticsConfig, + }; #[test] fn test_check_unnecessary_braces_in_use_statement() { @@ -94,6 +89,32 @@ mod a { } "#, ); + check_diagnostics( + r#" +use a::{self}; + +mod a { +} +"#, + ); + check_diagnostics( + r#" +use a::{self as cool_name}; + +mod a { +} +"#, + ); + + let mut config = DiagnosticsConfig::test_sample(); + config.disabled.insert("syntax-error".to_string()); + check_diagnostics_with_config( + config, + r#" +mod a { pub mod b {} } +use a::{b::self}; +"#, + ); check_fix( r#" mod b {} @@ -126,16 +147,6 @@ use a::c; ); check_fix( r#" -mod a {} -use a::{self$0}; -"#, - r#" -mod a {} -use a; -"#, - ); - check_fix( - r#" mod a { pub mod c {} pub mod d { pub mod e {} } } use a::{c, d::{e$0}}; "#, diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs index 55a4a482d3b..b1b9b4b8ea5 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/lib.rs @@ -67,24 +67,61 @@ mod handlers { #[cfg(test)] mod tests; +use std::collections::HashMap; + use hir::{diagnostics::AnyDiagnostic, InFile, Semantics}; use ide_db::{ assists::{Assist, AssistId, AssistKind, AssistResolveStrategy}, base_db::{FileId, FileRange, SourceDatabase}, + generated::lints::{LintGroup, CLIPPY_LINT_GROUPS, DEFAULT_LINT_GROUPS}, imports::insert_use::InsertUseConfig, label::Label, source_change::SourceChange, - FxHashSet, RootDatabase, + syntax_helpers::node_ext::parse_tt_as_comma_sep_paths, + FxHashMap, FxHashSet, RootDatabase, +}; +use once_cell::sync::Lazy; +use stdx::never; +use syntax::{ + algo::find_node_at_range, + ast::{self, AstNode}, + SyntaxNode, SyntaxNodePtr, TextRange, }; -use syntax::{algo::find_node_at_range, ast::AstNode, SyntaxNodePtr, TextRange}; // FIXME: Make this an enum -#[derive(Copy, Clone, Debug, PartialEq)] -pub struct DiagnosticCode(pub &'static str); +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum DiagnosticCode { + RustcHardError(&'static str), + RustcLint(&'static str), + Clippy(&'static str), + Ra(&'static str, Severity), +} impl DiagnosticCode { - pub fn as_str(&self) -> &str { - self.0 + pub fn url(&self) -> String { + match self { + DiagnosticCode::RustcHardError(e) => { + format!("https://doc.rust-lang.org/stable/error_codes/{e}.html") + } + DiagnosticCode::RustcLint(e) => { + format!("https://doc.rust-lang.org/rustc/?search={e}") + } + DiagnosticCode::Clippy(e) => { + format!("https://rust-lang.github.io/rust-clippy/master/#/{e}") + } + DiagnosticCode::Ra(e, _) => { + format!("https://rust-analyzer.github.io/manual.html#{e}") + } + } + } + + pub fn as_str(&self) -> &'static str { + match self { + DiagnosticCode::RustcHardError(r) + | DiagnosticCode::RustcLint(r) + | DiagnosticCode::Clippy(r) + | DiagnosticCode::Ra(r, _) => r, + } } } @@ -97,29 +134,51 @@ pub struct Diagnostic { pub unused: bool, pub experimental: bool, pub fixes: Option<Vec<Assist>>, + // The node that will be affected by `#[allow]` and similar attributes. + pub main_node: Option<InFile<SyntaxNode>>, } impl Diagnostic { - fn new(code: &'static str, message: impl Into<String>, range: TextRange) -> Diagnostic { + fn new(code: DiagnosticCode, message: impl Into<String>, range: TextRange) -> Diagnostic { let message = message.into(); Diagnostic { - code: DiagnosticCode(code), + code, message, range, - severity: Severity::Error, + severity: match code { + DiagnosticCode::RustcHardError(_) => Severity::Error, + // FIXME: Rustc lints are not always warning, but the ones that are currently implemented are all warnings. + DiagnosticCode::RustcLint(_) => Severity::Warning, + // FIXME: We can make this configurable, and if the user uses `cargo clippy` on flycheck, we can + // make it normal warning. + DiagnosticCode::Clippy(_) => Severity::WeakWarning, + DiagnosticCode::Ra(_, s) => s, + }, unused: false, experimental: false, fixes: None, + main_node: None, } } + fn new_with_syntax_node_ptr( + ctx: &DiagnosticsContext<'_>, + code: DiagnosticCode, + message: impl Into<String>, + node: InFile<SyntaxNodePtr>, + ) -> Diagnostic { + let file_id = node.file_id; + Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node.clone()).range) + .with_main_node(node.map(|x| x.to_node(&ctx.sema.parse_or_expand(file_id)))) + } + fn experimental(mut self) -> Diagnostic { self.experimental = true; self } - fn severity(mut self, severity: Severity) -> Diagnostic { - self.severity = severity; + fn with_main_node(mut self, main_node: InFile<SyntaxNode>) -> Diagnostic { + self.main_node = Some(main_node); self } @@ -134,12 +193,12 @@ impl Diagnostic { } } -#[derive(Debug, Copy, Clone)] +#[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum Severity { Error, - // We don't actually emit this one yet, but we should at some point. - // Warning, + Warning, WeakWarning, + Allow, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -155,6 +214,8 @@ impl Default for ExprFillDefaultMode { #[derive(Debug, Clone)] pub struct DiagnosticsConfig { + /// Whether native diagnostics are enabled. + pub enabled: bool, pub proc_macros_enabled: bool, pub proc_attr_macros_enabled: bool, pub disable_experimental: bool, @@ -171,6 +232,7 @@ impl DiagnosticsConfig { use ide_db::imports::insert_use::ImportGranularity; Self { + enabled: true, proc_macros_enabled: Default::default(), proc_attr_macros_enabled: Default::default(), disable_experimental: Default::default(), @@ -194,7 +256,7 @@ struct DiagnosticsContext<'a> { resolve: &'a AssistResolveStrategy, } -impl<'a> DiagnosticsContext<'a> { +impl DiagnosticsContext<'_> { fn resolve_precise_location( &self, node: &InFile<SyntaxNodePtr>, @@ -228,11 +290,13 @@ pub fn diagnostics( let mut res = Vec::new(); // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily. - res.extend( - parse.errors().iter().take(128).map(|err| { - Diagnostic::new("syntax-error", format!("Syntax Error: {err}"), err.range()) - }), - ); + res.extend(parse.errors().iter().take(128).map(|err| { + Diagnostic::new( + DiagnosticCode::RustcHardError("syntax-error"), + format!("Syntax Error: {err}"), + err.range(), + ) + })); let parse = sema.parse(file_id); @@ -271,7 +335,7 @@ pub fn diagnostics( res.extend(d.errors.iter().take(32).map(|err| { { Diagnostic::new( - "syntax-error", + DiagnosticCode::RustcHardError("syntax-error"), format!("Syntax Error in Expansion: {err}"), ctx.resolve_precise_location(&d.node.clone(), d.precise_location), ) @@ -309,14 +373,168 @@ pub fn diagnostics( res.push(d) } + let mut diagnostics_of_range = + res.iter_mut().filter_map(|x| Some((x.main_node.clone()?, x))).collect::<FxHashMap<_, _>>(); + + let mut rustc_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default(); + let mut clippy_stack: FxHashMap<String, Vec<Severity>> = FxHashMap::default(); + + handle_lint_attributes( + &ctx.sema, + parse.syntax(), + &mut rustc_stack, + &mut clippy_stack, + &mut diagnostics_of_range, + ); + res.retain(|d| { - !ctx.config.disabled.contains(d.code.as_str()) + d.severity != Severity::Allow + && !ctx.config.disabled.contains(d.code.as_str()) && !(ctx.config.disable_experimental && d.experimental) }); res } +// `__RA_EVERY_LINT` is a fake lint group to allow every lint in proc macros + +static RUSTC_LINT_GROUPS_DICT: Lazy<HashMap<&str, Vec<&str>>> = + Lazy::new(|| build_group_dict(DEFAULT_LINT_GROUPS, &["warnings", "__RA_EVERY_LINT"], "")); + +static CLIPPY_LINT_GROUPS_DICT: Lazy<HashMap<&str, Vec<&str>>> = + Lazy::new(|| build_group_dict(CLIPPY_LINT_GROUPS, &["__RA_EVERY_LINT"], "clippy::")); + +fn build_group_dict( + lint_group: &'static [LintGroup], + all_groups: &'static [&'static str], + prefix: &'static str, +) -> HashMap<&'static str, Vec<&'static str>> { + let mut r: HashMap<&str, Vec<&str>> = HashMap::new(); + for g in lint_group { + for child in g.children { + r.entry(child.strip_prefix(prefix).unwrap()) + .or_default() + .push(g.lint.label.strip_prefix(prefix).unwrap()); + } + } + for (lint, groups) in r.iter_mut() { + groups.push(lint); + groups.extend_from_slice(all_groups); + } + r +} + +fn handle_lint_attributes( + sema: &Semantics<'_, RootDatabase>, + root: &SyntaxNode, + rustc_stack: &mut FxHashMap<String, Vec<Severity>>, + clippy_stack: &mut FxHashMap<String, Vec<Severity>>, + diagnostics_of_range: &mut FxHashMap<InFile<SyntaxNode>, &mut Diagnostic>, +) { + let file_id = sema.hir_file_for(root); + for ev in root.preorder() { + match ev { + syntax::WalkEvent::Enter(node) => { + for attr in node.children().filter_map(ast::Attr::cast) { + parse_lint_attribute(attr, rustc_stack, clippy_stack, |stack, severity| { + stack.push(severity); + }); + } + if let Some(x) = + diagnostics_of_range.get_mut(&InFile { file_id, value: node.clone() }) + { + const EMPTY_LINTS: &[&str] = &[]; + let (names, stack) = match x.code { + DiagnosticCode::RustcLint(name) => ( + RUSTC_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |x| &**x), + &mut *rustc_stack, + ), + DiagnosticCode::Clippy(name) => ( + CLIPPY_LINT_GROUPS_DICT.get(name).map_or(EMPTY_LINTS, |x| &**x), + &mut *clippy_stack, + ), + _ => continue, + }; + for &name in names { + if let Some(s) = stack.get(name).and_then(|x| x.last()) { + x.severity = *s; + } + } + } + if let Some(item) = ast::Item::cast(node.clone()) { + if let Some(me) = sema.expand_attr_macro(&item) { + for stack in [&mut *rustc_stack, &mut *clippy_stack] { + stack + .entry("__RA_EVERY_LINT".to_owned()) + .or_default() + .push(Severity::Allow); + } + handle_lint_attributes( + sema, + &me, + rustc_stack, + clippy_stack, + diagnostics_of_range, + ); + for stack in [&mut *rustc_stack, &mut *clippy_stack] { + stack.entry("__RA_EVERY_LINT".to_owned()).or_default().pop(); + } + } + } + if let Some(mc) = ast::MacroCall::cast(node) { + if let Some(me) = sema.expand(&mc) { + handle_lint_attributes( + sema, + &me, + rustc_stack, + clippy_stack, + diagnostics_of_range, + ); + } + } + } + syntax::WalkEvent::Leave(node) => { + for attr in node.children().filter_map(ast::Attr::cast) { + parse_lint_attribute(attr, rustc_stack, clippy_stack, |stack, severity| { + if stack.pop() != Some(severity) { + never!("Mismatched serevity in walking lint attributes"); + } + }); + } + } + } + } +} + +fn parse_lint_attribute( + attr: ast::Attr, + rustc_stack: &mut FxHashMap<String, Vec<Severity>>, + clippy_stack: &mut FxHashMap<String, Vec<Severity>>, + job: impl Fn(&mut Vec<Severity>, Severity), +) { + let Some((tag, args_tt)) = attr.as_simple_call() else { + return; + }; + let serevity = match tag.as_str() { + "allow" => Severity::Allow, + "warn" => Severity::Warning, + "forbid" | "deny" => Severity::Error, + _ => return, + }; + for lint in parse_tt_as_comma_sep_paths(args_tt).into_iter().flatten() { + if let Some(lint) = lint.as_single_name_ref() { + job(rustc_stack.entry(lint.to_string()).or_default(), serevity); + } + if let Some(tool) = lint.qualifier().and_then(|x| x.as_single_name_ref()) { + if let Some(name_ref) = &lint.segment().and_then(|x| x.name_ref()) { + if tool.to_string() == "clippy" { + job(clippy_stack.entry(name_ref.to_string()).or_default(), serevity); + } + } + } + } +} + fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextRange) -> Assist { let mut res = unresolved_fix(id, label, target); res.source_change = Some(source_change); diff --git a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs index b5cd4e0d689..4ac9d0a9fb7 100644 --- a/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs +++ b/src/tools/rust-analyzer/crates/ide-diagnostics/src/tests.rs @@ -114,6 +114,8 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur annotation.push_str(match d.severity { Severity::Error => "error", Severity::WeakWarning => "weak", + Severity::Warning => "warn", + Severity::Allow => "allow", }); annotation.push_str(": "); annotation.push_str(&d.message); @@ -130,14 +132,19 @@ pub(crate) fn check_diagnostics_with_config(config: DiagnosticsConfig, ra_fixtur ) } } - assert_eq!(expected, actual); + if expected != actual { + let fneg = expected.iter().filter(|x| !actual.contains(x)).collect::<Vec<_>>(); + let fpos = actual.iter().filter(|x| !expected.contains(x)).collect::<Vec<_>>(); + + panic!("Diagnostic test failed.\nFalse negatives: {fneg:?}\nFalse positives: {fpos:?}"); + } } } #[test] fn test_disabled_diagnostics() { let mut config = DiagnosticsConfig::test_sample(); - config.disabled.insert("unresolved-module".into()); + config.disabled.insert("E0583".into()); let (db, file_id) = RootDatabase::with_single_file(r#"mod foo;"#); @@ -159,7 +166,7 @@ fn minicore_smoke_test() { let source = minicore.source_code(); let mut config = DiagnosticsConfig::test_sample(); // This should be ignored since we conditionaly remove code which creates single item use with braces - config.disabled.insert("unnecessary-braces".to_string()); + config.disabled.insert("unused_braces".to_string()); check_diagnostics_with_config(config, &source); } diff --git a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs index 0a85569b600..96c193bd539 100644 --- a/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs +++ b/src/tools/rust-analyzer/crates/ide-ssr/src/search.rs @@ -22,7 +22,7 @@ pub(crate) struct UsageCache { usages: Vec<(Definition, UsageSearchResult)>, } -impl<'db> MatchFinder<'db> { +impl MatchFinder<'_> { /// Adds all matches for `rule` to `matches_out`. Matches may overlap in ways that make /// replacement impossible, so further processing is required in order to properly nest matches /// and remove overlapping matches. This is done in the `nesting` module. diff --git a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs index 8112c4f7259..c90ba212535 100644 --- a/src/tools/rust-analyzer/crates/ide/src/doc_links.rs +++ b/src/tools/rust-analyzer/crates/ide/src/doc_links.rs @@ -330,7 +330,9 @@ fn get_doc_links( base_url.and_then(|url| url.join(path).ok()) }; - let Some((target, file, frag)) = filename_and_frag_for_def(db, def) else { return Default::default(); }; + let Some((target, file, frag)) = filename_and_frag_for_def(db, def) else { + return Default::default(); + }; let (mut web_url, mut local_url) = get_doc_base_urls(db, target, target_dir, sysroot); diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs index 1362146413e..ef33386a7e9 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/render.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/render.rs @@ -422,10 +422,10 @@ pub(super) fn definition( |&it| { if !it.parent_enum(db).is_data_carrying(db) { match it.eval(db) { - Ok(x) => { - Some(if x >= 10 { format!("{x} ({x:#X})") } else { format!("{x}") }) + Ok(it) => { + Some(if it >= 10 { format!("{it} ({it:#X})") } else { format!("{it}") }) } - Err(_) => it.value(db).map(|x| format!("{x:?}")), + Err(_) => it.value(db).map(|it| format!("{it:?}")), } } else { None @@ -437,7 +437,7 @@ pub(super) fn definition( Definition::Const(it) => label_value_and_docs(db, it, |it| { let body = it.render_eval(db); match body { - Ok(x) => Some(x), + Ok(it) => Some(it), Err(_) => { let source = it.source(db)?; let mut body = source.value.body()?.syntax().clone(); diff --git a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs index f75ebfa12eb..00e21433daa 100644 --- a/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/hover/tests.rs @@ -674,7 +674,7 @@ struct Foo { fiel$0d_a: u8, field_b: i32, field_c: i16 } ``` ```rust - field_a: u8 // size = 1, align = 1, offset = 4 + field_a: u8 // size = 1, align = 1, offset = 6 ``` "#]], ); @@ -779,6 +779,39 @@ const foo$0: u32 = { ``` "#]], ); + + check( + r#"const FOO$0: i32 = -2147483648;"#, + expect![[r#" + *FOO* + + ```rust + test + ``` + + ```rust + const FOO: i32 = -2147483648 (0x80000000) + ``` + "#]], + ); + + check( + r#" + const FOO: i32 = -2147483648; + const BAR$0: bool = FOO > 0; + "#, + expect![[r#" + *BAR* + + ```rust + test + ``` + + ```rust + const BAR: bool = false + ``` + "#]], + ); } #[test] diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs index 10bee2a6acc..6d6bd315ebb 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/adjustment.rs @@ -259,7 +259,7 @@ fn needs_parens_for_adjustment_hints(expr: &ast::Expr, postfix: bool) -> (bool, } })() else { never!("broken syntax tree?\n{:?}\n{:?}", expr, dummy_expr); - return (true, true) + return (true, true); }; // At this point diff --git a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs index 84eac16b9f9..b621a8dda7e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs +++ b/src/tools/rust-analyzer/crates/ide/src/inlay_hints/chaining.rs @@ -474,7 +474,7 @@ fn main() { file_id: FileId( 1, ), - range: 9287..9295, + range: 9289..9297, }, ), tooltip: "", @@ -487,7 +487,7 @@ fn main() { file_id: FileId( 1, ), - range: 9319..9323, + range: 9321..9325, }, ), tooltip: "", @@ -511,7 +511,7 @@ fn main() { file_id: FileId( 1, ), - range: 9287..9295, + range: 9289..9297, }, ), tooltip: "", @@ -524,7 +524,7 @@ fn main() { file_id: FileId( 1, ), - range: 9319..9323, + range: 9321..9325, }, ), tooltip: "", @@ -548,7 +548,7 @@ fn main() { file_id: FileId( 1, ), - range: 9287..9295, + range: 9289..9297, }, ), tooltip: "", @@ -561,7 +561,7 @@ fn main() { file_id: FileId( 1, ), - range: 9319..9323, + range: 9321..9325, }, ), tooltip: "", diff --git a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs index cbcbb4b09db..d06ffd53575 100644 --- a/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs +++ b/src/tools/rust-analyzer/crates/ide/src/interpret_function.rs @@ -34,13 +34,15 @@ fn find_and_interpret(db: &RootDatabase, position: FilePosition) -> Option<Strin _ => return None, }; let span_formatter = |file_id, text_range: TextRange| { - let line_col = db.line_index(file_id).line_col(text_range.start()); let path = &db .source_root(db.file_source_root(file_id)) .path_for_file(&file_id) .map(|x| x.to_string()); let path = path.as_deref().unwrap_or("<unknown file>"); - format!("file://{path}#{}:{}", line_col.line + 1, line_col.col) + match db.line_index(file_id).try_line_col(text_range.start()) { + Some(line_col) => format!("file://{path}#{}:{}", line_col.line + 1, line_col.col), + None => format!("file://{path} range {:?}", text_range), + } }; Some(def.eval(db, span_formatter)) } diff --git a/src/tools/rust-analyzer/crates/ide/src/lib.rs b/src/tools/rust-analyzer/crates/ide/src/lib.rs index f195f78b3ab..0ad4c6c47e6 100644 --- a/src/tools/rust-analyzer/crates/ide/src/lib.rs +++ b/src/tools/rust-analyzer/crates/ide/src/lib.rs @@ -60,6 +60,7 @@ mod interpret_function; mod view_item_tree; mod shuffle_crate_graph; mod fetch_crates; +mod view_memory_layout; use std::ffi::OsStr; @@ -74,6 +75,7 @@ use ide_db::{ }; use syntax::SourceFile; use triomphe::Arc; +use view_memory_layout::{view_memory_layout, RecursiveMemoryLayout}; use crate::navigation_target::{ToNav, TryToNav}; @@ -642,7 +644,7 @@ impl Analysis { }; self.with_db(|db| { - let diagnostic_assists = if include_fixes { + let diagnostic_assists = if diagnostics_config.enabled && include_fixes { ide_diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id) .into_iter() .flat_map(|it| it.fixes.unwrap_or_default()) @@ -724,6 +726,13 @@ impl Analysis { self.with_db(|db| move_item::move_item(db, range, direction)) } + pub fn get_recursive_memory_layout( + &self, + position: FilePosition, + ) -> Cancellable<Option<RecursiveMemoryLayout>> { + self.with_db(|db| view_memory_layout(db, position)) + } + /// Performs an operation on the database that may be canceled. /// /// rust-analyzer needs to be able to answer semantic questions about the diff --git a/src/tools/rust-analyzer/crates/ide/src/moniker.rs b/src/tools/rust-analyzer/crates/ide/src/moniker.rs index 0d57e63d29c..d486a794e13 100644 --- a/src/tools/rust-analyzer/crates/ide/src/moniker.rs +++ b/src/tools/rust-analyzer/crates/ide/src/moniker.rs @@ -320,7 +320,7 @@ use foo::module::func; fn main() { func$0(); } -//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git +//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub fn func() {} } @@ -336,7 +336,7 @@ use foo::module::func; fn main() { func(); } -//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git +//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub fn func$0() {} } @@ -351,7 +351,7 @@ pub mod module { fn moniker_for_trait() { check_moniker( r#" -//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git +//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub trait MyTrait { pub fn func$0() {} @@ -368,7 +368,7 @@ pub mod module { fn moniker_for_trait_constant() { check_moniker( r#" -//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git +//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub trait MyTrait { const MY_CONST$0: u8; @@ -385,7 +385,7 @@ pub mod module { fn moniker_for_trait_type() { check_moniker( r#" -//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git +//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub trait MyTrait { type MyType$0; @@ -402,7 +402,7 @@ pub mod module { fn moniker_for_trait_impl_function() { check_moniker( r#" -//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git +//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub trait MyTrait { pub fn func() {} @@ -430,7 +430,7 @@ use foo::St; fn main() { let x = St { a$0: 2 }; } -//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git +//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub struct St { pub a: i32, } @@ -450,7 +450,7 @@ use foo::module::func; fn main() { func(); } -//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git +//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub fn func() { let x$0 = 2; diff --git a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs index 385c1b0c008..c7abecb4f1e 100644 --- a/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs +++ b/src/tools/rust-analyzer/crates/ide/src/navigation_target.rs @@ -357,13 +357,11 @@ impl ToNav for hir::Module { impl TryToNav for hir::Impl { fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> { let InFile { file_id, value } = self.source(db)?; - let derive_attr = self.is_builtin_derive(db); + let derive_attr = self.as_builtin_derive(db); - let focus = if derive_attr.is_some() { None } else { value.self_ty() }; - - let syntax = match &derive_attr { - Some(attr) => attr.value.syntax(), - None => value.syntax(), + let (focus, syntax) = match &derive_attr { + Some(attr) => (None, attr.value.syntax()), + None => (value.self_ty(), value.syntax()), }; let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus); diff --git a/src/tools/rust-analyzer/crates/ide/src/runnables.rs b/src/tools/rust-analyzer/crates/ide/src/runnables.rs index 27ad63d820d..9fa0e6449b8 100644 --- a/src/tools/rust-analyzer/crates/ide/src/runnables.rs +++ b/src/tools/rust-analyzer/crates/ide/src/runnables.rs @@ -2,7 +2,7 @@ use std::fmt; use ast::HasName; use cfg::CfgExpr; -use hir::{AsAssocItem, HasAttrs, HasSource, Semantics}; +use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, Semantics}; use ide_assists::utils::test_related_attribute; use ide_db::{ base_db::{FilePosition, FileRange}, @@ -14,7 +14,7 @@ use ide_db::{ use itertools::Itertools; use stdx::{always, format_to}; use syntax::{ - ast::{self, AstNode, HasAttrs as _}, + ast::{self, AstNode}, SmolStr, SyntaxNode, }; @@ -307,7 +307,6 @@ pub(crate) fn runnable_fn( sema: &Semantics<'_, RootDatabase>, def: hir::Function, ) -> Option<Runnable> { - let func = def.source(sema.db)?; let name = def.name(sema.db).to_smol_str(); let root = def.module(sema.db).krate().root_module(sema.db); @@ -323,10 +322,10 @@ pub(crate) fn runnable_fn( canonical_path.map(TestId::Path).unwrap_or(TestId::Name(name)) }; - if test_related_attribute(&func.value).is_some() { - let attr = TestAttr::from_fn(&func.value); + if def.is_test(sema.db) { + let attr = TestAttr::from_fn(sema.db, def); RunnableKind::Test { test_id: test_id(), attr } - } else if func.value.has_atom_attr("bench") { + } else if def.is_bench(sema.db) { RunnableKind::Bench { test_id: test_id() } } else { return None; @@ -335,7 +334,7 @@ pub(crate) fn runnable_fn( let nav = NavigationTarget::from_named( sema.db, - func.as_ref().map(|it| it as &dyn ast::HasName), + def.source(sema.db)?.as_ref().map(|it| it as &dyn ast::HasName), SymbolKind::Function, ); let cfg = def.attrs(sema.db).cfg(); @@ -487,12 +486,8 @@ pub struct TestAttr { } impl TestAttr { - fn from_fn(fn_def: &ast::Fn) -> TestAttr { - let ignore = fn_def - .attrs() - .filter_map(|attr| attr.simple_name()) - .any(|attribute_text| attribute_text == "ignore"); - TestAttr { ignore } + fn from_fn(db: &dyn HirDatabase, fn_def: hir::Function) -> TestAttr { + TestAttr { ignore: fn_def.is_ignore(db) } } } @@ -594,6 +589,9 @@ fn main() {} #[test] fn test_foo() {} +#[::core::prelude::v1::test] +fn test_full_path() {} + #[test] #[ignore] fn test_foo() {} @@ -605,7 +603,7 @@ mod not_a_root { fn main() {} } "#, - &[TestMod, Bin, Test, Test, Bench], + &[TestMod, Bin, Test, Test, Test, Bench], expect![[r#" [ Runnable { @@ -614,7 +612,7 @@ mod not_a_root { file_id: FileId( 0, ), - full_range: 0..137, + full_range: 0..190, name: "", kind: Module, }, @@ -664,8 +662,29 @@ mod not_a_root { file_id: FileId( 0, ), - full_range: 41..75, - focus_range: 62..70, + full_range: 41..92, + focus_range: 73..87, + name: "test_full_path", + kind: Function, + }, + kind: Test { + test_id: Path( + "test_full_path", + ), + attr: TestAttr { + ignore: false, + }, + }, + cfg: None, + }, + Runnable { + use_name_in_title: false, + nav: NavigationTarget { + file_id: FileId( + 0, + ), + full_range: 94..128, + focus_range: 115..123, name: "test_foo", kind: Function, }, @@ -685,8 +704,8 @@ mod not_a_root { file_id: FileId( 0, ), - full_range: 77..99, - focus_range: 89..94, + full_range: 130..152, + focus_range: 142..147, name: "bench", kind: Function, }, diff --git a/src/tools/rust-analyzer/crates/ide/src/static_index.rs b/src/tools/rust-analyzer/crates/ide/src/static_index.rs index 3e3d9f8f85c..59e8300dcdb 100644 --- a/src/tools/rust-analyzer/crates/ide/src/static_index.rs +++ b/src/tools/rust-analyzer/crates/ide/src/static_index.rs @@ -3,13 +3,14 @@ use std::collections::HashMap; -use hir::{db::HirDatabase, Crate, Module, Semantics}; +use hir::{db::HirDatabase, Crate, Module}; +use ide_db::helpers::get_definition; use ide_db::{ base_db::{FileId, FileRange, SourceDatabaseExt}, - defs::{Definition, IdentClass}, + defs::Definition, FxHashSet, RootDatabase, }; -use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T}; +use syntax::{AstNode, SyntaxKind::*, TextRange, T}; use crate::{ hover::hover_for_definition, @@ -73,7 +74,7 @@ impl TokenStore { } pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> { - self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x)) + self.0.into_iter().enumerate().map(|(id, data)| (TokenId(id), data)) } } @@ -132,9 +133,9 @@ impl StaticIndex<'_> { // hovers let sema = hir::Semantics::new(self.db); let tokens_or_nodes = sema.parse(file_id).syntax().clone(); - let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x { + let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|it| match it { syntax::NodeOrToken::Node(_) => None, - syntax::NodeOrToken::Token(x) => Some(x), + syntax::NodeOrToken::Token(it) => Some(it), }); let hover_config = HoverConfig { links_in_hover: true, @@ -154,28 +155,29 @@ impl StaticIndex<'_> { let range = token.text_range(); let node = token.parent().unwrap(); let def = match get_definition(&sema, token.clone()) { - Some(x) => x, + Some(it) => it, None => continue, }; - let id = if let Some(x) = self.def_map.get(&def) { - *x + let id = if let Some(it) = self.def_map.get(&def) { + *it } else { - let x = self.tokens.insert(TokenStaticData { + let it = self.tokens.insert(TokenStaticData { hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), - definition: def - .try_to_nav(self.db) - .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }), + definition: def.try_to_nav(self.db).map(|it| FileRange { + file_id: it.file_id, + range: it.focus_or_full_range(), + }), references: vec![], moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)), }); - self.def_map.insert(def, x); - x + self.def_map.insert(def, it); + it }; let token = self.tokens.get_mut(id).unwrap(); token.references.push(ReferenceData { range: FileRange { range, file_id }, is_definition: match def.try_to_nav(self.db) { - Some(x) => x.file_id == file_id && x.focus_or_full_range() == range, + Some(it) => it.file_id == file_id && it.focus_or_full_range() == range, None => false, }, }); @@ -187,7 +189,7 @@ impl StaticIndex<'_> { pub fn compute(analysis: &Analysis) -> StaticIndex<'_> { let db = &*analysis.db; let work = all_modules(db).into_iter().filter(|module| { - let file_id = module.definition_source(db).file_id.original_file(db); + let file_id = module.definition_source_file_id(db).original_file(db); let source_root = db.file_source_root(file_id); let source_root = db.source_root(source_root); !source_root.is_library @@ -201,7 +203,7 @@ impl StaticIndex<'_> { }; let mut visited_files = FxHashSet::default(); for module in work { - let file_id = module.definition_source(db).file_id.original_file(db); + let file_id = module.definition_source_file_id(db).original_file(db); if visited_files.contains(&file_id) { continue; } @@ -213,16 +215,6 @@ impl StaticIndex<'_> { } } -fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Definition> { - for token in sema.descend_into_macros(token) { - let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); - if let Some(&[x]) = def.as_deref() { - return Some(x); - } - } - None -} - #[cfg(test)] mod tests { use crate::{fixture, StaticIndex}; @@ -233,14 +225,14 @@ mod tests { fn check_all_ranges(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let s = StaticIndex::compute(&analysis); - let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect(); + let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect(); for f in s.files { for (range, _) in f.tokens { - let x = FileRange { file_id: f.file_id, range }; - if !range_set.contains(&x) { - panic!("additional range {x:?}"); + let it = FileRange { file_id: f.file_id, range }; + if !range_set.contains(&it) { + panic!("additional range {it:?}"); } - range_set.remove(&x); + range_set.remove(&it); } } if !range_set.is_empty() { @@ -251,17 +243,17 @@ mod tests { fn check_definitions(ra_fixture: &str) { let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let s = StaticIndex::compute(&analysis); - let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect(); + let mut range_set: HashSet<_> = ranges.iter().map(|it| it.0).collect(); for (_, t) in s.tokens.iter() { - if let Some(x) = t.definition { - if x.range.start() == TextSize::from(0) { + if let Some(t) = t.definition { + if t.range.start() == TextSize::from(0) { // ignore definitions that are whole of file continue; } - if !range_set.contains(&x) { - panic!("additional definition {x:?}"); + if !range_set.contains(&t) { + panic!("additional definition {t:?}"); } - range_set.remove(&x); + range_set.remove(&t); } } if !range_set.is_empty() { diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs index dc06591ffea..577bd2bc1f8 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs @@ -265,10 +265,14 @@ fn traverse( // set macro and attribute highlighting states match event.clone() { - Enter(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => { + Enter(NodeOrToken::Node(node)) + if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) => + { tt_level += 1; } - Leave(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => { + Leave(NodeOrToken::Node(node)) + if current_macro.is_none() && ast::TokenTree::can_cast(node.kind()) => + { tt_level -= 1; } Enter(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => { @@ -387,7 +391,7 @@ fn traverse( }; let descended_element = if in_macro { // Attempt to descend tokens into macro-calls. - match element { + let res = match element { NodeOrToken::Token(token) if token.kind() != COMMENT => { let token = match attr_or_derive_item { Some(AttrOrDerive::Attr(_)) => { @@ -412,7 +416,8 @@ fn traverse( } } e => e, - } + }; + res } else { element }; diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html index fa374b04f19..f4f164aa1de 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_strings.html @@ -161,7 +161,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span> <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">b"</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x28</span><span class="escape_sequence">\x00</span><span class="escape_sequence">\x63</span><span class="escape_sequence">\n</span><span class="string_literal">"</span><span class="semicolon">;</span> - <span class="keyword">let</span> <span class="punctuation">_</span> <span class="operator">=</span> <span class="string_literal">r"\\"</span><span class="semicolon">;</span> + <span class="keyword">let</span> <span class="variable declaration reference">backslash</span> <span class="operator">=</span> <span class="string_literal">r"\\"</span><span class="semicolon">;</span> <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="escape_sequence">\x41</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">A</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">ничоси</span> <span class="operator macro">=</span> <span class="numeric_literal macro">92</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> @@ -173,6 +173,6 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd <span class="macro">assert</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="bool_literal macro">true</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> asdasd"</span><span class="comma macro">,</span> <span class="numeric_literal macro">1</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">toho</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">fmt"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro unsafe">asm</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"mov eax, </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> - <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> - <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">format_args</span><span class="operator macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> + <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="none macro">concat</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="string_literal macro">"{}"</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> + <span class="macro">format_args</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro"> </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="none macro">backslash</span><span class="comma macro">,</span> <span class="none macro">format_args</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="none macro">foo</span><span class="comma macro">,</span> <span class="string_literal macro">"bar"</span><span class="comma macro">,</span> <span class="none macro">toho</span><span class="punctuation macro">!</span><span class="parenthesis macro">(</span><span class="parenthesis macro">)</span><span class="comma macro">,</span> <span class="none macro">backslash</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="brace">}</span></code></pre> \ No newline at end of file diff --git a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs index 497992f684c..1ee451a06d0 100644 --- a/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs +++ b/src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs @@ -507,7 +507,7 @@ fn main() { let _ = "\x28\x28\x00\x63\n"; let _ = b"\x28\x28\x00\x63\n"; - let _ = r"\\"; + let backslash = r"\\"; println!("{\x41}", A = 92); println!("{ничоси}", ничоси = 92); @@ -520,7 +520,7 @@ fn main() { toho!("{}fmt", 0); asm!("mov eax, {0}"); format_args!(concat!("{}"), "{}"); - format_args!("{}", format_args!("{}", 0)); + format_args!("{} {} {} {} {} {}", backslash, format_args!("{}", 0), foo, "bar", toho!(), backslash); }"#, expect_file!["./test_data/highlight_strings.html"], false, diff --git a/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs new file mode 100644 index 00000000000..2f6332abd25 --- /dev/null +++ b/src/tools/rust-analyzer/crates/ide/src/view_memory_layout.rs @@ -0,0 +1,409 @@ +use std::fmt; + +use hir::{Field, HirDisplay, Layout, Semantics, Type}; +use ide_db::{ + defs::Definition, + helpers::{get_definition, pick_best_token}, + RootDatabase, +}; +use syntax::{AstNode, SyntaxKind}; + +use crate::FilePosition; + +pub struct MemoryLayoutNode { + pub item_name: String, + pub typename: String, + pub size: u64, + pub alignment: u64, + pub offset: u64, + pub parent_idx: i64, + pub children_start: i64, + pub children_len: u64, +} + +pub struct RecursiveMemoryLayout { + pub nodes: Vec<MemoryLayoutNode>, +} + +// NOTE: this is currently strictly for testing and so isn't super useful as a visualization tool, however it could be adapted to become one? +impl fmt::Display for RecursiveMemoryLayout { + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fn process( + fmt: &mut fmt::Formatter<'_>, + nodes: &Vec<MemoryLayoutNode>, + idx: usize, + depth: usize, + ) -> fmt::Result { + let mut out = "\t".repeat(depth); + let node = &nodes[idx]; + out += &format!( + "{}: {} (size: {}, align: {}, field offset: {})\n", + node.item_name, node.typename, node.size, node.alignment, node.offset + ); + write!(fmt, "{}", out)?; + if node.children_start != -1 { + for j in nodes[idx].children_start + ..(nodes[idx].children_start + nodes[idx].children_len as i64) + { + process(fmt, nodes, j as usize, depth + 1)?; + } + } + Ok(()) + } + + process(fmt, &self.nodes, 0, 0) + } +} + +enum FieldOrTupleIdx { + Field(Field), + TupleIdx(usize), +} + +impl FieldOrTupleIdx { + fn name(&self, db: &RootDatabase) -> String { + match *self { + FieldOrTupleIdx::Field(f) => f + .name(db) + .as_str() + .map(|s| s.to_owned()) + .unwrap_or_else(|| format!(".{}", f.name(db).as_tuple_index().unwrap())), + FieldOrTupleIdx::TupleIdx(i) => format!(".{i}").to_owned(), + } + } + + fn index(&self) -> usize { + match *self { + FieldOrTupleIdx::Field(f) => f.index(), + FieldOrTupleIdx::TupleIdx(i) => i, + } + } +} + +// Feature: View Memory Layout +// +// Displays the recursive memory layout of a datatype. +// +// |=== +// | Editor | Action Name +// +// | VS Code | **rust-analyzer: View Memory Layout** +// |=== +pub(crate) fn view_memory_layout( + db: &RootDatabase, + position: FilePosition, +) -> Option<RecursiveMemoryLayout> { + let sema = Semantics::new(db); + let file = sema.parse(position.file_id); + let token = + pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind { + SyntaxKind::IDENT => 3, + _ => 0, + })?; + + let def = get_definition(&sema, token)?; + + let ty = match def { + Definition::Adt(it) => it.ty(db), + Definition::TypeAlias(it) => it.ty(db), + Definition::BuiltinType(it) => it.ty(db), + Definition::SelfType(it) => it.self_ty(db), + Definition::Local(it) => it.ty(db), + Definition::Field(it) => it.ty(db), + Definition::Const(it) => it.ty(db), + Definition::Static(it) => it.ty(db), + _ => return None, + }; + + fn read_layout( + nodes: &mut Vec<MemoryLayoutNode>, + db: &RootDatabase, + ty: &Type, + layout: &Layout, + parent_idx: usize, + ) { + let mut fields = ty + .fields(db) + .into_iter() + .map(|(f, ty)| (FieldOrTupleIdx::Field(f), ty)) + .chain( + ty.tuple_fields(db) + .into_iter() + .enumerate() + .map(|(i, ty)| (FieldOrTupleIdx::TupleIdx(i), ty)), + ) + .collect::<Vec<_>>(); + + if fields.len() == 0 { + return; + } + + fields.sort_by_key(|(f, _)| layout.field_offset(f.index()).unwrap()); + + let children_start = nodes.len(); + nodes[parent_idx].children_start = children_start as i64; + nodes[parent_idx].children_len = fields.len() as u64; + + for (field, child_ty) in fields.iter() { + if let Ok(child_layout) = child_ty.layout(db) { + nodes.push(MemoryLayoutNode { + item_name: field.name(db), + typename: child_ty.display(db).to_string(), + size: child_layout.size(), + alignment: child_layout.align(), + offset: layout.field_offset(field.index()).unwrap_or(0), + parent_idx: parent_idx as i64, + children_start: -1, + children_len: 0, + }); + } else { + nodes.push(MemoryLayoutNode { + item_name: field.name(db) + + format!("(no layout data: {:?})", child_ty.layout(db).unwrap_err()) + .as_ref(), + typename: child_ty.display(db).to_string(), + size: 0, + offset: 0, + alignment: 0, + parent_idx: parent_idx as i64, + children_start: -1, + children_len: 0, + }); + } + } + + for (i, (_, child_ty)) in fields.iter().enumerate() { + if let Ok(child_layout) = child_ty.layout(db) { + read_layout(nodes, db, &child_ty, &child_layout, children_start + i); + } + } + } + + ty.layout(db) + .map(|layout| { + let item_name = match def { + // def is a datatype + Definition::Adt(_) + | Definition::TypeAlias(_) + | Definition::BuiltinType(_) + | Definition::SelfType(_) => "[ROOT]".to_owned(), + + // def is an item + def => def + .name(db) + .map(|n| { + n.as_str() + .map(|s| s.to_owned()) + .unwrap_or_else(|| format!(".{}", n.as_tuple_index().unwrap())) + }) + .unwrap_or("[ROOT]".to_owned()), + }; + + let typename = ty.display(db).to_string(); + + let mut nodes = vec![MemoryLayoutNode { + item_name, + typename: typename.clone(), + size: layout.size(), + offset: 0, + alignment: layout.align(), + parent_idx: -1, + children_start: -1, + children_len: 0, + }]; + read_layout(&mut nodes, db, &ty, &layout, 0); + + RecursiveMemoryLayout { nodes } + }) + .ok() +} + +#[cfg(test)] +mod tests { + use super::*; + + use crate::fixture; + use expect_test::expect; + + fn make_memory_layout(ra_fixture: &str) -> Option<RecursiveMemoryLayout> { + let (analysis, position, _) = fixture::annotations(ra_fixture); + + view_memory_layout(&analysis.db, position) + } + + #[test] + fn view_memory_layout_none() { + assert!(make_memory_layout(r#"$0"#).is_none()); + assert!(make_memory_layout(r#"stru$0ct Blah {}"#).is_none()); + } + + #[test] + fn view_memory_layout_primitive() { + expect![[r#" + foo: i32 (size: 4, align: 4, field offset: 0) + "#]] + .assert_eq( + &make_memory_layout( + r#" +fn main() { + let foo$0 = 109; // default i32 +} +"#, + ) + .unwrap() + .to_string(), + ); + } + + #[test] + fn view_memory_layout_constant() { + expect![[r#" + BLAH: bool (size: 1, align: 1, field offset: 0) + "#]] + .assert_eq( + &make_memory_layout( + r#" +const BLAH$0: bool = 0; +"#, + ) + .unwrap() + .to_string(), + ); + } + + #[test] + fn view_memory_layout_static() { + expect![[r#" + BLAH: bool (size: 1, align: 1, field offset: 0) + "#]] + .assert_eq( + &make_memory_layout( + r#" +static BLAH$0: bool = 0; +"#, + ) + .unwrap() + .to_string(), + ); + } + + #[test] + fn view_memory_layout_tuple() { + expect![[r#" + x: (f64, u8, i64) (size: 24, align: 8, field offset: 0) + .0: f64 (size: 8, align: 8, field offset: 0) + .1: u8 (size: 1, align: 1, field offset: 8) + .2: i64 (size: 8, align: 8, field offset: 16) + "#]] + .assert_eq( + &make_memory_layout( + r#" +fn main() { + let x$0 = (101.0, 111u8, 119i64); +} +"#, + ) + .unwrap() + .to_string(), + ); + } + + #[test] + fn view_memory_layout_c_struct() { + expect![[r#" + [ROOT]: Blah (size: 16, align: 4, field offset: 0) + a: u32 (size: 4, align: 4, field offset: 0) + b: (i32, u8) (size: 8, align: 4, field offset: 4) + .0: i32 (size: 4, align: 4, field offset: 0) + .1: u8 (size: 1, align: 1, field offset: 4) + c: i8 (size: 1, align: 1, field offset: 12) + "#]] + .assert_eq( + &make_memory_layout( + r#" +#[repr(C)] +struct Blah$0 { + a: u32, + b: (i32, u8), + c: i8, +} +"#, + ) + .unwrap() + .to_string(), + ); + } + + #[test] + fn view_memory_layout_struct() { + expect![[r#" + [ROOT]: Blah (size: 16, align: 4, field offset: 0) + b: (i32, u8) (size: 8, align: 4, field offset: 0) + .0: i32 (size: 4, align: 4, field offset: 0) + .1: u8 (size: 1, align: 1, field offset: 4) + a: u32 (size: 4, align: 4, field offset: 8) + c: i8 (size: 1, align: 1, field offset: 12) + "#]] + .assert_eq( + &make_memory_layout( + r#" +struct Blah$0 { + a: u32, + b: (i32, u8), + c: i8, +} +"#, + ) + .unwrap() + .to_string(), + ); + } + + #[test] + fn view_memory_layout_member() { + expect![[r#" + a: bool (size: 1, align: 1, field offset: 0) + "#]] + .assert_eq( + &make_memory_layout( + r#" +#[repr(C)] +struct Oof { + a$0: bool, +} +"#, + ) + .unwrap() + .to_string(), + ); + } + + #[test] + fn view_memory_layout_alias() { + let ml_a = make_memory_layout( + r#" +struct X { + a: u32, + b: i8, + c: (f32, f32), +} + +type Foo$0 = X; +"#, + ) + .unwrap(); + + let ml_b = make_memory_layout( + r#" +struct X$0 { + a: u32, + b: i8, + c: (f32, f32), +} +"#, + ) + .unwrap(); + + assert_eq!(ml_a.to_string(), ml_b.to_string()); + } +} diff --git a/src/tools/rust-analyzer/crates/intern/Cargo.toml b/src/tools/rust-analyzer/crates/intern/Cargo.toml index dcd0d788125..4d56c771960 100644 --- a/src/tools/rust-analyzer/crates/intern/Cargo.toml +++ b/src/tools/rust-analyzer/crates/intern/Cargo.toml @@ -15,7 +15,7 @@ doctest = false [dependencies] # We need to freeze the version of the crate, as the raw-api feature is considered unstable dashmap = { version = "=5.4.0", features = ["raw-api"] } -hashbrown = { version = "0.12.1", default-features = false } +hashbrown.workspace = true once_cell = "1.17.0" rustc-hash = "1.1.0" triomphe.workspace = true diff --git a/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml new file mode 100644 index 00000000000..f041ca88ac0 --- /dev/null +++ b/src/tools/rust-analyzer/crates/load-cargo/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "load-cargo" +version = "0.0.0" +description = "TBD" + +rust-version.workspace = true +edition.workspace = true +license.workspace = true +authors.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow = "1.0.62" +crossbeam-channel = "0.5.5" +itertools = "0.10.5" +tracing = "0.1.35" + +ide.workspace = true +ide-db.workspace =true +proc-macro-api.workspace = true +project-model.workspace = true +tt.workspace = true +vfs.workspace = true +vfs-notify.workspace = true diff --git a/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs new file mode 100644 index 00000000000..7a795dd62ab --- /dev/null +++ b/src/tools/rust-analyzer/crates/load-cargo/src/lib.rs @@ -0,0 +1,441 @@ +//! Loads a Cargo project into a static instance of analysis, without support +//! for incorporating changes. +// Note, don't remove any public api from this. This API is consumed by external tools +// to run rust-analyzer as a library. +use std::{collections::hash_map::Entry, mem, path::Path, sync}; + +use ::tt::token_id as tt; +use crossbeam_channel::{unbounded, Receiver}; +use ide::{AnalysisHost, Change, SourceRoot}; +use ide_db::{ + base_db::{ + CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, + ProcMacroLoadResult, ProcMacros, + }, + FxHashMap, +}; +use itertools::Itertools; +use proc_macro_api::{MacroDylib, ProcMacroServer}; +use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace}; +use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; + +pub struct LoadCargoConfig { + pub load_out_dirs_from_check: bool, + pub with_proc_macro_server: ProcMacroServerChoice, + pub prefill_caches: bool, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum ProcMacroServerChoice { + Sysroot, + Explicit(AbsPathBuf), + None, +} + +pub fn load_workspace_at( + root: &Path, + cargo_config: &CargoConfig, + load_config: &LoadCargoConfig, + progress: &dyn Fn(String), +) -> anyhow::Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> { + let root = AbsPathBuf::assert(std::env::current_dir()?.join(root)); + let root = ProjectManifest::discover_single(&root)?; + let mut workspace = ProjectWorkspace::load(root, cargo_config, progress)?; + + if load_config.load_out_dirs_from_check { + let build_scripts = workspace.run_build_scripts(cargo_config, progress)?; + workspace.set_build_scripts(build_scripts) + } + + load_workspace(workspace, &cargo_config.extra_env, load_config) +} + +pub fn load_workspace( + ws: ProjectWorkspace, + extra_env: &FxHashMap<String, String>, + load_config: &LoadCargoConfig, +) -> anyhow::Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> { + let (sender, receiver) = unbounded(); + let mut vfs = vfs::Vfs::default(); + let mut loader = { + let loader = + vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap())); + Box::new(loader) + }; + + let proc_macro_server = match &load_config.with_proc_macro_server { + ProcMacroServerChoice::Sysroot => ws + .find_sysroot_proc_macro_srv() + .and_then(|it| ProcMacroServer::spawn(it).map_err(Into::into)), + ProcMacroServerChoice::Explicit(path) => { + ProcMacroServer::spawn(path.clone()).map_err(Into::into) + } + ProcMacroServerChoice::None => Err(anyhow::format_err!("proc macro server disabled")), + }; + + let (crate_graph, proc_macros) = ws.to_crate_graph( + &mut |path: &AbsPath| { + let contents = loader.load_sync(path); + let path = vfs::VfsPath::from(path.to_path_buf()); + vfs.set_file_contents(path.clone(), contents); + vfs.file_id(&path) + }, + extra_env, + ); + let proc_macros = { + let proc_macro_server = match &proc_macro_server { + Ok(it) => Ok(it), + Err(e) => Err(e.to_string()), + }; + proc_macros + .into_iter() + .map(|(crate_id, path)| { + ( + crate_id, + path.map_or_else( + |_| Err("proc macro crate is missing dylib".to_owned()), + |(_, path)| { + proc_macro_server.as_ref().map_err(Clone::clone).and_then( + |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]), + ) + }, + ), + ) + }) + .collect() + }; + + let project_folders = ProjectFolders::new(&[ws], &[]); + loader.set_config(vfs::loader::Config { + load: project_folders.load, + watch: vec![], + version: 0, + }); + + let host = load_crate_graph( + crate_graph, + proc_macros, + project_folders.source_root_config, + &mut vfs, + &receiver, + ); + + if load_config.prefill_caches { + host.analysis().parallel_prime_caches(1, |_| {})?; + } + Ok((host, vfs, proc_macro_server.ok())) +} + +#[derive(Default)] +pub struct ProjectFolders { + pub load: Vec<vfs::loader::Entry>, + pub watch: Vec<usize>, + pub source_root_config: SourceRootConfig, +} + +impl ProjectFolders { + pub fn new(workspaces: &[ProjectWorkspace], global_excludes: &[AbsPathBuf]) -> ProjectFolders { + let mut res = ProjectFolders::default(); + let mut fsc = FileSetConfig::builder(); + let mut local_filesets = vec![]; + + // Dedup source roots + // Depending on the project setup, we can have duplicated source roots, or for example in + // the case of the rustc workspace, we can end up with two source roots that are almost the + // same but not quite, like: + // PackageRoot { is_local: false, include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri")], exclude: [] } + // PackageRoot { + // is_local: true, + // include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri"), AbsPathBuf(".../rust/build/x86_64-pc-windows-msvc/stage0-tools/x86_64-pc-windows-msvc/release/build/cargo-miri-85801cd3d2d1dae4/out")], + // exclude: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri/.git"), AbsPathBuf(".../rust/src/tools/miri/cargo-miri/target")] + // } + // + // The first one comes from the explicit rustc workspace which points to the rustc workspace itself + // The second comes from the rustc workspace that we load as the actual project workspace + // These `is_local` differing in this kind of way gives us problems, especially when trying to filter diagnostics as we don't report diagnostics for external libraries. + // So we need to deduplicate these, usually it would be enough to deduplicate by `include`, but as the rustc example shows here that doesn't work, + // so we need to also coalesce the includes if they overlap. + + let mut roots: Vec<_> = workspaces + .iter() + .flat_map(|ws| ws.to_roots()) + .update(|root| root.include.sort()) + .sorted_by(|a, b| a.include.cmp(&b.include)) + .collect(); + + // map that tracks indices of overlapping roots + let mut overlap_map = FxHashMap::<_, Vec<_>>::default(); + let mut done = false; + + while !mem::replace(&mut done, true) { + // maps include paths to indices of the corresponding root + let mut include_to_idx = FxHashMap::default(); + // Find and note down the indices of overlapping roots + for (idx, root) in roots.iter().enumerate().filter(|(_, it)| !it.include.is_empty()) { + for include in &root.include { + match include_to_idx.entry(include) { + Entry::Occupied(e) => { + overlap_map.entry(*e.get()).or_default().push(idx); + } + Entry::Vacant(e) => { + e.insert(idx); + } + } + } + } + for (k, v) in overlap_map.drain() { + done = false; + for v in v { + let r = mem::replace( + &mut roots[v], + PackageRoot { is_local: false, include: vec![], exclude: vec![] }, + ); + roots[k].is_local |= r.is_local; + roots[k].include.extend(r.include); + roots[k].exclude.extend(r.exclude); + } + roots[k].include.sort(); + roots[k].exclude.sort(); + roots[k].include.dedup(); + roots[k].exclude.dedup(); + } + } + + for root in roots.into_iter().filter(|it| !it.include.is_empty()) { + let file_set_roots: Vec<VfsPath> = + root.include.iter().cloned().map(VfsPath::from).collect(); + + let entry = { + let mut dirs = vfs::loader::Directories::default(); + dirs.extensions.push("rs".into()); + dirs.include.extend(root.include); + dirs.exclude.extend(root.exclude); + for excl in global_excludes { + if dirs + .include + .iter() + .any(|incl| incl.starts_with(excl) || excl.starts_with(incl)) + { + dirs.exclude.push(excl.clone()); + } + } + + vfs::loader::Entry::Directories(dirs) + }; + + if root.is_local { + res.watch.push(res.load.len()); + } + res.load.push(entry); + + if root.is_local { + local_filesets.push(fsc.len()); + } + fsc.add_file_set(file_set_roots) + } + + let fsc = fsc.build(); + res.source_root_config = SourceRootConfig { fsc, local_filesets }; + + res + } +} + +#[derive(Default, Debug)] +pub struct SourceRootConfig { + pub fsc: FileSetConfig, + pub local_filesets: Vec<usize>, +} + +impl SourceRootConfig { + pub fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> { + self.fsc + .partition(vfs) + .into_iter() + .enumerate() + .map(|(idx, file_set)| { + let is_local = self.local_filesets.contains(&idx); + if is_local { + SourceRoot::new_local(file_set) + } else { + SourceRoot::new_library(file_set) + } + }) + .collect() + } +} + +/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace` +/// with an identity dummy expander. +pub fn load_proc_macro( + server: &ProcMacroServer, + path: &AbsPath, + dummy_replace: &[Box<str>], +) -> ProcMacroLoadResult { + let res: Result<Vec<_>, String> = (|| { + let dylib = MacroDylib::new(path.to_path_buf()); + let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?; + if vec.is_empty() { + return Err("proc macro library returned no proc macros".to_string()); + } + Ok(vec + .into_iter() + .map(|expander| expander_to_proc_macro(expander, dummy_replace)) + .collect()) + })(); + match res { + Ok(proc_macros) => { + tracing::info!( + "Loaded proc-macros for {path}: {:?}", + proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>() + ); + Ok(proc_macros) + } + Err(e) => { + tracing::warn!("proc-macro loading for {path} failed: {e}"); + Err(e) + } + } +} + +fn load_crate_graph( + crate_graph: CrateGraph, + proc_macros: ProcMacros, + source_root_config: SourceRootConfig, + vfs: &mut vfs::Vfs, + receiver: &Receiver<vfs::loader::Message>, +) -> AnalysisHost { + let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok()); + let mut host = AnalysisHost::new(lru_cap); + let mut analysis_change = Change::new(); + + host.raw_database_mut().enable_proc_attr_macros(); + + // wait until Vfs has loaded all roots + for task in receiver { + match task { + vfs::loader::Message::Progress { n_done, n_total, config_version: _ } => { + if n_done == n_total { + break; + } + } + vfs::loader::Message::Loaded { files } => { + for (path, contents) in files { + vfs.set_file_contents(path.into(), contents); + } + } + } + } + let changes = vfs.take_changes(); + for file in changes { + if file.exists() { + let contents = vfs.file_contents(file.file_id); + if let Ok(text) = std::str::from_utf8(contents) { + analysis_change.change_file(file.file_id, Some(text.into())) + } + } + } + let source_roots = source_root_config.partition(vfs); + analysis_change.set_roots(source_roots); + + analysis_change.set_crate_graph(crate_graph); + analysis_change.set_proc_macros(proc_macros); + + host.apply_change(analysis_change); + host +} + +fn expander_to_proc_macro( + expander: proc_macro_api::ProcMacro, + dummy_replace: &[Box<str>], +) -> ProcMacro { + let name = From::from(expander.name()); + let kind = match expander.kind() { + proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive, + proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike, + proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr, + }; + let expander: sync::Arc<dyn ProcMacroExpander> = + if dummy_replace.iter().any(|replace| &**replace == name) { + match kind { + ProcMacroKind::Attr => sync::Arc::new(IdentityExpander), + _ => sync::Arc::new(EmptyExpander), + } + } else { + sync::Arc::new(Expander(expander)) + }; + ProcMacro { name, kind, expander } +} + +#[derive(Debug)] +struct Expander(proc_macro_api::ProcMacro); + +impl ProcMacroExpander for Expander { + fn expand( + &self, + subtree: &tt::Subtree, + attrs: Option<&tt::Subtree>, + env: &Env, + ) -> Result<tt::Subtree, ProcMacroExpansionError> { + let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); + match self.0.expand(subtree, attrs, env) { + Ok(Ok(subtree)) => Ok(subtree), + Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), + Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), + } + } +} + +/// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user. +#[derive(Debug)] +struct IdentityExpander; + +impl ProcMacroExpander for IdentityExpander { + fn expand( + &self, + subtree: &tt::Subtree, + _: Option<&tt::Subtree>, + _: &Env, + ) -> Result<tt::Subtree, ProcMacroExpansionError> { + Ok(subtree.clone()) + } +} + +/// Empty expander, used for proc-macros that are deliberately ignored by the user. +#[derive(Debug)] +struct EmptyExpander; + +impl ProcMacroExpander for EmptyExpander { + fn expand( + &self, + _: &tt::Subtree, + _: Option<&tt::Subtree>, + _: &Env, + ) -> Result<tt::Subtree, ProcMacroExpansionError> { + Ok(tt::Subtree::empty()) + } +} + +#[cfg(test)] +mod tests { + use ide_db::base_db::SourceDatabase; + + use super::*; + + #[test] + fn test_loading_rust_analyzer() { + let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); + let cargo_config = CargoConfig::default(); + let load_cargo_config = LoadCargoConfig { + load_out_dirs_from_check: false, + with_proc_macro_server: ProcMacroServerChoice::None, + prefill_caches: false, + }; + let (host, _vfs, _proc_macro) = + load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap(); + + let n_crates = host.raw_database().crate_graph().iter().count(); + // RA has quite a few crates, but the exact count doesn't matter + assert!(n_crates > 20); + } +} diff --git a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs index d28dd17def3..9d43e130457 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/benchmark.rs @@ -20,10 +20,7 @@ fn benchmark_parse_macro_rules() { let rules = macro_rules_fixtures_tt(); let hash: usize = { let _pt = bench("mbe parse macro rules"); - rules - .values() - .map(|it| DeclarativeMacro::parse_macro_rules(it, true).unwrap().rules.len()) - .sum() + rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it, true).rules.len()).sum() }; assert_eq!(hash, 1144); } @@ -41,7 +38,7 @@ fn benchmark_expand_macro_rules() { invocations .into_iter() .map(|(id, tt)| { - let res = rules[&id].expand(&tt); + let res = rules[&id].expand(tt); assert!(res.err.is_none()); res.value.token_trees.len() }) @@ -53,7 +50,7 @@ fn benchmark_expand_macro_rules() { fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> { macro_rules_fixtures_tt() .into_iter() - .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true).unwrap())) + .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true))) .collect() } @@ -105,7 +102,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri for op in rule.lhs.iter() { collect_from_op(op, &mut subtree, &mut seed); } - if it.expand(&subtree).err.is_none() { + if it.expand(subtree.clone()).err.is_none() { res.push((name.clone(), subtree)); break; } diff --git a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs index 474826079d7..1a7b7eed295 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/expander/matcher.rs @@ -884,7 +884,7 @@ impl<'a> Iterator for OpDelimitedIter<'a> { } } -impl<'a> TtIter<'a> { +impl TtIter<'_> { fn expect_separator(&mut self, separator: &Separator) -> bool { let mut fork = self.clone(); let ok = match separator { diff --git a/src/tools/rust-analyzer/crates/mbe/src/lib.rs b/src/tools/rust-analyzer/crates/mbe/src/lib.rs index 5ef20ff8a9b..665bce474a6 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/lib.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/lib.rs @@ -34,7 +34,8 @@ pub use ::parser::TopEntryPoint; pub use crate::{ syntax_bridge::{ - parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, + parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_map, + syntax_node_to_token_map_with_modifications, syntax_node_to_token_tree, syntax_node_to_token_tree_with_modifications, token_tree_to_syntax_node, SyntheticToken, SyntheticTokenId, }, @@ -131,6 +132,7 @@ pub struct DeclarativeMacro { // This is used for correctly determining the behavior of the pat fragment // FIXME: This should be tracked by hygiene of the fragment identifier! is_2021: bool, + err: Option<Box<ParseError>>, } #[derive(Clone, Debug, PartialEq, Eq)] @@ -206,81 +208,118 @@ impl Shift { } } -#[derive(Debug, Eq, PartialEq)] +#[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Origin { Def, Call, } impl DeclarativeMacro { + pub fn from_err(err: ParseError, is_2021: bool) -> DeclarativeMacro { + DeclarativeMacro { + rules: Box::default(), + shift: Shift(0), + is_2021, + err: Some(Box::new(err)), + } + } + /// The old, `macro_rules! m {}` flavor. - pub fn parse_macro_rules( - tt: &tt::Subtree, - is_2021: bool, - ) -> Result<DeclarativeMacro, ParseError> { + pub fn parse_macro_rules(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { // Note: this parsing can be implemented using mbe machinery itself, by // matching against `$($lhs:tt => $rhs:tt);*` pattern, but implementing // manually seems easier. let mut src = TtIter::new(tt); let mut rules = Vec::new(); + let mut err = None; + while src.len() > 0 { - let rule = Rule::parse(&mut src, true)?; + let rule = match Rule::parse(&mut src, true) { + Ok(it) => it, + Err(e) => { + err = Some(Box::new(e)); + break; + } + }; rules.push(rule); if let Err(()) = src.expect_char(';') { if src.len() > 0 { - return Err(ParseError::expected("expected `;`")); + err = Some(Box::new(ParseError::expected("expected `;`"))); } break; } } for Rule { lhs, .. } in &rules { - validate(lhs)?; + if let Err(e) = validate(lhs) { + err = Some(Box::new(e)); + break; + } } - Ok(DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021 }) + DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err } } /// The new, unstable `macro m {}` flavor. - pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> Result<DeclarativeMacro, ParseError> { + pub fn parse_macro2(tt: &tt::Subtree, is_2021: bool) -> DeclarativeMacro { let mut src = TtIter::new(tt); let mut rules = Vec::new(); + let mut err = None; if tt::DelimiterKind::Brace == tt.delimiter.kind { cov_mark::hit!(parse_macro_def_rules); while src.len() > 0 { - let rule = Rule::parse(&mut src, true)?; + let rule = match Rule::parse(&mut src, true) { + Ok(it) => it, + Err(e) => { + err = Some(Box::new(e)); + break; + } + }; rules.push(rule); if let Err(()) = src.expect_any_char(&[';', ',']) { if src.len() > 0 { - return Err(ParseError::expected("expected `;` or `,` to delimit rules")); + err = Some(Box::new(ParseError::expected( + "expected `;` or `,` to delimit rules", + ))); } break; } } } else { cov_mark::hit!(parse_macro_def_simple); - let rule = Rule::parse(&mut src, false)?; - if src.len() != 0 { - return Err(ParseError::expected("remaining tokens in macro def")); + match Rule::parse(&mut src, false) { + Ok(rule) => { + if src.len() != 0 { + err = Some(Box::new(ParseError::expected("remaining tokens in macro def"))); + } + rules.push(rule); + } + Err(e) => { + err = Some(Box::new(e)); + } } - rules.push(rule); } for Rule { lhs, .. } in &rules { - validate(lhs)?; + if let Err(e) = validate(lhs) { + err = Some(Box::new(e)); + break; + } } - Ok(DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021 }) + DeclarativeMacro { rules: rules.into_boxed_slice(), shift: Shift::new(tt), is_2021, err } } - pub fn expand(&self, tt: &tt::Subtree) -> ExpandResult<tt::Subtree> { - // apply shift - let mut tt = tt.clone(); + pub fn expand(&self, mut tt: tt::Subtree) -> ExpandResult<tt::Subtree> { self.shift.shift_all(&mut tt); expander::expand_rules(&self.rules, &tt, self.is_2021) } + pub fn err(&self) -> Option<&ParseError> { + self.err.as_deref() + } + pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { self.shift.shift(id) } diff --git a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs index 8cbf0f8fc0b..62b2accf5cd 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/syntax_bridge.rs @@ -53,6 +53,37 @@ pub fn syntax_node_to_token_tree_with_modifications( (subtree, c.id_alloc.map, c.id_alloc.next_id) } +/// Convert the syntax node to a `TokenTree` (what macro +/// will consume). +pub fn syntax_node_to_token_map(node: &SyntaxNode) -> TokenMap { + syntax_node_to_token_map_with_modifications( + node, + Default::default(), + 0, + Default::default(), + Default::default(), + ) + .0 +} + +/// Convert the syntax node to a `TokenTree` (what macro will consume) +/// with the censored range excluded. +pub fn syntax_node_to_token_map_with_modifications( + node: &SyntaxNode, + existing_token_map: TokenMap, + next_id: u32, + replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>, + append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>, +) -> (TokenMap, u32) { + let global_offset = node.text_range().start(); + let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append); + collect_tokens(&mut c); + c.id_alloc.map.shrink_to_fit(); + always!(c.replace.is_empty(), "replace: {:?}", c.replace); + always!(c.append.is_empty(), "append: {:?}", c.append); + (c.id_alloc.map, c.id_alloc.next_id) +} + #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] pub struct SyntheticTokenId(pub u32); @@ -327,6 +358,111 @@ fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree { } } +fn collect_tokens<C: TokenConverter>(conv: &mut C) { + struct StackEntry { + idx: usize, + open_range: TextRange, + delimiter: tt::DelimiterKind, + } + + let entry = StackEntry { + delimiter: tt::DelimiterKind::Invisible, + // never used (delimiter is `None`) + idx: !0, + open_range: TextRange::empty(TextSize::of('.')), + }; + let mut stack = NonEmptyVec::new(entry); + + loop { + let StackEntry { delimiter, .. } = stack.last_mut(); + let (token, range) = match conv.bump() { + Some(it) => it, + None => break, + }; + let synth_id = token.synthetic_id(conv); + + let kind = token.kind(conv); + if kind == COMMENT { + // Since `convert_doc_comment` can fail, we need to peek the next id, so that we can + // figure out which token id to use for the doc comment, if it is converted successfully. + let next_id = conv.id_alloc().peek_next_id(); + if let Some(_tokens) = conv.convert_doc_comment(&token, next_id) { + let id = conv.id_alloc().alloc(range, synth_id); + debug_assert_eq!(id, next_id); + } + continue; + } + if kind.is_punct() && kind != UNDERSCORE { + if synth_id.is_none() { + assert_eq!(range.len(), TextSize::of('.')); + } + + let expected = match delimiter { + tt::DelimiterKind::Parenthesis => Some(T![')']), + tt::DelimiterKind::Brace => Some(T!['}']), + tt::DelimiterKind::Bracket => Some(T![']']), + tt::DelimiterKind::Invisible => None, + }; + + if let Some(expected) = expected { + if kind == expected { + if let Some(entry) = stack.pop() { + conv.id_alloc().close_delim(entry.idx, Some(range)); + } + continue; + } + } + + let delim = match kind { + T!['('] => Some(tt::DelimiterKind::Parenthesis), + T!['{'] => Some(tt::DelimiterKind::Brace), + T!['['] => Some(tt::DelimiterKind::Bracket), + _ => None, + }; + + if let Some(kind) = delim { + let (_id, idx) = conv.id_alloc().open_delim(range, synth_id); + + stack.push(StackEntry { idx, open_range: range, delimiter: kind }); + continue; + } + + conv.id_alloc().alloc(range, synth_id); + } else { + macro_rules! make_leaf { + ($i:ident) => {{ + conv.id_alloc().alloc(range, synth_id); + }}; + } + match kind { + T![true] | T![false] => make_leaf!(Ident), + IDENT => make_leaf!(Ident), + UNDERSCORE => make_leaf!(Ident), + k if k.is_keyword() => make_leaf!(Ident), + k if k.is_literal() => make_leaf!(Literal), + LIFETIME_IDENT => { + let char_unit = TextSize::of('\''); + let r = TextRange::at(range.start(), char_unit); + conv.id_alloc().alloc(r, synth_id); + + let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); + conv.id_alloc().alloc(r, synth_id); + continue; + } + _ => continue, + }; + }; + + // If we get here, we've consumed all input tokens. + // We might have more than one subtree in the stack, if the delimiters are improperly balanced. + // Merge them so we're left with one. + while let Some(entry) = stack.pop() { + conv.id_alloc().close_delim(entry.idx, None); + conv.id_alloc().alloc(entry.open_range, None); + } + } +} + fn is_single_token_op(kind: SyntaxKind) -> bool { matches!( kind, @@ -509,12 +645,12 @@ trait TokenConverter: Sized { fn id_alloc(&mut self) -> &mut TokenIdAlloc; } -impl<'a> SrcToken<RawConverter<'a>> for usize { - fn kind(&self, ctx: &RawConverter<'a>) -> SyntaxKind { +impl SrcToken<RawConverter<'_>> for usize { + fn kind(&self, ctx: &RawConverter<'_>) -> SyntaxKind { ctx.lexed.kind(*self) } - fn to_char(&self, ctx: &RawConverter<'a>) -> Option<char> { + fn to_char(&self, ctx: &RawConverter<'_>) -> Option<char> { ctx.lexed.text(*self).chars().next() } @@ -522,12 +658,12 @@ impl<'a> SrcToken<RawConverter<'a>> for usize { ctx.lexed.text(*self).into() } - fn synthetic_id(&self, _ctx: &RawConverter<'a>) -> Option<SyntheticTokenId> { + fn synthetic_id(&self, _ctx: &RawConverter<'_>) -> Option<SyntheticTokenId> { None } } -impl<'a> TokenConverter for RawConverter<'a> { +impl TokenConverter for RawConverter<'_> { type Token = usize; fn convert_doc_comment(&self, &token: &usize, span: tt::TokenId) -> Option<Vec<tt::TokenTree>> { @@ -800,7 +936,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> Option<&'static str> { Some(&texts[idx..texts.len() - (1 - idx)]) } -impl<'a> TtTreeSink<'a> { +impl TtTreeSink<'_> { /// Parses a float literal as if it was a one to two name ref nodes with a dot inbetween. /// This occurs when a float literal is used as a field access. fn float_split(&mut self, has_pseudo_dot: bool) { diff --git a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs index c923e7a69a1..9b2df89f9c7 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/token_map.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/token_map.rs @@ -110,4 +110,11 @@ impl TokenMap { // FIXME: This could be accidentally quadratic self.entries.remove(idx); } + + pub fn entries(&self) -> impl Iterator<Item = (tt::TokenId, TextRange)> + '_ { + self.entries.iter().filter_map(|&(tid, tr)| match tr { + TokenTextRange::Token(range) => Some((tid, range)), + TokenTextRange::Delimiter(_) => None, + }) + } } diff --git a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs index 59dbf156800..79ff8ca28e8 100644 --- a/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs +++ b/src/tools/rust-analyzer/crates/mbe/src/tt_iter.rs @@ -197,4 +197,4 @@ impl<'a> Iterator for TtIter<'a> { } } -impl<'a> std::iter::ExactSizeIterator for TtIter<'a> {} +impl std::iter::ExactSizeIterator for TtIter<'_> {} diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs index e589b69934d..211af98e6ef 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_args.rs @@ -32,6 +32,9 @@ const GENERIC_ARG_FIRST: TokenSet = TokenSet::new(&[ ]) .union(types::TYPE_FIRST); +// Despite its name, it can also be used for generic param list. +const GENERIC_ARG_RECOVERY_SET: TokenSet = TokenSet::new(&[T![>], T![,]]); + // test generic_arg // type T = S<i32>; fn generic_arg(p: &mut Parser<'_>) -> bool { @@ -55,6 +58,15 @@ fn generic_arg(p: &mut Parser<'_>) -> bool { // test assoc_type_eq // type T = StreamingIterator<Item<'a> = &'a T>; types::type_(p); + } else if p.at_ts(GENERIC_ARG_RECOVERY_SET) { + // Although `const_arg()` recovers as expected, we want to + // handle those here to give the following message because + // we don't know whether this associated item is a type or + // const at this point. + + // test_err recover_from_missing_assoc_item_binding + // fn f() -> impl Iterator<Item = , Item = > {} + p.error("missing associated item binding"); } else { // test assoc_const_eq // fn foo<F: Foo<N=3>>() {} @@ -141,12 +153,17 @@ pub(super) fn const_arg_expr(p: &mut Parser<'_>) { expressions::literal(p); lm.complete(p, PREFIX_EXPR); } - _ => { + _ if paths::is_use_path_start(p) => { // This shouldn't be hit by `const_arg` let lm = p.start(); paths::use_path(p); lm.complete(p, PATH_EXPR); } + _ => { + // test_err recover_from_missing_const_default + // struct A<const N: i32 = , const M: i32 =>; + p.err_recover("expected a generic const argument", GENERIC_ARG_RECOVERY_SET); + } } } diff --git a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs index 7fcf938babd..8ed1c84c4c6 100644 --- a/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs +++ b/src/tools/rust-analyzer/crates/parser/src/grammar/generic_params.rs @@ -79,10 +79,9 @@ fn const_param(p: &mut Parser<'_>, m: Marker) { p.error("missing type for const parameter"); } - if p.at(T![=]) { + if p.eat(T![=]) { // test const_param_default_literal // struct A<const N: i32 = -1>; - p.bump(T![=]); // test const_param_default_expression // struct A<const N: i32 = { 1 }>; diff --git a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs index 5cdb39700dd..6e3ae656b02 100644 --- a/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs +++ b/src/tools/rust-analyzer/crates/parser/src/shortcuts.rs @@ -24,7 +24,7 @@ pub enum StrStep<'a> { Error { msg: &'a str, pos: usize }, } -impl<'a> LexedStr<'a> { +impl LexedStr<'_> { pub fn to_input(&self) -> crate::Input { let mut res = crate::Input::default(); let mut was_joint = false; diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast new file mode 100644 index 00000000000..fc59db84e77 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rast @@ -0,0 +1,48 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "f" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + RET_TYPE + THIN_ARROW "->" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Iterator" + GENERIC_ARG_LIST + L_ANGLE "<" + ASSOC_TYPE_ARG + NAME_REF + IDENT "Item" + WHITESPACE " " + EQ "=" + WHITESPACE " " + COMMA "," + WHITESPACE " " + ASSOC_TYPE_ARG + NAME_REF + IDENT "Item" + WHITESPACE " " + EQ "=" + WHITESPACE " " + R_ANGLE ">" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" +error 30: missing associated item binding +error 39: missing associated item binding diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs new file mode 100644 index 00000000000..e484e433a09 --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0021_recover_from_missing_assoc_item_binding.rs @@ -0,0 +1 @@ +fn f() -> impl Iterator<Item = , Item = > {} diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast new file mode 100644 index 00000000000..809ad1b8d5b --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rast @@ -0,0 +1,44 @@ +SOURCE_FILE + STRUCT + STRUCT_KW "struct" + WHITESPACE " " + NAME + IDENT "A" + GENERIC_PARAM_LIST + L_ANGLE "<" + CONST_PARAM + CONST_KW "const" + WHITESPACE " " + NAME + IDENT "N" + COLON ":" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + WHITESPACE " " + EQ "=" + WHITESPACE " " + COMMA "," + WHITESPACE " " + CONST_PARAM + CONST_KW "const" + WHITESPACE " " + NAME + IDENT "M" + COLON ":" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + WHITESPACE " " + EQ "=" + R_ANGLE ">" + SEMICOLON ";" + WHITESPACE "\n" +error 23: expected a generic const argument +error 40: expected a generic const argument diff --git a/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs new file mode 100644 index 00000000000..5bab13da92b --- /dev/null +++ b/src/tools/rust-analyzer/crates/parser/test_data/parser/inline/err/0022_recover_from_missing_const_default.rs @@ -0,0 +1 @@ +struct A<const N: i32 = , const M: i32 =>; diff --git a/src/tools/rust-analyzer/crates/paths/src/lib.rs b/src/tools/rust-analyzer/crates/paths/src/lib.rs index e0c20a4143b..88b8d0aee3a 100644 --- a/src/tools/rust-analyzer/crates/paths/src/lib.rs +++ b/src/tools/rust-analyzer/crates/paths/src/lib.rs @@ -6,7 +6,7 @@ use std::{ borrow::Borrow, ffi::OsStr, - ops, + fmt, ops, path::{Component, Path, PathBuf}, }; @@ -95,6 +95,12 @@ impl AbsPathBuf { } } +impl fmt::Display for AbsPathBuf { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.0.display(), f) + } +} + /// Wrapper around an absolute [`Path`]. #[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] #[repr(transparent)] @@ -217,6 +223,7 @@ impl AbsPath { pub fn as_os_str(&self) -> &OsStr { self.0.as_os_str() } + #[deprecated(note = "use Display instead")] pub fn display(&self) -> std::path::Display<'_> { self.0.display() } @@ -227,6 +234,12 @@ impl AbsPath { // endregion:delegate-methods } +impl fmt::Display for AbsPath { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.0.display(), f) + } +} + /// Wrapper around a relative [`PathBuf`]. #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] pub struct RelPathBuf(PathBuf); diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml index d3486e75575..4e39167136b 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-api/Cargo.toml @@ -12,7 +12,7 @@ rust-version.workspace = true doctest = false [dependencies] -object = { version = "0.30.2", default-features = false, features = [ +object = { version = "0.31.0", default-features = false, features = [ "std", "read_core", "elf", diff --git a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs index 13f67a0128a..48efbf589c6 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs +++ b/src/tools/rust-analyzer/crates/proc-macro-api/src/version.rs @@ -135,7 +135,12 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> { } }; - let mut snappy_decoder = SnapDecoder::new(snappy_portion); + let mut uncompressed: Box<dyn Read> = if &snappy_portion[0..4] == b"rust" { + // Not compressed. + Box::new(snappy_portion) + } else { + Box::new(SnapDecoder::new(snappy_portion)) + }; // the bytes before version string bytes, so this basically is: // 8 bytes for [b'r',b'u',b's',b't',0,0,0,5] @@ -144,11 +149,11 @@ pub fn read_version(dylib_path: &AbsPath) -> io::Result<String> { // so 13 bytes in total, and we should check the 13th byte // to know the length let mut bytes_before_version = [0u8; 13]; - snappy_decoder.read_exact(&mut bytes_before_version)?; + uncompressed.read_exact(&mut bytes_before_version)?; let length = bytes_before_version[12]; let mut version_string_utf8 = vec![0u8; length as usize]; - snappy_decoder.read_exact(&mut version_string_utf8)?; + uncompressed.read_exact(&mut version_string_utf8)?; let version_string = String::from_utf8(version_string_utf8); version_string.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) } diff --git a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml index d5eb157bfef..ecc6aaa0ac7 100644 --- a/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml +++ b/src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml @@ -12,14 +12,14 @@ rust-version.workspace = true doctest = false [dependencies] -object = { version = "0.30.2", default-features = false, features = [ +object = { version = "0.31.0", default-features = false, features = [ "std", "read_core", "elf", "macho", "pe", ] } -libloading = "0.7.3" +libloading = "0.8.0" memmap2 = "0.5.4" stdx.workspace = true diff --git a/src/tools/rust-analyzer/crates/profile/Cargo.toml b/src/tools/rust-analyzer/crates/profile/Cargo.toml index 602e7427510..937834a82ae 100644 --- a/src/tools/rust-analyzer/crates/profile/Cargo.toml +++ b/src/tools/rust-analyzer/crates/profile/Cargo.toml @@ -15,7 +15,7 @@ doctest = false once_cell = "1.17.0" cfg-if = "1.0.0" libc = "0.2.135" -la-arena = { version = "0.3.0", path = "../../lib/la-arena" } +la-arena.workspace = true countme = { version = "3.0.1", features = ["enable"] } jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true } diff --git a/src/tools/rust-analyzer/crates/profile/src/tree.rs b/src/tools/rust-analyzer/crates/profile/src/tree.rs index 62f0c30b529..1290fba36fa 100644 --- a/src/tools/rust-analyzer/crates/profile/src/tree.rs +++ b/src/tools/rust-analyzer/crates/profile/src/tree.rs @@ -72,7 +72,7 @@ struct NodeIter<'a, T> { next: Option<Idx<T>>, } -impl<'a, T> Iterator for NodeIter<'a, T> { +impl<T> Iterator for NodeIter<'_, T> { type Item = Idx<T>; fn next(&mut self) -> Option<Idx<T>> { diff --git a/src/tools/rust-analyzer/crates/project-model/Cargo.toml b/src/tools/rust-analyzer/crates/project-model/Cargo.toml index 3abff64a83b..75977fc5b04 100644 --- a/src/tools/rust-analyzer/crates/project-model/Cargo.toml +++ b/src/tools/rust-analyzer/crates/project-model/Cargo.toml @@ -20,7 +20,7 @@ serde_json.workspace = true serde.workspace = true triomphe.workspace = true anyhow = "1.0.62" -la-arena = { version = "0.3.0", path = "../../lib/la-arena" } +la-arena.workspace = true itertools = "0.10.5" # local deps diff --git a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs index 6cbf403cb2e..fb0f3ab7d17 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/build_scripts.rs @@ -225,9 +225,8 @@ impl WorkspaceBuildScripts { let package_build_data = &mut res[idx].outputs[package]; if !package_build_data.is_unchanged() { tracing::info!( - "{}: {:?}", - workspace[package].manifest.parent().display(), - package_build_data, + "{}: {package_build_data:?}", + workspace[package].manifest.parent(), ); } } @@ -270,9 +269,8 @@ impl WorkspaceBuildScripts { let package_build_data = &outputs[package]; if !package_build_data.is_unchanged() { tracing::info!( - "{}: {:?}", - workspace[package].manifest.parent().display(), - package_build_data, + "{}: {package_build_data:?}", + workspace[package].manifest.parent(), ); } } @@ -424,7 +422,7 @@ impl WorkspaceBuildScripts { let target_libdir = AbsPathBuf::try_from(PathBuf::from(target_libdir)) .map_err(|_| anyhow::format_err!("target-libdir was not an absolute path"))?; - tracing::info!("Loading rustc proc-macro paths from {}", target_libdir.display()); + tracing::info!("Loading rustc proc-macro paths from {target_libdir}"); let proc_macro_dylibs: Vec<(String, AbsPathBuf)> = std::fs::read_dir(target_libdir)? .filter_map(|entry| { @@ -458,9 +456,8 @@ impl WorkspaceBuildScripts { let package_build_data = &bs.outputs[package]; if !package_build_data.is_unchanged() { tracing::info!( - "{}: {:?}", - rustc[package].manifest.parent().display(), - package_build_data, + "{}: {package_build_data:?}", + rustc[package].manifest.parent(), ); } } diff --git a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs index 92b454150c3..e1117ac464b 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/cargo_workspace.rs @@ -4,7 +4,7 @@ use std::path::PathBuf; use std::str::from_utf8; use std::{ops, process::Command}; -use anyhow::{Context, Result}; +use anyhow::Context; use base_db::Edition; use cargo_metadata::{CargoOpt, MetadataCommand}; use la_arena::{Arena, Idx}; @@ -236,7 +236,7 @@ impl CargoWorkspace { current_dir: &AbsPath, config: &CargoConfig, progress: &dyn Fn(String), - ) -> Result<cargo_metadata::Metadata> { + ) -> anyhow::Result<cargo_metadata::Metadata> { let targets = find_list_of_build_targets(config, cargo_toml); let mut meta = MetadataCommand::new(); diff --git a/src/tools/rust-analyzer/crates/project-model/src/lib.rs b/src/tools/rust-analyzer/crates/project-model/src/lib.rs index 61acc646f81..901dcfd2b11 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/lib.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/lib.rs @@ -31,12 +31,13 @@ pub mod target_data_layout; mod tests; use std::{ + fmt, fs::{self, read_dir, ReadDir}, io, process::Command, }; -use anyhow::{bail, format_err, Context, Result}; +use anyhow::{bail, format_err, Context}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashSet; @@ -59,19 +60,19 @@ pub enum ProjectManifest { } impl ProjectManifest { - pub fn from_manifest_file(path: AbsPathBuf) -> Result<ProjectManifest> { + pub fn from_manifest_file(path: AbsPathBuf) -> anyhow::Result<ProjectManifest> { let path = ManifestPath::try_from(path) - .map_err(|path| format_err!("bad manifest path: {}", path.display()))?; + .map_err(|path| format_err!("bad manifest path: {path}"))?; if path.file_name().unwrap_or_default() == "rust-project.json" { return Ok(ProjectManifest::ProjectJson(path)); } if path.file_name().unwrap_or_default() == "Cargo.toml" { return Ok(ProjectManifest::CargoToml(path)); } - bail!("project root must point to Cargo.toml or rust-project.json: {}", path.display()); + bail!("project root must point to Cargo.toml or rust-project.json: {path}"); } - pub fn discover_single(path: &AbsPath) -> Result<ProjectManifest> { + pub fn discover_single(path: &AbsPath) -> anyhow::Result<ProjectManifest> { let mut candidates = ProjectManifest::discover(path)?; let res = match candidates.pop() { None => bail!("no projects"), @@ -145,7 +146,17 @@ impl ProjectManifest { } } -fn utf8_stdout(mut cmd: Command) -> Result<String> { +impl fmt::Display for ProjectManifest { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ProjectManifest::ProjectJson(it) | ProjectManifest::CargoToml(it) => { + fmt::Display::fmt(&it, f) + } + } + } +} + +fn utf8_stdout(mut cmd: Command) -> anyhow::Result<String> { let output = cmd.output().with_context(|| format!("{cmd:?} failed"))?; if !output.status.success() { match String::from_utf8(output.stderr) { diff --git a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs index 3f60e4dd92f..490e1a4ea88 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/manifest_path.rs @@ -1,5 +1,5 @@ //! See [`ManifestPath`]. -use std::{ops, path::Path}; +use std::{fmt, ops, path::Path}; use paths::{AbsPath, AbsPathBuf}; @@ -40,6 +40,12 @@ impl ManifestPath { } } +impl fmt::Display for ManifestPath { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(&self.file, f) + } +} + impl ops::Deref for ManifestPath { type Target = AbsPath; diff --git a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs index 0066f6717ef..8392718b227 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/rustc_cfg.rs @@ -2,7 +2,6 @@ use std::process::Command; -use anyhow::Result; use rustc_hash::FxHashMap; use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath}; @@ -23,6 +22,9 @@ pub(crate) fn get( } } + // Add miri cfg, which is useful for mir eval in stdlib + res.push(CfgFlag::Atom("miri".into())); + match get_rust_cfgs(cargo_toml, target, extra_env) { Ok(rustc_cfgs) => { tracing::debug!( @@ -44,7 +46,7 @@ fn get_rust_cfgs( cargo_toml: Option<&ManifestPath>, target: Option<&str>, extra_env: &FxHashMap<String, String>, -) -> Result<String> { +) -> anyhow::Result<String> { if let Some(cargo_toml) = cargo_toml { let mut cargo_config = Command::new(toolchain::cargo()); cargo_config.envs(extra_env); diff --git a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs index e3a2de927c9..da862c9e87f 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/sysroot.rs @@ -85,9 +85,8 @@ impl Sysroot { " try running `rustup component add rust-src` to possible fix this" }; Some(format!( - "could not find libcore in loaded sysroot at `{}`{}", - self.src_root.as_path().display(), - var_note, + "could not find libcore in loaded sysroot at `{}`{var_note}", + self.src_root.as_path(), )) } else { None @@ -99,7 +98,7 @@ impl Sysroot { impl Sysroot { /// Attempts to discover the toolchain's sysroot from the given `dir`. pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Result<Sysroot> { - tracing::debug!("discovering sysroot for {}", dir.display()); + tracing::debug!("discovering sysroot for {dir}"); let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; let sysroot_src_dir = discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?; @@ -111,7 +110,7 @@ impl Sysroot { extra_env: &FxHashMap<String, String>, src: AbsPathBuf, ) -> Result<Sysroot> { - tracing::debug!("discovering sysroot for {}", current_dir.display()); + tracing::debug!("discovering sysroot for {current_dir}"); let sysroot_dir = discover_sysroot_dir(current_dir, extra_env)?; Ok(Sysroot::load(sysroot_dir, src)) } @@ -122,7 +121,7 @@ impl Sysroot { pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf) -> Result<Sysroot> { let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| { - format_err!("can't load standard library from sysroot path {}", sysroot_dir.display()) + format_err!("can't load standard library from sysroot path {sysroot_dir}") })?; Ok(Sysroot::load(sysroot_dir, sysroot_src_dir)) } @@ -220,10 +219,10 @@ fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> { if let Ok(path) = AbsPathBuf::try_from(path.as_str()) { let core = path.join("core"); if fs::metadata(&core).is_ok() { - tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display()); + tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {path}"); return Some(path); } - tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core); + tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {core:?}), ignoring"); } else { tracing::debug!("RUST_SRC_PATH is set, but is invalid, ignoring"); } @@ -250,10 +249,9 @@ fn discover_sysroot_src_dir_or_add_component( format_err!( "\ can't load standard library from sysroot -{} +{sysroot_path} (discovered via `rustc --print sysroot`) try installing the Rust source the same way you installed rustc", - sysroot_path.display(), ) }) } @@ -261,7 +259,7 @@ try installing the Rust source the same way you installed rustc", fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> { let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml"); let rustc_src = ManifestPath::try_from(rustc_src).ok()?; - tracing::debug!("checking for rustc source code: {}", rustc_src.display()); + tracing::debug!("checking for rustc source code: {rustc_src}"); if fs::metadata(&rustc_src).is_ok() { Some(rustc_src) } else { @@ -271,7 +269,7 @@ fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> { fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> { let rust_src = sysroot_path.join("lib/rustlib/src/rust/library"); - tracing::debug!("checking sysroot library: {}", rust_src.display()); + tracing::debug!("checking sysroot library: {rust_src}"); if fs::metadata(&rust_src).is_ok() { Some(rust_src) } else { diff --git a/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs b/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs index 30ca7b348e8..cb995857ec7 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/target_data_layout.rs @@ -1,7 +1,6 @@ //! Runs `rustc --print target-spec-json` to get the target_data_layout. use std::process::Command; -use anyhow::Result; use rustc_hash::FxHashMap; use crate::{utf8_stdout, ManifestPath}; @@ -10,7 +9,7 @@ pub fn get( cargo_toml: Option<&ManifestPath>, target: Option<&str>, extra_env: &FxHashMap<String, String>, -) -> Result<String> { +) -> anyhow::Result<String> { let output = (|| { if let Some(cargo_toml) = cargo_toml { let mut cmd = Command::new(toolchain::rustc()); diff --git a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs index b5fe237fc4c..f51ea7eeb22 100644 --- a/src/tools/rust-analyzer/crates/project-model/src/workspace.rs +++ b/src/tools/rust-analyzer/crates/project-model/src/workspace.rs @@ -4,7 +4,7 @@ use std::{collections::VecDeque, fmt, fs, process::Command, sync}; -use anyhow::{format_err, Context, Result}; +use anyhow::{format_err, Context}; use base_db::{ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, ReleaseChannel, TargetLayoutLoadResult, @@ -151,7 +151,16 @@ impl ProjectWorkspace { manifest: ProjectManifest, config: &CargoConfig, progress: &dyn Fn(String), - ) -> Result<ProjectWorkspace> { + ) -> anyhow::Result<ProjectWorkspace> { + ProjectWorkspace::load_inner(&manifest, config, progress) + .with_context(|| format!("Failed to load the project at {manifest}")) + } + + fn load_inner( + manifest: &ProjectManifest, + config: &CargoConfig, + progress: &dyn Fn(String), + ) -> anyhow::Result<ProjectWorkspace> { let version = |current_dir, cmd_path, prefix: &str| { let cargo_version = utf8_stdout({ let mut cmd = Command::new(cmd_path); @@ -167,12 +176,10 @@ impl ProjectWorkspace { }; let res = match manifest { ProjectManifest::ProjectJson(project_json) => { - let file = fs::read_to_string(&project_json).with_context(|| { - format!("Failed to read json file {}", project_json.display()) - })?; - let data = serde_json::from_str(&file).with_context(|| { - format!("Failed to deserialize json file {}", project_json.display()) - })?; + let file = fs::read_to_string(&project_json) + .with_context(|| format!("Failed to read json file {project_json}"))?; + let data = serde_json::from_str(&file) + .with_context(|| format!("Failed to deserialize json file {project_json}"))?; let project_location = project_json.parent().to_path_buf(); let toolchain = version(&*project_location, toolchain::rustc(), "rustc ")?; let project_json = ProjectJson::new(&project_location, data); @@ -193,9 +200,7 @@ impl ProjectWorkspace { ) .with_context(|| { format!( - "Failed to read Cargo metadata from Cargo.toml file {}, {:?}", - cargo_toml.display(), - toolchain + "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}", ) })?; let cargo = CargoWorkspace::new(meta); @@ -203,12 +208,12 @@ impl ProjectWorkspace { let sysroot = match (&config.sysroot, &config.sysroot_src) { (Some(RustLibSource::Path(path)), None) => { Sysroot::with_sysroot_dir(path.clone()).map_err(|e| { - Some(format!("Failed to find sysroot at {}:{e}", path.display())) + Some(format!("Failed to find sysroot at {path}:{e}")) }) } (Some(RustLibSource::Discover), None) => { Sysroot::discover(cargo_toml.parent(), &config.extra_env).map_err(|e| { - Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display())) + Some(format!("Failed to find sysroot for Cargo.toml file {cargo_toml}. Is rust-src installed? {e}")) }) } (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => { @@ -220,21 +225,19 @@ impl ProjectWorkspace { &config.extra_env, sysroot_src.clone(), ).map_err(|e| { - Some(format!("Failed to find sysroot for Cargo.toml file {}. Is rust-src installed? {e}", cargo_toml.display())) + Some(format!("Failed to find sysroot for Cargo.toml file {cargo_toml}. Is rust-src installed? {e}")) }) } (None, _) => Err(None), }; if let Ok(sysroot) = &sysroot { - tracing::info!(workspace = %cargo_toml.display(), src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); + tracing::info!(workspace = %cargo_toml, src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); } let rustc_dir = match &config.rustc_source { Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone()) - .map_err(|p| { - Some(format!("rustc source path is not absolute: {}", p.display())) - }), + .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))), Some(RustLibSource::Discover) => { sysroot.as_ref().ok().and_then(Sysroot::discover_rustc).ok_or_else(|| { Some(format!("Failed to discover rustc source for sysroot.")) @@ -244,7 +247,7 @@ impl ProjectWorkspace { }; let rustc = rustc_dir.and_then(|rustc_dir| { - tracing::info!(workspace = %cargo_toml.display(), rustc_dir = %rustc_dir.display(), "Using rustc source"); + tracing::info!(workspace = %cargo_toml, rustc_dir = %rustc_dir, "Using rustc source"); match CargoWorkspace::fetch_metadata( &rustc_dir, cargo_toml.parent(), @@ -266,13 +269,11 @@ impl ProjectWorkspace { Err(e) => { tracing::error!( %e, - "Failed to read Cargo metadata from rustc source at {}", - rustc_dir.display() + "Failed to read Cargo metadata from rustc source at {rustc_dir}", ); Err(Some(format!( - "Failed to read Cargo metadata from rustc source at {}: {e}", - rustc_dir.display()) - )) + "Failed to read Cargo metadata from rustc source at {rustc_dir}: {e}" + ))) } } }); @@ -330,7 +331,7 @@ impl ProjectWorkspace { (None, None) => Err(None), }; if let Ok(sysroot) = &sysroot { - tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); + tracing::info!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); } let rustc_cfg = rustc_cfg::get(None, target, extra_env); @@ -340,26 +341,23 @@ impl ProjectWorkspace { pub fn load_detached_files( detached_files: Vec<AbsPathBuf>, config: &CargoConfig, - ) -> Result<ProjectWorkspace> { + ) -> anyhow::Result<ProjectWorkspace> { let sysroot = match &config.sysroot { Some(RustLibSource::Path(path)) => Sysroot::with_sysroot_dir(path.clone()) - .map_err(|e| Some(format!("Failed to find sysroot at {}:{e}", path.display()))), + .map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}"))), Some(RustLibSource::Discover) => { let dir = &detached_files .first() .and_then(|it| it.parent()) .ok_or_else(|| format_err!("No detached files to load"))?; Sysroot::discover(dir, &config.extra_env).map_err(|e| { - Some(format!( - "Failed to find sysroot for {}. Is rust-src installed? {e}", - dir.display() - )) + Some(format!("Failed to find sysroot for {dir}. Is rust-src installed? {e}")) }) } None => Err(None), }; if let Ok(sysroot) = &sysroot { - tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot"); + tracing::info!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); } let rustc_cfg = rustc_cfg::get(None, None, &Default::default()); Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) @@ -370,15 +368,12 @@ impl ProjectWorkspace { &self, config: &CargoConfig, progress: &dyn Fn(String), - ) -> Result<WorkspaceBuildScripts> { + ) -> anyhow::Result<WorkspaceBuildScripts> { match self { ProjectWorkspace::Cargo { cargo, toolchain, .. } => { WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, toolchain) .with_context(|| { - format!( - "Failed to run build scripts for {}", - &cargo.workspace_root().display() - ) + format!("Failed to run build scripts for {}", cargo.workspace_root()) }) } ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => { @@ -393,7 +388,7 @@ impl ProjectWorkspace { workspaces: &[ProjectWorkspace], config: &CargoConfig, progress: &dyn Fn(String), - ) -> Vec<Result<WorkspaceBuildScripts>> { + ) -> Vec<anyhow::Result<WorkspaceBuildScripts>> { if matches!(config.invocation_strategy, InvocationStrategy::PerWorkspace) || config.run_build_script_command.is_none() { @@ -419,10 +414,7 @@ impl ProjectWorkspace { ProjectWorkspace::Cargo { cargo, .. } => match outputs { Ok(outputs) => Ok(outputs.next().unwrap()), Err(e) => Err(e.clone()).with_context(|| { - format!( - "Failed to run build scripts for {}", - &cargo.workspace_root().display() - ) + format!("Failed to run build scripts for {}", cargo.workspace_root()) }), }, _ => Ok(WorkspaceBuildScripts::default()), @@ -447,7 +439,7 @@ impl ProjectWorkspace { } } - pub fn find_sysroot_proc_macro_srv(&self) -> Result<AbsPathBuf> { + pub fn find_sysroot_proc_macro_srv(&self) -> anyhow::Result<AbsPathBuf> { match self { ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. } | ProjectWorkspace::Json { sysroot: Ok(sysroot), .. } @@ -459,22 +451,22 @@ impl ProjectWorkspace { .map(|segment| sysroot.root().join(segment).join(&standalone_server_name)) .find(|server_path| std::fs::metadata(server_path).is_ok()) .ok_or_else(|| { - anyhow::anyhow!( + anyhow::format_err!( "cannot find proc-macro server in sysroot `{}`", - sysroot.root().display() + sysroot.root() ) }) } ProjectWorkspace::DetachedFiles { .. } => { - Err(anyhow::anyhow!("cannot find proc-macro server, no sysroot was found")) + Err(anyhow::format_err!("cannot find proc-macro server, no sysroot was found")) } - ProjectWorkspace::Cargo { cargo, .. } => Err(anyhow::anyhow!( + ProjectWorkspace::Cargo { cargo, .. } => Err(anyhow::format_err!( "cannot find proc-macro-srv, the workspace `{}` is missing a sysroot", - cargo.workspace_root().display() + cargo.workspace_root() )), - ProjectWorkspace::Json { project, .. } => Err(anyhow::anyhow!( + ProjectWorkspace::Json { project, .. } => Err(anyhow::format_err!( "cannot find proc-macro-srv, the workspace `{}` is missing a sysroot", - project.path().display() + project.path() )), } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml index 5b72d57560b..5bfac7ee45c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml +++ b/src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml @@ -34,10 +34,9 @@ serde.workspace = true rayon = "1.6.1" num_cpus = "1.15.0" mimalloc = { version = "0.1.30", default-features = false, optional = true } -lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" } +lsp-server.workspace = true tracing = "0.1.35" tracing-subscriber = { version = "0.3.16", default-features = false, features = [ - "env-filter", "registry", "fmt", "tracing-log", @@ -48,12 +47,8 @@ triomphe.workspace = true nohash-hasher.workspace = true always-assert = "0.1.2" -# These dependencies are unused, but we pin them to a version here to restrict them for our transitive dependencies -# so that we don't pull in duplicates of their dependencies like windows-sys and syn 1 vs 2 -# these would pull in serde 2 -thiserror = "=1.0.39" -serde_repr = "=0.1.11" -# these would pull in windows-sys 0.45.0 +# These 3 deps are not used by r-a directly, but we list them here to lock in their versions +# in our transitive deps to prevent them from pulling in windows-sys 0.45.0 mio = "=0.8.5" filetime = "=0.2.19" parking_lot_core = "=0.9.6" @@ -67,13 +62,13 @@ ide-db.workspace = true # This should only be used in CLI ide-ssr.workspace = true ide.workspace = true +load-cargo.workspace = true proc-macro-api.workspace = true profile.workspace = true project-model.workspace = true stdx.workspace = true syntax.workspace = true toolchain.workspace = true -tt.workspace = true vfs-notify.workspace = true vfs.workspace = true diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs index 8caadecd850..1f923f6cf8d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/logger.rs @@ -8,10 +8,11 @@ use std::{ sync::Arc, }; -use rust_analyzer::Result; +use anyhow::Context; use tracing::{level_filters::LevelFilter, Event, Subscriber}; use tracing_log::NormalizeEvent; use tracing_subscriber::{ + filter::Targets, fmt::{ format::Writer, writer::BoxMakeWriter, FmtContext, FormatEvent, FormatFields, FormattedFields, MakeWriter, @@ -19,81 +20,62 @@ use tracing_subscriber::{ layer::SubscriberExt, registry::LookupSpan, util::SubscriberInitExt, - EnvFilter, Registry, + Registry, }; use tracing_tree::HierarchicalLayer; -pub(crate) struct Logger { - filter: EnvFilter, - file: Option<File>, +pub(crate) struct LoggerConfig { + pub(crate) log_file: Option<File>, + pub(crate) filter: String, + pub(crate) chalk_filter: Option<String>, } struct MakeWriterStderr; -impl<'a> MakeWriter<'a> for MakeWriterStderr { +impl MakeWriter<'_> for MakeWriterStderr { type Writer = Stderr; - fn make_writer(&'a self) -> Self::Writer { + fn make_writer(&self) -> Self::Writer { io::stderr() } } -impl Logger { - pub(crate) fn new(file: Option<File>, filter: Option<&str>) -> Logger { - let filter = filter.map_or(EnvFilter::default(), EnvFilter::new); - - Logger { filter, file } - } +impl LoggerConfig { + pub(crate) fn init(self) -> anyhow::Result<()> { + let mut filter: Targets = self + .filter + .parse() + .with_context(|| format!("invalid log filter: `{}`", self.filter))?; + + let mut chalk_layer = None; + if let Some(chalk_filter) = self.chalk_filter { + let level: LevelFilter = + chalk_filter.parse().with_context(|| "invalid chalk log filter")?; + chalk_layer = Some( + HierarchicalLayer::default() + .with_indent_lines(true) + .with_ansi(false) + .with_indent_amount(2) + .with_writer(io::stderr), + ); + filter = filter + .with_target("chalk_solve", level) + .with_target("chalk_ir", level) + .with_target("chalk_recursive", level); + }; - pub(crate) fn install(self) -> Result<()> { - // The meaning of CHALK_DEBUG I suspected is to tell chalk crates - // (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing - // logs. But now we can only have just one filter, which means we have to - // merge chalk filter to our main filter (from RA_LOG env). - // - // The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`. - // As the value should only affect chalk crates, we'd better manually - // specify the target. And for simplicity, CHALK_DEBUG only accept the value - // that specify level. - let chalk_level_dir = std::env::var("CHALK_DEBUG") - .map(|val| { - val.parse::<LevelFilter>().expect( - "invalid CHALK_DEBUG value, expect right log level (like debug or trace)", - ) - }) - .ok(); - - let chalk_layer = HierarchicalLayer::default() - .with_indent_lines(true) - .with_ansi(false) - .with_indent_amount(2) - .with_writer(io::stderr); - - let writer = match self.file { + let writer = match self.log_file { Some(file) => BoxMakeWriter::new(Arc::new(file)), None => BoxMakeWriter::new(io::stderr), }; let ra_fmt_layer = tracing_subscriber::fmt::layer().event_format(LoggerFormatter).with_writer(writer); - match chalk_level_dir { - Some(val) => { - Registry::default() - .with( - self.filter - .add_directive(format!("chalk_solve={val}").parse()?) - .add_directive(format!("chalk_ir={val}").parse()?) - .add_directive(format!("chalk_recursive={val}").parse()?), - ) - .with(ra_fmt_layer) - .with(chalk_layer) - .init(); - } - None => { - Registry::default().with(self.filter).with(ra_fmt_layer).init(); - } - }; - + let registry = Registry::default().with(filter).with(ra_fmt_layer); + match chalk_layer { + Some(chalk_layer) => registry.with(chalk_layer).init(), + None => registry.init(), + } Ok(()) } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs index 91911dd1809..2fa14fc7e28 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs @@ -7,14 +7,11 @@ mod logger; mod rustc_wrapper; -use std::{ - env, fs, - path::{Path, PathBuf}, - process, -}; +use std::{env, fs, path::PathBuf, process}; +use anyhow::Context; use lsp_server::Connection; -use rust_analyzer::{cli::flags, config::Config, from_json, Result}; +use rust_analyzer::{cli::flags, config::Config, from_json}; use vfs::AbsPathBuf; #[cfg(all(feature = "mimalloc"))] @@ -25,7 +22,7 @@ static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc; #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; -fn main() { +fn main() -> anyhow::Result<()> { if std::env::var("RA_RUSTC_WRAPPER").is_ok() { let mut args = std::env::args_os(); let _me = args.next().unwrap(); @@ -41,14 +38,7 @@ fn main() { } let flags = flags::RustAnalyzer::from_env_or_exit(); - if let Err(err) = try_main(flags) { - tracing::error!("Unexpected error: {}", err); - eprintln!("{err}"); - process::exit(101); - } -} -fn try_main(flags: flags::RustAnalyzer) -> Result<()> { #[cfg(debug_assertions)] if flags.wait_dbg || env::var("RA_WAIT_DBG").is_ok() { #[allow(unused_mut)] @@ -58,14 +48,8 @@ fn try_main(flags: flags::RustAnalyzer) -> Result<()> { } } - let mut log_file = flags.log_file.as_deref(); - - let env_log_file = env::var("RA_LOG_FILE").ok(); - if let Some(env_log_file) = env_log_file.as_deref() { - log_file = Some(Path::new(env_log_file)); - } + setup_logging(flags.log_file.clone())?; - setup_logging(log_file)?; let verbosity = flags.verbosity(); match flags.subcommand { @@ -98,11 +82,12 @@ fn try_main(flags: flags::RustAnalyzer) -> Result<()> { flags::RustAnalyzerCmd::Search(cmd) => cmd.run()?, flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?, flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?, + flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?, } Ok(()) } -fn setup_logging(log_file: Option<&Path>) -> Result<()> { +fn setup_logging(log_file_flag: Option<PathBuf>) -> anyhow::Result<()> { if cfg!(windows) { // This is required so that windows finds our pdb that is placed right beside the exe. // By default it doesn't look at the folder the exe resides in, only in the current working @@ -115,23 +100,42 @@ fn setup_logging(log_file: Option<&Path>) -> Result<()> { } } } + if env::var("RUST_BACKTRACE").is_err() { env::set_var("RUST_BACKTRACE", "short"); } + let log_file = env::var("RA_LOG_FILE").ok().map(PathBuf::from).or(log_file_flag); let log_file = match log_file { Some(path) => { if let Some(parent) = path.parent() { let _ = fs::create_dir_all(parent); } - Some(fs::File::create(path)?) + Some( + fs::File::create(&path) + .with_context(|| format!("can't create log file at {}", path.display()))?, + ) } None => None, }; - let filter = env::var("RA_LOG").ok(); - // deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually useful - // information in there for debugging - logger::Logger::new(log_file, filter.as_deref().or(Some("error"))).install()?; + + logger::LoggerConfig { + log_file, + // Deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually + // useful information in there for debugging. + filter: env::var("RA_LOG").ok().unwrap_or_else(|| "error".to_string()), + // The meaning of CHALK_DEBUG I suspected is to tell chalk crates + // (i.e. chalk-solve, chalk-ir, chalk-recursive) how to filter tracing + // logs. But now we can only have just one filter, which means we have to + // merge chalk filter to our main filter (from RA_LOG env). + // + // The acceptable syntax of CHALK_DEBUG is `target[span{field=value}]=level`. + // As the value should only affect chalk crates, we'd better manually + // specify the target. And for simplicity, CHALK_DEBUG only accept the value + // that specify level. + chalk_filter: env::var("CHALK_DEBUG").ok(), + } + .init()?; profile::init(); @@ -146,8 +150,8 @@ const STACK_SIZE: usize = 1024 * 1024 * 8; fn with_extra_thread( thread_name: impl Into<String>, thread_intent: stdx::thread::ThreadIntent, - f: impl FnOnce() -> Result<()> + Send + 'static, -) -> Result<()> { + f: impl FnOnce() -> anyhow::Result<()> + Send + 'static, +) -> anyhow::Result<()> { let handle = stdx::thread::Builder::new(thread_intent) .name(thread_name.into()) .stack_size(STACK_SIZE) @@ -158,7 +162,7 @@ fn with_extra_thread( Ok(()) } -fn run_server() -> Result<()> { +fn run_server() -> anyhow::Result<()> { tracing::info!("server version {} will start", rust_analyzer::version()); let (connection, io_threads) = Connection::stdio(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs index e3520192110..64646b33ad4 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs @@ -1,7 +1,6 @@ //! Various batch processing tasks, intended primarily for debugging. pub mod flags; -pub mod load_cargo; mod parse; mod symbols; mod highlight; @@ -10,13 +9,17 @@ mod diagnostics; mod ssr; mod lsif; mod scip; +mod run_tests; mod progress_report; use std::io::Read; use anyhow::Result; +use hir::{Module, Name}; +use hir_ty::db::HirDatabase; use ide::AnalysisHost; +use itertools::Itertools; use vfs::Vfs; #[derive(Clone, Copy)] @@ -36,7 +39,7 @@ impl Verbosity { } } -fn read_stdin() -> Result<String> { +fn read_stdin() -> anyhow::Result<String> { let mut buff = String::new(); std::io::stdin().read_to_string(&mut buff)?; Ok(buff) @@ -71,3 +74,14 @@ fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) { eprintln!("{remaining:>8} Remaining"); } + +fn full_name_of_item(db: &dyn HirDatabase, module: Module, name: Name) -> String { + module + .path_to_root(db) + .into_iter() + .rev() + .filter_map(|it| it.name(db)) + .chain(Some(name)) + .map(|it| it.display(db.upcast()).to_string()) + .join("::") +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs index 4cb917ce290..8d68bf160a2 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -15,7 +15,7 @@ use hir_def::{ hir::{ExprId, PatId}, }; use hir_ty::{Interner, Substitution, TyExt, TypeFlags}; -use ide::{LineCol, RootDatabase}; +use ide::{Analysis, AnnotationConfig, DiagnosticsConfig, InlayHintsConfig, LineCol, RootDatabase}; use ide_db::{ base_db::{ salsa::{self, debug::DebugQueryTable, ParallelDatabase}, @@ -24,20 +24,20 @@ use ide_db::{ LineIndexDatabase, }; use itertools::Itertools; +use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; use oorandom::Rand32; use profile::{Bytes, StopWatch}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use rayon::prelude::*; use rustc_hash::FxHashSet; use syntax::{AstNode, SyntaxNode}; -use vfs::{AbsPathBuf, Vfs, VfsPath}; +use vfs::{AbsPathBuf, FileId, Vfs, VfsPath}; use crate::cli::{ flags::{self, OutputFormat}, - load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}, - print_memory_usage, + full_name_of_item, print_memory_usage, progress_report::ProgressReport, - report_metric, Result, Verbosity, + report_metric, Verbosity, }; /// Need to wrap Snapshot to provide `Clone` impl for `map_with` @@ -49,7 +49,7 @@ impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> { } impl flags::AnalysisStats { - pub fn run(self, verbosity: Verbosity) -> Result<()> { + pub fn run(self, verbosity: Verbosity) -> anyhow::Result<()> { let mut rng = { let seed = SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis() as u64; Rand32::new(seed) @@ -95,17 +95,41 @@ impl flags::AnalysisStats { eprintln!(")"); let mut analysis_sw = self.stop_watch(); - let mut num_crates = 0; - let mut visited_modules = FxHashSet::default(); - let mut visit_queue = Vec::new(); let mut krates = Crate::all(db); if self.randomize { shuffle(&mut rng, &mut krates); } + + let mut item_tree_sw = self.stop_watch(); + let mut num_item_trees = 0; + let source_roots = + krates.iter().cloned().map(|krate| db.file_source_root(krate.root_file(db))).unique(); + for source_root_id in source_roots { + let source_root = db.source_root(source_root_id); + if !source_root.is_library || self.with_deps { + for file_id in source_root.iter() { + if let Some(p) = source_root.path_for_file(&file_id) { + if let Some((_, Some("rs"))) = p.name_and_extension() { + db.file_item_tree(file_id.into()); + num_item_trees += 1; + } + } + } + } + } + eprintln!(" item trees: {num_item_trees}"); + let item_tree_time = item_tree_sw.elapsed(); + eprintln!("{:<20} {}", "Item Tree Collection:", item_tree_time); + report_metric("item tree time", item_tree_time.time.as_millis() as u64, "ms"); + + let mut crate_def_map_sw = self.stop_watch(); + let mut num_crates = 0; + let mut visited_modules = FxHashSet::default(); + let mut visit_queue = Vec::new(); for krate in krates { let module = krate.root_module(db); - let file_id = module.definition_source(db).file_id; + let file_id = module.definition_source_file_id(db); let file_id = file_id.original_file(db); let source_root = db.file_source_root(file_id); let source_root = db.source_root(source_root); @@ -124,8 +148,10 @@ impl flags::AnalysisStats { let mut bodies = Vec::new(); let mut adts = Vec::new(); let mut consts = Vec::new(); + let mut file_ids = Vec::new(); while let Some(module) = visit_queue.pop() { if visited_modules.insert(module) { + file_ids.extend(module.as_source_file_id(db)); visit_queue.extend(module.children(db)); for decl in module.declarations(db) { @@ -171,7 +197,9 @@ impl flags::AnalysisStats { adts.len(), consts.len(), ); - eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed()); + let crate_def_map_time = crate_def_map_sw.elapsed(); + eprintln!("{:<20} {}", "Item Collection:", crate_def_map_time); + report_metric("crate def map time", crate_def_map_time.time.as_millis() as u64, "ms"); if self.randomize { shuffle(&mut rng, &mut bodies); @@ -197,6 +225,10 @@ impl flags::AnalysisStats { self.run_const_eval(db, &consts, verbosity); } + if self.run_all_ide_things { + self.run_ide_things(host.analysis(), file_ids); + } + let total_span = analysis_sw.elapsed(); eprintln!("{:<20} {total_span}", "Total:"); report_metric("total time", total_span.time.as_millis() as u64, "ms"); @@ -242,21 +274,15 @@ impl flags::AnalysisStats { continue; } all += 1; - let Err(e) - = db.layout_of_adt(hir_def::AdtId::from(a).into(), Substitution::empty(Interner), a.krate(db).into()) - else { - continue + let Err(e) = db.layout_of_adt( + hir_def::AdtId::from(a).into(), + Substitution::empty(Interner), + a.krate(db).into(), + ) else { + continue; }; if verbosity.is_spammy() { - let full_name = a - .module(db) - .path_to_root(db) - .into_iter() - .rev() - .filter_map(|it| it.name(db)) - .chain(Some(a.name(db))) - .map(|it| it.display(db).to_string()) - .join("::"); + let full_name = full_name_of_item(db, a.module(db), a.name(db)); println!("Data layout for {full_name} failed due {e:?}"); } fail += 1; @@ -278,15 +304,8 @@ impl flags::AnalysisStats { continue; }; if verbosity.is_spammy() { - let full_name = c - .module(db) - .path_to_root(db) - .into_iter() - .rev() - .filter_map(|it| it.name(db)) - .chain(c.name(db)) - .map(|it| it.display(db).to_string()) - .join("::"); + let full_name = + full_name_of_item(db, c.module(db), c.name(db).unwrap_or(Name::missing())); println!("Const eval for {full_name} failed due {e:?}"); } fail += 1; @@ -717,6 +736,83 @@ impl flags::AnalysisStats { report_metric("body lowering time", body_lowering_time.time.as_millis() as u64, "ms"); } + fn run_ide_things(&self, analysis: Analysis, mut file_ids: Vec<FileId>) { + file_ids.sort(); + file_ids.dedup(); + let mut sw = self.stop_watch(); + + for &file_id in &file_ids { + _ = analysis.diagnostics( + &DiagnosticsConfig { + enabled: true, + proc_macros_enabled: true, + proc_attr_macros_enabled: true, + disable_experimental: false, + disabled: Default::default(), + expr_fill_default: Default::default(), + insert_use: ide_db::imports::insert_use::InsertUseConfig { + granularity: ide_db::imports::insert_use::ImportGranularity::Crate, + enforce_granularity: true, + prefix_kind: hir::PrefixKind::ByCrate, + group: true, + skip_glob_imports: true, + }, + prefer_no_std: Default::default(), + }, + ide::AssistResolveStrategy::All, + file_id, + ); + } + for &file_id in &file_ids { + _ = analysis.inlay_hints( + &InlayHintsConfig { + render_colons: false, + type_hints: true, + discriminant_hints: ide::DiscriminantHints::Always, + parameter_hints: true, + chaining_hints: true, + adjustment_hints: ide::AdjustmentHints::Always, + adjustment_hints_mode: ide::AdjustmentHintsMode::Postfix, + adjustment_hints_hide_outside_unsafe: false, + closure_return_type_hints: ide::ClosureReturnTypeHints::Always, + closure_capture_hints: true, + binding_mode_hints: true, + lifetime_elision_hints: ide::LifetimeElisionHints::Always, + param_names_for_lifetime_elision_hints: true, + hide_named_constructor_hints: false, + hide_closure_initialization_hints: false, + closure_style: hir::ClosureStyle::ImplFn, + max_length: Some(25), + closing_brace_hints_min_lines: Some(20), + }, + file_id, + None, + ); + } + for &file_id in &file_ids { + analysis + .annotations( + &AnnotationConfig { + binary_target: true, + annotate_runnables: true, + annotate_impls: true, + annotate_references: false, + annotate_method_references: false, + annotate_enum_variant_references: false, + location: ide::AnnotationLocation::AboveName, + }, + file_id, + ) + .unwrap() + .into_iter() + .for_each(|annotation| { + _ = analysis.resolve_annotation(annotation); + }); + } + let ide_time = sw.elapsed(); + eprintln!("{:<20} {} ({} files)", "IDE:", ide_time, file_ids.len()); + } + fn stop_watch(&self) -> StopWatch { StopWatch::start().memory(self.memory_usage) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs index 4306d721298..0db5fb4740e 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/diagnostics.rs @@ -7,11 +7,9 @@ use rustc_hash::FxHashSet; use hir::{db::HirDatabase, Crate, Module}; use ide::{AssistResolveStrategy, DiagnosticsConfig, Severity}; use ide_db::base_db::SourceDatabaseExt; +use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; -use crate::cli::{ - flags, - load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}, -}; +use crate::cli::flags; impl flags::Diagnostics { pub fn run(self) -> anyhow::Result<()> { @@ -37,14 +35,14 @@ impl flags::Diagnostics { let mut visited_files = FxHashSet::default(); let work = all_modules(db).into_iter().filter(|module| { - let file_id = module.definition_source(db).file_id.original_file(db); + let file_id = module.definition_source_file_id(db).original_file(db); let source_root = db.file_source_root(file_id); let source_root = db.source_root(source_root); !source_root.is_library }); for module in work { - let file_id = module.definition_source(db).file_id.original_file(db); + let file_id = module.definition_source_file_id(db).original_file(db); if !visited_files.contains(&file_id) { let crate_name = module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs index 208a4e6ecd9..13b7f039bb0 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs @@ -12,7 +12,7 @@ xflags::xflags! { /// LSP server for the Rust programming language. /// /// Subcommands and their flags do not provide any stability guarantees and may be removed or - /// changed without notice. Top-level flags that are not are marked as [Unstable] provide + /// changed without notice. Top-level flags that are not marked as [Unstable] provide /// backwards-compatibility and may be relied on. cmd rust-analyzer { /// Verbosity level, can be repeated multiple times. @@ -88,6 +88,16 @@ xflags::xflags! { optional --skip-data-layout /// Skip const evaluation optional --skip-const-eval + /// Runs several IDE features after analysis, including semantics highlighting, diagnostics + /// and annotations. This is useful for benchmarking the memory usage on a project that has + /// been worked on for a bit in a longer running session. + optional --run-all-ide-things + } + + /// Run unit tests of the project using mir interpreter + cmd run-tests { + /// Directory with Cargo.toml. + required path: PathBuf } cmd diagnostics { @@ -103,7 +113,7 @@ xflags::xflags! { } cmd ssr { - /// A structured search replace rule (`$a.foo($b) ==> bar($a, $b)`) + /// A structured search replace rule (`$a.foo($b) ==>> bar($a, $b)`) repeated rule: SsrRule } @@ -147,6 +157,7 @@ pub enum RustAnalyzerCmd { Symbols(Symbols), Highlight(Highlight), AnalysisStats(AnalysisStats), + RunTests(RunTests), Diagnostics(Diagnostics), Ssr(Ssr), Search(Search), @@ -182,16 +193,22 @@ pub struct AnalysisStats { pub parallel: bool, pub memory_usage: bool, pub source_stats: bool, - pub skip_lowering: bool, - pub skip_inference: bool, - pub skip_mir_stats: bool, - pub skip_data_layout: bool, - pub skip_const_eval: bool, pub only: Option<String>, pub with_deps: bool, pub no_sysroot: bool, pub disable_build_scripts: bool, pub disable_proc_macros: bool, + pub skip_lowering: bool, + pub skip_inference: bool, + pub skip_mir_stats: bool, + pub skip_data_layout: bool, + pub skip_const_eval: bool, + pub run_all_ide_things: bool, +} + +#[derive(Debug)] +pub struct RunTests { + pub path: PathBuf, } #[derive(Debug)] @@ -223,6 +240,7 @@ pub struct Lsif { #[derive(Debug)] pub struct Scip { pub path: PathBuf, + pub output: Option<PathBuf>, } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs deleted file mode 100644 index 4e8f9997167..00000000000 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/load_cargo.rs +++ /dev/null @@ -1,205 +0,0 @@ -//! Loads a Cargo project into a static instance of analysis, without support -//! for incorporating changes. -use std::path::Path; - -use anyhow::{anyhow, Result}; -use crossbeam_channel::{unbounded, Receiver}; -use ide::{AnalysisHost, Change}; -use ide_db::{ - base_db::{CrateGraph, ProcMacros}, - FxHashMap, -}; -use proc_macro_api::ProcMacroServer; -use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace}; -use triomphe::Arc; -use vfs::{loader::Handle, AbsPath, AbsPathBuf}; - -use crate::reload::{load_proc_macro, ProjectFolders, SourceRootConfig}; - -// Note: Since this type is used by external tools that use rust-analyzer as a library -// what otherwise would be `pub(crate)` has to be `pub` here instead. -pub struct LoadCargoConfig { - pub load_out_dirs_from_check: bool, - pub with_proc_macro_server: ProcMacroServerChoice, - pub prefill_caches: bool, -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub enum ProcMacroServerChoice { - Sysroot, - Explicit(AbsPathBuf), - None, -} - -// Note: Since this function is used by external tools that use rust-analyzer as a library -// what otherwise would be `pub(crate)` has to be `pub` here instead. -pub fn load_workspace_at( - root: &Path, - cargo_config: &CargoConfig, - load_config: &LoadCargoConfig, - progress: &dyn Fn(String), -) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> { - let root = AbsPathBuf::assert(std::env::current_dir()?.join(root)); - let root = ProjectManifest::discover_single(&root)?; - let mut workspace = ProjectWorkspace::load(root, cargo_config, progress)?; - - if load_config.load_out_dirs_from_check { - let build_scripts = workspace.run_build_scripts(cargo_config, progress)?; - workspace.set_build_scripts(build_scripts) - } - - load_workspace(workspace, &cargo_config.extra_env, load_config) -} - -// Note: Since this function is used by external tools that use rust-analyzer as a library -// what otherwise would be `pub(crate)` has to be `pub` here instead. -// -// The reason both, `load_workspace_at` and `load_workspace` are `pub` is that some of -// these tools need access to `ProjectWorkspace`, too, which `load_workspace_at` hides. -pub fn load_workspace( - ws: ProjectWorkspace, - extra_env: &FxHashMap<String, String>, - load_config: &LoadCargoConfig, -) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> { - let (sender, receiver) = unbounded(); - let mut vfs = vfs::Vfs::default(); - let mut loader = { - let loader = - vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap())); - Box::new(loader) - }; - - let proc_macro_server = match &load_config.with_proc_macro_server { - ProcMacroServerChoice::Sysroot => ws - .find_sysroot_proc_macro_srv() - .and_then(|it| ProcMacroServer::spawn(it).map_err(Into::into)), - ProcMacroServerChoice::Explicit(path) => { - ProcMacroServer::spawn(path.clone()).map_err(Into::into) - } - ProcMacroServerChoice::None => Err(anyhow!("proc macro server disabled")), - }; - - let (crate_graph, proc_macros) = ws.to_crate_graph( - &mut |path: &AbsPath| { - let contents = loader.load_sync(path); - let path = vfs::VfsPath::from(path.to_path_buf()); - vfs.set_file_contents(path.clone(), contents); - vfs.file_id(&path) - }, - extra_env, - ); - let proc_macros = { - let proc_macro_server = match &proc_macro_server { - Ok(it) => Ok(it), - Err(e) => Err(e.to_string()), - }; - proc_macros - .into_iter() - .map(|(crate_id, path)| { - ( - crate_id, - path.map_or_else( - |_| Err("proc macro crate is missing dylib".to_owned()), - |(_, path)| { - proc_macro_server.as_ref().map_err(Clone::clone).and_then( - |proc_macro_server| load_proc_macro(proc_macro_server, &path, &[]), - ) - }, - ), - ) - }) - .collect() - }; - - let project_folders = ProjectFolders::new(&[ws], &[]); - loader.set_config(vfs::loader::Config { - load: project_folders.load, - watch: vec![], - version: 0, - }); - - tracing::debug!("crate graph: {:?}", crate_graph); - let host = load_crate_graph( - crate_graph, - proc_macros, - project_folders.source_root_config, - &mut vfs, - &receiver, - ); - - if load_config.prefill_caches { - host.analysis().parallel_prime_caches(1, |_| {})?; - } - Ok((host, vfs, proc_macro_server.ok())) -} - -fn load_crate_graph( - crate_graph: CrateGraph, - proc_macros: ProcMacros, - source_root_config: SourceRootConfig, - vfs: &mut vfs::Vfs, - receiver: &Receiver<vfs::loader::Message>, -) -> AnalysisHost { - let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok()); - let mut host = AnalysisHost::new(lru_cap); - let mut analysis_change = Change::new(); - - host.raw_database_mut().enable_proc_attr_macros(); - - // wait until Vfs has loaded all roots - for task in receiver { - match task { - vfs::loader::Message::Progress { n_done, n_total, config_version: _ } => { - if n_done == n_total { - break; - } - } - vfs::loader::Message::Loaded { files } => { - for (path, contents) in files { - vfs.set_file_contents(path.into(), contents); - } - } - } - } - let changes = vfs.take_changes(); - for file in changes { - if file.exists() { - let contents = vfs.file_contents(file.file_id); - if let Ok(text) = std::str::from_utf8(contents) { - analysis_change.change_file(file.file_id, Some(Arc::from(text))) - } - } - } - let source_roots = source_root_config.partition(vfs); - analysis_change.set_roots(source_roots); - - analysis_change.set_crate_graph(crate_graph); - analysis_change.set_proc_macros(proc_macros); - - host.apply_change(analysis_change); - host -} - -#[cfg(test)] -mod tests { - use super::*; - - use hir::Crate; - - #[test] - fn test_loading_rust_analyzer() { - let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); - let cargo_config = CargoConfig::default(); - let load_cargo_config = LoadCargoConfig { - load_out_dirs_from_check: false, - with_proc_macro_server: ProcMacroServerChoice::None, - prefill_caches: false, - }; - let (host, _vfs, _proc_macro) = - load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap(); - - let n_crates = Crate::all(host.raw_database()).len(); - // RA has quite a few crates, but the exact count doesn't matter - assert!(n_crates > 20); - } -} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs index 7f5d0844967..42d180114e5 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/lsif.rs @@ -8,23 +8,22 @@ use ide::{ Analysis, FileId, FileRange, MonikerKind, PackageInformation, RootDatabase, StaticIndex, StaticIndexedFile, TokenId, TokenStaticData, }; -use ide_db::LineIndexDatabase; - -use ide_db::base_db::salsa::{self, ParallelDatabase}; -use ide_db::line_index::WideEncoding; +use ide_db::{ + base_db::salsa::{self, ParallelDatabase}, + line_index::WideEncoding, + LineIndexDatabase, +}; +use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; use lsp_types::{self, lsif}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use vfs::{AbsPathBuf, Vfs}; -use crate::cli::load_cargo::ProcMacroServerChoice; -use crate::cli::{ - flags, - load_cargo::{load_workspace, LoadCargoConfig}, - Result, +use crate::{ + cli::flags, + line_index::{LineEndings, LineIndex, PositionEncoding}, + to_proto, + version::version, }; -use crate::line_index::{LineEndings, LineIndex, PositionEncoding}; -use crate::to_proto; -use crate::version::version; /// Need to wrap Snapshot to provide `Clone` impl for `map_with` struct Snap<DB>(DB); @@ -49,8 +48,8 @@ struct LsifManager<'a> { struct Id(i32); impl From<Id> for lsp_types::NumberOrString { - fn from(Id(x): Id) -> Self { - lsp_types::NumberOrString::Number(x) + fn from(Id(it): Id) -> Self { + lsp_types::NumberOrString::Number(it) } } @@ -89,8 +88,8 @@ impl LsifManager<'_> { } fn get_token_id(&mut self, id: TokenId) -> Id { - if let Some(x) = self.token_map.get(&id) { - return *x; + if let Some(it) = self.token_map.get(&id) { + return *it; } let result_set_id = self.add_vertex(lsif::Vertex::ResultSet(lsif::ResultSet { key: None })); self.token_map.insert(id, result_set_id); @@ -98,8 +97,8 @@ impl LsifManager<'_> { } fn get_package_id(&mut self, package_information: PackageInformation) -> Id { - if let Some(x) = self.package_map.get(&package_information) { - return *x; + if let Some(it) = self.package_map.get(&package_information) { + return *it; } let pi = package_information.clone(); let result_set_id = @@ -120,8 +119,8 @@ impl LsifManager<'_> { } fn get_range_id(&mut self, id: FileRange) -> Id { - if let Some(x) = self.range_map.get(&id) { - return *x; + if let Some(it) = self.range_map.get(&id) { + return *it; } let file_id = id.file_id; let doc_id = self.get_file_id(file_id); @@ -143,8 +142,8 @@ impl LsifManager<'_> { } fn get_file_id(&mut self, id: FileId) -> Id { - if let Some(x) = self.file_map.get(&id) { - return *x; + if let Some(it) = self.file_map.get(&id) { + return *it; } let path = self.vfs.file_path(id); let path = path.as_path().unwrap(); @@ -217,18 +216,18 @@ impl LsifManager<'_> { })); let mut edges = token.references.iter().fold( HashMap::<_, Vec<lsp_types::NumberOrString>>::new(), - |mut edges, x| { + |mut edges, it| { let entry = - edges.entry((x.range.file_id, x.is_definition)).or_insert_with(Vec::new); - entry.push((*self.range_map.get(&x.range).unwrap()).into()); + edges.entry((it.range.file_id, it.is_definition)).or_insert_with(Vec::new); + entry.push((*self.range_map.get(&it.range).unwrap()).into()); edges }, ); - for x in token.references { - if let Some(vertices) = edges.remove(&(x.range.file_id, x.is_definition)) { + for it in token.references { + if let Some(vertices) = edges.remove(&(it.range.file_id, it.is_definition)) { self.add_edge(lsif::Edge::Item(lsif::Item { - document: (*self.file_map.get(&x.range.file_id).unwrap()).into(), - property: Some(if x.is_definition { + document: (*self.file_map.get(&it.range.file_id).unwrap()).into(), + property: Some(if it.is_definition { lsif::ItemKind::Definitions } else { lsif::ItemKind::References @@ -286,7 +285,7 @@ impl LsifManager<'_> { } impl flags::Lsif { - pub fn run(self) -> Result<()> { + pub fn run(self) -> anyhow::Result<()> { eprintln!("Generating LSIF started..."); let now = Instant::now(); let mut cargo_config = CargoConfig::default(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs new file mode 100644 index 00000000000..b63a266a57a --- /dev/null +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/run_tests.rs @@ -0,0 +1,89 @@ +//! Run all tests in a project, similar to `cargo test`, but using the mir interpreter. + +use hir::{Crate, Module}; +use hir_ty::db::HirDatabase; +use ide_db::{base_db::SourceDatabaseExt, LineIndexDatabase}; +use profile::StopWatch; +use project_model::{CargoConfig, RustLibSource}; +use syntax::TextRange; + +use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; + +use crate::cli::{flags, full_name_of_item, Result}; + +impl flags::RunTests { + pub fn run(self) -> Result<()> { + let mut cargo_config = CargoConfig::default(); + cargo_config.sysroot = Some(RustLibSource::Discover); + let load_cargo_config = LoadCargoConfig { + load_out_dirs_from_check: true, + with_proc_macro_server: ProcMacroServerChoice::Sysroot, + prefill_caches: false, + }; + let (host, _vfs, _proc_macro) = + load_workspace_at(&self.path, &cargo_config, &load_cargo_config, &|_| {})?; + let db = host.raw_database(); + + let tests = all_modules(db) + .into_iter() + .flat_map(|x| x.declarations(db)) + .filter_map(|x| match x { + hir::ModuleDef::Function(f) => Some(f), + _ => None, + }) + .filter(|x| x.is_test(db)); + let span_formatter = |file_id, text_range: TextRange| { + let line_col = match db.line_index(file_id).try_line_col(text_range.start()) { + None => " (unknown line col)".to_string(), + Some(x) => format!("#{}:{}", x.line + 1, x.col), + }; + let path = &db + .source_root(db.file_source_root(file_id)) + .path_for_file(&file_id) + .map(|x| x.to_string()); + let path = path.as_deref().unwrap_or("<unknown file>"); + format!("file://{path}{line_col}") + }; + let mut pass_count = 0; + let mut ignore_count = 0; + let mut fail_count = 0; + let mut sw_all = StopWatch::start(); + for test in tests { + let full_name = full_name_of_item(db, test.module(db), test.name(db)); + println!("test {}", full_name); + if test.is_ignore(db) { + println!("ignored"); + ignore_count += 1; + continue; + } + let mut sw_one = StopWatch::start(); + let result = test.eval(db, span_formatter); + if result.trim() == "pass" { + pass_count += 1; + } else { + fail_count += 1; + } + println!("{}", result); + eprintln!("{:<20} {}", format!("test {}", full_name), sw_one.elapsed()); + } + println!("{pass_count} passed, {fail_count} failed, {ignore_count} ignored"); + eprintln!("{:<20} {}", "All tests", sw_all.elapsed()); + Ok(()) + } +} + +fn all_modules(db: &dyn HirDatabase) -> Vec<Module> { + let mut worklist: Vec<_> = Crate::all(db) + .into_iter() + .filter(|x| x.origin(db).is_local()) + .map(|krate| krate.root_module(db)) + .collect(); + let mut modules = Vec::new(); + + while let Some(module) = worklist.pop() { + modules.push(module); + worklist.extend(module.children(db)); + } + + modules +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs index b0b724bdfe7..4579aca3021 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs @@ -6,27 +6,23 @@ use std::{ time::Instant, }; -use crate::{ - cli::load_cargo::ProcMacroServerChoice, - line_index::{LineEndings, LineIndex, PositionEncoding}, -}; use ide::{ LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId, TokenStaticData, }; use ide_db::LineIndexDatabase; +use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use scip::types as scip_types; use std::env; -use crate::cli::{ - flags, - load_cargo::{load_workspace, LoadCargoConfig}, - Result, +use crate::{ + cli::flags, + line_index::{LineEndings, LineIndex, PositionEncoding}, }; impl flags::Scip { - pub fn run(self) -> Result<()> { + pub fn run(self) -> anyhow::Result<()> { eprintln!("Generating SCIP start..."); let now = Instant::now(); let mut cargo_config = CargoConfig::default(); @@ -65,7 +61,7 @@ impl flags::Scip { path.normalize() .as_os_str() .to_str() - .ok_or(anyhow::anyhow!("Unable to normalize project_root path"))? + .ok_or(anyhow::format_err!("Unable to normalize project_root path"))? ), text_document_encoding: scip_types::TextEncoding::UTF8.into(), special_fields: Default::default(), @@ -168,7 +164,7 @@ impl flags::Scip { let out_path = self.output.unwrap_or_else(|| PathBuf::from(r"index.scip")); scip::write_message_to_file(out_path, index) - .map_err(|err| anyhow::anyhow!("Failed to write scip to file: {}", err))?; + .map_err(|err| anyhow::format_err!("Failed to write scip to file: {}", err))?; eprintln!("Generating SCIP finished {:?}", now.elapsed()); Ok(()) @@ -276,7 +272,7 @@ mod test { let change_fixture = ChangeFixture::parse(ra_fixture); host.raw_database_mut().apply_change(change_fixture.change); let (file_id, range_or_offset) = - change_fixture.file_position.expect("expected a marker ($0)"); + change_fixture.file_position.expect("expected a marker ()"); let offset = range_or_offset.expect_offset(); (host, FilePosition { file_id, offset }) } @@ -325,7 +321,7 @@ use foo::example_mod::func; fn main() { func$0(); } -//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git +//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod example_mod { pub fn func() {} } @@ -338,7 +334,7 @@ pub mod example_mod { fn symbol_for_trait() { check_symbol( r#" -//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git +//- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub trait MyTrait { pub fn func$0() {} @@ -353,7 +349,7 @@ pub mod module { fn symbol_for_trait_constant() { check_symbol( r#" - //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git + //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub trait MyTrait { const MY_CONST$0: u8; @@ -368,7 +364,7 @@ pub mod module { fn symbol_for_trait_type() { check_symbol( r#" - //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git + //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub trait MyTrait { type MyType$0; @@ -384,7 +380,7 @@ pub mod module { fn symbol_for_trait_impl_function() { check_symbol( r#" - //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git + //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub trait MyTrait { pub fn func() {} @@ -411,7 +407,7 @@ pub mod module { fn main() { let x = St { a$0: 2 }; } - //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git + //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub struct St { pub a: i32, } @@ -429,7 +425,7 @@ pub mod module { fn main() { func(); } - //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git + //- /foo/lib.rs crate:foo@0.1.0,https://a.b/foo.git library pub mod module { pub fn func() { let x$0 = 2; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs index 82a769347df..f87dcb889a4 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/cli/ssr.rs @@ -1,16 +1,14 @@ //! Applies structured search replace rules from the command line. +use anyhow::Context; use ide_ssr::MatchFinder; +use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; use project_model::{CargoConfig, RustLibSource}; -use crate::cli::{ - flags, - load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}, - Result, -}; +use crate::cli::flags; impl flags::Ssr { - pub fn run(self) -> Result<()> { + pub fn run(self) -> anyhow::Result<()> { use ide_db::base_db::SourceDatabaseExt; let mut cargo_config = CargoConfig::default(); cargo_config.sysroot = Some(RustLibSource::Discover); @@ -35,7 +33,8 @@ impl flags::Ssr { if let Some(path) = vfs.file_path(file_id).as_path() { let mut contents = db.file_text(file_id).to_string(); edit.apply(&mut contents); - std::fs::write(path, contents)?; + std::fs::write(path, contents) + .with_context(|| format!("failed to write {path}"))?; } } Ok(()) @@ -46,7 +45,7 @@ impl flags::Search { /// Searches for `patterns`, printing debug information for any nodes whose text exactly matches /// `debug_snippet`. This is intended for debugging and probably isn't in it's current form useful /// for much else. - pub fn run(self) -> Result<()> { + pub fn run(self) -> anyhow::Result<()> { use ide_db::base_db::SourceDatabaseExt; use ide_db::symbol_index::SymbolsDatabase; let cargo_config = CargoConfig::default(); diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs index 6355c620f78..fa20c796ec2 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs @@ -1079,6 +1079,7 @@ impl Config { pub fn diagnostics(&self) -> DiagnosticsConfig { DiagnosticsConfig { + enabled: self.data.diagnostics_enable, proc_attr_macros_enabled: self.expand_proc_attr_macros(), proc_macros_enabled: self.data.procMacro_enable, disable_experimental: !self.data.diagnostics_experimental_enable, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs index 4e57c6eb65a..5e5cd9a0269 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/dispatch.rs @@ -10,7 +10,7 @@ use crate::{ global_state::{GlobalState, GlobalStateSnapshot}, main_loop::Task, version::version, - LspError, Result, + LspError, }; /// A visitor for routing a raw JSON request to an appropriate handler function. @@ -32,13 +32,13 @@ pub(crate) struct RequestDispatcher<'a> { pub(crate) global_state: &'a mut GlobalState, } -impl<'a> RequestDispatcher<'a> { +impl RequestDispatcher<'_> { /// Dispatches the request onto the current thread, given full access to /// mutable global state. Unlike all other methods here, this one isn't /// guarded by `catch_unwind`, so, please, don't make bugs :-) pub(crate) fn on_sync_mut<R>( &mut self, - f: fn(&mut GlobalState, R::Params) -> Result<R::Result>, + f: fn(&mut GlobalState, R::Params) -> anyhow::Result<R::Result>, ) -> &mut Self where R: lsp_types::request::Request, @@ -63,7 +63,7 @@ impl<'a> RequestDispatcher<'a> { /// Dispatches the request onto the current thread. pub(crate) fn on_sync<R>( &mut self, - f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>, + f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>, ) -> &mut Self where R: lsp_types::request::Request, @@ -92,7 +92,7 @@ impl<'a> RequestDispatcher<'a> { /// without retrying it if it panics. pub(crate) fn on_no_retry<R>( &mut self, - f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>, + f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>, ) -> &mut Self where R: lsp_types::request::Request + 'static, @@ -128,7 +128,7 @@ impl<'a> RequestDispatcher<'a> { /// Dispatches a non-latency-sensitive request onto the thread pool. pub(crate) fn on<R>( &mut self, - f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>, + f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>, ) -> &mut Self where R: lsp_types::request::Request + 'static, @@ -141,7 +141,7 @@ impl<'a> RequestDispatcher<'a> { /// Dispatches a latency-sensitive request onto the thread pool. pub(crate) fn on_latency_sensitive<R>( &mut self, - f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>, + f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>, ) -> &mut Self where R: lsp_types::request::Request + 'static, @@ -156,7 +156,7 @@ impl<'a> RequestDispatcher<'a> { /// We can't run this on the main thread though as we invoke rustfmt which may take arbitrary time to complete! pub(crate) fn on_fmt_thread<R>( &mut self, - f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>, + f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>, ) -> &mut Self where R: lsp_types::request::Request + 'static, @@ -181,7 +181,7 @@ impl<'a> RequestDispatcher<'a> { fn on_with_thread_intent<const MAIN_POOL: bool, R>( &mut self, intent: ThreadIntent, - f: fn(GlobalStateSnapshot, R::Params) -> Result<R::Result>, + f: fn(GlobalStateSnapshot, R::Params) -> anyhow::Result<R::Result>, ) -> &mut Self where R: lsp_types::request::Request + 'static, @@ -245,7 +245,7 @@ impl<'a> RequestDispatcher<'a> { fn thread_result_to_response<R>( id: lsp_server::RequestId, - result: thread::Result<Result<R::Result>>, + result: thread::Result<anyhow::Result<R::Result>>, ) -> Result<lsp_server::Response, Cancelled> where R: lsp_types::request::Request, @@ -277,7 +277,7 @@ where fn result_to_response<R>( id: lsp_server::RequestId, - result: Result<R::Result>, + result: anyhow::Result<R::Result>, ) -> Result<lsp_server::Response, Cancelled> where R: lsp_types::request::Request, @@ -289,7 +289,7 @@ where Err(e) => match e.downcast::<LspError>() { Ok(lsp_error) => lsp_server::Response::new_err(id, lsp_error.code, lsp_error.message), Err(e) => match e.downcast::<Cancelled>() { - Ok(cancelled) => return Err(*cancelled), + Ok(cancelled) => return Err(cancelled), Err(e) => lsp_server::Response::new_err( id, lsp_server::ErrorCode::InternalError as i32, @@ -306,11 +306,11 @@ pub(crate) struct NotificationDispatcher<'a> { pub(crate) global_state: &'a mut GlobalState, } -impl<'a> NotificationDispatcher<'a> { +impl NotificationDispatcher<'_> { pub(crate) fn on_sync_mut<N>( &mut self, - f: fn(&mut GlobalState, N::Params) -> Result<()>, - ) -> Result<&mut Self> + f: fn(&mut GlobalState, N::Params) -> anyhow::Result<()>, + ) -> anyhow::Result<&mut Self> where N: lsp_types::notification::Notification, N::Params: DeserializeOwned + Send, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs index cd74a5500d0..c247e1bb229 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/from_proto.rs @@ -14,19 +14,21 @@ use crate::{ line_index::{LineIndex, PositionEncoding}, lsp_ext, lsp_utils::invalid_params_error, - Result, }; -pub(crate) fn abs_path(url: &lsp_types::Url) -> Result<AbsPathBuf> { - let path = url.to_file_path().map_err(|()| "url is not a file")?; +pub(crate) fn abs_path(url: &lsp_types::Url) -> anyhow::Result<AbsPathBuf> { + let path = url.to_file_path().map_err(|()| anyhow::format_err!("url is not a file"))?; Ok(AbsPathBuf::try_from(path).unwrap()) } -pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> { +pub(crate) fn vfs_path(url: &lsp_types::Url) -> anyhow::Result<vfs::VfsPath> { abs_path(url).map(vfs::VfsPath::from) } -pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> { +pub(crate) fn offset( + line_index: &LineIndex, + position: lsp_types::Position, +) -> anyhow::Result<TextSize> { let line_col = match line_index.encoding { PositionEncoding::Utf8 => LineCol { line: position.line, col: position.character }, PositionEncoding::Wide(enc) => { @@ -42,7 +44,10 @@ pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> R Ok(text_size) } -pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Result<TextRange> { +pub(crate) fn text_range( + line_index: &LineIndex, + range: lsp_types::Range, +) -> anyhow::Result<TextRange> { let start = offset(line_index, range.start)?; let end = offset(line_index, range.end)?; match end < start { @@ -51,14 +56,14 @@ pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Res } } -pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> Result<FileId> { +pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> anyhow::Result<FileId> { snap.url_to_file_id(url) } pub(crate) fn file_position( snap: &GlobalStateSnapshot, tdpp: lsp_types::TextDocumentPositionParams, -) -> Result<FilePosition> { +) -> anyhow::Result<FilePosition> { let file_id = file_id(snap, &tdpp.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; let offset = offset(&line_index, tdpp.position)?; @@ -69,7 +74,7 @@ pub(crate) fn file_range( snap: &GlobalStateSnapshot, text_document_identifier: lsp_types::TextDocumentIdentifier, range: lsp_types::Range, -) -> Result<FileRange> { +) -> anyhow::Result<FileRange> { file_range_uri(snap, &text_document_identifier.uri, range) } @@ -77,7 +82,7 @@ pub(crate) fn file_range_uri( snap: &GlobalStateSnapshot, document: &lsp_types::Url, range: lsp_types::Range, -) -> Result<FileRange> { +) -> anyhow::Result<FileRange> { let file_id = file_id(snap, document)?; let line_index = snap.file_line_index(file_id)?; let range = text_range(&line_index, range)?; @@ -101,7 +106,7 @@ pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> pub(crate) fn annotation( snap: &GlobalStateSnapshot, code_lens: lsp_types::CodeLens, -) -> Result<Option<Annotation>> { +) -> anyhow::Result<Option<Annotation>> { let data = code_lens.data.ok_or_else(|| invalid_params_error("code lens without data".to_string()))?; let resolve = from_json::<lsp_ext::CodeLensResolveData>("CodeLensResolveData", &data)?; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs index d5b0e3a5705..ea8a6975195 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs @@ -9,6 +9,7 @@ use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId}; use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase}; +use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; use nohash_hasher::IntMap; use parking_lot::{Mutex, RwLock}; @@ -27,10 +28,9 @@ use crate::{ main_loop::Task, mem_docs::MemDocs, op_queue::OpQueue, - reload::{self, SourceRootConfig}, + reload, task_pool::TaskPool, to_proto::url_from_abs_path, - Result, }; // Enforces drop order @@ -319,7 +319,7 @@ impl GlobalState { // crate see https://github.com/rust-lang/rust-analyzer/issues/13029 if let Some((path, force_crate_graph_reload)) = workspace_structure_change { self.fetch_workspaces_queue.request_op( - format!("workspace vfs file change: {}", path.display()), + format!("workspace vfs file change: {path}"), force_crate_graph_reload, ); } @@ -422,7 +422,7 @@ impl Drop for GlobalState { } impl GlobalStateSnapshot { - pub(crate) fn url_to_file_id(&self, url: &Url) -> Result<FileId> { + pub(crate) fn url_to_file_id(&self, url: &Url) -> anyhow::Result<FileId> { url_to_file_id(&self.vfs.read().0, url) } @@ -481,8 +481,8 @@ pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url { url_from_abs_path(path) } -pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> { +pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> anyhow::Result<FileId> { let path = from_proto::vfs_path(url)?; - let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {path}"))?; + let res = vfs.file_id(&path).ok_or_else(|| anyhow::format_err!("file not found: {path}"))?; Ok(res) } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs index ae1dc23153c..e830e5e9a64 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/notification.rs @@ -14,10 +14,10 @@ use vfs::{AbsPathBuf, ChangeKind, VfsPath}; use crate::{ config::Config, from_proto, global_state::GlobalState, lsp_ext::RunFlycheckParams, - lsp_utils::apply_document_changes, mem_docs::DocumentData, reload, Result, + lsp_utils::apply_document_changes, mem_docs::DocumentData, reload, }; -pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> Result<()> { +pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> anyhow::Result<()> { let id: lsp_server::RequestId = match params.id { lsp_types::NumberOrString::Number(id) => id.into(), lsp_types::NumberOrString::String(id) => id.into(), @@ -29,7 +29,7 @@ pub(crate) fn handle_cancel(state: &mut GlobalState, params: CancelParams) -> Re pub(crate) fn handle_work_done_progress_cancel( state: &mut GlobalState, params: WorkDoneProgressCancelParams, -) -> Result<()> { +) -> anyhow::Result<()> { if let lsp_types::NumberOrString::String(s) = ¶ms.token { if let Some(id) = s.strip_prefix("rust-analyzer/flycheck/") { if let Ok(id) = u32::from_str_radix(id, 10) { @@ -49,7 +49,7 @@ pub(crate) fn handle_work_done_progress_cancel( pub(crate) fn handle_did_open_text_document( state: &mut GlobalState, params: DidOpenTextDocumentParams, -) -> Result<()> { +) -> anyhow::Result<()> { let _p = profile::span("handle_did_open_text_document"); if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { @@ -68,7 +68,7 @@ pub(crate) fn handle_did_open_text_document( pub(crate) fn handle_did_change_text_document( state: &mut GlobalState, params: DidChangeTextDocumentParams, -) -> Result<()> { +) -> anyhow::Result<()> { let _p = profile::span("handle_did_change_text_document"); if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { @@ -100,7 +100,7 @@ pub(crate) fn handle_did_change_text_document( pub(crate) fn handle_did_close_text_document( state: &mut GlobalState, params: DidCloseTextDocumentParams, -) -> Result<()> { +) -> anyhow::Result<()> { let _p = profile::span("handle_did_close_text_document"); if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) { @@ -120,14 +120,14 @@ pub(crate) fn handle_did_close_text_document( pub(crate) fn handle_did_save_text_document( state: &mut GlobalState, params: DidSaveTextDocumentParams, -) -> Result<()> { +) -> anyhow::Result<()> { if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { // Re-fetch workspaces if a workspace related file has changed if let Some(abs_path) = vfs_path.as_path() { if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) { state .fetch_workspaces_queue - .request_op(format!("DidSaveTextDocument {}", abs_path.display()), false); + .request_op(format!("DidSaveTextDocument {abs_path}"), false); } } @@ -146,7 +146,7 @@ pub(crate) fn handle_did_save_text_document( pub(crate) fn handle_did_change_configuration( state: &mut GlobalState, _params: DidChangeConfigurationParams, -) -> Result<()> { +) -> anyhow::Result<()> { // As stated in https://github.com/microsoft/language-server-protocol/issues/676, // this notification's parameters should be ignored and the actual config queried separately. state.send_request::<lsp_types::request::WorkspaceConfiguration>( @@ -186,7 +186,7 @@ pub(crate) fn handle_did_change_configuration( pub(crate) fn handle_did_change_workspace_folders( state: &mut GlobalState, params: DidChangeWorkspaceFoldersParams, -) -> Result<()> { +) -> anyhow::Result<()> { let config = Arc::make_mut(&mut state.config); for workspace in params.event.removed { @@ -214,7 +214,7 @@ pub(crate) fn handle_did_change_workspace_folders( pub(crate) fn handle_did_change_watched_files( state: &mut GlobalState, params: DidChangeWatchedFilesParams, -) -> Result<()> { +) -> anyhow::Result<()> { for change in params.changes { if let Ok(path) = from_proto::abs_path(&change.uri) { state.loader.handle.invalidate(path); @@ -302,13 +302,13 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { } } -pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> Result<()> { +pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { let _p = profile::span("handle_stop_flycheck"); state.flycheck.iter().for_each(|flycheck| flycheck.cancel()); Ok(()) } -pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> Result<()> { +pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { let _p = profile::span("handle_clear_flycheck"); state.diagnostics.clear_check_all(); Ok(()) @@ -317,7 +317,7 @@ pub(crate) fn handle_clear_flycheck(state: &mut GlobalState, _: ()) -> Result<() pub(crate) fn handle_run_flycheck( state: &mut GlobalState, params: RunFlycheckParams, -) -> Result<()> { +) -> anyhow::Result<()> { let _p = profile::span("handle_run_flycheck"); if let Some(text_document) = params.text_document { if let Ok(vfs_path) = from_proto::vfs_path(&text_document.uri) { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs index a6a72552d57..aad74b7466a 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/handlers/request.rs @@ -8,6 +8,7 @@ use std::{ }; use anyhow::Context; + use ide::{ AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange, HoverAction, HoverGotoTypeData, Query, RangeInfo, ReferenceCategory, Runnable, RunnableKind, @@ -20,9 +21,9 @@ use lsp_types::{ CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams, CodeLens, CompletionItem, FoldingRange, FoldingRangeParams, HoverContents, InlayHint, InlayHintParams, Location, LocationLink, Position, PrepareRenameResponse, Range, RenameParams, - SemanticTokensDeltaParams, SemanticTokensFullDeltaResult, SemanticTokensParams, - SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, - SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit, + ResourceOp, ResourceOperationKind, SemanticTokensDeltaParams, SemanticTokensFullDeltaResult, + SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult, + SemanticTokensResult, SymbolInformation, SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit, }; use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; @@ -33,7 +34,7 @@ use vfs::{AbsPath, AbsPathBuf, VfsPath}; use crate::{ cargo_target_spec::CargoTargetSpec, - config::{RustfmtConfig, WorkspaceSymbolConfig}, + config::{Config, RustfmtConfig, WorkspaceSymbolConfig}, diff::diff, from_proto, global_state::{GlobalState, GlobalStateSnapshot}, @@ -43,10 +44,10 @@ use crate::{ FetchDependencyListResult, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams, }, lsp_utils::{all_edits_are_disjoint, invalid_params_error}, - to_proto, LspError, Result, + to_proto, LspError, }; -pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> { +pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { // FIXME: use `Arc::from_iter` when it becomes available state.proc_macro_clients = Arc::from(Vec::new()); state.proc_macro_changed = false; @@ -55,7 +56,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result< Ok(()) } -pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> Result<()> { +pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { // FIXME: use `Arc::from_iter` when it becomes available state.proc_macro_clients = Arc::from(Vec::new()); state.proc_macro_changed = false; @@ -67,7 +68,7 @@ pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> Resu pub(crate) fn handle_analyzer_status( snap: GlobalStateSnapshot, params: lsp_ext::AnalyzerStatusParams, -) -> Result<String> { +) -> anyhow::Result<String> { let _p = profile::span("handle_analyzer_status"); let mut buf = String::new(); @@ -112,7 +113,7 @@ pub(crate) fn handle_analyzer_status( Ok(buf) } -pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<String> { +pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> anyhow::Result<String> { let _p = profile::span("handle_memory_usage"); let mem = state.analysis_host.per_query_memory_usage(); @@ -125,7 +126,7 @@ pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<Stri Ok(out) } -pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> Result<()> { +pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { state.analysis_host.shuffle_crate_graph(); Ok(()) } @@ -133,7 +134,7 @@ pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> Resu pub(crate) fn handle_syntax_tree( snap: GlobalStateSnapshot, params: lsp_ext::SyntaxTreeParams, -) -> Result<String> { +) -> anyhow::Result<String> { let _p = profile::span("handle_syntax_tree"); let id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(id)?; @@ -145,7 +146,7 @@ pub(crate) fn handle_syntax_tree( pub(crate) fn handle_view_hir( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, -) -> Result<String> { +) -> anyhow::Result<String> { let _p = profile::span("handle_view_hir"); let position = from_proto::file_position(&snap, params)?; let res = snap.analysis.view_hir(position)?; @@ -155,7 +156,7 @@ pub(crate) fn handle_view_hir( pub(crate) fn handle_view_mir( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, -) -> Result<String> { +) -> anyhow::Result<String> { let _p = profile::span("handle_view_mir"); let position = from_proto::file_position(&snap, params)?; let res = snap.analysis.view_mir(position)?; @@ -165,7 +166,7 @@ pub(crate) fn handle_view_mir( pub(crate) fn handle_interpret_function( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, -) -> Result<String> { +) -> anyhow::Result<String> { let _p = profile::span("handle_interpret_function"); let position = from_proto::file_position(&snap, params)?; let res = snap.analysis.interpret_function(position)?; @@ -175,7 +176,7 @@ pub(crate) fn handle_interpret_function( pub(crate) fn handle_view_file_text( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentIdentifier, -) -> Result<String> { +) -> anyhow::Result<String> { let file_id = from_proto::file_id(&snap, ¶ms.uri)?; Ok(snap.analysis.file_text(file_id)?.to_string()) } @@ -183,7 +184,7 @@ pub(crate) fn handle_view_file_text( pub(crate) fn handle_view_item_tree( snap: GlobalStateSnapshot, params: lsp_ext::ViewItemTreeParams, -) -> Result<String> { +) -> anyhow::Result<String> { let _p = profile::span("handle_view_item_tree"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let res = snap.analysis.view_item_tree(file_id)?; @@ -193,16 +194,16 @@ pub(crate) fn handle_view_item_tree( pub(crate) fn handle_view_crate_graph( snap: GlobalStateSnapshot, params: ViewCrateGraphParams, -) -> Result<String> { +) -> anyhow::Result<String> { let _p = profile::span("handle_view_crate_graph"); - let dot = snap.analysis.view_crate_graph(params.full)??; + let dot = snap.analysis.view_crate_graph(params.full)?.map_err(anyhow::Error::msg)?; Ok(dot) } pub(crate) fn handle_expand_macro( snap: GlobalStateSnapshot, params: lsp_ext::ExpandMacroParams, -) -> Result<Option<lsp_ext::ExpandedMacro>> { +) -> anyhow::Result<Option<lsp_ext::ExpandedMacro>> { let _p = profile::span("handle_expand_macro"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; @@ -215,11 +216,11 @@ pub(crate) fn handle_expand_macro( pub(crate) fn handle_selection_range( snap: GlobalStateSnapshot, params: lsp_types::SelectionRangeParams, -) -> Result<Option<Vec<lsp_types::SelectionRange>>> { +) -> anyhow::Result<Option<Vec<lsp_types::SelectionRange>>> { let _p = profile::span("handle_selection_range"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; - let res: Result<Vec<lsp_types::SelectionRange>> = params + let res: anyhow::Result<Vec<lsp_types::SelectionRange>> = params .positions .into_iter() .map(|position| { @@ -258,7 +259,7 @@ pub(crate) fn handle_selection_range( pub(crate) fn handle_matching_brace( snap: GlobalStateSnapshot, params: lsp_ext::MatchingBraceParams, -) -> Result<Vec<Position>> { +) -> anyhow::Result<Vec<Position>> { let _p = profile::span("handle_matching_brace"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; @@ -281,7 +282,7 @@ pub(crate) fn handle_matching_brace( pub(crate) fn handle_join_lines( snap: GlobalStateSnapshot, params: lsp_ext::JoinLinesParams, -) -> Result<Vec<lsp_types::TextEdit>> { +) -> anyhow::Result<Vec<lsp_types::TextEdit>> { let _p = profile::span("handle_join_lines"); let config = snap.config.join_lines(); @@ -306,7 +307,7 @@ pub(crate) fn handle_join_lines( pub(crate) fn handle_on_enter( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, -) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> { +) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> { let _p = profile::span("handle_on_enter"); let position = from_proto::file_position(&snap, params)?; let edit = match snap.analysis.on_enter(position)? { @@ -321,7 +322,7 @@ pub(crate) fn handle_on_enter( pub(crate) fn handle_on_type_formatting( snap: GlobalStateSnapshot, params: lsp_types::DocumentOnTypeFormattingParams, -) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> { +) -> anyhow::Result<Option<Vec<lsp_ext::SnippetTextEdit>>> { let _p = profile::span("handle_on_type_formatting"); let mut position = from_proto::file_position(&snap, params.text_document_position)?; let line_index = snap.file_line_index(position.file_id)?; @@ -361,7 +362,7 @@ pub(crate) fn handle_on_type_formatting( pub(crate) fn handle_document_symbol( snap: GlobalStateSnapshot, params: lsp_types::DocumentSymbolParams, -) -> Result<Option<lsp_types::DocumentSymbolResponse>> { +) -> anyhow::Result<Option<lsp_types::DocumentSymbolResponse>> { let _p = profile::span("handle_document_symbol"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; @@ -450,7 +451,7 @@ pub(crate) fn handle_document_symbol( pub(crate) fn handle_workspace_symbol( snap: GlobalStateSnapshot, params: WorkspaceSymbolParams, -) -> Result<Option<Vec<SymbolInformation>>> { +) -> anyhow::Result<Option<Vec<SymbolInformation>>> { let _p = profile::span("handle_workspace_symbol"); let config = snap.config.workspace_symbol(); @@ -513,7 +514,10 @@ pub(crate) fn handle_workspace_symbol( (all_symbols, libs) } - fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInformation>> { + fn exec_query( + snap: &GlobalStateSnapshot, + query: Query, + ) -> anyhow::Result<Vec<SymbolInformation>> { let mut res = Vec::new(); for nav in snap.analysis.symbol_search(query)? { let container_name = nav.container_name.as_ref().map(|v| v.to_string()); @@ -542,7 +546,7 @@ pub(crate) fn handle_workspace_symbol( pub(crate) fn handle_will_rename_files( snap: GlobalStateSnapshot, params: lsp_types::RenameFilesParams, -) -> Result<Option<lsp_types::WorkspaceEdit>> { +) -> anyhow::Result<Option<lsp_types::WorkspaceEdit>> { let _p = profile::span("handle_will_rename_files"); let source_changes: Vec<SourceChange> = params @@ -604,7 +608,7 @@ pub(crate) fn handle_will_rename_files( pub(crate) fn handle_goto_definition( snap: GlobalStateSnapshot, params: lsp_types::GotoDefinitionParams, -) -> Result<Option<lsp_types::GotoDefinitionResponse>> { +) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> { let _p = profile::span("handle_goto_definition"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let nav_info = match snap.analysis.goto_definition(position)? { @@ -619,7 +623,7 @@ pub(crate) fn handle_goto_definition( pub(crate) fn handle_goto_declaration( snap: GlobalStateSnapshot, params: lsp_types::request::GotoDeclarationParams, -) -> Result<Option<lsp_types::request::GotoDeclarationResponse>> { +) -> anyhow::Result<Option<lsp_types::request::GotoDeclarationResponse>> { let _p = profile::span("handle_goto_declaration"); let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?; let nav_info = match snap.analysis.goto_declaration(position)? { @@ -634,7 +638,7 @@ pub(crate) fn handle_goto_declaration( pub(crate) fn handle_goto_implementation( snap: GlobalStateSnapshot, params: lsp_types::request::GotoImplementationParams, -) -> Result<Option<lsp_types::request::GotoImplementationResponse>> { +) -> anyhow::Result<Option<lsp_types::request::GotoImplementationResponse>> { let _p = profile::span("handle_goto_implementation"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let nav_info = match snap.analysis.goto_implementation(position)? { @@ -649,7 +653,7 @@ pub(crate) fn handle_goto_implementation( pub(crate) fn handle_goto_type_definition( snap: GlobalStateSnapshot, params: lsp_types::request::GotoTypeDefinitionParams, -) -> Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> { +) -> anyhow::Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> { let _p = profile::span("handle_goto_type_definition"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let nav_info = match snap.analysis.goto_type_definition(position)? { @@ -664,7 +668,7 @@ pub(crate) fn handle_goto_type_definition( pub(crate) fn handle_parent_module( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, -) -> Result<Option<lsp_types::GotoDefinitionResponse>> { +) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> { let _p = profile::span("handle_parent_module"); if let Ok(file_path) = ¶ms.text_document.uri.to_file_path() { if file_path.file_name().unwrap_or_default() == "Cargo.toml" { @@ -731,7 +735,7 @@ pub(crate) fn handle_parent_module( pub(crate) fn handle_runnables( snap: GlobalStateSnapshot, params: lsp_ext::RunnablesParams, -) -> Result<Vec<lsp_ext::Runnable>> { +) -> anyhow::Result<Vec<lsp_ext::Runnable>> { let _p = profile::span("handle_runnables"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let line_index = snap.file_line_index(file_id)?; @@ -826,7 +830,7 @@ fn should_skip_for_offset(runnable: &Runnable, offset: Option<TextSize>) -> bool pub(crate) fn handle_related_tests( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, -) -> Result<Vec<lsp_ext::TestInfo>> { +) -> anyhow::Result<Vec<lsp_ext::TestInfo>> { let _p = profile::span("handle_related_tests"); let position = from_proto::file_position(&snap, params)?; @@ -844,7 +848,7 @@ pub(crate) fn handle_related_tests( pub(crate) fn handle_completion( snap: GlobalStateSnapshot, params: lsp_types::CompletionParams, -) -> Result<Option<lsp_types::CompletionResponse>> { +) -> anyhow::Result<Option<lsp_types::CompletionResponse>> { let _p = profile::span("handle_completion"); let text_document_position = params.text_document_position.clone(); let position = from_proto::file_position(&snap, params.text_document_position)?; @@ -872,7 +876,7 @@ pub(crate) fn handle_completion( pub(crate) fn handle_completion_resolve( snap: GlobalStateSnapshot, mut original_completion: CompletionItem, -) -> Result<CompletionItem> { +) -> anyhow::Result<CompletionItem> { let _p = profile::span("handle_completion_resolve"); if !all_edits_are_disjoint(&original_completion, &[]) { @@ -928,7 +932,7 @@ pub(crate) fn handle_completion_resolve( pub(crate) fn handle_folding_range( snap: GlobalStateSnapshot, params: FoldingRangeParams, -) -> Result<Option<Vec<FoldingRange>>> { +) -> anyhow::Result<Option<Vec<FoldingRange>>> { let _p = profile::span("handle_folding_range"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let folds = snap.analysis.folding_ranges(file_id)?; @@ -945,7 +949,7 @@ pub(crate) fn handle_folding_range( pub(crate) fn handle_signature_help( snap: GlobalStateSnapshot, params: lsp_types::SignatureHelpParams, -) -> Result<Option<lsp_types::SignatureHelp>> { +) -> anyhow::Result<Option<lsp_types::SignatureHelp>> { let _p = profile::span("handle_signature_help"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let help = match snap.analysis.signature_help(position)? { @@ -960,7 +964,7 @@ pub(crate) fn handle_signature_help( pub(crate) fn handle_hover( snap: GlobalStateSnapshot, params: lsp_ext::HoverParams, -) -> Result<Option<lsp_ext::Hover>> { +) -> anyhow::Result<Option<lsp_ext::Hover>> { let _p = profile::span("handle_hover"); let range = match params.position { PositionOrRange::Position(position) => Range::new(position, position), @@ -997,7 +1001,7 @@ pub(crate) fn handle_hover( pub(crate) fn handle_prepare_rename( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, -) -> Result<Option<PrepareRenameResponse>> { +) -> anyhow::Result<Option<PrepareRenameResponse>> { let _p = profile::span("handle_prepare_rename"); let position = from_proto::file_position(&snap, params)?; @@ -1011,7 +1015,7 @@ pub(crate) fn handle_prepare_rename( pub(crate) fn handle_rename( snap: GlobalStateSnapshot, params: RenameParams, -) -> Result<Option<WorkspaceEdit>> { +) -> anyhow::Result<Option<WorkspaceEdit>> { let _p = profile::span("handle_rename"); let position = from_proto::file_position(&snap, params.text_document_position)?; @@ -1027,14 +1031,30 @@ pub(crate) fn handle_rename( if !change.file_system_edits.is_empty() && snap.config.will_rename() { change.source_file_edits.clear(); } + let workspace_edit = to_proto::workspace_edit(&snap, change)?; + + if let Some(lsp_types::DocumentChanges::Operations(ops)) = + workspace_edit.document_changes.as_ref() + { + for op in ops { + if let lsp_types::DocumentChangeOperation::Op(doc_change_op) = op { + if let Err(err) = + resource_ops_supported(&snap.config, resolve_resource_op(doc_change_op)) + { + return Err(err); + } + } + } + } + Ok(Some(workspace_edit)) } pub(crate) fn handle_references( snap: GlobalStateSnapshot, params: lsp_types::ReferenceParams, -) -> Result<Option<Vec<Location>>> { +) -> anyhow::Result<Option<Vec<Location>>> { let _p = profile::span("handle_references"); let position = from_proto::file_position(&snap, params.text_document_position)?; @@ -1077,7 +1097,7 @@ pub(crate) fn handle_references( pub(crate) fn handle_formatting( snap: GlobalStateSnapshot, params: lsp_types::DocumentFormattingParams, -) -> Result<Option<Vec<lsp_types::TextEdit>>> { +) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> { let _p = profile::span("handle_formatting"); run_rustfmt(&snap, params.text_document, None) @@ -1086,7 +1106,7 @@ pub(crate) fn handle_formatting( pub(crate) fn handle_range_formatting( snap: GlobalStateSnapshot, params: lsp_types::DocumentRangeFormattingParams, -) -> Result<Option<Vec<lsp_types::TextEdit>>> { +) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> { let _p = profile::span("handle_range_formatting"); run_rustfmt(&snap, params.text_document, Some(params.range)) @@ -1095,7 +1115,7 @@ pub(crate) fn handle_range_formatting( pub(crate) fn handle_code_action( snap: GlobalStateSnapshot, params: lsp_types::CodeActionParams, -) -> Result<Option<Vec<lsp_ext::CodeAction>>> { +) -> anyhow::Result<Option<Vec<lsp_ext::CodeAction>>> { let _p = profile::span("handle_code_action"); if !snap.config.code_action_literals() { @@ -1134,6 +1154,21 @@ pub(crate) fn handle_code_action( let resolve_data = if code_action_resolve_cap { Some((index, params.clone())) } else { None }; let code_action = to_proto::code_action(&snap, assist, resolve_data)?; + + // Check if the client supports the necessary `ResourceOperation`s. + let changes = code_action.edit.as_ref().and_then(|it| it.document_changes.as_ref()); + if let Some(changes) = changes { + for change in changes { + if let lsp_ext::SnippetDocumentChangeOperation::Op(res_op) = change { + if let Err(err) = + resource_ops_supported(&snap.config, resolve_resource_op(res_op)) + { + return Err(err); + } + } + } + } + res.push(code_action) } @@ -1158,7 +1193,7 @@ pub(crate) fn handle_code_action( pub(crate) fn handle_code_action_resolve( snap: GlobalStateSnapshot, mut code_action: lsp_ext::CodeAction, -) -> Result<lsp_ext::CodeAction> { +) -> anyhow::Result<lsp_ext::CodeAction> { let _p = profile::span("handle_code_action_resolve"); let params = match code_action.data.take() { Some(it) => it, @@ -1216,10 +1251,25 @@ pub(crate) fn handle_code_action_resolve( let ca = to_proto::code_action(&snap, assist.clone(), None)?; code_action.edit = ca.edit; code_action.command = ca.command; + + if let Some(edit) = code_action.edit.as_ref() { + if let Some(changes) = edit.document_changes.as_ref() { + for change in changes { + if let lsp_ext::SnippetDocumentChangeOperation::Op(res_op) = change { + if let Err(err) = + resource_ops_supported(&snap.config, resolve_resource_op(res_op)) + { + return Err(err); + } + } + } + } + } + Ok(code_action) } -fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> { +fn parse_action_id(action_id: &str) -> anyhow::Result<(usize, SingleResolve), String> { let id_parts = action_id.split(':').collect::<Vec<_>>(); match id_parts.as_slice() { [assist_id_string, assist_kind_string, index_string] => { @@ -1237,7 +1287,7 @@ fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> { pub(crate) fn handle_code_lens( snap: GlobalStateSnapshot, params: lsp_types::CodeLensParams, -) -> Result<Option<Vec<CodeLens>>> { +) -> anyhow::Result<Option<Vec<CodeLens>>> { let _p = profile::span("handle_code_lens"); let lens_config = snap.config.lens(); @@ -1280,8 +1330,10 @@ pub(crate) fn handle_code_lens( pub(crate) fn handle_code_lens_resolve( snap: GlobalStateSnapshot, code_lens: CodeLens, -) -> Result<CodeLens> { - let Some(annotation) = from_proto::annotation(&snap, code_lens.clone())? else { return Ok(code_lens) }; +) -> anyhow::Result<CodeLens> { + let Some(annotation) = from_proto::annotation(&snap, code_lens.clone())? else { + return Ok(code_lens); + }; let annotation = snap.analysis.resolve_annotation(annotation)?; let mut acc = Vec::new(); @@ -1301,7 +1353,7 @@ pub(crate) fn handle_code_lens_resolve( pub(crate) fn handle_document_highlight( snap: GlobalStateSnapshot, params: lsp_types::DocumentHighlightParams, -) -> Result<Option<Vec<lsp_types::DocumentHighlight>>> { +) -> anyhow::Result<Option<Vec<lsp_types::DocumentHighlight>>> { let _p = profile::span("handle_document_highlight"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; let line_index = snap.file_line_index(position.file_id)?; @@ -1323,7 +1375,7 @@ pub(crate) fn handle_document_highlight( pub(crate) fn handle_ssr( snap: GlobalStateSnapshot, params: lsp_ext::SsrParams, -) -> Result<lsp_types::WorkspaceEdit> { +) -> anyhow::Result<lsp_types::WorkspaceEdit> { let _p = profile::span("handle_ssr"); let selections = params .selections @@ -1343,7 +1395,7 @@ pub(crate) fn handle_ssr( pub(crate) fn handle_inlay_hints( snap: GlobalStateSnapshot, params: InlayHintParams, -) -> Result<Option<Vec<InlayHint>>> { +) -> anyhow::Result<Option<Vec<InlayHint>>> { let _p = profile::span("handle_inlay_hints"); let document_uri = ¶ms.text_document.uri; let FileRange { file_id, range } = from_proto::file_range( @@ -1365,7 +1417,7 @@ pub(crate) fn handle_inlay_hints( pub(crate) fn handle_inlay_hints_resolve( _snap: GlobalStateSnapshot, hint: InlayHint, -) -> Result<InlayHint> { +) -> anyhow::Result<InlayHint> { let _p = profile::span("handle_inlay_hints_resolve"); Ok(hint) } @@ -1373,7 +1425,7 @@ pub(crate) fn handle_inlay_hints_resolve( pub(crate) fn handle_call_hierarchy_prepare( snap: GlobalStateSnapshot, params: CallHierarchyPrepareParams, -) -> Result<Option<Vec<CallHierarchyItem>>> { +) -> anyhow::Result<Option<Vec<CallHierarchyItem>>> { let _p = profile::span("handle_call_hierarchy_prepare"); let position = from_proto::file_position(&snap, params.text_document_position_params)?; @@ -1395,7 +1447,7 @@ pub(crate) fn handle_call_hierarchy_prepare( pub(crate) fn handle_call_hierarchy_incoming( snap: GlobalStateSnapshot, params: CallHierarchyIncomingCallsParams, -) -> Result<Option<Vec<CallHierarchyIncomingCall>>> { +) -> anyhow::Result<Option<Vec<CallHierarchyIncomingCall>>> { let _p = profile::span("handle_call_hierarchy_incoming"); let item = params.item; @@ -1430,7 +1482,7 @@ pub(crate) fn handle_call_hierarchy_incoming( pub(crate) fn handle_call_hierarchy_outgoing( snap: GlobalStateSnapshot, params: CallHierarchyOutgoingCallsParams, -) -> Result<Option<Vec<CallHierarchyOutgoingCall>>> { +) -> anyhow::Result<Option<Vec<CallHierarchyOutgoingCall>>> { let _p = profile::span("handle_call_hierarchy_outgoing"); let item = params.item; @@ -1465,7 +1517,7 @@ pub(crate) fn handle_call_hierarchy_outgoing( pub(crate) fn handle_semantic_tokens_full( snap: GlobalStateSnapshot, params: SemanticTokensParams, -) -> Result<Option<SemanticTokensResult>> { +) -> anyhow::Result<Option<SemanticTokensResult>> { let _p = profile::span("handle_semantic_tokens_full"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; @@ -1495,7 +1547,7 @@ pub(crate) fn handle_semantic_tokens_full( pub(crate) fn handle_semantic_tokens_full_delta( snap: GlobalStateSnapshot, params: SemanticTokensDeltaParams, -) -> Result<Option<SemanticTokensFullDeltaResult>> { +) -> anyhow::Result<Option<SemanticTokensFullDeltaResult>> { let _p = profile::span("handle_semantic_tokens_full_delta"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; @@ -1535,7 +1587,7 @@ pub(crate) fn handle_semantic_tokens_full_delta( pub(crate) fn handle_semantic_tokens_range( snap: GlobalStateSnapshot, params: SemanticTokensRangeParams, -) -> Result<Option<SemanticTokensRangeResult>> { +) -> anyhow::Result<Option<SemanticTokensRangeResult>> { let _p = profile::span("handle_semantic_tokens_range"); let frange = from_proto::file_range(&snap, params.text_document, params.range)?; @@ -1561,7 +1613,7 @@ pub(crate) fn handle_semantic_tokens_range( pub(crate) fn handle_open_docs( snap: GlobalStateSnapshot, params: lsp_types::TextDocumentPositionParams, -) -> Result<ExternalDocsResponse> { +) -> anyhow::Result<ExternalDocsResponse> { let _p = profile::span("handle_open_docs"); let position = from_proto::file_position(&snap, params)?; @@ -1582,9 +1634,9 @@ pub(crate) fn handle_open_docs( let Ok(remote_urls) = snap.analysis.external_docs(position, target_dir, sysroot) else { return if snap.config.local_docs() { Ok(ExternalDocsResponse::WithLocal(Default::default())) - } else { + } else { Ok(ExternalDocsResponse::Simple(None)) - } + }; }; let web = remote_urls.web_url.and_then(|it| Url::parse(&it).ok()); @@ -1600,7 +1652,7 @@ pub(crate) fn handle_open_docs( pub(crate) fn handle_open_cargo_toml( snap: GlobalStateSnapshot, params: lsp_ext::OpenCargoTomlParams, -) -> Result<Option<lsp_types::GotoDefinitionResponse>> { +) -> anyhow::Result<Option<lsp_types::GotoDefinitionResponse>> { let _p = profile::span("handle_open_cargo_toml"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; @@ -1618,7 +1670,7 @@ pub(crate) fn handle_open_cargo_toml( pub(crate) fn handle_move_item( snap: GlobalStateSnapshot, params: lsp_ext::MoveItemParams, -) -> Result<Vec<lsp_ext::SnippetTextEdit>> { +) -> anyhow::Result<Vec<lsp_ext::SnippetTextEdit>> { let _p = profile::span("handle_move_item"); let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; let range = from_proto::file_range(&snap, params.text_document, params.range)?; @@ -1637,6 +1689,34 @@ pub(crate) fn handle_move_item( } } +pub(crate) fn handle_view_recursive_memory_layout( + snap: GlobalStateSnapshot, + params: lsp_types::TextDocumentPositionParams, +) -> anyhow::Result<Option<lsp_ext::RecursiveMemoryLayout>> { + let _p = profile::span("view_recursive_memory_layout"); + let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?; + let line_index = snap.file_line_index(file_id)?; + let offset = from_proto::offset(&line_index, params.position)?; + + let res = snap.analysis.get_recursive_memory_layout(FilePosition { file_id, offset })?; + Ok(res.map(|it| lsp_ext::RecursiveMemoryLayout { + nodes: it + .nodes + .iter() + .map(|n| lsp_ext::MemoryLayoutNode { + item_name: n.item_name.clone(), + typename: n.typename.clone(), + size: n.size, + offset: n.offset, + alignment: n.alignment, + parent_idx: n.parent_idx, + children_start: n.children_start, + children_len: n.children_len, + }) + .collect(), + })) +} + fn to_command_link(command: lsp_types::Command, tooltip: String) -> lsp_ext::CommandLink { lsp_ext::CommandLink { tooltip: Some(tooltip), command } } @@ -1792,7 +1872,7 @@ fn run_rustfmt( snap: &GlobalStateSnapshot, text_document: TextDocumentIdentifier, range: Option<lsp_types::Range>, -) -> Result<Option<Vec<lsp_types::TextEdit>>> { +) -> anyhow::Result<Option<Vec<lsp_types::TextEdit>>> { let file_id = from_proto::file_id(snap, &text_document.uri)?; let file = snap.analysis.file_text(file_id)?; @@ -1942,7 +2022,7 @@ fn run_rustfmt( pub(crate) fn fetch_dependency_list( state: GlobalStateSnapshot, _params: FetchDependencyListParams, -) -> Result<FetchDependencyListResult> { +) -> anyhow::Result<FetchDependencyListResult> { let crates = state.analysis.fetch_crates()?; let crate_infos = crates .into_iter() @@ -1987,3 +2067,42 @@ fn to_url(path: VfsPath) -> Option<Url> { let str_path = path.as_os_str().to_str()?; Url::from_file_path(str_path).ok() } + +fn resource_ops_supported(config: &Config, kind: ResourceOperationKind) -> anyhow::Result<()> { + #[rustfmt::skip] + let resops = (|| { + config + .caps() + .workspace + .as_ref()? + .workspace_edit + .as_ref()? + .resource_operations + .as_ref() + })(); + + if !matches!(resops, Some(resops) if resops.contains(&kind)) { + return Err(LspError::new( + ErrorCode::RequestFailed as i32, + format!( + "Client does not support {} capability.", + match kind { + ResourceOperationKind::Create => "create", + ResourceOperationKind::Rename => "rename", + ResourceOperationKind::Delete => "delete", + } + ), + ) + .into()); + } + + Ok(()) +} + +fn resolve_resource_op(op: &ResourceOp) -> ResourceOperationKind { + match op { + ResourceOp::Create(_) => ResourceOperationKind::Create, + ResourceOp::Rename(_) => ResourceOperationKind::Rename, + ResourceOp::Delete(_) => ResourceOperationKind::Delete, + } +} diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs index bd9f471a46d..5a11012b93c 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -20,7 +20,7 @@ use test_utils::project_root; use triomphe::Arc; use vfs::{AbsPathBuf, VfsPath}; -use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; +use load_cargo::{load_workspace_at, LoadCargoConfig, ProcMacroServerChoice}; #[test] fn integrated_highlighting_benchmark() { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs index 65de4366e9f..57e26c241bb 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lib.rs @@ -54,13 +54,12 @@ use serde::de::DeserializeOwned; pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version}; -pub type Error = Box<dyn std::error::Error + Send + Sync>; -pub type Result<T, E = Error> = std::result::Result<T, E>; - -pub fn from_json<T: DeserializeOwned>(what: &'static str, json: &serde_json::Value) -> Result<T> { - let res = serde_json::from_value(json.clone()) - .map_err(|e| format!("Failed to deserialize {what}: {e}; {json}"))?; - Ok(res) +pub fn from_json<T: DeserializeOwned>( + what: &'static str, + json: &serde_json::Value, +) -> anyhow::Result<T> { + serde_json::from_value(json.clone()) + .map_err(|e| anyhow::format_err!("Failed to deserialize {what}: {e}; {json}")) } #[derive(Debug)] diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs index 4d67c8b305f..d0989b3230d 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/lsp_ext.rs @@ -182,6 +182,33 @@ pub struct ExpandedMacro { pub expansion: String, } +pub enum ViewRecursiveMemoryLayout {} + +impl Request for ViewRecursiveMemoryLayout { + type Params = lsp_types::TextDocumentPositionParams; + type Result = Option<RecursiveMemoryLayout>; + const METHOD: &'static str = "rust-analyzer/viewRecursiveMemoryLayout"; +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct RecursiveMemoryLayout { + pub nodes: Vec<MemoryLayoutNode>, +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct MemoryLayoutNode { + pub item_name: String, + pub typename: String, + pub size: u64, + pub offset: u64, + pub alignment: u64, + pub parent_idx: i64, + pub children_start: i64, + pub children_len: u64, +} + pub enum CancelFlycheck {} impl Notification for CancelFlycheck { diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs index 02dd94e5fa5..74036710fa3 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs @@ -23,10 +23,9 @@ use crate::{ lsp_ext, lsp_utils::{notification_is, Progress}, reload::{BuildDataProgress, ProcMacroProgress, ProjectWorkspaceProgress}, - Result, }; -pub fn main_loop(config: Config, connection: Connection) -> Result<()> { +pub fn main_loop(config: Config, connection: Connection) -> anyhow::Result<()> { tracing::info!("initial config: {:#?}", config); // Windows scheduler implements priority boosts: if thread waits for an @@ -109,7 +108,7 @@ impl fmt::Debug for Event { } impl GlobalState { - fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> { + fn run(mut self, inbox: Receiver<lsp_server::Message>) -> anyhow::Result<()> { self.update_status_or_notify(); if self.config.did_save_text_document_dynamic_registration() { @@ -134,7 +133,7 @@ impl GlobalState { self.handle_event(event)?; } - Err("client exited without proper shutdown sequence".into()) + anyhow::bail!("client exited without proper shutdown sequence") } fn register_did_save_capability(&mut self) { @@ -191,7 +190,7 @@ impl GlobalState { } } - fn handle_event(&mut self, event: Event) -> Result<()> { + fn handle_event(&mut self, event: Event) -> anyhow::Result<()> { let loop_start = Instant::now(); // NOTE: don't count blocking select! call as a loop-turn time let _p = profile::span("GlobalState::handle_event"); @@ -754,11 +753,12 @@ impl GlobalState { ) .on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files) .on::<lsp_ext::Ssr>(handlers::handle_ssr) + .on::<lsp_ext::ViewRecursiveMemoryLayout>(handlers::handle_view_recursive_memory_layout) .finish(); } /// Handles an incoming notification. - fn on_notification(&mut self, not: Notification) -> Result<()> { + fn on_notification(&mut self, not: Notification) -> anyhow::Result<()> { use crate::handlers::notification as handlers; use lsp_types::notification as notifs; @@ -843,11 +843,7 @@ impl GlobalState { d.code.as_str().to_string(), )), code_description: Some(lsp_types::CodeDescription { - href: lsp_types::Url::parse(&format!( - "https://rust-analyzer.github.io/manual.html#{}", - d.code.as_str() - )) - .unwrap(), + href: lsp_types::Url::parse(&d.code.url()).unwrap(), }), source: Some("rust-analyzer".to_string()), message: d.message, diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs index 310c6b076c0..18d9151d4aa 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs @@ -12,26 +12,22 @@ //! correct. Instead, we try to provide a best-effort service. Even if the //! project is currently loading and we don't have a full project model, we //! still want to respond to various requests. -use std::{collections::hash_map::Entry, iter, mem, sync}; +use std::{iter, mem}; use flycheck::{FlycheckConfig, FlycheckHandle}; use hir::db::DefDatabase; use ide::Change; use ide_db::{ - base_db::{ - salsa::Durability, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, - ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros, SourceRoot, VfsPath, - }, + base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, ProcMacros}, FxHashMap, }; -use itertools::Itertools; -use proc_macro_api::{MacroDylib, ProcMacroServer}; -use project_model::{PackageRoot, ProjectWorkspace, WorkspaceBuildScripts}; +use load_cargo::{load_proc_macro, ProjectFolders}; +use proc_macro_api::ProcMacroServer; +use project_model::{ProjectWorkspace, WorkspaceBuildScripts}; use rustc_hash::FxHashSet; use stdx::{format_to, thread::ThreadIntent}; -use syntax::SmolStr; use triomphe::Arc; -use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind}; +use vfs::{AbsPath, ChangeKind}; use crate::{ config::{Config, FilesWatcher, LinkedProject}, @@ -41,8 +37,6 @@ use crate::{ op_queue::Cause, }; -use ::tt::token_id as tt; - #[derive(Debug)] pub(crate) enum ProjectWorkspaceProgress { Begin, @@ -307,7 +301,7 @@ impl GlobalState { res.map_or_else( |_| Err("proc macro crate is missing dylib".to_owned()), |(crate_name, path)| { - progress(path.display().to_string()); + progress(path.to_string()); client.as_ref().map_err(Clone::clone).and_then(|client| { load_proc_macro( client, @@ -340,7 +334,11 @@ impl GlobalState { let _p = profile::span("GlobalState::switch_workspaces"); tracing::info!(%cause, "will switch workspaces"); - let Some((workspaces, force_reload_crate_graph)) = self.fetch_workspaces_queue.last_op_result() else { return; }; + let Some((workspaces, force_reload_crate_graph)) = + self.fetch_workspaces_queue.last_op_result() + else { + return; + }; if let Err(_) = self.fetch_workspace_error() { if !self.workspaces.is_empty() { @@ -407,9 +405,9 @@ impl GlobalState { .flat_map(|root| { root.include.into_iter().flat_map(|it| { [ - format!("{}/**/*.rs", it.display()), - format!("{}/**/Cargo.toml", it.display()), - format!("{}/**/Cargo.lock", it.display()), + format!("{it}/**/*.rs"), + format!("{it}/**/Cargo.toml"), + format!("{it}/**/Cargo.lock"), ] }) }) @@ -447,17 +445,13 @@ impl GlobalState { None => ws.find_sysroot_proc_macro_srv()?, }; - tracing::info!("Using proc-macro server at {}", path.display(),); + tracing::info!("Using proc-macro server at {path}"); ProcMacroServer::spawn(path.clone()).map_err(|err| { tracing::error!( - "Failed to run proc-macro server from path {}, error: {:?}", - path.display(), - err + "Failed to run proc-macro server from path {path}, error: {err:?}", ); - anyhow::anyhow!( - "Failed to run proc-macro server from path {}, error: {:?}", - path.display(), - err + anyhow::format_err!( + "Failed to run proc-macro server from path {path}, error: {err:?}", ) }) }) @@ -534,7 +528,9 @@ impl GlobalState { pub(super) fn fetch_workspace_error(&self) -> Result<(), String> { let mut buf = String::new(); - let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()) }; + let Some((last_op_result, _)) = self.fetch_workspaces_queue.last_op_result() else { + return Ok(()); + }; if last_op_result.is_empty() { stdx::format_to!(buf, "rust-analyzer failed to discover workspace"); } else { @@ -621,254 +617,6 @@ impl GlobalState { } } -#[derive(Default)] -pub(crate) struct ProjectFolders { - pub(crate) load: Vec<vfs::loader::Entry>, - pub(crate) watch: Vec<usize>, - pub(crate) source_root_config: SourceRootConfig, -} - -impl ProjectFolders { - pub(crate) fn new( - workspaces: &[ProjectWorkspace], - global_excludes: &[AbsPathBuf], - ) -> ProjectFolders { - let mut res = ProjectFolders::default(); - let mut fsc = FileSetConfig::builder(); - let mut local_filesets = vec![]; - - // Dedup source roots - // Depending on the project setup, we can have duplicated source roots, or for example in - // the case of the rustc workspace, we can end up with two source roots that are almost the - // same but not quite, like: - // PackageRoot { is_local: false, include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri")], exclude: [] } - // PackageRoot { - // is_local: true, - // include: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri"), AbsPathBuf(".../rust/build/x86_64-pc-windows-msvc/stage0-tools/x86_64-pc-windows-msvc/release/build/cargo-miri-85801cd3d2d1dae4/out")], - // exclude: [AbsPathBuf(".../rust/src/tools/miri/cargo-miri/.git"), AbsPathBuf(".../rust/src/tools/miri/cargo-miri/target")] - // } - // - // The first one comes from the explicit rustc workspace which points to the rustc workspace itself - // The second comes from the rustc workspace that we load as the actual project workspace - // These `is_local` differing in this kind of way gives us problems, especially when trying to filter diagnostics as we don't report diagnostics for external libraries. - // So we need to deduplicate these, usually it would be enough to deduplicate by `include`, but as the rustc example shows here that doesn't work, - // so we need to also coalesce the includes if they overlap. - - let mut roots: Vec<_> = workspaces - .iter() - .flat_map(|ws| ws.to_roots()) - .update(|root| root.include.sort()) - .sorted_by(|a, b| a.include.cmp(&b.include)) - .collect(); - - // map that tracks indices of overlapping roots - let mut overlap_map = FxHashMap::<_, Vec<_>>::default(); - let mut done = false; - - while !mem::replace(&mut done, true) { - // maps include paths to indices of the corresponding root - let mut include_to_idx = FxHashMap::default(); - // Find and note down the indices of overlapping roots - for (idx, root) in roots.iter().enumerate().filter(|(_, it)| !it.include.is_empty()) { - for include in &root.include { - match include_to_idx.entry(include) { - Entry::Occupied(e) => { - overlap_map.entry(*e.get()).or_default().push(idx); - } - Entry::Vacant(e) => { - e.insert(idx); - } - } - } - } - for (k, v) in overlap_map.drain() { - done = false; - for v in v { - let r = mem::replace( - &mut roots[v], - PackageRoot { is_local: false, include: vec![], exclude: vec![] }, - ); - roots[k].is_local |= r.is_local; - roots[k].include.extend(r.include); - roots[k].exclude.extend(r.exclude); - } - roots[k].include.sort(); - roots[k].exclude.sort(); - roots[k].include.dedup(); - roots[k].exclude.dedup(); - } - } - - for root in roots.into_iter().filter(|it| !it.include.is_empty()) { - let file_set_roots: Vec<VfsPath> = - root.include.iter().cloned().map(VfsPath::from).collect(); - - let entry = { - let mut dirs = vfs::loader::Directories::default(); - dirs.extensions.push("rs".into()); - dirs.include.extend(root.include); - dirs.exclude.extend(root.exclude); - for excl in global_excludes { - if dirs - .include - .iter() - .any(|incl| incl.starts_with(excl) || excl.starts_with(incl)) - { - dirs.exclude.push(excl.clone()); - } - } - - vfs::loader::Entry::Directories(dirs) - }; - - if root.is_local { - res.watch.push(res.load.len()); - } - res.load.push(entry); - - if root.is_local { - local_filesets.push(fsc.len()); - } - fsc.add_file_set(file_set_roots) - } - - let fsc = fsc.build(); - res.source_root_config = SourceRootConfig { fsc, local_filesets }; - - res - } -} - -#[derive(Default, Debug)] -pub(crate) struct SourceRootConfig { - pub(crate) fsc: FileSetConfig, - pub(crate) local_filesets: Vec<usize>, -} - -impl SourceRootConfig { - pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> { - let _p = profile::span("SourceRootConfig::partition"); - self.fsc - .partition(vfs) - .into_iter() - .enumerate() - .map(|(idx, file_set)| { - let is_local = self.local_filesets.contains(&idx); - if is_local { - SourceRoot::new_local(file_set) - } else { - SourceRoot::new_library(file_set) - } - }) - .collect() - } -} - -/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace` -/// with an identity dummy expander. -pub(crate) fn load_proc_macro( - server: &ProcMacroServer, - path: &AbsPath, - dummy_replace: &[Box<str>], -) -> ProcMacroLoadResult { - let res: Result<Vec<_>, String> = (|| { - let dylib = MacroDylib::new(path.to_path_buf()); - let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?; - if vec.is_empty() { - return Err("proc macro library returned no proc macros".to_string()); - } - Ok(vec - .into_iter() - .map(|expander| expander_to_proc_macro(expander, dummy_replace)) - .collect()) - })(); - return match res { - Ok(proc_macros) => { - tracing::info!( - "Loaded proc-macros for {}: {:?}", - path.display(), - proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>() - ); - Ok(proc_macros) - } - Err(e) => { - tracing::warn!("proc-macro loading for {} failed: {e}", path.display()); - Err(e) - } - }; - - fn expander_to_proc_macro( - expander: proc_macro_api::ProcMacro, - dummy_replace: &[Box<str>], - ) -> ProcMacro { - let name = SmolStr::from(expander.name()); - let kind = match expander.kind() { - proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive, - proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike, - proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr, - }; - let expander: sync::Arc<dyn ProcMacroExpander> = - if dummy_replace.iter().any(|replace| &**replace == name) { - match kind { - ProcMacroKind::Attr => sync::Arc::new(IdentityExpander), - _ => sync::Arc::new(EmptyExpander), - } - } else { - sync::Arc::new(Expander(expander)) - }; - ProcMacro { name, kind, expander } - } - - #[derive(Debug)] - struct Expander(proc_macro_api::ProcMacro); - - impl ProcMacroExpander for Expander { - fn expand( - &self, - subtree: &tt::Subtree, - attrs: Option<&tt::Subtree>, - env: &Env, - ) -> Result<tt::Subtree, ProcMacroExpansionError> { - let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); - match self.0.expand(subtree, attrs, env) { - Ok(Ok(subtree)) => Ok(subtree), - Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), - Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), - } - } - } - - /// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user. - #[derive(Debug)] - struct IdentityExpander; - - impl ProcMacroExpander for IdentityExpander { - fn expand( - &self, - subtree: &tt::Subtree, - _: Option<&tt::Subtree>, - _: &Env, - ) -> Result<tt::Subtree, ProcMacroExpansionError> { - Ok(subtree.clone()) - } - } - - /// Empty expander, used for proc-macros that are deliberately ignored by the user. - #[derive(Debug)] - struct EmptyExpander; - - impl ProcMacroExpander for EmptyExpander { - fn expand( - &self, - _: &tt::Subtree, - _: Option<&tt::Subtree>, - _: &Env, - ) -> Result<tt::Subtree, ProcMacroExpansionError> { - Ok(tt::Subtree::empty()) - } - } -} - pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool { const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"]; const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"]; diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs index 648bc995ad5..ba3421bf9e7 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs @@ -94,7 +94,10 @@ pub(crate) fn document_highlight_kind( pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSeverity { match severity { Severity::Error => lsp_types::DiagnosticSeverity::ERROR, + Severity::Warning => lsp_types::DiagnosticSeverity::WARNING, Severity::WeakWarning => lsp_types::DiagnosticSeverity::HINT, + // unreachable + Severity::Allow => lsp_types::DiagnosticSeverity::INFORMATION, } } diff --git a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs index b2a8041ae9b..3c52ef5ef7f 100644 --- a/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/src/tools/rust-analyzer/crates/rust-analyzer/tests/slow-tests/support.rs @@ -13,6 +13,7 @@ use rust_analyzer::{config::Config, lsp_ext, main_loop}; use serde::Serialize; use serde_json::{json, to_string_pretty, Value}; use test_utils::FixtureWithProjectMeta; +use tracing_subscriber::{prelude::*, Layer}; use vfs::AbsPathBuf; use crate::testdir::TestDir; @@ -24,7 +25,7 @@ pub(crate) struct Project<'a> { config: serde_json::Value, } -impl<'a> Project<'a> { +impl Project<'_> { pub(crate) fn with_fixture(fixture: &str) -> Project<'_> { Project { fixture, @@ -47,17 +48,17 @@ impl<'a> Project<'a> { } } - pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Project<'a> { + pub(crate) fn tmp_dir(mut self, tmp_dir: TestDir) -> Self { self.tmp_dir = Some(tmp_dir); self } - pub(crate) fn root(mut self, path: &str) -> Project<'a> { + pub(crate) fn root(mut self, path: &str) -> Self { self.roots.push(path.into()); self } - pub(crate) fn with_config(mut self, config: serde_json::Value) -> Project<'a> { + pub(crate) fn with_config(mut self, config: serde_json::Value) -> Self { fn merge(dst: &mut serde_json::Value, src: serde_json::Value) { match (dst, src) { (Value::Object(dst), Value::Object(src)) => { @@ -76,10 +77,11 @@ impl<'a> Project<'a> { let tmp_dir = self.tmp_dir.unwrap_or_else(TestDir::new); static INIT: Once = Once::new(); INIT.call_once(|| { - tracing_subscriber::fmt() - .with_test_writer() - .with_env_filter(tracing_subscriber::EnvFilter::from_env("RA_LOG")) - .init(); + let filter: tracing_subscriber::filter::Targets = + std::env::var("RA_LOG").ok().and_then(|it| it.parse().ok()).unwrap_or_default(); + let layer = + tracing_subscriber::fmt::Layer::new().with_test_writer().with_filter(filter); + tracing_subscriber::Registry::default().with(layer).init(); profile::init_from(crate::PROFILE); }); @@ -111,6 +113,14 @@ impl<'a> Project<'a> { relative_pattern_support: None, }, ), + workspace_edit: Some(lsp_types::WorkspaceEditClientCapabilities { + resource_operations: Some(vec![ + lsp_types::ResourceOperationKind::Create, + lsp_types::ResourceOperationKind::Delete, + lsp_types::ResourceOperationKind::Rename, + ]), + ..Default::default() + }), ..Default::default() }), text_document: Some(lsp_types::TextDocumentClientCapabilities { diff --git a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs index c5da6ceb4d1..1514c6c7d4c 100644 --- a/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs +++ b/src/tools/rust-analyzer/crates/sourcegen/src/lib.rs @@ -61,7 +61,9 @@ impl CommentBlock { let mut blocks = CommentBlock::extract_untagged(text); blocks.retain_mut(|block| { let first = block.contents.remove(0); - let Some(id) = first.strip_prefix(&tag) else { return false; }; + let Some(id) = first.strip_prefix(&tag) else { + return false; + }; if block.is_doc { panic!("Use plain (non-doc) comments with tags like {tag}:\n {first}"); diff --git a/src/tools/rust-analyzer/crates/stdx/Cargo.toml b/src/tools/rust-analyzer/crates/stdx/Cargo.toml index a67f36ae900..536f000a44b 100644 --- a/src/tools/rust-analyzer/crates/stdx/Cargo.toml +++ b/src/tools/rust-analyzer/crates/stdx/Cargo.toml @@ -13,7 +13,7 @@ doctest = false [dependencies] libc = "0.2.135" -backtrace = { version = "0.3.65", optional = true } +backtrace = { version = "0.3.67", optional = true } always-assert = { version = "0.1.2", features = ["log"] } jod-thread = "0.1.2" crossbeam-channel = "0.5.5" diff --git a/src/tools/rust-analyzer/crates/syntax/Cargo.toml b/src/tools/rust-analyzer/crates/syntax/Cargo.toml index fb38d25ab54..5ee0c479284 100644 --- a/src/tools/rust-analyzer/crates/syntax/Cargo.toml +++ b/src/tools/rust-analyzer/crates/syntax/Cargo.toml @@ -19,7 +19,7 @@ itertools = "0.10.5" rowan = "0.15.11" rustc-hash = "1.1.0" once_cell = "1.17.0" -indexmap = "1.9.1" +indexmap = "2.0.0" smol_str.workspace = true triomphe.workspace = true diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs index b3ea6ca8d46..606804aea25 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/edit_in_place.rs @@ -213,6 +213,28 @@ pub trait AttrsOwnerEdit: ast::HasAttrs { } } } + + fn add_attr(&self, attr: ast::Attr) { + add_attr(self.syntax(), attr); + + fn add_attr(node: &SyntaxNode, attr: ast::Attr) { + let indent = IndentLevel::from_node(node); + attr.reindent_to(indent); + + let after_attrs_and_comments = node + .children_with_tokens() + .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) + .map_or(Position::first_child_of(node), |it| Position::before(it)); + + ted::insert_all( + after_attrs_and_comments, + vec![ + attr.syntax().clone().into(), + make::tokens::whitespace(&format!("\n{indent}")).into(), + ], + ) + } + } } impl<T: ast::HasAttrs> AttrsOwnerEdit for T {} @@ -676,12 +698,6 @@ fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken { } } -impl ast::StmtList { - pub fn push_front(&self, statement: ast::Stmt) { - ted::insert(Position::after(self.l_curly_token().unwrap()), statement.syntax()); - } -} - impl ast::VariantList { pub fn add_variant(&self, variant: ast::Variant) { let (indent, position) = match self.variants().last() { @@ -732,6 +748,27 @@ fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> { Some(()) } +pub trait HasVisibilityEdit: ast::HasVisibility { + fn set_visibility(&self, visbility: ast::Visibility) { + match self.visibility() { + Some(current_visibility) => { + ted::replace(current_visibility.syntax(), visbility.syntax()) + } + None => { + let vis_before = self + .syntax() + .children_with_tokens() + .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) + .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap()); + + ted::insert(ted::Position::before(vis_before), visbility.syntax()); + } + } + } +} + +impl<T: ast::HasVisibility> HasVisibilityEdit for T {} + pub trait Indent: AstNode + Clone + Sized { fn indent_level(&self) -> IndentLevel { IndentLevel::from_node(self.syntax()) diff --git a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs index 3c2b7e56b06..4c6db0ef06c 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/ast/make.rs @@ -10,6 +10,8 @@ //! `parse(format!())` we use internally is an implementation detail -- long //! term, it will be replaced with direct tree manipulation. use itertools::Itertools; +use parser::T; +use rowan::NodeOrToken; use stdx::{format_to, never}; use crate::{ast, utils::is_raw_identifier, AstNode, SourceFile, SyntaxKind, SyntaxToken}; @@ -447,6 +449,21 @@ pub fn block_expr( ast_from_text(&format!("fn f() {buf}")) } +pub fn async_move_block_expr( + stmts: impl IntoIterator<Item = ast::Stmt>, + tail_expr: Option<ast::Expr>, +) -> ast::BlockExpr { + let mut buf = "async move {\n".to_string(); + for stmt in stmts.into_iter() { + format_to!(buf, " {stmt}\n"); + } + if let Some(tail_expr) = tail_expr { + format_to!(buf, " {tail_expr}\n"); + } + buf += "}"; + ast_from_text(&format!("const _: () = {buf};")) +} + pub fn tail_only_block_expr(tail_expr: ast::Expr) -> ast::BlockExpr { ast_from_text(&format!("fn f() {{ {tail_expr} }}")) } @@ -848,6 +865,36 @@ pub fn param_list( ast_from_text(&list) } +pub fn trait_( + is_unsafe: bool, + ident: &str, + gen_params: Option<ast::GenericParamList>, + where_clause: Option<ast::WhereClause>, + assoc_items: ast::AssocItemList, +) -> ast::Trait { + let mut text = String::new(); + + if is_unsafe { + format_to!(text, "unsafe "); + } + + format_to!(text, "trait {ident}"); + + if let Some(gen_params) = gen_params { + format_to!(text, "{} ", gen_params.to_string()); + } else { + text.push(' '); + } + + if let Some(where_clause) = where_clause { + format_to!(text, "{} ", where_clause.to_string()); + } + + format_to!(text, "{}", assoc_items.to_string()); + + ast_from_text(&text) +} + pub fn type_bound(bound: &str) -> ast::TypeBound { ast_from_text(&format!("fn f<T: {bound}>() {{ }}")) } @@ -985,6 +1032,41 @@ pub fn struct_( ast_from_text(&format!("{visibility}struct {strukt_name}{type_params}{field_list}{semicolon}",)) } +pub fn attr_outer(meta: ast::Meta) -> ast::Attr { + ast_from_text(&format!("#[{meta}]")) +} + +pub fn attr_inner(meta: ast::Meta) -> ast::Attr { + ast_from_text(&format!("#![{meta}]")) +} + +pub fn meta_expr(path: ast::Path, expr: ast::Expr) -> ast::Meta { + ast_from_text(&format!("#[{path} = {expr}]")) +} + +pub fn meta_token_tree(path: ast::Path, tt: ast::TokenTree) -> ast::Meta { + ast_from_text(&format!("#[{path}{tt}]")) +} + +pub fn meta_path(path: ast::Path) -> ast::Meta { + ast_from_text(&format!("#[{path}]")) +} + +pub fn token_tree( + delimiter: SyntaxKind, + tt: Vec<NodeOrToken<ast::TokenTree, SyntaxToken>>, +) -> ast::TokenTree { + let (l_delimiter, r_delimiter) = match delimiter { + T!['('] => ('(', ')'), + T!['['] => ('[', ']'), + T!['{'] => ('{', '}'), + _ => panic!("invalid delimiter `{delimiter:?}`"), + }; + let tt = tt.into_iter().join(""); + + ast_from_text(&format!("tt!{l_delimiter}{tt}{r_delimiter}")) +} + #[track_caller] fn ast_from_text<N: AstNode>(text: &str) -> N { let parse = SourceFile::parse(text); @@ -1022,6 +1104,17 @@ pub mod tokens { ) }); + pub fn semicolon() -> SyntaxToken { + SOURCE_FILE + .tree() + .syntax() + .clone_for_update() + .descendants_with_tokens() + .filter_map(|it| it.into_token()) + .find(|it| it.kind() == SEMICOLON) + .unwrap() + } + pub fn single_space() -> SyntaxToken { SOURCE_FILE .tree() diff --git a/src/tools/rust-analyzer/crates/syntax/src/lib.rs b/src/tools/rust-analyzer/crates/syntax/src/lib.rs index efbf8796644..bed240a6d73 100644 --- a/src/tools/rust-analyzer/crates/syntax/src/lib.rs +++ b/src/tools/rust-analyzer/crates/syntax/src/lib.rs @@ -171,6 +171,109 @@ impl SourceFile { } } +impl ast::TokenTree { + pub fn reparse_as_expr(self) -> Parse<ast::Expr> { + let tokens = self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); + + let mut parser_input = parser::Input::default(); + let mut was_joint = false; + for t in tokens { + let kind = t.kind(); + if kind.is_trivia() { + was_joint = false + } else { + if kind == SyntaxKind::IDENT { + let token_text = t.text(); + let contextual_kw = SyntaxKind::from_contextual_keyword(token_text) + .unwrap_or(SyntaxKind::IDENT); + parser_input.push_ident(contextual_kw); + } else { + if was_joint { + parser_input.was_joint(); + } + parser_input.push(kind); + // Tag the token as joint if it is float with a fractional part + // we use this jointness to inform the parser about what token split + // event to emit when we encounter a float literal in a field access + if kind == SyntaxKind::FLOAT_NUMBER && !t.text().ends_with('.') { + parser_input.was_joint(); + } + } + was_joint = true; + } + } + + let parser_output = parser::TopEntryPoint::Expr.parse(&parser_input); + + let mut tokens = + self.syntax().descendants_with_tokens().filter_map(NodeOrToken::into_token); + let mut text = String::new(); + let mut pos = TextSize::from(0); + let mut builder = SyntaxTreeBuilder::default(); + for event in parser_output.iter() { + match event { + parser::Step::Token { kind, n_input_tokens } => { + let mut token = tokens.next().unwrap(); + while token.kind().is_trivia() { + let text = token.text(); + pos += TextSize::from(text.len() as u32); + builder.token(token.kind(), text); + + token = tokens.next().unwrap(); + } + text.push_str(token.text()); + for _ in 1..n_input_tokens { + let token = tokens.next().unwrap(); + text.push_str(token.text()); + } + + pos += TextSize::from(text.len() as u32); + builder.token(kind, &text); + text.clear(); + } + parser::Step::FloatSplit { ends_in_dot: has_pseudo_dot } => { + let token = tokens.next().unwrap(); + let text = token.text(); + + match text.split_once('.') { + Some((left, right)) => { + assert!(!left.is_empty()); + builder.start_node(SyntaxKind::NAME_REF); + builder.token(SyntaxKind::INT_NUMBER, left); + builder.finish_node(); + + // here we move the exit up, the original exit has been deleted in process + builder.finish_node(); + + builder.token(SyntaxKind::DOT, "."); + + if has_pseudo_dot { + assert!(right.is_empty(), "{left}.{right}"); + } else { + builder.start_node(SyntaxKind::NAME_REF); + builder.token(SyntaxKind::INT_NUMBER, right); + builder.finish_node(); + + // the parser creates an unbalanced start node, we are required to close it here + builder.finish_node(); + } + } + None => unreachable!(), + } + pos += TextSize::from(text.len() as u32); + } + parser::Step::Enter { kind } => builder.start_node(kind), + parser::Step::Exit => builder.finish_node(), + parser::Step::Error { msg } => builder.error(msg.to_owned(), pos), + } + } + + let (green, errors) = builder.finish_raw(); + + Parse { green, errors: Arc::new(errors), _ty: PhantomData } + } +} + /// Matches a `SyntaxNode` against an `ast` type. /// /// # Example: diff --git a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs index 602baed3707..75e7a3fec00 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/fixture.rs @@ -47,13 +47,10 @@ //! ``` //! //! Metadata allows specifying all settings and variables -//! that are available in a real rust project: -//! - crate names via `crate:cratename` -//! - dependencies via `deps:dep1,dep2` -//! - configuration settings via `cfg:dbg=false,opt_level=2` -//! - environment variables via `env:PATH=/bin,RUST_LOG=debug` +//! that are available in a real rust project. See [`Fixture`] +//! for the syntax. //! -//! Example using all available metadata: +//! Example using some available metadata: //! ``` //! " //! //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo @@ -68,17 +65,74 @@ use stdx::trim_indent; #[derive(Debug, Eq, PartialEq)] pub struct Fixture { + /// Specifies the path for this file. It must start with "/". pub path: String, - pub text: String, + /// Defines a new crate and make this file its root module. + /// + /// Version and repository URL of the crate can optionally be specified; if + /// either one is specified, the other must also be specified. + /// + /// Syntax: + /// - `crate:my_awesome_lib` + /// - `crate:my_awesome_lib@0.0.1,https://example.com/repo.git` pub krate: Option<String>, + /// Specifies dependencies of this crate. This must be used with `crate` meta. + /// + /// Syntax: `deps:hir-def,ide-assists` pub deps: Vec<String>, + /// Limits crates in the extern prelude. The set of crate names must be a + /// subset of `deps`. This must be used with `crate` meta. + /// + /// If this is not specified, all the dependencies will be in the extern prelude. + /// + /// Syntax: `extern-prelude:hir-def,ide-assists` pub extern_prelude: Option<Vec<String>>, - pub cfg_atoms: Vec<String>, - pub cfg_key_values: Vec<(String, String)>, + /// Specifies configuration options to be enabled. Options may have associated + /// values. + /// + /// Syntax: `cfg:test,dbg=false,opt_level=2` + pub cfgs: Vec<(String, Option<String>)>, + /// Specifies the edition of this crate. This must be used with `crate` meta. If + /// this is not specified, ([`base_db::input::Edition::CURRENT`]) will be used. + /// This must be used with `crate` meta. + /// + /// Syntax: `edition:2021` pub edition: Option<String>, + /// Specifies environment variables. + /// + /// Syntax: `env:PATH=/bin,RUST_LOG=debug` pub env: FxHashMap<String, String>, + /// Introduces a new [source root](base_db::input::SourceRoot). This file **and + /// the following files** will belong the new source root. This must be used + /// with `crate` meta. + /// + /// Use this if you want to test something that uses `SourceRoot::is_library()` + /// to check editability. + /// + /// Note that files before the first fixture with `new_source_root` meta will + /// belong to an implicitly defined local source root. + /// + /// Syntax: + /// - `new_source_root:library` + /// - `new_source_root:local` pub introduce_new_source_root: Option<String>, + /// Explicitly declares this crate as a library outside current workspace. This + /// must be used with `crate` meta. + /// + /// This is implied if this file belongs to a library source root. + /// + /// Use this if you want to test something that checks if a crate is a workspace + /// member via [`CrateOrigin`](base_db::input::CrateOrigin). + /// + /// Syntax: `library` + pub library: bool, + /// Specifies LLVM data layout to be used. + /// + /// You probably don't want to manually specify this. See LLVM manual for the + /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout pub target_data_layout: Option<String>, + /// Actual file contents. All meta comments are stripped. + pub text: String, } pub struct MiniCore { @@ -178,23 +232,28 @@ impl FixtureWithProjectMeta { fn parse_meta_line(meta: &str) -> Fixture { assert!(meta.starts_with("//-")); let meta = meta["//-".len()..].trim(); - let components = meta.split_ascii_whitespace().collect::<Vec<_>>(); + let mut components = meta.split_ascii_whitespace(); - let path = components[0].to_string(); + let path = components.next().expect("fixture meta must start with a path").to_string(); assert!(path.starts_with('/'), "fixture path does not start with `/`: {path:?}"); let mut krate = None; let mut deps = Vec::new(); let mut extern_prelude = None; let mut edition = None; - let mut cfg_atoms = Vec::new(); - let mut cfg_key_values = Vec::new(); + let mut cfgs = Vec::new(); let mut env = FxHashMap::default(); let mut introduce_new_source_root = None; + let mut library = false; let mut target_data_layout = Some( "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_string(), ); - for component in components[1..].iter() { + for component in components { + if component == "library" { + library = true; + continue; + } + let (key, value) = component.split_once(':').unwrap_or_else(|| panic!("invalid meta line: {meta:?}")); match key { @@ -212,8 +271,8 @@ impl FixtureWithProjectMeta { "cfg" => { for entry in value.split(',') { match entry.split_once('=') { - Some((k, v)) => cfg_key_values.push((k.to_string(), v.to_string())), - None => cfg_atoms.push(entry.to_string()), + Some((k, v)) => cfgs.push((k.to_string(), Some(v.to_string()))), + None => cfgs.push((entry.to_string(), None)), } } } @@ -243,11 +302,11 @@ impl FixtureWithProjectMeta { krate, deps, extern_prelude, - cfg_atoms, - cfg_key_values, + cfgs, edition, env, introduce_new_source_root, + library, target_data_layout, } } diff --git a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs index 266bc2391f1..c765f42447a 100644 --- a/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs +++ b/src/tools/rust-analyzer/crates/test-utils/src/minicore.rs @@ -20,6 +20,7 @@ //! deref_mut: deref //! deref: sized //! derive: +//! discriminant: //! drop: //! eq: sized //! error: fmt @@ -36,6 +37,7 @@ //! iterator: option //! iterators: iterator, fn //! manually_drop: drop +//! non_null: //! non_zero: //! option: panic //! ord: eq, option @@ -129,6 +131,14 @@ pub mod marker { #[lang = "phantom_data"] pub struct PhantomData<T: ?Sized>; // endregion:phantom_data + + // region:discriminant + #[lang = "discriminant_kind"] + pub trait DiscriminantKind { + #[lang = "discriminant_type"] + type Discriminant; + } + // endregion:discriminant } // region:default @@ -354,6 +364,11 @@ pub mod mem { pub fn size_of<T>() -> usize; } // endregion:size_of + + // region:discriminant + use crate::marker::DiscriminantKind; + pub struct Discriminant<T>(<T as DiscriminantKind>::Discriminant); + // endregion:discriminant } pub mod ptr { @@ -377,6 +392,19 @@ pub mod ptr { type Metadata; } // endregion:pointee + // region:non_null + #[rustc_layout_scalar_valid_range_start(1)] + #[rustc_nonnull_optimization_guaranteed] + pub struct NonNull<T: ?Sized> { + pointer: *const T, + } + // region:coerce_unsized + impl<T: ?Sized, U: ?Sized> crate::ops::CoerceUnsized<NonNull<U>> for NonNull<T> where + T: crate::marker::Unsize<U> + { + } + // endregion:coerce_unsized + // endregion:non_null } pub mod ops { @@ -1287,6 +1315,11 @@ mod macros { pub macro derive($item:item) { /* compiler built-in */ } + + #[rustc_builtin_macro] + pub macro derive_const($item:item) { + /* compiler built-in */ + } } // endregion:derive @@ -1354,24 +1387,24 @@ pub mod error { pub mod prelude { pub mod v1 { pub use crate::{ - clone::Clone, // :clone - cmp::{Eq, PartialEq}, // :eq - cmp::{Ord, PartialOrd}, // :ord - convert::AsRef, // :as_ref - convert::{From, Into}, // :from - default::Default, // :default - iter::{IntoIterator, Iterator}, // :iterator - macros::builtin::derive, // :derive - marker::Copy, // :copy - marker::Send, // :send - marker::Sized, // :sized - marker::Sync, // :sync - mem::drop, // :drop - ops::Drop, // :drop - ops::{Fn, FnMut, FnOnce}, // :fn - option::Option::{self, None, Some}, // :option - panic, // :panic - result::Result::{self, Err, Ok}, // :result + clone::Clone, // :clone + cmp::{Eq, PartialEq}, // :eq + cmp::{Ord, PartialOrd}, // :ord + convert::AsRef, // :as_ref + convert::{From, Into}, // :from + default::Default, // :default + iter::{IntoIterator, Iterator}, // :iterator + macros::builtin::{derive, derive_const}, // :derive + marker::Copy, // :copy + marker::Send, // :send + marker::Sized, // :sized + marker::Sync, // :sync + mem::drop, // :drop + ops::Drop, // :drop + ops::{Fn, FnMut, FnOnce}, // :fn + option::Option::{self, None, Some}, // :option + panic, // :panic + result::Result::{self, Err, Ok}, // :result }; } diff --git a/src/tools/rust-analyzer/crates/tt/src/buffer.rs b/src/tools/rust-analyzer/crates/tt/src/buffer.rs index 0615a3763df..cade2e9f67a 100644 --- a/src/tools/rust-analyzer/crates/tt/src/buffer.rs +++ b/src/tools/rust-analyzer/crates/tt/src/buffer.rs @@ -134,7 +134,7 @@ pub enum TokenTreeRef<'a, Span> { Leaf(&'a Leaf<Span>, &'a TokenTree<Span>), } -impl<'a, Span: Clone> TokenTreeRef<'a, Span> { +impl<Span: Clone> TokenTreeRef<'_, Span> { pub fn cloned(&self) -> TokenTree<Span> { match self { TokenTreeRef::Subtree(subtree, tt) => match tt { @@ -153,13 +153,13 @@ pub struct Cursor<'a, Span> { ptr: EntryPtr, } -impl<'a, Span> PartialEq for Cursor<'a, Span> { +impl<Span> PartialEq for Cursor<'_, Span> { fn eq(&self, other: &Cursor<'_, Span>) -> bool { self.ptr == other.ptr && std::ptr::eq(self.buffer, other.buffer) } } -impl<'a, Span> Eq for Cursor<'a, Span> {} +impl<Span> Eq for Cursor<'_, Span> {} impl<'a, Span> Cursor<'a, Span> { /// Check whether it is eof diff --git a/src/tools/rust-analyzer/crates/tt/src/lib.rs b/src/tools/rust-analyzer/crates/tt/src/lib.rs index c2ebf03746a..1b8d4ba42a5 100644 --- a/src/tools/rust-analyzer/crates/tt/src/lib.rs +++ b/src/tools/rust-analyzer/crates/tt/src/lib.rs @@ -68,6 +68,21 @@ pub mod token_id { Self::Subtree(Subtree { delimiter: Delimiter::unspecified(), token_trees: vec![] }) } } + + impl Subtree { + pub fn visit_ids(&mut self, f: &impl Fn(TokenId) -> TokenId) { + self.delimiter.open = f(self.delimiter.open); + self.delimiter.close = f(self.delimiter.close); + self.token_trees.iter_mut().for_each(|tt| match tt { + crate::TokenTree::Leaf(leaf) => match leaf { + crate::Leaf::Literal(it) => it.span = f(it.span), + crate::Leaf::Punct(it) => it.span = f(it.span), + crate::Leaf::Ident(it) => it.span = f(it.span), + }, + crate::TokenTree::Subtree(s) => s.visit_ids(f), + }) + } + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] diff --git a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml index 5d61a227284..95c5142517a 100644 --- a/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml +++ b/src/tools/rust-analyzer/crates/vfs-notify/Cargo.toml @@ -15,7 +15,8 @@ doctest = false tracing = "0.1.35" walkdir = "2.3.2" crossbeam-channel = "0.5.5" -notify = "5.0" +# We demand 5.1.0 as any higher version pulls in a new windows-sys dupe +notify = "=5.1.0" stdx.workspace = true vfs.workspace = true diff --git a/src/tools/rust-analyzer/crates/vfs/Cargo.toml b/src/tools/rust-analyzer/crates/vfs/Cargo.toml index 3ae3dc83ca9..c35785cf98c 100644 --- a/src/tools/rust-analyzer/crates/vfs/Cargo.toml +++ b/src/tools/rust-analyzer/crates/vfs/Cargo.toml @@ -14,7 +14,7 @@ doctest = false [dependencies] rustc-hash = "1.1.0" fst = "0.4.7" -indexmap = "1.9.1" +indexmap = "2.0.0" nohash-hasher.workspace = true paths.workspace = true diff --git a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs index d327f2edf14..52ada32bdfd 100644 --- a/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs +++ b/src/tools/rust-analyzer/crates/vfs/src/vfs_path.rs @@ -292,8 +292,8 @@ impl From<AbsPathBuf> for VfsPath { impl fmt::Display for VfsPath { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self.0 { - VfsPathRepr::PathBuf(it) => fmt::Display::fmt(&it.display(), f), - VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Display::fmt(it, f), + VfsPathRepr::PathBuf(it) => it.fmt(f), + VfsPathRepr::VirtualPath(VirtualPath(it)) => it.fmt(f), } } } @@ -307,8 +307,8 @@ impl fmt::Debug for VfsPath { impl fmt::Debug for VfsPathRepr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self { - VfsPathRepr::PathBuf(it) => fmt::Debug::fmt(&it.display(), f), - VfsPathRepr::VirtualPath(VirtualPath(it)) => fmt::Debug::fmt(&it, f), + VfsPathRepr::PathBuf(it) => it.fmt(f), + VfsPathRepr::VirtualPath(VirtualPath(it)) => it.fmt(f), } } } diff --git a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md index bc58aa7220d..024acb87709 100644 --- a/src/tools/rust-analyzer/docs/dev/lsp-extensions.md +++ b/src/tools/rust-analyzer/docs/dev/lsp-extensions.md @@ -1,5 +1,5 @@ <!--- -lsp_ext.rs hash: 2d60bbffe70ae198 +lsp_ext.rs hash: 149a5be3c5e469d1 If you need to change the above hash to make the test pass, please check if you need to adjust this doc as well and ping this issue: @@ -886,3 +886,48 @@ export interface FetchDependencyListResult { } ``` Returns all crates from this workspace, so it can be used create a viewTree to help navigate the dependency tree. + +## View Recursive Memory Layout + +**Method:** `rust-analyzer/viewRecursiveMemoryLayout` + +**Request:** `TextDocumentPositionParams` + +**Response:** + +```typescript +export interface RecursiveMemoryLayoutNode = { + /// Name of the item, or [ROOT], `.n` for tuples + item_name: string; + /// Full name of the type (type aliases are ignored) + typename: string; + /// Size of the type in bytes + size: number; + /// Alignment of the type in bytes + alignment: number; + /// Offset of the type relative to its parent (or 0 if its the root) + offset: number; + /// Index of the node's parent (or -1 if its the root) + parent_idx: number; + /// Index of the node's children (or -1 if it does not have children) + children_start: number; + /// Number of child nodes (unspecified it does not have children) + children_len: number; +}; + +export interface RecursiveMemoryLayout = { + nodes: RecursiveMemoryLayoutNode[]; +}; +``` + +Returns a vector of nodes representing items in the datatype as a tree, `RecursiveMemoryLayout::nodes[0]` is the root node. + +If `RecursiveMemoryLayout::nodes::length == 0` we could not find a suitable type. + +Generic Types do not give anything because they are incomplete. Fully specified generic types do not give anything if they are selected directly but do work when a child of other types [this is consistent with other behavior](https://github.com/rust-lang/rust-analyzer/issues/15010). + +### Unresolved questions: + +- How should enums/unions be represented? currently they do not produce any children because they have multiple distinct sets of children. +- Should niches be represented? currently they are not reported. +- A visual representation of the memory layout is not specified, see the provided implementation for an example, however it may not translate well to terminal based editors or other such things. diff --git a/src/tools/rust-analyzer/docs/dev/style.md b/src/tools/rust-analyzer/docs/dev/style.md index d2a03fba40d..786127639ce 100644 --- a/src/tools/rust-analyzer/docs/dev/style.md +++ b/src/tools/rust-analyzer/docs/dev/style.md @@ -869,6 +869,19 @@ type -> ty **Rationale:** consistency. +## Error Handling Trivia + +Use `anyhow::Result` rather than just `Result`. + +**Rationale:** makes it immediately clear what result that is. + +Use `anyhow::format_err!` rather than `anyhow::anyhow`. + +**Rationale:** consistent, boring, avoids stuttering. + +There's no specific guidance on the formatting of error messages, see [anyhow/#209](https://github.com/dtolnay/anyhow/issues/209). +Do not end error and context messages with `.` though. + ## Early Returns Do use early returns diff --git a/src/tools/rust-analyzer/docs/user/manual.adoc b/src/tools/rust-analyzer/docs/user/manual.adoc index b5c095fd9d8..31035c4b729 100644 --- a/src/tools/rust-analyzer/docs/user/manual.adoc +++ b/src/tools/rust-analyzer/docs/user/manual.adoc @@ -64,22 +64,8 @@ You can install the latest release of the plugin from https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer[the marketplace]. Note that the plugin may cause conflicts with the -https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[official Rust plugin]. -It is recommended to disable the Rust plugin when using the rust-analyzer extension. - -By default, the plugin will prompt you to download the matching version of the server as well: - -image::https://user-images.githubusercontent.com/9021944/75067008-17502500-54ba-11ea-835a-f92aac50e866.png[] - -[NOTE] -==== -To disable this notification put the following to `settings.json` - -[source,json] ----- -{ "rust-analyzer.updates.askBeforeDownload": false } ----- -==== +https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[previous official Rust plugin]. +The latter is no longer maintained and should be uninstalled. The server binary is stored in the extension install directory, which starts with `rust-lang.rust-analyzer-` and is located under: @@ -141,6 +127,9 @@ If you're not using Code, you can compile and install only the LSP server: $ cargo xtask install --server ---- +Make sure that `.cargo/bin` is in `$PATH` and precedes paths where `rust-analyzer` may also be installed. +Specifically, `rustup` includes a proxy called `rust-analyzer`, which can cause problems if you're planning to use a source build or even a downloaded binary. + === rust-analyzer Language Server Binary Other editors generally require the `rust-analyzer` binary to be in `$PATH`. @@ -280,12 +269,12 @@ Also see the https://emacs-lsp.github.io/lsp-mode/page/lsp-rust-analyzer/[rust-a Note the excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm] on how to set-up Emacs for Rust development with LSP mode and several other packages. -=== Vim/NeoVim +=== Vim/Neovim Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>. Not needed if the extension can install/update it on its own, coc-rust-analyzer is one example. -There are several LSP client implementations for vim or neovim: +There are several LSP client implementations for Vim or Neovim: ==== coc-rust-analyzer @@ -308,7 +297,7 @@ Note: for code actions, use `coc-codeaction-cursor` and `coc-codeaction-selected https://github.com/autozimu/LanguageClient-neovim[here] * The GitHub project wiki has extra tips on configuration -2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists): +2. Configure by adding this to your Vim/Neovim config file (replacing the existing Rust-specific line if it exists): + [source,vim] ---- @@ -335,7 +324,7 @@ let g:ale_linters = {'rust': ['analyzer']} ==== nvim-lsp -NeoVim 0.5 has built-in language server support. +Neovim 0.5 has built-in language server support. For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lspconfig#rust_analyzer[neovim/nvim-lspconfig]. Once `neovim/nvim-lspconfig` is installed, use `+lua require'lspconfig'.rust_analyzer.setup({})+` in your `init.vim`. @@ -376,7 +365,7 @@ EOF See https://sharksforarms.dev/posts/neovim-rust/ for more tips on getting started. -Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for neovim. +Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for Neovim. ==== vim-lsp @@ -933,17 +922,17 @@ For example: More about `when` clause contexts https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts[here]. ==== Setting runnable environment variables -You can use "rust-analyzer.runnableEnv" setting to define runnable environment-specific substitution variables. +You can use "rust-analyzer.runnables.extraEnv" setting to define runnable environment-specific substitution variables. The simplest way for all runnables in a bunch: ```jsonc -"rust-analyzer.runnableEnv": { +"rust-analyzer.runnables.extraEnv": { "RUN_SLOW_TESTS": "1" } ``` Or it is possible to specify vars more granularly: ```jsonc -"rust-analyzer.runnableEnv": [ +"rust-analyzer.runnables.extraEnv": [ { // "mask": null, // null mask means that this rule will be applied for all runnables env: { diff --git a/src/tools/rust-analyzer/editors/code/.eslintrc.js b/src/tools/rust-analyzer/editors/code/.eslintrc.js index 297e9fa1e5c..9705c5f5ec6 100644 --- a/src/tools/rust-analyzer/editors/code/.eslintrc.js +++ b/src/tools/rust-analyzer/editors/code/.eslintrc.js @@ -6,7 +6,7 @@ module.exports = { extends: ["prettier"], parser: "@typescript-eslint/parser", parserOptions: { - project: "tsconfig.eslint.json", + project: true, tsconfigRootDir: __dirname, sourceType: "module", }, @@ -33,5 +33,14 @@ module.exports = { "@typescript-eslint/semi": ["error", "always"], "@typescript-eslint/no-unnecessary-type-assertion": "error", "@typescript-eslint/no-floating-promises": "error", + + "@typescript-eslint/consistent-type-imports": [ + "error", + { + prefer: "type-imports", + fixStyle: "inline-type-imports", + }, + ], + "@typescript-eslint/no-import-type-side-effects": "error", }, }; diff --git a/src/tools/rust-analyzer/editors/code/.vscodeignore b/src/tools/rust-analyzer/editors/code/.vscodeignore index 6e118f0b3ae..09dc27056b3 100644 --- a/src/tools/rust-analyzer/editors/code/.vscodeignore +++ b/src/tools/rust-analyzer/editors/code/.vscodeignore @@ -10,6 +10,5 @@ !package-lock.json !package.json !ra_syntax_tree.tmGrammar.json -!rustdoc.markdown.injection.tmGrammar.json !server !README.md diff --git a/src/tools/rust-analyzer/editors/code/package-lock.json b/src/tools/rust-analyzer/editors/code/package-lock.json index ef9be380ed1..1c94f13d745 100644 --- a/src/tools/rust-analyzer/editors/code/package-lock.json +++ b/src/tools/rust-analyzer/editors/code/package-lock.json @@ -1,7 +1,7 @@ { "name": "rust-analyzer", "version": "0.5.0-dev", - "lockfileVersion": 2, + "lockfileVersion": 3, "requires": true, "packages": { "": { @@ -9,35 +9,205 @@ "version": "0.5.0-dev", "license": "MIT OR Apache-2.0", "dependencies": { + "@hpcc-js/wasm": "^2.13.0", "anser": "^2.1.1", - "d3": "^7.6.1", + "d3": "^7.8.5", "d3-graphviz": "^5.0.2", - "vscode-languageclient": "^8.0.2" + "vscode-languageclient": "^8.1.0" }, "devDependencies": { + "@tsconfig/strictest": "^2.0.1", "@types/node": "~16.11.7", - "@types/vscode": "~1.66.0", - "@typescript-eslint/eslint-plugin": "^5.30.5", - "@typescript-eslint/parser": "^5.30.5", - "@vscode/test-electron": "^2.1.5", - "cross-env": "^7.0.3", - "esbuild": "^0.14.48", - "eslint": "^8.19.0", - "eslint-config-prettier": "^8.5.0", - "ovsx": "^0.5.2", - "prettier": "^2.7.1", - "tslib": "^2.4.0", - "typescript": "^4.7.4", - "vsce": "^2.9.2" + "@types/vscode": "~1.78.1", + "@typescript-eslint/eslint-plugin": "^6.0.0", + "@typescript-eslint/parser": "^6.0.0", + "@vscode/test-electron": "^2.3.3", + "@vscode/vsce": "^2.19.0", + "esbuild": "^0.18.12", + "eslint": "^8.44.0", + "eslint-config-prettier": "^8.8.0", + "ovsx": "^0.8.2", + "prettier": "^3.0.0", + "tslib": "^2.6.0", + "typescript": "^5.1.6" }, "engines": { - "vscode": "^1.66.0" + "vscode": "^1.78.0" + } + }, + "node_modules/@aashutoshrathi/word-wrap": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", + "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.12.tgz", + "integrity": "sha512-LIxaNIQfkFZbTLb4+cX7dozHlAbAshhFE5PKdro0l+FnCpx1GDJaQ2WMcqm+ToXKMt8p8Uojk/MFRuGyz3V5Sw==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.12.tgz", + "integrity": "sha512-BMAlczRqC/LUt2P97E4apTBbkvS9JTJnp2DKFbCwpZ8vBvXVbNdqmvzW/OsdtI/+mGr+apkkpqGM8WecLkPgrA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.12.tgz", + "integrity": "sha512-zU5MyluNsykf5cOJ0LZZZjgAHbhPJ1cWfdH1ZXVMXxVMhEV0VZiZXQdwBBVvmvbF28EizeK7obG9fs+fpmS0eQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.12.tgz", + "integrity": "sha512-zUZMep7YONnp6954QOOwEBwFX9svlKd3ov6PkxKd53LGTHsp/gy7vHaPGhhjBmEpqXEXShi6dddjIkmd+NgMsA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.12.tgz", + "integrity": "sha512-ohqLPc7i67yunArPj1+/FeeJ7AgwAjHqKZ512ADk3WsE3FHU9l+m5aa7NdxXr0HmN1bjDlUslBjWNbFlD9y12Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.12.tgz", + "integrity": "sha512-GIIHtQXqgeOOqdG16a/A9N28GpkvjJnjYMhOnXVbn3EDJcoItdR58v/pGN31CHjyXDc8uCcRnFWmqaJt24AYJg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.12.tgz", + "integrity": "sha512-zK0b9a1/0wZY+6FdOS3BpZcPc1kcx2G5yxxfEJtEUzVxI6n/FrC2Phsxj/YblPuBchhBZ/1wwn7AyEBUyNSa6g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.12.tgz", + "integrity": "sha512-y75OijvrBE/1XRrXq1jtrJfG26eHeMoqLJ2dwQNwviwTuTtHGCojsDO6BJNF8gU+3jTn1KzJEMETytwsFSvc+Q==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.12.tgz", + "integrity": "sha512-JKgG8Q/LL/9sw/iHHxQyVMoQYu3rU3+a5Z87DxC+wAu3engz+EmctIrV+FGOgI6gWG1z1+5nDDbXiRMGQZXqiw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.12.tgz", + "integrity": "sha512-yoRIAqc0B4lDIAAEFEIu9ttTRFV84iuAl0KNCN6MhKLxNPfzwCBvEMgwco2f71GxmpBcTtn7KdErueZaM2rEvw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.14.54.tgz", - "integrity": "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw==", + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.12.tgz", + "integrity": "sha512-qYgt3dHPVvf/MgbIBpJ4Sup/yb9DAopZ3a2JgMpNKIHUpOdnJ2eHBo/aQdnd8dJ21X/+sS58wxHtA9lEazYtXQ==", "cpu": [ "loong64" ], @@ -50,15 +220,215 @@ "node": ">=12" } }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.12.tgz", + "integrity": "sha512-wHphlMLK4ufNOONqukELfVIbnGQJrHJ/mxZMMrP2jYrPgCRZhOtf0kC4yAXBwnfmULimV1qt5UJJOw4Kh13Yfg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.12.tgz", + "integrity": "sha512-TeN//1Ft20ZZW41+zDSdOI/Os1bEq5dbvBvYkberB7PHABbRcsteeoNVZFlI0YLpGdlBqohEpjrn06kv8heCJg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.12.tgz", + "integrity": "sha512-AgUebVS4DoAblBgiB2ACQ/8l4eGE5aWBb8ZXtkXHiET9mbj7GuWt3OnsIW/zX+XHJt2RYJZctbQ2S/mDjbp0UA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.12.tgz", + "integrity": "sha512-dJ3Rb3Ei2u/ysSXd6pzleGtfDdc2MuzKt8qc6ls8vreP1G3B7HInX3i7gXS4BGeVd24pp0yqyS7bJ5NHaI9ing==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.12.tgz", + "integrity": "sha512-OrNJMGQbPaVyHHcDF8ybNSwu7TDOfX8NGpXCbetwOSP6txOJiWlgQnRymfC9ocR1S0Y5PW0Wb1mV6pUddqmvmQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.12.tgz", + "integrity": "sha512-55FzVCAiwE9FK8wWeCRuvjazNRJ1QqLCYGZVB6E8RuQuTeStSwotpSW4xoRGwp3a1wUsaVCdYcj5LGCASVJmMg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.12.tgz", + "integrity": "sha512-qnluf8rfb6Y5Lw2tirfK2quZOBbVqmwxut7GPCIJsM8lc4AEUj9L8y0YPdLaPK0TECt4IdyBdBD/KRFKorlK3g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.12.tgz", + "integrity": "sha512-+RkKpVQR7bICjTOPUpkTBTaJ4TFqQBX5Ywyd/HSdDkQGn65VPkTsR/pL4AMvuMWy+wnXgIl4EY6q4mVpJal8Kg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.12.tgz", + "integrity": "sha512-GNHuciv0mFM7ouzsU0+AwY+7eV4Mgo5WnbhfDCQGtpvOtD1vbOiRjPYG6dhmMoFyBjj+pNqQu2X+7DKn0KQ/Gw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.12.tgz", + "integrity": "sha512-kR8cezhYipbbypGkaqCTWIeu4zID17gamC8YTPXYtcN3E5BhhtTnwKBn9I0PJur/T6UVwIEGYzkffNL0lFvxEw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.12.tgz", + "integrity": "sha512-O0UYQVkvfM/jO8a4OwoV0mAKSJw+mjWTAd1MJd/1FCX6uiMdLmMRPK/w6e9OQ0ob2WGxzIm9va/KG0Ja4zIOgg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.5.1.tgz", + "integrity": "sha512-Z5ba73P98O1KUYCCJTUeVpja9RcGoMdncZ6T49FCUl2lN38JtCJ+3WgIDBv0AuY4WChU5PmtJmOCTlN6FZTFKQ==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, "node_modules/@eslint/eslintrc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.4.1.tgz", - "integrity": "sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.0.tgz", + "integrity": "sha512-Lj7DECXqIVCqnqjjHMPna4vn6GJcMgul/wuS0je9OZ9gsL0zzDpKPVtcG1HaDVc+9y+qgXneTeUMbCqXJNpH1A==", "dev": true, "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", - "espree": "^9.4.0", + "espree": "^9.6.0", "globals": "^13.19.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", @@ -73,21 +443,30 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/@eslint/js": { + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.44.0.tgz", + "integrity": "sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/@hpcc-js/wasm": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@hpcc-js/wasm/-/wasm-2.5.0.tgz", - "integrity": "sha512-G26BamgaHW46f6P8bmkygapgNcy+tTDMwIvCzmMzdp39sxUS1u4gaT/vR2SSDc4x3SfL5RE4B2B8ef/wd429Hg==", + "version": "2.13.0", + "resolved": "https://registry.npmjs.org/@hpcc-js/wasm/-/wasm-2.13.0.tgz", + "integrity": "sha512-MvnUPnyMlN3/2IONCXwl/SBVWIfVOFJqvw+kFfI1QcwKjNmkwTAtG+9/m3nvofTymkASUUxNULbBmRDIr2uzIA==", "dependencies": { - "yargs": "17.6.2" + "yargs": "17.7.2" }, "bin": { "dot-wasm": "bin/dot-wasm.js" } }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.8", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", - "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz", + "integrity": "sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ==", "dev": true, "dependencies": { "@humanwhocodes/object-schema": "^1.2.1", @@ -161,10 +540,16 @@ "node": ">= 6" } }, + "node_modules/@tsconfig/strictest": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@tsconfig/strictest/-/strictest-2.0.1.tgz", + "integrity": "sha512-7JHHCbyCsGUxLd0pDbp24yz3zjxw2t673W5oAP6HCEdr/UUhaRhYd3SSnUsGCk+VnPVJVA4mXROzbhI+nyIk+w==", + "dev": true + }, "node_modules/@types/json-schema": { - "version": "7.0.11", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", - "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", + "version": "7.0.12", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.12.tgz", + "integrity": "sha512-Hr5Jfhc9eYOQNPYO5WLDq/n4jqijdHNlDXjuAQkkt+mWdQR+XJToOHrsD4cPaMXpn6KO7y2+wM8AZEs8VpBLVA==", "dev": true }, "node_modules/@types/node": { @@ -174,43 +559,47 @@ "dev": true }, "node_modules/@types/semver": { - "version": "7.3.13", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.13.tgz", - "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==", + "version": "7.5.0", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.0.tgz", + "integrity": "sha512-G8hZ6XJiHnuhQKR7ZmysCeJWE08o8T0AXtk5darsCaTVsYZhhgUrq53jizaR2FvsoeCwJhlmwTjkXBY5Pn/ZHw==", "dev": true }, "node_modules/@types/vscode": { - "version": "1.66.0", - "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.66.0.tgz", - "integrity": "sha512-ZfJck4M7nrGasfs4A4YbUoxis3Vu24cETw3DERsNYtDZmYSYtk6ljKexKFKhImO/ZmY6ZMsmegu2FPkXoUFImA==", + "version": "1.78.1", + "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.78.1.tgz", + "integrity": "sha512-wEA+54axejHu7DhcUfnFBan1IqFD1gBDxAFz8LoX06NbNDMRJv/T6OGthOs52yZccasKfN588EyffHWABkR0fg==", "dev": true }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.48.2.tgz", - "integrity": "sha512-sR0Gja9Ky1teIq4qJOl0nC+Tk64/uYdX+mi+5iB//MH8gwyx8e3SOyhEzeLZEFEEfCaLf8KJq+Bd/6je1t+CAg==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.0.0.tgz", + "integrity": "sha512-xuv6ghKGoiq856Bww/yVYnXGsKa588kY3M0XK7uUW/3fJNNULKRfZfSBkMTSpqGG/8ZCXCadfh8G/z/B4aqS/A==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "5.48.2", - "@typescript-eslint/type-utils": "5.48.2", - "@typescript-eslint/utils": "5.48.2", + "@eslint-community/regexpp": "^4.5.0", + "@typescript-eslint/scope-manager": "6.0.0", + "@typescript-eslint/type-utils": "6.0.0", + "@typescript-eslint/utils": "6.0.0", + "@typescript-eslint/visitor-keys": "6.0.0", "debug": "^4.3.4", - "ignore": "^5.2.0", + "grapheme-splitter": "^1.0.4", + "graphemer": "^1.4.0", + "ignore": "^5.2.4", + "natural-compare": "^1.4.0", "natural-compare-lite": "^1.4.0", - "regexpp": "^3.2.0", - "semver": "^7.3.7", - "tsutils": "^3.21.0" + "semver": "^7.5.0", + "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^5.0.0", - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + "@typescript-eslint/parser": "^6.0.0 || ^6.0.0-alpha", + "eslint": "^7.0.0 || ^8.0.0" }, "peerDependenciesMeta": { "typescript": { @@ -219,25 +608,26 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.48.2.tgz", - "integrity": "sha512-38zMsKsG2sIuM5Oi/olurGwYJXzmtdsHhn5mI/pQogP+BjYVkK5iRazCQ8RGS0V+YLk282uWElN70zAAUmaYHw==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.0.0.tgz", + "integrity": "sha512-TNaufYSPrr1U8n+3xN+Yp9g31vQDJqhXzzPSHfQDLcaO4tU+mCfODPxCwf4H530zo7aUBE3QIdxCXamEnG04Tg==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "5.48.2", - "@typescript-eslint/types": "5.48.2", - "@typescript-eslint/typescript-estree": "5.48.2", + "@typescript-eslint/scope-manager": "6.0.0", + "@typescript-eslint/types": "6.0.0", + "@typescript-eslint/typescript-estree": "6.0.0", + "@typescript-eslint/visitor-keys": "6.0.0", "debug": "^4.3.4" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + "eslint": "^7.0.0 || ^8.0.0" }, "peerDependenciesMeta": { "typescript": { @@ -246,16 +636,16 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.48.2.tgz", - "integrity": "sha512-zEUFfonQid5KRDKoI3O+uP1GnrFd4tIHlvs+sTJXiWuypUWMuDaottkJuR612wQfOkjYbsaskSIURV9xo4f+Fw==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.0.0.tgz", + "integrity": "sha512-o4q0KHlgCZTqjuaZ25nw5W57NeykZT9LiMEG4do/ovwvOcPnDO1BI5BQdCsUkjxFyrCL0cSzLjvIMfR9uo7cWg==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.48.2", - "@typescript-eslint/visitor-keys": "5.48.2" + "@typescript-eslint/types": "6.0.0", + "@typescript-eslint/visitor-keys": "6.0.0" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", @@ -263,25 +653,25 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.48.2.tgz", - "integrity": "sha512-QVWx7J5sPMRiOMJp5dYshPxABRoZV1xbRirqSk8yuIIsu0nvMTZesKErEA3Oix1k+uvsk8Cs8TGJ6kQ0ndAcew==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.0.0.tgz", + "integrity": "sha512-ah6LJvLgkoZ/pyJ9GAdFkzeuMZ8goV6BH7eC9FPmojrnX9yNCIsfjB+zYcnex28YO3RFvBkV6rMV6WpIqkPvoQ==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "5.48.2", - "@typescript-eslint/utils": "5.48.2", + "@typescript-eslint/typescript-estree": "6.0.0", + "@typescript-eslint/utils": "6.0.0", "debug": "^4.3.4", - "tsutils": "^3.21.0" + "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "*" + "eslint": "^7.0.0 || ^8.0.0" }, "peerDependenciesMeta": { "typescript": { @@ -290,12 +680,12 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.48.2.tgz", - "integrity": "sha512-hE7dA77xxu7ByBc6KCzikgfRyBCTst6dZQpwaTy25iMYOnbNljDT4hjhrGEJJ0QoMjrfqrx+j1l1B9/LtKeuqA==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.0.0.tgz", + "integrity": "sha512-Zk9KDggyZM6tj0AJWYYKgF0yQyrcnievdhG0g5FqyU3Y2DRxJn4yWY21sJC0QKBckbsdKKjYDV2yVrrEvuTgxg==", "dev": true, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", @@ -303,21 +693,21 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.48.2.tgz", - "integrity": "sha512-bibvD3z6ilnoVxUBFEgkO0k0aFvUc4Cttt0dAreEr+nrAHhWzkO83PEVVuieK3DqcgL6VAK5dkzK8XUVja5Zcg==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.0.0.tgz", + "integrity": "sha512-2zq4O7P6YCQADfmJ5OTDQTP3ktajnXIRrYAtHM9ofto/CJZV3QfJ89GEaM2BNGeSr1KgmBuLhEkz5FBkS2RQhQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.48.2", - "@typescript-eslint/visitor-keys": "5.48.2", + "@typescript-eslint/types": "6.0.0", + "@typescript-eslint/visitor-keys": "6.0.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" + "semver": "^7.5.0", + "ts-api-utils": "^1.0.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", @@ -330,42 +720,42 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.48.2.tgz", - "integrity": "sha512-2h18c0d7jgkw6tdKTlNaM7wyopbLRBiit8oAxoP89YnuBOzCZ8g8aBCaCqq7h208qUTroL7Whgzam7UY3HVLow==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.0.0.tgz", + "integrity": "sha512-SOr6l4NB6HE4H/ktz0JVVWNXqCJTOo/mHnvIte1ZhBQ0Cvd04x5uKZa3zT6tiodL06zf5xxdK8COiDvPnQ27JQ==", "dev": true, "dependencies": { - "@types/json-schema": "^7.0.9", + "@eslint-community/eslint-utils": "^4.3.0", + "@types/json-schema": "^7.0.11", "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "5.48.2", - "@typescript-eslint/types": "5.48.2", - "@typescript-eslint/typescript-estree": "5.48.2", + "@typescript-eslint/scope-manager": "6.0.0", + "@typescript-eslint/types": "6.0.0", + "@typescript-eslint/typescript-estree": "6.0.0", "eslint-scope": "^5.1.1", - "eslint-utils": "^3.0.0", - "semver": "^7.3.7" + "semver": "^7.5.0" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" + "eslint": "^7.0.0 || ^8.0.0" } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.48.2.tgz", - "integrity": "sha512-z9njZLSkwmjFWUelGEwEbdf4NwKvfHxvGC0OcGN1Hp/XNDIcJ7D5DpPNPv6x6/mFvc1tQHsaWmpD/a4gOvvCJQ==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.0.0.tgz", + "integrity": "sha512-cvJ63l8c0yXdeT5POHpL0Q1cZoRcmRKFCtSjNGJxPkcP571EfZMcNbzWAc7oK3D1dRzm/V5EwtkANTZxqvuuUA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.48.2", - "eslint-visitor-keys": "^3.3.0" + "@typescript-eslint/types": "6.0.0", + "eslint-visitor-keys": "^3.4.1" }, "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + "node": "^16.0.0 || >=18.0.0" }, "funding": { "type": "opencollective", @@ -373,24 +763,163 @@ } }, "node_modules/@vscode/test-electron": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.2.2.tgz", - "integrity": "sha512-s5d2VtMySvff0UgqkJ0BMCr1es+qREE194EAodGIefq518W53ifvv69e80l9e2MrYJEqUUKwukE/w3H9o15YEw==", + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.3.3.tgz", + "integrity": "sha512-hgXCkDP0ibboF1K6seqQYyHAzCURgTwHS/6QU7slhwznDLwsRwg9bhfw1CZdyUEw8vvCmlrKWnd7BlQnI0BC4w==", "dev": true, "dependencies": { "http-proxy-agent": "^4.0.1", "https-proxy-agent": "^5.0.0", - "rimraf": "^3.0.2", - "unzipper": "^0.10.11" + "jszip": "^3.10.1", + "semver": "^7.3.8" }, "engines": { "node": ">=16" } }, + "node_modules/@vscode/vsce": { + "version": "2.19.0", + "resolved": "https://registry.npmjs.org/@vscode/vsce/-/vsce-2.19.0.tgz", + "integrity": "sha512-dAlILxC5ggOutcvJY24jxz913wimGiUrHaPkk16Gm9/PGFbz1YezWtrXsTKUtJws4fIlpX2UIlVlVESWq8lkfQ==", + "dev": true, + "dependencies": { + "azure-devops-node-api": "^11.0.1", + "chalk": "^2.4.2", + "cheerio": "^1.0.0-rc.9", + "commander": "^6.1.0", + "glob": "^7.0.6", + "hosted-git-info": "^4.0.2", + "jsonc-parser": "^3.2.0", + "leven": "^3.1.0", + "markdown-it": "^12.3.2", + "mime": "^1.3.4", + "minimatch": "^3.0.3", + "parse-semver": "^1.1.1", + "read": "^1.0.7", + "semver": "^5.1.0", + "tmp": "^0.2.1", + "typed-rest-client": "^1.8.4", + "url-join": "^4.0.1", + "xml2js": "^0.5.0", + "yauzl": "^2.3.1", + "yazl": "^2.2.2" + }, + "bin": { + "vsce": "vsce" + }, + "engines": { + "node": ">= 14" + }, + "optionalDependencies": { + "keytar": "^7.7.0" + } + }, + "node_modules/@vscode/vsce/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@vscode/vsce/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@vscode/vsce/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@vscode/vsce/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@vscode/vsce/node_modules/commander": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@vscode/vsce/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@vscode/vsce/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@vscode/vsce/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/@vscode/vsce/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@vscode/vsce/node_modules/xml2js": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz", + "integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==", + "dev": true, + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, "node_modules/acorn": { - "version": "8.8.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.1.tgz", - "integrity": "sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==", + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", + "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -511,35 +1040,15 @@ "type": "consulting", "url": "https://feross.org/support" } - ] - }, - "node_modules/big-integer": { - "version": "1.6.51", - "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz", - "integrity": "sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==", - "dev": true, - "engines": { - "node": ">=0.6" - } - }, - "node_modules/binary": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", - "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", - "dev": true, - "dependencies": { - "buffers": "~0.1.1", - "chainsaw": "~0.1.0" - }, - "engines": { - "node": "*" - } + ], + "optional": true }, "node_modules/bl": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", "dev": true, + "optional": true, "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", @@ -551,6 +1060,7 @@ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", "dev": true, + "optional": true, "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -560,12 +1070,6 @@ "node": ">= 6" } }, - "node_modules/bluebird": { - "version": "3.4.7", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.4.7.tgz", - "integrity": "sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA==", - "dev": true - }, "node_modules/boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", @@ -576,6 +1080,7 @@ "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -612,6 +1117,7 @@ "url": "https://feross.org/support" } ], + "optional": true, "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" @@ -626,24 +1132,6 @@ "node": "*" } }, - "node_modules/buffer-indexof-polyfill": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/buffer-indexof-polyfill/-/buffer-indexof-polyfill-1.0.2.tgz", - "integrity": "sha512-I7wzHwA3t1/lwXQh+A5PbNvJxgfo5r3xulgpYDB5zckTu/Z9oUK9biouBKQUjEqzaz3HnAT6TYoovmE+GqSf7A==", - "dev": true, - "engines": { - "node": ">=0.10" - } - }, - "node_modules/buffers": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", - "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==", - "dev": true, - "engines": { - "node": ">=0.2.0" - } - }, "node_modules/call-bind": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", @@ -666,18 +1154,6 @@ "node": ">=6" } }, - "node_modules/chainsaw": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", - "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", - "dev": true, - "dependencies": { - "traverse": ">=0.3.0 <0.4" - }, - "engines": { - "node": "*" - } - }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -736,7 +1212,8 @@ "version": "1.1.4", "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", - "dev": true + "dev": true, + "optional": true }, "node_modules/ci-info": { "version": "2.0.0", @@ -784,7 +1261,8 @@ "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true }, "node_modules/core-util-is": { "version": "1.0.3", @@ -792,24 +1270,6 @@ "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", "dev": true }, - "node_modules/cross-env": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz", - "integrity": "sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==", - "dev": true, - "dependencies": { - "cross-spawn": "^7.0.1" - }, - "bin": { - "cross-env": "src/bin/cross-env.js", - "cross-env-shell": "src/bin/cross-env-shell.js" - }, - "engines": { - "node": ">=10.14", - "npm": ">=6", - "yarn": ">=1" - } - }, "node_modules/cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", @@ -853,9 +1313,9 @@ } }, "node_modules/d3": { - "version": "7.8.2", - "resolved": "https://registry.npmjs.org/d3/-/d3-7.8.2.tgz", - "integrity": "sha512-WXty7qOGSHb7HR7CfOzwN1Gw04MUOzN8qh9ZUsvwycIMb4DYMpY9xczZ6jUorGtO6bR9BPMPaueIKwiDxu9uiQ==", + "version": "7.8.5", + "resolved": "https://registry.npmjs.org/d3/-/d3-7.8.5.tgz", + "integrity": "sha512-JgoahDG51ncUfJu6wX/1vWQEqOflgXyl4MaHqlcSruTez7yhaRKR9i8VjjcQGeS2en/jnFivXuaIMnseMMt0XA==", "dependencies": { "d3-array": "3", "d3-axis": "3", @@ -1083,6 +1543,34 @@ "d3-selection": "^3.0.0" } }, + "node_modules/d3-graphviz/node_modules/@hpcc-js/wasm": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@hpcc-js/wasm/-/wasm-2.5.0.tgz", + "integrity": "sha512-G26BamgaHW46f6P8bmkygapgNcy+tTDMwIvCzmMzdp39sxUS1u4gaT/vR2SSDc4x3SfL5RE4B2B8ef/wd429Hg==", + "dependencies": { + "yargs": "17.6.2" + }, + "bin": { + "dot-wasm": "bin/dot-wasm.js" + } + }, + "node_modules/d3-graphviz/node_modules/yargs": { + "version": "17.6.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz", + "integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==", + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/d3-hierarchy": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", @@ -1265,6 +1753,7 @@ "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", "dev": true, + "optional": true, "dependencies": { "mimic-response": "^3.1.0" }, @@ -1280,6 +1769,7 @@ "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", "dev": true, + "optional": true, "engines": { "node": ">=4.0.0" } @@ -1303,6 +1793,7 @@ "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==", "dev": true, + "optional": true, "engines": { "node": ">=8" } @@ -1386,15 +1877,6 @@ "url": "https://github.com/fb55/domutils?sponsor=1" } }, - "node_modules/duplexer2": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz", - "integrity": "sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA==", - "dev": true, - "dependencies": { - "readable-stream": "^2.0.2" - } - }, "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", @@ -1405,6 +1887,7 @@ "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "dev": true, + "optional": true, "dependencies": { "once": "^1.4.0" } @@ -1422,9 +1905,9 @@ } }, "node_modules/esbuild": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.54.tgz", - "integrity": "sha512-Cy9llcy8DvET5uznocPyqL3BFRrFXSVqbgpMJ9Wz8oVjZlh/zUSNbPRbov0VX7VxN2JH1Oa0uNxZ7eLRb62pJA==", + "version": "0.18.12", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.12.tgz", + "integrity": "sha512-XuOVLDdtsDslXStStduT41op21Ytmf4/BDS46aa3xPJ7X5h2eMWBF1oAe3QjUH3bDksocNXgzGUZ7XHIBya6Tg==", "dev": true, "hasInstallScript": true, "bin": { @@ -1434,347 +1917,28 @@ "node": ">=12" }, "optionalDependencies": { - "@esbuild/linux-loong64": "0.14.54", - "esbuild-android-64": "0.14.54", - "esbuild-android-arm64": "0.14.54", - "esbuild-darwin-64": "0.14.54", - "esbuild-darwin-arm64": "0.14.54", - "esbuild-freebsd-64": "0.14.54", - "esbuild-freebsd-arm64": "0.14.54", - "esbuild-linux-32": "0.14.54", - "esbuild-linux-64": "0.14.54", - "esbuild-linux-arm": "0.14.54", - "esbuild-linux-arm64": "0.14.54", - "esbuild-linux-mips64le": "0.14.54", - "esbuild-linux-ppc64le": "0.14.54", - "esbuild-linux-riscv64": "0.14.54", - "esbuild-linux-s390x": "0.14.54", - "esbuild-netbsd-64": "0.14.54", - "esbuild-openbsd-64": "0.14.54", - "esbuild-sunos-64": "0.14.54", - "esbuild-windows-32": "0.14.54", - "esbuild-windows-64": "0.14.54", - "esbuild-windows-arm64": "0.14.54" - } - }, - "node_modules/esbuild-android-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.54.tgz", - "integrity": "sha512-Tz2++Aqqz0rJ7kYBfz+iqyE3QMycD4vk7LBRyWaAVFgFtQ/O8EJOnVmTOiDWYZ/uYzB4kvP+bqejYdVKzE5lAQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-android-arm64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.54.tgz", - "integrity": "sha512-F9E+/QDi9sSkLaClO8SOV6etqPd+5DgJje1F9lOWoNncDdOBL2YF59IhsWATSt0TLZbYCf3pNlTHvVV5VfHdvg==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-darwin-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.54.tgz", - "integrity": "sha512-jtdKWV3nBviOd5v4hOpkVmpxsBy90CGzebpbO9beiqUYVMBtSc0AL9zGftFuBon7PNDcdvNCEuQqw2x0wP9yug==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-darwin-arm64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.54.tgz", - "integrity": "sha512-OPafJHD2oUPyvJMrsCvDGkRrVCar5aVyHfWGQzY1dWnzErjrDuSETxwA2HSsyg2jORLY8yBfzc1MIpUkXlctmw==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-freebsd-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.54.tgz", - "integrity": "sha512-OKwd4gmwHqOTp4mOGZKe/XUlbDJ4Q9TjX0hMPIDBUWWu/kwhBAudJdBoxnjNf9ocIB6GN6CPowYpR/hRCbSYAg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-freebsd-arm64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.54.tgz", - "integrity": "sha512-sFwueGr7OvIFiQT6WeG0jRLjkjdqWWSrfbVwZp8iMP+8UHEHRBvlaxL6IuKNDwAozNUmbb8nIMXa7oAOARGs1Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-linux-32": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.54.tgz", - "integrity": "sha512-1ZuY+JDI//WmklKlBgJnglpUL1owm2OX+8E1syCD6UAxcMM/XoWd76OHSjl/0MR0LisSAXDqgjT3uJqT67O3qw==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-linux-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.54.tgz", - "integrity": "sha512-EgjAgH5HwTbtNsTqQOXWApBaPVdDn7XcK+/PtJwZLT1UmpLoznPd8c5CxqsH2dQK3j05YsB3L17T8vE7cp4cCg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-linux-arm": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.54.tgz", - "integrity": "sha512-qqz/SjemQhVMTnvcLGoLOdFpCYbz4v4fUo+TfsWG+1aOu70/80RV6bgNpR2JCrppV2moUQkww+6bWxXRL9YMGw==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-linux-arm64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.54.tgz", - "integrity": "sha512-WL71L+0Rwv+Gv/HTmxTEmpv0UgmxYa5ftZILVi2QmZBgX3q7+tDeOQNqGtdXSdsL8TQi1vIaVFHUPDe0O0kdig==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-linux-mips64le": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.54.tgz", - "integrity": "sha512-qTHGQB8D1etd0u1+sB6p0ikLKRVuCWhYQhAHRPkO+OF3I/iSlTKNNS0Lh2Oc0g0UFGguaFZZiPJdJey3AGpAlw==", - "cpu": [ - "mips64el" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-linux-ppc64le": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.54.tgz", - "integrity": "sha512-j3OMlzHiqwZBDPRCDFKcx595XVfOfOnv68Ax3U4UKZ3MTYQB5Yz3X1mn5GnodEVYzhtZgxEBidLWeIs8FDSfrQ==", - "cpu": [ - "ppc64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-linux-riscv64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.54.tgz", - "integrity": "sha512-y7Vt7Wl9dkOGZjxQZnDAqqn+XOqFD7IMWiewY5SPlNlzMX39ocPQlOaoxvT4FllA5viyV26/QzHtvTjVNOxHZg==", - "cpu": [ - "riscv64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-linux-s390x": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.54.tgz", - "integrity": "sha512-zaHpW9dziAsi7lRcyV4r8dhfG1qBidQWUXweUjnw+lliChJqQr+6XD71K41oEIC3Mx1KStovEmlzm+MkGZHnHA==", - "cpu": [ - "s390x" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-netbsd-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.54.tgz", - "integrity": "sha512-PR01lmIMnfJTgeU9VJTDY9ZerDWVFIUzAtJuDHwwceppW7cQWjBBqP48NdeRtoP04/AtO9a7w3viI+PIDr6d+w==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-openbsd-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.54.tgz", - "integrity": "sha512-Qyk7ikT2o7Wu76UsvvDS5q0amJvmRzDyVlL0qf5VLsLchjCa1+IAvd8kTBgUxD7VBUUVgItLkk609ZHUc1oCaw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-sunos-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.54.tgz", - "integrity": "sha512-28GZ24KmMSeKi5ueWzMcco6EBHStL3B6ubM7M51RmPwXQGLe0teBGJocmWhgwccA1GeFXqxzILIxXpHbl9Q/Kw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-windows-32": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.54.tgz", - "integrity": "sha512-T+rdZW19ql9MjS7pixmZYVObd9G7kcaZo+sETqNH4RCkuuYSuv9AGHUVnPoP9hhuE1WM1ZimHz1CIBHBboLU7w==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-windows-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.54.tgz", - "integrity": "sha512-AoHTRBUuYwXtZhjXZbA1pGfTo8cJo3vZIcWGLiUcTNgHpJJMC1rVA44ZereBHMJtotyN71S8Qw0npiCIkW96cQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild-windows-arm64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.54.tgz", - "integrity": "sha512-M0kuUvXhot1zOISQGXwWn6YtS+Y/1RT9WrVIOywZnJHo3jCDyewAc79aKNQWFCQm+xNHVTq9h8dZKvygoXQQRg==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" + "@esbuild/android-arm": "0.18.12", + "@esbuild/android-arm64": "0.18.12", + "@esbuild/android-x64": "0.18.12", + "@esbuild/darwin-arm64": "0.18.12", + "@esbuild/darwin-x64": "0.18.12", + "@esbuild/freebsd-arm64": "0.18.12", + "@esbuild/freebsd-x64": "0.18.12", + "@esbuild/linux-arm": "0.18.12", + "@esbuild/linux-arm64": "0.18.12", + "@esbuild/linux-ia32": "0.18.12", + "@esbuild/linux-loong64": "0.18.12", + "@esbuild/linux-mips64el": "0.18.12", + "@esbuild/linux-ppc64": "0.18.12", + "@esbuild/linux-riscv64": "0.18.12", + "@esbuild/linux-s390x": "0.18.12", + "@esbuild/linux-x64": "0.18.12", + "@esbuild/netbsd-x64": "0.18.12", + "@esbuild/openbsd-x64": "0.18.12", + "@esbuild/sunos-x64": "0.18.12", + "@esbuild/win32-arm64": "0.18.12", + "@esbuild/win32-ia32": "0.18.12", + "@esbuild/win32-x64": "0.18.12" } }, "node_modules/escalade": { @@ -1798,13 +1962,16 @@ } }, "node_modules/eslint": { - "version": "8.32.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.32.0.tgz", - "integrity": "sha512-nETVXpnthqKPFyuY2FNjz/bEd6nbosRgKbkgS/y1C7LJop96gYHWpiguLecMHQ2XCPxn77DS0P+68WzG6vkZSQ==", + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.44.0.tgz", + "integrity": "sha512-0wpHoUbDUHgNCyvFB5aXLiQVfK9B0at6gUvzy83k4kAsQ/u769TQDX6iKC+aO4upIHO9WSaA3QoXYQDHbNwf1A==", "dev": true, "dependencies": { - "@eslint/eslintrc": "^1.4.1", - "@humanwhocodes/config-array": "^0.11.8", + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.4.0", + "@eslint/eslintrc": "^2.1.0", + "@eslint/js": "8.44.0", + "@humanwhocodes/config-array": "^0.11.10", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "ajv": "^6.10.0", @@ -1813,32 +1980,29 @@ "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.1.1", - "eslint-utils": "^3.0.0", - "eslint-visitor-keys": "^3.3.0", - "espree": "^9.4.0", - "esquery": "^1.4.0", + "eslint-scope": "^7.2.0", + "eslint-visitor-keys": "^3.4.1", + "espree": "^9.6.0", + "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "globals": "^13.19.0", - "grapheme-splitter": "^1.0.4", + "graphemer": "^1.4.0", "ignore": "^5.2.0", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", - "js-sdsl": "^4.1.4", "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", - "optionator": "^0.9.1", - "regexpp": "^3.2.0", + "optionator": "^0.9.3", "strip-ansi": "^6.0.1", "strip-json-comments": "^3.1.0", "text-table": "^0.2.0" @@ -1854,9 +2018,9 @@ } }, "node_modules/eslint-config-prettier": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.6.0.tgz", - "integrity": "sha512-bAF0eLpLVqP5oEVUFKpMA+NnRFICwn9X8B5jrR9FcqnYBuPbqWEjTEspPWMj5ye6czoSLDweCzSo3Ko7gGrZaA==", + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.8.0.tgz", + "integrity": "sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA==", "dev": true, "bin": { "eslint-config-prettier": "bin/cli.js" @@ -1878,46 +2042,22 @@ "node": ">=8.0.0" } }, - "node_modules/eslint-utils": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", - "dev": true, - "dependencies": { - "eslint-visitor-keys": "^2.0.0" - }, - "engines": { - "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - }, - "peerDependencies": { - "eslint": ">=5" - } - }, - "node_modules/eslint-utils/node_modules/eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true, - "engines": { - "node": ">=10" - } - }, "node_modules/eslint-visitor-keys": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", - "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz", + "integrity": "sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, "node_modules/eslint/node_modules/eslint-scope": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", - "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.0.tgz", + "integrity": "sha512-DYj5deGlHBfMt15J7rdtyKNq/Nqlv5KfU4iodrQ019XESsRnwXH9KAE0y3cwtUHDo2ob7CypAnCqefh6vioWRw==", "dev": true, "dependencies": { "esrecurse": "^4.3.0", @@ -1925,6 +2065,9 @@ }, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, "node_modules/eslint/node_modules/estraverse": { @@ -1937,14 +2080,14 @@ } }, "node_modules/espree": { - "version": "9.4.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.1.tgz", - "integrity": "sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==", + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.0.tgz", + "integrity": "sha512-1FH/IiruXZ84tpUlm0aCUEwMl2Ho5ilqVh0VvQXw+byAz/4SAciyHLlfmL5WYqsvD38oymdUwBss0LtK8m4s/A==", "dev": true, "dependencies": { - "acorn": "^8.8.0", + "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^3.3.0" + "eslint-visitor-keys": "^3.4.1" }, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1954,9 +2097,9 @@ } }, "node_modules/esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "dev": true, "dependencies": { "estraverse": "^5.1.0" @@ -2018,6 +2161,7 @@ "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", "dev": true, + "optional": true, "engines": { "node": ">=6" } @@ -2029,9 +2173,9 @@ "dev": true }, "node_modules/fast-glob": { - "version": "3.2.12", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", - "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.0.tgz", + "integrity": "sha512-ChDuvbOypPuNjO8yIDf36x7BlZX1smcUMTTcyoIjycexOxd6DFsKsg21qVBzEmr3G7fUKIRy2/psii+CIUt7FA==", "dev": true, "dependencies": { "@nodelib/fs.stat": "^2.0.2", @@ -2169,7 +2313,8 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", - "dev": true + "dev": true, + "optional": true }, "node_modules/fs.realpath": { "version": "1.0.0", @@ -2177,33 +2322,6 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", "dev": true }, - "node_modules/fstream": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz", - "integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==", - "dev": true, - "dependencies": { - "graceful-fs": "^4.1.2", - "inherits": "~2.0.0", - "mkdirp": ">=0.5 0", - "rimraf": "2" - }, - "engines": { - "node": ">=0.6" - } - }, - "node_modules/fstream/node_modules/rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "dev": true, - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - } - }, "node_modules/function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -2236,7 +2354,8 @@ "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", - "dev": true + "dev": true, + "optional": true }, "node_modules/glob": { "version": "7.2.3", @@ -2271,9 +2390,9 @@ } }, "node_modules/globals": { - "version": "13.19.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.19.0.tgz", - "integrity": "sha512-dkQ957uSRWHw7CFXLUtUHQI3g3aWApYhfNR2O6jn/907riyTYKVBmxYVROkBcY614FSSeSJh7Xm7SrUWCxvJMQ==", + "version": "13.20.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", + "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", "dev": true, "dependencies": { "type-fest": "^0.20.2" @@ -2305,18 +2424,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", - "dev": true - }, "node_modules/grapheme-splitter": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", "dev": true }, + "node_modules/graphemer": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", + "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", + "dev": true + }, "node_modules/has": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", @@ -2437,7 +2556,8 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "optional": true }, "node_modules/ignore": { "version": "5.2.4", @@ -2448,6 +2568,12 @@ "node": ">= 4" } }, + "node_modules/immediate": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz", + "integrity": "sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ==", + "dev": true + }, "node_modules/import-fresh": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", @@ -2493,7 +2619,8 @@ "version": "1.3.8", "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "dev": true + "dev": true, + "optional": true }, "node_modules/internmap": { "version": "2.0.3", @@ -2574,16 +2701,6 @@ "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", "dev": true }, - "node_modules/js-sdsl": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz", - "integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==", - "dev": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/js-sdsl" - } - }, "node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", @@ -2608,12 +2725,31 @@ "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", "dev": true }, + "node_modules/jsonc-parser": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.0.tgz", + "integrity": "sha512-gfFQZrcTc8CnKXp6Y4/CBT3fTc0OVuDofpre4aEeEpSBPV5X5v4+Vmx+8snU7RLPrNHPKSgLxGo9YuQzz20o+w==", + "dev": true + }, + "node_modules/jszip": { + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", + "integrity": "sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g==", + "dev": true, + "dependencies": { + "lie": "~3.3.0", + "pako": "~1.0.2", + "readable-stream": "~2.3.6", + "setimmediate": "^1.0.5" + } + }, "node_modules/keytar": { "version": "7.9.0", "resolved": "https://registry.npmjs.org/keytar/-/keytar-7.9.0.tgz", "integrity": "sha512-VPD8mtVtm5JNtA2AErl6Chp06JBfy7diFQ7TQQhdpWOl6MrCRB+eRbvAZUsbGQS9kiMq0coJsy0W0vHpDCkWsQ==", "dev": true, "hasInstallScript": true, + "optional": true, "dependencies": { "node-addon-api": "^4.3.0", "prebuild-install": "^7.0.1" @@ -2641,6 +2777,15 @@ "node": ">= 0.8.0" } }, + "node_modules/lie": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", + "integrity": "sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ==", + "dev": true, + "dependencies": { + "immediate": "~3.0.5" + } + }, "node_modules/linkify-it": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", @@ -2650,12 +2795,6 @@ "uc.micro": "^1.0.1" } }, - "node_modules/listenercount": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/listenercount/-/listenercount-1.0.1.tgz", - "integrity": "sha512-3mk/Zag0+IJxeDrxSgaDPy4zZ3w05PRZeJNnlWhzFz5OkX49J4krc+A8X2d2M69vGMBEX0uyl8M+W+8gH+kBqQ==", - "dev": true - }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -2758,6 +2897,7 @@ "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", "dev": true, + "optional": true, "engines": { "node": ">=10" }, @@ -2769,6 +2909,7 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -2781,27 +2922,17 @@ "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz", "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==", "dev": true, + "optional": true, "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/mkdirp": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", - "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", - "dev": true, - "dependencies": { - "minimist": "^1.2.6" - }, - "bin": { - "mkdirp": "bin/cmd.js" - } - }, "node_modules/mkdirp-classic": { "version": "0.5.3", "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", - "dev": true + "dev": true, + "optional": true }, "node_modules/ms": { "version": "2.1.2", @@ -2819,7 +2950,8 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz", "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==", - "dev": true + "dev": true, + "optional": true }, "node_modules/natural-compare": { "version": "1.4.0", @@ -2838,6 +2970,7 @@ "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.31.0.tgz", "integrity": "sha512-eSKV6s+APenqVh8ubJyiu/YhZgxQpGP66ntzUb3lY1xB9ukSRaGnx0AIxI+IM+1+IVYC1oWobgG5L3Lt9ARykQ==", "dev": true, + "optional": true, "dependencies": { "semver": "^7.3.5" }, @@ -2849,7 +2982,8 @@ "version": "4.3.0", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-4.3.0.tgz", "integrity": "sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==", - "dev": true + "dev": true, + "optional": true }, "node_modules/nth-check": { "version": "2.1.1", @@ -2882,34 +3016,35 @@ } }, "node_modules/optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", + "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", "dev": true, "dependencies": { + "@aashutoshrathi/word-wrap": "^1.2.3", "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.3" + "type-check": "^0.4.0" }, "engines": { "node": ">= 0.8.0" } }, "node_modules/ovsx": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/ovsx/-/ovsx-0.5.2.tgz", - "integrity": "sha512-UbLultRCk46WddeA0Cly4hoRhzBJUiLgbIEViXlgOvV54LbsppClDkMLoCevUUBHoiNdMX2NuiSgURAEXgCZdw==", + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/ovsx/-/ovsx-0.8.2.tgz", + "integrity": "sha512-btDXZorXlmwN9+9Un3khrVygCXmhwbrtg8gifNXw92rZPXcRBAiLG/L09Kb6srhGEratsFt42AktfD8t9XhzoA==", "dev": true, "dependencies": { + "@vscode/vsce": "^2.19.0", "commander": "^6.1.0", "follow-redirects": "^1.14.6", "is-ci": "^2.0.0", "leven": "^3.1.0", - "tmp": "^0.2.1", - "vsce": "^2.6.3" + "semver": "^7.5.2", + "tmp": "^0.2.1" }, "bin": { "ovsx": "lib/ovsx" @@ -2957,6 +3092,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/pako": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", + "dev": true + }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -2979,9 +3120,9 @@ } }, "node_modules/parse-semver/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", "dev": true, "bin": { "semver": "bin/semver" @@ -3071,6 +3212,7 @@ "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz", "integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==", "dev": true, + "optional": true, "dependencies": { "detect-libc": "^2.0.0", "expand-template": "^2.0.3", @@ -3102,15 +3244,15 @@ } }, "node_modules/prettier": { - "version": "2.8.3", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.3.tgz", - "integrity": "sha512-tJ/oJ4amDihPoufT5sM0Z1SKEuKay8LfVAMlbbhnnkvt6BUserZylqo2PN+p9KeljLr0OHa2rXHU1T8reeoTrw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.0.0.tgz", + "integrity": "sha512-zBf5eHpwHOGPC47h0zrPyNn+eAEIdEzfywMoYn2XPi0P44Zp0tSq64rq0xAREh4auw2cJZHo9QUob+NqCQky4g==", "dev": true, "bin": { - "prettier": "bin-prettier.js" + "prettier": "bin/prettier.cjs" }, "engines": { - "node": ">=10.13.0" + "node": ">=14" }, "funding": { "url": "https://github.com/prettier/prettier?sponsor=1" @@ -3127,6 +3269,7 @@ "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", "dev": true, + "optional": true, "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" @@ -3181,6 +3324,7 @@ "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", "dev": true, + "optional": true, "dependencies": { "deep-extend": "^0.6.0", "ini": "~1.3.0", @@ -3196,6 +3340,7 @@ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", "dev": true, + "optional": true, "engines": { "node": ">=0.10.0" } @@ -3213,9 +3358,9 @@ } }, "node_modules/readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "dev": true, "dependencies": { "core-util-is": "~1.0.0", @@ -3227,18 +3372,6 @@ "util-deprecate": "~1.0.1" } }, - "node_modules/regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "dev": true, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - } - }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -3332,9 +3465,9 @@ "dev": true }, "node_modules/semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz", + "integrity": "sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==", "dependencies": { "lru-cache": "^6.0.0" }, @@ -3404,7 +3537,8 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "optional": true }, "node_modules/simple-get": { "version": "4.0.1", @@ -3425,6 +3559,7 @@ "url": "https://feross.org/support" } ], + "optional": true, "dependencies": { "decompress-response": "^6.0.0", "once": "^1.3.1", @@ -3502,6 +3637,7 @@ "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz", "integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==", "dev": true, + "optional": true, "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", @@ -3514,6 +3650,7 @@ "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", "dev": true, + "optional": true, "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", @@ -3530,6 +3667,7 @@ "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", "dev": true, + "optional": true, "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", @@ -3569,40 +3707,22 @@ "node": ">=8.0" } }, - "node_modules/traverse": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", - "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==", - "dev": true, - "engines": { - "node": "*" - } - }, - "node_modules/tslib": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.1.tgz", - "integrity": "sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==", - "dev": true - }, - "node_modules/tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "node_modules/ts-api-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.0.1.tgz", + "integrity": "sha512-lC/RGlPmwdrIBFTX59wwNzqh7aR2otPNPR/5brHZm/XKFYKsfqxihXUe9pU3JI+3vGkl+vyCoNNnPhJn3aLK1A==", "dev": true, - "dependencies": { - "tslib": "^1.8.1" - }, "engines": { - "node": ">= 6" + "node": ">=16.13.0" }, "peerDependencies": { - "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + "typescript": ">=4.2.0" } }, - "node_modules/tsutils/node_modules/tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "node_modules/tslib": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.0.tgz", + "integrity": "sha512-7At1WUettjcSRHXCyYtTselblcHl9PJFFVKiCAy/bY97+BPZXSQ2wbq0P9s8tK2G7dFQfNnlJnPAiArVBVBsfA==", "dev": true }, "node_modules/tunnel": { @@ -3619,6 +3739,7 @@ "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", "dev": true, + "optional": true, "dependencies": { "safe-buffer": "^5.0.1" }, @@ -3662,16 +3783,16 @@ } }, "node_modules/typescript": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.4.tgz", - "integrity": "sha512-Uz+dTXYzxXXbsFpM86Wh3dKCxrQqUcVMxwU54orwlJjOpO3ao8L7j5lH+dWfTwgCwIuM9GQ2kvVotzYJMXTBZg==", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz", + "integrity": "sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA==", "dev": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" }, "engines": { - "node": ">=4.2.0" + "node": ">=14.17" } }, "node_modules/uc.micro": { @@ -3686,24 +3807,6 @@ "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==", "dev": true }, - "node_modules/unzipper": { - "version": "0.10.11", - "resolved": "https://registry.npmjs.org/unzipper/-/unzipper-0.10.11.tgz", - "integrity": "sha512-+BrAq2oFqWod5IESRjL3S8baohbevGcVA+teAIOYWM3pDVdseogqbzhhvvmiyQrUNKFUnDMtELW3X8ykbyDCJw==", - "dev": true, - "dependencies": { - "big-integer": "^1.6.17", - "binary": "~0.3.0", - "bluebird": "~3.4.1", - "buffer-indexof-polyfill": "~1.0.0", - "duplexer2": "~0.1.4", - "fstream": "^1.0.12", - "graceful-fs": "^4.2.2", - "listenercount": "~1.0.1", - "readable-stream": "~2.3.6", - "setimmediate": "~1.0.4" - } - }, "node_modules/uri-js": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", @@ -3725,164 +3828,59 @@ "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "dev": true }, - "node_modules/vsce": { - "version": "2.15.0", - "resolved": "https://registry.npmjs.org/vsce/-/vsce-2.15.0.tgz", - "integrity": "sha512-P8E9LAZvBCQnoGoizw65JfGvyMqNGlHdlUXD1VAuxtvYAaHBKLBdKPnpy60XKVDAkQCfmMu53g+gq9FM+ydepw==", - "deprecated": "vsce has been renamed to @vscode/vsce. Install using @vscode/vsce instead.", - "dev": true, - "dependencies": { - "azure-devops-node-api": "^11.0.1", - "chalk": "^2.4.2", - "cheerio": "^1.0.0-rc.9", - "commander": "^6.1.0", - "glob": "^7.0.6", - "hosted-git-info": "^4.0.2", - "keytar": "^7.7.0", - "leven": "^3.1.0", - "markdown-it": "^12.3.2", - "mime": "^1.3.4", - "minimatch": "^3.0.3", - "parse-semver": "^1.1.1", - "read": "^1.0.7", - "semver": "^5.1.0", - "tmp": "^0.2.1", - "typed-rest-client": "^1.8.4", - "url-join": "^4.0.1", - "xml2js": "^0.4.23", - "yauzl": "^2.3.1", - "yazl": "^2.2.2" - }, - "bin": { - "vsce": "vsce" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/vsce/node_modules/ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "dependencies": { - "color-convert": "^1.9.0" - }, + "node_modules/vscode-jsonrpc": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.1.0.tgz", + "integrity": "sha512-6TDy/abTQk+zDGYazgbIPc+4JoXdwC8NHU9Pbn4UJP1fehUyZmM4RHp5IthX7A6L5KS30PRui+j+tbbMMMafdw==", "engines": { - "node": ">=4" + "node": ">=14.0.0" } }, - "node_modules/vsce/node_modules/chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, + "node_modules/vscode-languageclient": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.1.0.tgz", + "integrity": "sha512-GL4QdbYUF/XxQlAsvYWZRV3V34kOkpRlvV60/72ghHfsYFnS/v2MANZ9P6sHmxFcZKOse8O+L9G7Czg0NUWing==", "dependencies": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" + "minimatch": "^5.1.0", + "semver": "^7.3.7", + "vscode-languageserver-protocol": "3.17.3" }, "engines": { - "node": ">=4" - } - }, - "node_modules/vsce/node_modules/color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "dependencies": { - "color-name": "1.1.3" - } - }, - "node_modules/vsce/node_modules/color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "node_modules/vsce/node_modules/commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", - "dev": true, - "engines": { - "node": ">= 6" - } - }, - "node_modules/vsce/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/vsce/node_modules/has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true, - "engines": { - "node": ">=4" - } - }, - "node_modules/vsce/node_modules/semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true, - "bin": { - "semver": "bin/semver" + "vscode": "^1.67.0" } }, - "node_modules/vsce/node_modules/supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, + "node_modules/vscode-languageclient/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", "dependencies": { - "has-flag": "^3.0.0" - }, - "engines": { - "node": ">=4" + "balanced-match": "^1.0.0" } }, - "node_modules/vscode-jsonrpc": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.2.tgz", - "integrity": "sha512-RY7HwI/ydoC1Wwg4gJ3y6LpU9FJRZAUnTYMXthqhFXXu77ErDd/xkREpGuk4MyYkk4a+XDWAMqe0S3KkelYQEQ==", - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/vscode-languageclient": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.2.tgz", - "integrity": "sha512-lHlthJtphG9gibGb/y72CKqQUxwPsMXijJVpHEC2bvbFqxmkj9LwQ3aGU9dwjBLqsX1S4KjShYppLvg1UJDF/Q==", + "node_modules/vscode-languageclient/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", "dependencies": { - "minimatch": "^3.0.4", - "semver": "^7.3.5", - "vscode-languageserver-protocol": "3.17.2" + "brace-expansion": "^2.0.1" }, "engines": { - "vscode": "^1.67.0" + "node": ">=10" } }, "node_modules/vscode-languageserver-protocol": { - "version": "3.17.2", - "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.2.tgz", - "integrity": "sha512-8kYisQ3z/SQ2kyjlNeQxbkkTNmVFoQCqkmGrzLH6A9ecPlgTbp3wDTnUNqaUxYr4vlAcloxx8zwy7G5WdguYNg==", + "version": "3.17.3", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.3.tgz", + "integrity": "sha512-924/h0AqsMtA5yK22GgMtCYiMdCOtWTSGgUOkgEDX+wk2b0x4sAfLiO4NxBxqbiVtz7K7/1/RgVrVI0NClZwqA==", "dependencies": { - "vscode-jsonrpc": "8.0.2", - "vscode-languageserver-types": "3.17.2" + "vscode-jsonrpc": "8.1.0", + "vscode-languageserver-types": "3.17.3" } }, "node_modules/vscode-languageserver-types": { - "version": "3.17.2", - "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz", - "integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA==" + "version": "3.17.3", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.3.tgz", + "integrity": "sha512-SYU4z1dL0PyIMd4Vj8YOqFvHu7Hz/enbWtpfnVbJHU4Nd1YNYx8u0ennumc6h48GQNeOLxmwySmnADouT/AuZA==" }, "node_modules/which": { "version": "2.0.2", @@ -3899,15 +3897,6 @@ "node": ">= 8" } }, - "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -3930,19 +3919,6 @@ "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "dev": true }, - "node_modules/xml2js": { - "version": "0.4.23", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", - "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", - "dev": true, - "dependencies": { - "sax": ">=0.6.0", - "xmlbuilder": "~11.0.0" - }, - "engines": { - "node": ">=4.0.0" - } - }, "node_modules/xmlbuilder": { "version": "11.0.1", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", @@ -3966,9 +3942,9 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/yargs": { - "version": "17.6.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz", - "integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==", + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", @@ -4021,2811 +3997,5 @@ "url": "https://github.com/sponsors/sindresorhus" } } - }, - "dependencies": { - "@esbuild/linux-loong64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.14.54.tgz", - "integrity": "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw==", - "dev": true, - "optional": true - }, - "@eslint/eslintrc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.4.1.tgz", - "integrity": "sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==", - "dev": true, - "requires": { - "ajv": "^6.12.4", - "debug": "^4.3.2", - "espree": "^9.4.0", - "globals": "^13.19.0", - "ignore": "^5.2.0", - "import-fresh": "^3.2.1", - "js-yaml": "^4.1.0", - "minimatch": "^3.1.2", - "strip-json-comments": "^3.1.1" - } - }, - "@hpcc-js/wasm": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/@hpcc-js/wasm/-/wasm-2.5.0.tgz", - "integrity": "sha512-G26BamgaHW46f6P8bmkygapgNcy+tTDMwIvCzmMzdp39sxUS1u4gaT/vR2SSDc4x3SfL5RE4B2B8ef/wd429Hg==", - "requires": { - "yargs": "17.6.2" - } - }, - "@humanwhocodes/config-array": { - "version": "0.11.8", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", - "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", - "dev": true, - "requires": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", - "minimatch": "^3.0.5" - } - }, - "@humanwhocodes/module-importer": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", - "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", - "dev": true - }, - "@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", - "dev": true - }, - "@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "requires": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - } - }, - "@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true - }, - "@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "requires": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - } - }, - "@tootallnate/once": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", - "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", - "dev": true - }, - "@types/json-schema": { - "version": "7.0.11", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", - "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", - "dev": true - }, - "@types/node": { - "version": "16.11.68", - "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.68.tgz", - "integrity": "sha512-JkRpuVz3xCNCWaeQ5EHLR/6woMbHZz/jZ7Kmc63AkU+1HxnoUugzSWMck7dsR4DvNYX8jp9wTi9K7WvnxOIQZQ==", - "dev": true - }, - "@types/semver": { - "version": "7.3.13", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.13.tgz", - "integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==", - "dev": true - }, - "@types/vscode": { - "version": "1.66.0", - "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.66.0.tgz", - "integrity": "sha512-ZfJck4M7nrGasfs4A4YbUoxis3Vu24cETw3DERsNYtDZmYSYtk6ljKexKFKhImO/ZmY6ZMsmegu2FPkXoUFImA==", - "dev": true - }, - "@typescript-eslint/eslint-plugin": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.48.2.tgz", - "integrity": "sha512-sR0Gja9Ky1teIq4qJOl0nC+Tk64/uYdX+mi+5iB//MH8gwyx8e3SOyhEzeLZEFEEfCaLf8KJq+Bd/6je1t+CAg==", - "dev": true, - "requires": { - "@typescript-eslint/scope-manager": "5.48.2", - "@typescript-eslint/type-utils": "5.48.2", - "@typescript-eslint/utils": "5.48.2", - "debug": "^4.3.4", - "ignore": "^5.2.0", - "natural-compare-lite": "^1.4.0", - "regexpp": "^3.2.0", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - } - }, - "@typescript-eslint/parser": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.48.2.tgz", - "integrity": "sha512-38zMsKsG2sIuM5Oi/olurGwYJXzmtdsHhn5mI/pQogP+BjYVkK5iRazCQ8RGS0V+YLk282uWElN70zAAUmaYHw==", - "dev": true, - "requires": { - "@typescript-eslint/scope-manager": "5.48.2", - "@typescript-eslint/types": "5.48.2", - "@typescript-eslint/typescript-estree": "5.48.2", - "debug": "^4.3.4" - } - }, - "@typescript-eslint/scope-manager": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.48.2.tgz", - "integrity": "sha512-zEUFfonQid5KRDKoI3O+uP1GnrFd4tIHlvs+sTJXiWuypUWMuDaottkJuR612wQfOkjYbsaskSIURV9xo4f+Fw==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.48.2", - "@typescript-eslint/visitor-keys": "5.48.2" - } - }, - "@typescript-eslint/type-utils": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.48.2.tgz", - "integrity": "sha512-QVWx7J5sPMRiOMJp5dYshPxABRoZV1xbRirqSk8yuIIsu0nvMTZesKErEA3Oix1k+uvsk8Cs8TGJ6kQ0ndAcew==", - "dev": true, - "requires": { - "@typescript-eslint/typescript-estree": "5.48.2", - "@typescript-eslint/utils": "5.48.2", - "debug": "^4.3.4", - "tsutils": "^3.21.0" - } - }, - "@typescript-eslint/types": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.48.2.tgz", - "integrity": "sha512-hE7dA77xxu7ByBc6KCzikgfRyBCTst6dZQpwaTy25iMYOnbNljDT4hjhrGEJJ0QoMjrfqrx+j1l1B9/LtKeuqA==", - "dev": true - }, - "@typescript-eslint/typescript-estree": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.48.2.tgz", - "integrity": "sha512-bibvD3z6ilnoVxUBFEgkO0k0aFvUc4Cttt0dAreEr+nrAHhWzkO83PEVVuieK3DqcgL6VAK5dkzK8XUVja5Zcg==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.48.2", - "@typescript-eslint/visitor-keys": "5.48.2", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - } - }, - "@typescript-eslint/utils": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.48.2.tgz", - "integrity": "sha512-2h18c0d7jgkw6tdKTlNaM7wyopbLRBiit8oAxoP89YnuBOzCZ8g8aBCaCqq7h208qUTroL7Whgzam7UY3HVLow==", - "dev": true, - "requires": { - "@types/json-schema": "^7.0.9", - "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "5.48.2", - "@typescript-eslint/types": "5.48.2", - "@typescript-eslint/typescript-estree": "5.48.2", - "eslint-scope": "^5.1.1", - "eslint-utils": "^3.0.0", - "semver": "^7.3.7" - } - }, - "@typescript-eslint/visitor-keys": { - "version": "5.48.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.48.2.tgz", - "integrity": "sha512-z9njZLSkwmjFWUelGEwEbdf4NwKvfHxvGC0OcGN1Hp/XNDIcJ7D5DpPNPv6x6/mFvc1tQHsaWmpD/a4gOvvCJQ==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.48.2", - "eslint-visitor-keys": "^3.3.0" - } - }, - "@vscode/test-electron": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.2.2.tgz", - "integrity": "sha512-s5d2VtMySvff0UgqkJ0BMCr1es+qREE194EAodGIefq518W53ifvv69e80l9e2MrYJEqUUKwukE/w3H9o15YEw==", - "dev": true, - "requires": { - "http-proxy-agent": "^4.0.1", - "https-proxy-agent": "^5.0.0", - "rimraf": "^3.0.2", - "unzipper": "^0.10.11" - } - }, - "acorn": { - "version": "8.8.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.1.tgz", - "integrity": "sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==", - "dev": true - }, - "acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true, - "requires": {} - }, - "agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "dev": true, - "requires": { - "debug": "4" - } - }, - "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "requires": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "anser": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/anser/-/anser-2.1.1.tgz", - "integrity": "sha512-nqLm4HxOTpeLOxcmB3QWmV5TcDFhW9y/fyQ+hivtDFcK4OQ+pQ5fzPnXHM1Mfcm0VkLtvVi1TCPr++Qy0Q/3EQ==" - }, - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" - }, - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "requires": { - "color-convert": "^2.0.1" - } - }, - "argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", - "dev": true - }, - "azure-devops-node-api": { - "version": "11.2.0", - "resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-11.2.0.tgz", - "integrity": "sha512-XdiGPhrpaT5J8wdERRKs5g8E0Zy1pvOYTli7z9E8nmOn3YGp4FhtjhrOyFmX/8veWCwdI69mCHKJw6l+4J/bHA==", - "dev": true, - "requires": { - "tunnel": "0.0.6", - "typed-rest-client": "^1.8.4" - } - }, - "balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, - "base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true - }, - "big-integer": { - "version": "1.6.51", - "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.51.tgz", - "integrity": "sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg==", - "dev": true - }, - "binary": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", - "integrity": "sha512-D4H1y5KYwpJgK8wk1Cue5LLPgmwHKYSChkbspQg5JtVuR5ulGckxfR62H3AE9UDkdMC8yyXlqYihuz3Aqg2XZg==", - "dev": true, - "requires": { - "buffers": "~0.1.1", - "chainsaw": "~0.1.0" - } - }, - "bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dev": true, - "requires": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - }, - "dependencies": { - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - } - } - }, - "bluebird": { - "version": "3.4.7", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.4.7.tgz", - "integrity": "sha512-iD3898SR7sWVRHbiQv+sHUtHnMvC1o3nW5rAcqnq3uOn07DSAppZYUkIGslDz6gXC7HfunPe7YVBgoEJASPcHA==", - "dev": true - }, - "boolbase": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", - "dev": true - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", - "dev": true - }, - "buffer-indexof-polyfill": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/buffer-indexof-polyfill/-/buffer-indexof-polyfill-1.0.2.tgz", - "integrity": "sha512-I7wzHwA3t1/lwXQh+A5PbNvJxgfo5r3xulgpYDB5zckTu/Z9oUK9biouBKQUjEqzaz3HnAT6TYoovmE+GqSf7A==", - "dev": true - }, - "buffers": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", - "integrity": "sha512-9q/rDEGSb/Qsvv2qvzIzdluL5k7AaJOTrw23z9reQthrbF7is4CtlT0DXyO1oei2DCp4uojjzQ7igaSHp1kAEQ==", - "dev": true - }, - "call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", - "dev": true, - "requires": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" - } - }, - "callsites": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", - "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", - "dev": true - }, - "chainsaw": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", - "integrity": "sha512-75kWfWt6MEKNC8xYXIdRpDehRYY/tNSgwKaJq+dbbDcxORuVrrQ+SEHoWsniVn9XPYfP4gmdWIeDk/4YNp1rNQ==", - "dev": true, - "requires": { - "traverse": ">=0.3.0 <0.4" - } - }, - "chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - } - }, - "cheerio": { - "version": "1.0.0-rc.12", - "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz", - "integrity": "sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==", - "dev": true, - "requires": { - "cheerio-select": "^2.1.0", - "dom-serializer": "^2.0.0", - "domhandler": "^5.0.3", - "domutils": "^3.0.1", - "htmlparser2": "^8.0.1", - "parse5": "^7.0.0", - "parse5-htmlparser2-tree-adapter": "^7.0.0" - } - }, - "cheerio-select": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz", - "integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==", - "dev": true, - "requires": { - "boolbase": "^1.0.0", - "css-select": "^5.1.0", - "css-what": "^6.1.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3", - "domutils": "^3.0.1" - } - }, - "chownr": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", - "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", - "dev": true - }, - "ci-info": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", - "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", - "dev": true - }, - "cliui": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", - "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", - "requires": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" - }, - "commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==" - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" - }, - "core-util-is": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", - "dev": true - }, - "cross-env": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-7.0.3.tgz", - "integrity": "sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==", - "dev": true, - "requires": { - "cross-spawn": "^7.0.1" - } - }, - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "css-select": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", - "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", - "dev": true, - "requires": { - "boolbase": "^1.0.0", - "css-what": "^6.1.0", - "domhandler": "^5.0.2", - "domutils": "^3.0.1", - "nth-check": "^2.0.1" - } - }, - "css-what": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", - "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", - "dev": true - }, - "d3": { - "version": "7.8.2", - "resolved": "https://registry.npmjs.org/d3/-/d3-7.8.2.tgz", - "integrity": "sha512-WXty7qOGSHb7HR7CfOzwN1Gw04MUOzN8qh9ZUsvwycIMb4DYMpY9xczZ6jUorGtO6bR9BPMPaueIKwiDxu9uiQ==", - "requires": { - "d3-array": "3", - "d3-axis": "3", - "d3-brush": "3", - "d3-chord": "3", - "d3-color": "3", - "d3-contour": "4", - "d3-delaunay": "6", - "d3-dispatch": "3", - "d3-drag": "3", - "d3-dsv": "3", - "d3-ease": "3", - "d3-fetch": "3", - "d3-force": "3", - "d3-format": "3", - "d3-geo": "3", - "d3-hierarchy": "3", - "d3-interpolate": "3", - "d3-path": "3", - "d3-polygon": "3", - "d3-quadtree": "3", - "d3-random": "3", - "d3-scale": "4", - "d3-scale-chromatic": "3", - "d3-selection": "3", - "d3-shape": "3", - "d3-time": "3", - "d3-time-format": "4", - "d3-timer": "3", - "d3-transition": "3", - "d3-zoom": "3" - } - }, - "d3-array": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.2.tgz", - "integrity": "sha512-yEEyEAbDrF8C6Ob2myOBLjwBLck1Z89jMGFee0oPsn95GqjerpaOA4ch+vc2l0FNFFwMD5N7OCSEN5eAlsUbgQ==", - "requires": { - "internmap": "1 - 2" - } - }, - "d3-axis": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", - "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==" - }, - "d3-brush": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", - "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", - "requires": { - "d3-dispatch": "1 - 3", - "d3-drag": "2 - 3", - "d3-interpolate": "1 - 3", - "d3-selection": "3", - "d3-transition": "3" - } - }, - "d3-chord": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", - "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", - "requires": { - "d3-path": "1 - 3" - } - }, - "d3-color": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", - "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==" - }, - "d3-contour": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", - "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", - "requires": { - "d3-array": "^3.2.0" - } - }, - "d3-delaunay": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.2.tgz", - "integrity": "sha512-IMLNldruDQScrcfT+MWnazhHbDJhcRJyOEBAJfwQnHle1RPh6WDuLvxNArUju2VSMSUuKlY5BGHRJ2cYyoFLQQ==", - "requires": { - "delaunator": "5" - } - }, - "d3-dispatch": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", - "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==" - }, - "d3-drag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", - "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", - "requires": { - "d3-dispatch": "1 - 3", - "d3-selection": "3" - } - }, - "d3-dsv": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", - "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", - "requires": { - "commander": "7", - "iconv-lite": "0.6", - "rw": "1" - } - }, - "d3-ease": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", - "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==" - }, - "d3-fetch": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", - "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", - "requires": { - "d3-dsv": "1 - 3" - } - }, - "d3-force": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", - "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", - "requires": { - "d3-dispatch": "1 - 3", - "d3-quadtree": "1 - 3", - "d3-timer": "1 - 3" - } - }, - "d3-format": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", - "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==" - }, - "d3-geo": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.0.tgz", - "integrity": "sha512-JEo5HxXDdDYXCaWdwLRt79y7giK8SbhZJbFWXqbRTolCHFI5jRqteLzCsq51NKbUoX0PjBVSohxrx+NoOUujYA==", - "requires": { - "d3-array": "2.5.0 - 3" - } - }, - "d3-graphviz": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/d3-graphviz/-/d3-graphviz-5.0.2.tgz", - "integrity": "sha512-EVRow9rnFgm/L1trbbnu2PGOND11IcSEdWXbrDbz9hH0/Kj3YM2AqMkkTN/EAWgawD5/zryyCy+3Vm05oSJ1Kg==", - "requires": { - "@hpcc-js/wasm": "2.5.0", - "d3-dispatch": "^3.0.1", - "d3-format": "^3.1.0", - "d3-interpolate": "^3.0.1", - "d3-path": "^3.1.0", - "d3-timer": "^3.0.1", - "d3-transition": "^3.0.1", - "d3-zoom": "^3.0.0" - } - }, - "d3-hierarchy": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", - "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==" - }, - "d3-interpolate": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", - "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", - "requires": { - "d3-color": "1 - 3" - } - }, - "d3-path": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", - "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==" - }, - "d3-polygon": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", - "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==" - }, - "d3-quadtree": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", - "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==" - }, - "d3-random": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", - "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==" - }, - "d3-scale": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", - "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", - "requires": { - "d3-array": "2.10.0 - 3", - "d3-format": "1 - 3", - "d3-interpolate": "1.2.0 - 3", - "d3-time": "2.1.1 - 3", - "d3-time-format": "2 - 4" - } - }, - "d3-scale-chromatic": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.0.0.tgz", - "integrity": "sha512-Lx9thtxAKrO2Pq6OO2Ua474opeziKr279P/TKZsMAhYyNDD3EnCffdbgeSYN5O7m2ByQsxtuP2CSDczNUIZ22g==", - "requires": { - "d3-color": "1 - 3", - "d3-interpolate": "1 - 3" - } - }, - "d3-selection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", - "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==" - }, - "d3-shape": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", - "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", - "requires": { - "d3-path": "^3.1.0" - } - }, - "d3-time": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", - "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", - "requires": { - "d3-array": "2 - 3" - } - }, - "d3-time-format": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", - "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", - "requires": { - "d3-time": "1 - 3" - } - }, - "d3-timer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", - "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==" - }, - "d3-transition": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", - "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", - "requires": { - "d3-color": "1 - 3", - "d3-dispatch": "1 - 3", - "d3-ease": "1 - 3", - "d3-interpolate": "1 - 3", - "d3-timer": "1 - 3" - } - }, - "d3-zoom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", - "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", - "requires": { - "d3-dispatch": "1 - 3", - "d3-drag": "2 - 3", - "d3-interpolate": "1 - 3", - "d3-selection": "2 - 3", - "d3-transition": "2 - 3" - } - }, - "debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", - "dev": true, - "requires": { - "ms": "2.1.2" - } - }, - "decompress-response": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", - "dev": true, - "requires": { - "mimic-response": "^3.1.0" - } - }, - "deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", - "dev": true - }, - "deep-is": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", - "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", - "dev": true - }, - "delaunator": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.0.tgz", - "integrity": "sha512-AyLvtyJdbv/U1GkiS6gUUzclRoAY4Gs75qkMygJJhU75LW4DNuSF2RMzpxs9jw9Oz1BobHjTdkG3zdP55VxAqw==", - "requires": { - "robust-predicates": "^3.0.0" - } - }, - "detect-libc": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", - "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==", - "dev": true - }, - "dir-glob": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dev": true, - "requires": { - "path-type": "^4.0.0" - } - }, - "doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dev": true, - "requires": { - "esutils": "^2.0.2" - } - }, - "dom-serializer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", - "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", - "dev": true, - "requires": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.2", - "entities": "^4.2.0" - } - }, - "domelementtype": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", - "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", - "dev": true - }, - "domhandler": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", - "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", - "dev": true, - "requires": { - "domelementtype": "^2.3.0" - } - }, - "domutils": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.0.1.tgz", - "integrity": "sha512-z08c1l761iKhDFtfXO04C7kTdPBLi41zwOZl00WS8b5eiaebNpY00HKbztwBq+e3vyqWNwWF3mP9YLUeqIrF+Q==", - "dev": true, - "requires": { - "dom-serializer": "^2.0.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.1" - } - }, - "duplexer2": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz", - "integrity": "sha512-asLFVfWWtJ90ZyOUHMqk7/S2w2guQKxUI2itj3d92ADHhxUSbCMGi1f1cBcJ7xM1To+pE/Khbwo1yuNbMEPKeA==", - "dev": true, - "requires": { - "readable-stream": "^2.0.2" - } - }, - "emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" - }, - "end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, - "requires": { - "once": "^1.4.0" - } - }, - "entities": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.4.0.tgz", - "integrity": "sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA==", - "dev": true - }, - "esbuild": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.54.tgz", - "integrity": "sha512-Cy9llcy8DvET5uznocPyqL3BFRrFXSVqbgpMJ9Wz8oVjZlh/zUSNbPRbov0VX7VxN2JH1Oa0uNxZ7eLRb62pJA==", - "dev": true, - "requires": { - "@esbuild/linux-loong64": "0.14.54", - "esbuild-android-64": "0.14.54", - "esbuild-android-arm64": "0.14.54", - "esbuild-darwin-64": "0.14.54", - "esbuild-darwin-arm64": "0.14.54", - "esbuild-freebsd-64": "0.14.54", - "esbuild-freebsd-arm64": "0.14.54", - "esbuild-linux-32": "0.14.54", - "esbuild-linux-64": "0.14.54", - "esbuild-linux-arm": "0.14.54", - "esbuild-linux-arm64": "0.14.54", - "esbuild-linux-mips64le": "0.14.54", - "esbuild-linux-ppc64le": "0.14.54", - "esbuild-linux-riscv64": "0.14.54", - "esbuild-linux-s390x": "0.14.54", - "esbuild-netbsd-64": "0.14.54", - "esbuild-openbsd-64": "0.14.54", - "esbuild-sunos-64": "0.14.54", - "esbuild-windows-32": "0.14.54", - "esbuild-windows-64": "0.14.54", - "esbuild-windows-arm64": "0.14.54" - } - }, - "esbuild-android-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.54.tgz", - "integrity": "sha512-Tz2++Aqqz0rJ7kYBfz+iqyE3QMycD4vk7LBRyWaAVFgFtQ/O8EJOnVmTOiDWYZ/uYzB4kvP+bqejYdVKzE5lAQ==", - "dev": true, - "optional": true - }, - "esbuild-android-arm64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.54.tgz", - "integrity": "sha512-F9E+/QDi9sSkLaClO8SOV6etqPd+5DgJje1F9lOWoNncDdOBL2YF59IhsWATSt0TLZbYCf3pNlTHvVV5VfHdvg==", - "dev": true, - "optional": true - }, - "esbuild-darwin-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.54.tgz", - "integrity": "sha512-jtdKWV3nBviOd5v4hOpkVmpxsBy90CGzebpbO9beiqUYVMBtSc0AL9zGftFuBon7PNDcdvNCEuQqw2x0wP9yug==", - "dev": true, - "optional": true - }, - "esbuild-darwin-arm64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.54.tgz", - "integrity": "sha512-OPafJHD2oUPyvJMrsCvDGkRrVCar5aVyHfWGQzY1dWnzErjrDuSETxwA2HSsyg2jORLY8yBfzc1MIpUkXlctmw==", - "dev": true, - "optional": true - }, - "esbuild-freebsd-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.54.tgz", - "integrity": "sha512-OKwd4gmwHqOTp4mOGZKe/XUlbDJ4Q9TjX0hMPIDBUWWu/kwhBAudJdBoxnjNf9ocIB6GN6CPowYpR/hRCbSYAg==", - "dev": true, - "optional": true - }, - "esbuild-freebsd-arm64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.54.tgz", - "integrity": "sha512-sFwueGr7OvIFiQT6WeG0jRLjkjdqWWSrfbVwZp8iMP+8UHEHRBvlaxL6IuKNDwAozNUmbb8nIMXa7oAOARGs1Q==", - "dev": true, - "optional": true - }, - "esbuild-linux-32": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.54.tgz", - "integrity": "sha512-1ZuY+JDI//WmklKlBgJnglpUL1owm2OX+8E1syCD6UAxcMM/XoWd76OHSjl/0MR0LisSAXDqgjT3uJqT67O3qw==", - "dev": true, - "optional": true - }, - "esbuild-linux-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.54.tgz", - "integrity": "sha512-EgjAgH5HwTbtNsTqQOXWApBaPVdDn7XcK+/PtJwZLT1UmpLoznPd8c5CxqsH2dQK3j05YsB3L17T8vE7cp4cCg==", - "dev": true, - "optional": true - }, - "esbuild-linux-arm": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.54.tgz", - "integrity": "sha512-qqz/SjemQhVMTnvcLGoLOdFpCYbz4v4fUo+TfsWG+1aOu70/80RV6bgNpR2JCrppV2moUQkww+6bWxXRL9YMGw==", - "dev": true, - "optional": true - }, - "esbuild-linux-arm64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.54.tgz", - "integrity": "sha512-WL71L+0Rwv+Gv/HTmxTEmpv0UgmxYa5ftZILVi2QmZBgX3q7+tDeOQNqGtdXSdsL8TQi1vIaVFHUPDe0O0kdig==", - "dev": true, - "optional": true - }, - "esbuild-linux-mips64le": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.54.tgz", - "integrity": "sha512-qTHGQB8D1etd0u1+sB6p0ikLKRVuCWhYQhAHRPkO+OF3I/iSlTKNNS0Lh2Oc0g0UFGguaFZZiPJdJey3AGpAlw==", - "dev": true, - "optional": true - }, - "esbuild-linux-ppc64le": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.54.tgz", - "integrity": "sha512-j3OMlzHiqwZBDPRCDFKcx595XVfOfOnv68Ax3U4UKZ3MTYQB5Yz3X1mn5GnodEVYzhtZgxEBidLWeIs8FDSfrQ==", - "dev": true, - "optional": true - }, - "esbuild-linux-riscv64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.54.tgz", - "integrity": "sha512-y7Vt7Wl9dkOGZjxQZnDAqqn+XOqFD7IMWiewY5SPlNlzMX39ocPQlOaoxvT4FllA5viyV26/QzHtvTjVNOxHZg==", - "dev": true, - "optional": true - }, - "esbuild-linux-s390x": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.54.tgz", - "integrity": "sha512-zaHpW9dziAsi7lRcyV4r8dhfG1qBidQWUXweUjnw+lliChJqQr+6XD71K41oEIC3Mx1KStovEmlzm+MkGZHnHA==", - "dev": true, - "optional": true - }, - "esbuild-netbsd-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.54.tgz", - "integrity": "sha512-PR01lmIMnfJTgeU9VJTDY9ZerDWVFIUzAtJuDHwwceppW7cQWjBBqP48NdeRtoP04/AtO9a7w3viI+PIDr6d+w==", - "dev": true, - "optional": true - }, - "esbuild-openbsd-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.54.tgz", - "integrity": "sha512-Qyk7ikT2o7Wu76UsvvDS5q0amJvmRzDyVlL0qf5VLsLchjCa1+IAvd8kTBgUxD7VBUUVgItLkk609ZHUc1oCaw==", - "dev": true, - "optional": true - }, - "esbuild-sunos-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.54.tgz", - "integrity": "sha512-28GZ24KmMSeKi5ueWzMcco6EBHStL3B6ubM7M51RmPwXQGLe0teBGJocmWhgwccA1GeFXqxzILIxXpHbl9Q/Kw==", - "dev": true, - "optional": true - }, - "esbuild-windows-32": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.54.tgz", - "integrity": "sha512-T+rdZW19ql9MjS7pixmZYVObd9G7kcaZo+sETqNH4RCkuuYSuv9AGHUVnPoP9hhuE1WM1ZimHz1CIBHBboLU7w==", - "dev": true, - "optional": true - }, - "esbuild-windows-64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.54.tgz", - "integrity": "sha512-AoHTRBUuYwXtZhjXZbA1pGfTo8cJo3vZIcWGLiUcTNgHpJJMC1rVA44ZereBHMJtotyN71S8Qw0npiCIkW96cQ==", - "dev": true, - "optional": true - }, - "esbuild-windows-arm64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.54.tgz", - "integrity": "sha512-M0kuUvXhot1zOISQGXwWn6YtS+Y/1RT9WrVIOywZnJHo3jCDyewAc79aKNQWFCQm+xNHVTq9h8dZKvygoXQQRg==", - "dev": true, - "optional": true - }, - "escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" - }, - "escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", - "dev": true - }, - "eslint": { - "version": "8.32.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.32.0.tgz", - "integrity": "sha512-nETVXpnthqKPFyuY2FNjz/bEd6nbosRgKbkgS/y1C7LJop96gYHWpiguLecMHQ2XCPxn77DS0P+68WzG6vkZSQ==", - "dev": true, - "requires": { - "@eslint/eslintrc": "^1.4.1", - "@humanwhocodes/config-array": "^0.11.8", - "@humanwhocodes/module-importer": "^1.0.1", - "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.10.0", - "chalk": "^4.0.0", - "cross-spawn": "^7.0.2", - "debug": "^4.3.2", - "doctrine": "^3.0.0", - "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.1.1", - "eslint-utils": "^3.0.0", - "eslint-visitor-keys": "^3.3.0", - "espree": "^9.4.0", - "esquery": "^1.4.0", - "esutils": "^2.0.2", - "fast-deep-equal": "^3.1.3", - "file-entry-cache": "^6.0.1", - "find-up": "^5.0.0", - "glob-parent": "^6.0.2", - "globals": "^13.19.0", - "grapheme-splitter": "^1.0.4", - "ignore": "^5.2.0", - "import-fresh": "^3.0.0", - "imurmurhash": "^0.1.4", - "is-glob": "^4.0.0", - "is-path-inside": "^3.0.3", - "js-sdsl": "^4.1.4", - "js-yaml": "^4.1.0", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.4.1", - "lodash.merge": "^4.6.2", - "minimatch": "^3.1.2", - "natural-compare": "^1.4.0", - "optionator": "^0.9.1", - "regexpp": "^3.2.0", - "strip-ansi": "^6.0.1", - "strip-json-comments": "^3.1.0", - "text-table": "^0.2.0" - }, - "dependencies": { - "eslint-scope": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", - "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", - "dev": true, - "requires": { - "esrecurse": "^4.3.0", - "estraverse": "^5.2.0" - } - }, - "estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true - } - } - }, - "eslint-config-prettier": { - "version": "8.6.0", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-8.6.0.tgz", - "integrity": "sha512-bAF0eLpLVqP5oEVUFKpMA+NnRFICwn9X8B5jrR9FcqnYBuPbqWEjTEspPWMj5ye6czoSLDweCzSo3Ko7gGrZaA==", - "dev": true, - "requires": {} - }, - "eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "dev": true, - "requires": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - } - }, - "eslint-utils": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", - "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", - "dev": true, - "requires": { - "eslint-visitor-keys": "^2.0.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", - "dev": true - } - } - }, - "eslint-visitor-keys": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz", - "integrity": "sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA==", - "dev": true - }, - "espree": { - "version": "9.4.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.1.tgz", - "integrity": "sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==", - "dev": true, - "requires": { - "acorn": "^8.8.0", - "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^3.3.0" - } - }, - "esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", - "dev": true, - "requires": { - "estraverse": "^5.1.0" - }, - "dependencies": { - "estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true - } - } - }, - "esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", - "dev": true, - "requires": { - "estraverse": "^5.2.0" - }, - "dependencies": { - "estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "dev": true - } - } - }, - "estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", - "dev": true - }, - "esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "dev": true - }, - "expand-template": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", - "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", - "dev": true - }, - "fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", - "dev": true - }, - "fast-glob": { - "version": "3.2.12", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz", - "integrity": "sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w==", - "dev": true, - "requires": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "dependencies": { - "glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "requires": { - "is-glob": "^4.0.1" - } - } - } - }, - "fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", - "dev": true - }, - "fast-levenshtein": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", - "dev": true - }, - "fastq": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", - "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", - "dev": true, - "requires": { - "reusify": "^1.0.4" - } - }, - "fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", - "dev": true, - "requires": { - "pend": "~1.2.0" - } - }, - "file-entry-cache": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", - "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", - "dev": true, - "requires": { - "flat-cache": "^3.0.4" - } - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, - "find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "dev": true, - "requires": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - } - }, - "flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", - "dev": true, - "requires": { - "flatted": "^3.1.0", - "rimraf": "^3.0.2" - } - }, - "flatted": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", - "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==", - "dev": true - }, - "follow-redirects": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz", - "integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==", - "dev": true - }, - "fs-constants": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", - "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", - "dev": true - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "dev": true - }, - "fstream": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz", - "integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "inherits": "~2.0.0", - "mkdirp": ">=0.5 0", - "rimraf": "2" - }, - "dependencies": { - "rimraf": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", - "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } - } - } - }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true - }, - "get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" - }, - "get-intrinsic": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz", - "integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==", - "dev": true, - "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - } - }, - "github-from-package": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", - "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", - "dev": true - }, - "glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "glob-parent": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", - "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", - "dev": true, - "requires": { - "is-glob": "^4.0.3" - } - }, - "globals": { - "version": "13.19.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.19.0.tgz", - "integrity": "sha512-dkQ957uSRWHw7CFXLUtUHQI3g3aWApYhfNR2O6jn/907riyTYKVBmxYVROkBcY614FSSeSJh7Xm7SrUWCxvJMQ==", - "dev": true, - "requires": { - "type-fest": "^0.20.2" - } - }, - "globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", - "dev": true, - "requires": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - } - }, - "graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", - "dev": true - }, - "grapheme-splitter": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", - "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", - "dev": true - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true - }, - "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "dev": true - }, - "hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - }, - "htmlparser2": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.1.tgz", - "integrity": "sha512-4lVbmc1diZC7GUJQtRQ5yBAeUCL1exyMwmForWkRLnwyzWBFxN633SALPMGYaWZvKe9j1pRZJpauvmxENSp/EA==", - "dev": true, - "requires": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.2", - "domutils": "^3.0.1", - "entities": "^4.3.0" - } - }, - "http-proxy-agent": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", - "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", - "dev": true, - "requires": { - "@tootallnate/once": "1", - "agent-base": "6", - "debug": "4" - } - }, - "https-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", - "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", - "dev": true, - "requires": { - "agent-base": "6", - "debug": "4" - } - }, - "iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "requires": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - } - }, - "ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true - }, - "ignore": { - "version": "5.2.4", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", - "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", - "dev": true - }, - "import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", - "dev": true, - "requires": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - } - }, - "imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", - "dev": true - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "dev": true, - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "dev": true - }, - "internmap": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", - "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==" - }, - "is-ci": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", - "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", - "dev": true, - "requires": { - "ci-info": "^2.0.0" - } - }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true - }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" - }, - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, - "is-path-inside": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", - "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", - "dev": true - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", - "dev": true - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true - }, - "js-sdsl": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz", - "integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==", - "dev": true - }, - "js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "requires": { - "argparse": "^2.0.1" - } - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "json-stable-stringify-without-jsonify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", - "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", - "dev": true - }, - "keytar": { - "version": "7.9.0", - "resolved": "https://registry.npmjs.org/keytar/-/keytar-7.9.0.tgz", - "integrity": "sha512-VPD8mtVtm5JNtA2AErl6Chp06JBfy7diFQ7TQQhdpWOl6MrCRB+eRbvAZUsbGQS9kiMq0coJsy0W0vHpDCkWsQ==", - "dev": true, - "requires": { - "node-addon-api": "^4.3.0", - "prebuild-install": "^7.0.1" - } - }, - "leven": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", - "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", - "dev": true - }, - "levn": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", - "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", - "dev": true, - "requires": { - "prelude-ls": "^1.2.1", - "type-check": "~0.4.0" - } - }, - "linkify-it": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", - "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", - "dev": true, - "requires": { - "uc.micro": "^1.0.1" - } - }, - "listenercount": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/listenercount/-/listenercount-1.0.1.tgz", - "integrity": "sha512-3mk/Zag0+IJxeDrxSgaDPy4zZ3w05PRZeJNnlWhzFz5OkX49J4krc+A8X2d2M69vGMBEX0uyl8M+W+8gH+kBqQ==", - "dev": true - }, - "locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "dev": true, - "requires": { - "p-locate": "^5.0.0" - } - }, - "lodash.merge": { - "version": "4.6.2", - "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", - "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true - }, - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "requires": { - "yallist": "^4.0.0" - } - }, - "markdown-it": { - "version": "12.3.2", - "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", - "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", - "dev": true, - "requires": { - "argparse": "^2.0.1", - "entities": "~2.1.0", - "linkify-it": "^3.0.1", - "mdurl": "^1.0.1", - "uc.micro": "^1.0.5" - }, - "dependencies": { - "entities": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", - "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", - "dev": true - } - } - }, - "mdurl": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", - "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==", - "dev": true - }, - "merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true - }, - "micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", - "dev": true, - "requires": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - } - }, - "mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "dev": true - }, - "mimic-response": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", - "dev": true - }, - "minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz", - "integrity": "sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g==", - "dev": true - }, - "mkdirp": { - "version": "0.5.6", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz", - "integrity": "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw==", - "dev": true, - "requires": { - "minimist": "^1.2.6" - } - }, - "mkdirp-classic": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", - "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", - "dev": true - }, - "ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", - "dev": true - }, - "mute-stream": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", - "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", - "dev": true - }, - "napi-build-utils": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz", - "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==", - "dev": true - }, - "natural-compare": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", - "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", - "dev": true - }, - "natural-compare-lite": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/natural-compare-lite/-/natural-compare-lite-1.4.0.tgz", - "integrity": "sha512-Tj+HTDSJJKaZnfiuw+iaF9skdPpTo2GtEly5JHnWV/hfv2Qj/9RKsGISQtLh2ox3l5EAGw487hnBee0sIJ6v2g==", - "dev": true - }, - "node-abi": { - "version": "3.31.0", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.31.0.tgz", - "integrity": "sha512-eSKV6s+APenqVh8ubJyiu/YhZgxQpGP66ntzUb3lY1xB9ukSRaGnx0AIxI+IM+1+IVYC1oWobgG5L3Lt9ARykQ==", - "dev": true, - "requires": { - "semver": "^7.3.5" - } - }, - "node-addon-api": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-4.3.0.tgz", - "integrity": "sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==", - "dev": true - }, - "nth-check": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", - "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", - "dev": true, - "requires": { - "boolbase": "^1.0.0" - } - }, - "object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", - "dev": true - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, - "requires": { - "wrappy": "1" - } - }, - "optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", - "dev": true, - "requires": { - "deep-is": "^0.1.3", - "fast-levenshtein": "^2.0.6", - "levn": "^0.4.1", - "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.3" - } - }, - "ovsx": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/ovsx/-/ovsx-0.5.2.tgz", - "integrity": "sha512-UbLultRCk46WddeA0Cly4hoRhzBJUiLgbIEViXlgOvV54LbsppClDkMLoCevUUBHoiNdMX2NuiSgURAEXgCZdw==", - "dev": true, - "requires": { - "commander": "^6.1.0", - "follow-redirects": "^1.14.6", - "is-ci": "^2.0.0", - "leven": "^3.1.0", - "tmp": "^0.2.1", - "vsce": "^2.6.3" - }, - "dependencies": { - "commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", - "dev": true - } - } - }, - "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "dev": true, - "requires": { - "yocto-queue": "^0.1.0" - } - }, - "p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "dev": true, - "requires": { - "p-limit": "^3.0.2" - } - }, - "parent-module": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", - "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", - "dev": true, - "requires": { - "callsites": "^3.0.0" - } - }, - "parse-semver": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/parse-semver/-/parse-semver-1.1.1.tgz", - "integrity": "sha512-Eg1OuNntBMH0ojvEKSrvDSnwLmvVuUOSdylH/pSCPNMIspLlweJyIWXCE+k/5hm3cj/EBUYwmWkjhBALNP4LXQ==", - "dev": true, - "requires": { - "semver": "^5.1.0" - }, - "dependencies": { - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - } - } - }, - "parse5": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", - "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", - "dev": true, - "requires": { - "entities": "^4.4.0" - } - }, - "parse5-htmlparser2-tree-adapter": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.0.0.tgz", - "integrity": "sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==", - "dev": true, - "requires": { - "domhandler": "^5.0.2", - "parse5": "^7.0.0" - } - }, - "path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "dev": true - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "dev": true - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true - }, - "path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", - "dev": true - }, - "pend": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", - "dev": true - }, - "picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true - }, - "prebuild-install": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz", - "integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==", - "dev": true, - "requires": { - "detect-libc": "^2.0.0", - "expand-template": "^2.0.3", - "github-from-package": "0.0.0", - "minimist": "^1.2.3", - "mkdirp-classic": "^0.5.3", - "napi-build-utils": "^1.0.1", - "node-abi": "^3.3.0", - "pump": "^3.0.0", - "rc": "^1.2.7", - "simple-get": "^4.0.0", - "tar-fs": "^2.0.0", - "tunnel-agent": "^0.6.0" - } - }, - "prelude-ls": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", - "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", - "dev": true - }, - "prettier": { - "version": "2.8.3", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.3.tgz", - "integrity": "sha512-tJ/oJ4amDihPoufT5sM0Z1SKEuKay8LfVAMlbbhnnkvt6BUserZylqo2PN+p9KeljLr0OHa2rXHU1T8reeoTrw==", - "dev": true - }, - "process-nextick-args": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", - "dev": true - }, - "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", - "dev": true - }, - "qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", - "dev": true, - "requires": { - "side-channel": "^1.0.4" - } - }, - "queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true - }, - "rc": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", - "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", - "dev": true, - "requires": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - }, - "dependencies": { - "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", - "dev": true - } - } - }, - "read": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", - "integrity": "sha512-rSOKNYUmaxy0om1BNjMN4ezNT6VKK+2xF4GBhc81mkH7L60i6dp8qPYrkndNLT3QPphoII3maL9PVC9XmhHwVQ==", - "dev": true, - "requires": { - "mute-stream": "~0.0.4" - } - }, - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "regexpp": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", - "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", - "dev": true - }, - "require-directory": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", - "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==" - }, - "resolve-from": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", - "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", - "dev": true - }, - "reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true - }, - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dev": true, - "requires": { - "glob": "^7.1.3" - } - }, - "robust-predicates": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.1.tgz", - "integrity": "sha512-ndEIpszUHiG4HtDsQLeIuMvRsDnn8c8rYStabochtUeCvfuvNptb5TUbVD68LRAILPX7p9nqQGh4xJgn3EHS/g==" - }, - "run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "requires": { - "queue-microtask": "^1.2.2" - } - }, - "rw": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", - "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==" - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" - }, - "sax": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", - "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==", - "dev": true - }, - "semver": { - "version": "7.3.8", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", - "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "requires": { - "lru-cache": "^6.0.0" - } - }, - "setimmediate": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", - "integrity": "sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==", - "dev": true - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true - }, - "side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", - "dev": true, - "requires": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" - } - }, - "simple-concat": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", - "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", - "dev": true - }, - "simple-get": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", - "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", - "dev": true, - "requires": { - "decompress-response": "^6.0.0", - "once": "^1.3.1", - "simple-concat": "^1.0.0" - } - }, - "slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - }, - "string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "requires": { - "ansi-regex": "^5.0.1" - } - }, - "strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", - "dev": true - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - }, - "tar-fs": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz", - "integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==", - "dev": true, - "requires": { - "chownr": "^1.1.1", - "mkdirp-classic": "^0.5.2", - "pump": "^3.0.0", - "tar-stream": "^2.1.4" - } - }, - "tar-stream": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", - "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", - "dev": true, - "requires": { - "bl": "^4.0.3", - "end-of-stream": "^1.4.1", - "fs-constants": "^1.0.0", - "inherits": "^2.0.3", - "readable-stream": "^3.1.1" - }, - "dependencies": { - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - } - } - }, - "text-table": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", - "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", - "dev": true - }, - "tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "dev": true, - "requires": { - "rimraf": "^3.0.0" - } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } - }, - "traverse": { - "version": "0.3.9", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", - "integrity": "sha512-iawgk0hLP3SxGKDfnDJf8wTz4p2qImnyihM5Hh/sGvQ3K37dPi/w8sRhdNIxYA1TwFwc5mDhIJq+O0RsvXBKdQ==", - "dev": true - }, - "tslib": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.1.tgz", - "integrity": "sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==", - "dev": true - }, - "tsutils": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", - "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", - "dev": true, - "requires": { - "tslib": "^1.8.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true - } - } - }, - "tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", - "dev": true - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==", - "dev": true, - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "type-check": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", - "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", - "dev": true, - "requires": { - "prelude-ls": "^1.2.1" - } - }, - "type-fest": { - "version": "0.20.2", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", - "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", - "dev": true - }, - "typed-rest-client": { - "version": "1.8.9", - "resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.8.9.tgz", - "integrity": "sha512-uSmjE38B80wjL85UFX3sTYEUlvZ1JgCRhsWj/fJ4rZ0FqDUFoIuodtiVeE+cUqiVTOKPdKrp/sdftD15MDek6g==", - "dev": true, - "requires": { - "qs": "^6.9.1", - "tunnel": "0.0.6", - "underscore": "^1.12.1" - } - }, - "typescript": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.4.tgz", - "integrity": "sha512-Uz+dTXYzxXXbsFpM86Wh3dKCxrQqUcVMxwU54orwlJjOpO3ao8L7j5lH+dWfTwgCwIuM9GQ2kvVotzYJMXTBZg==", - "dev": true - }, - "uc.micro": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", - "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", - "dev": true - }, - "underscore": { - "version": "1.13.6", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.6.tgz", - "integrity": "sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==", - "dev": true - }, - "unzipper": { - "version": "0.10.11", - "resolved": "https://registry.npmjs.org/unzipper/-/unzipper-0.10.11.tgz", - "integrity": "sha512-+BrAq2oFqWod5IESRjL3S8baohbevGcVA+teAIOYWM3pDVdseogqbzhhvvmiyQrUNKFUnDMtELW3X8ykbyDCJw==", - "dev": true, - "requires": { - "big-integer": "^1.6.17", - "binary": "~0.3.0", - "bluebird": "~3.4.1", - "buffer-indexof-polyfill": "~1.0.0", - "duplexer2": "~0.1.4", - "fstream": "^1.0.12", - "graceful-fs": "^4.2.2", - "listenercount": "~1.0.1", - "readable-stream": "~2.3.6", - "setimmediate": "~1.0.4" - } - }, - "uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "requires": { - "punycode": "^2.1.0" - } - }, - "url-join": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz", - "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", - "dev": true - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "dev": true - }, - "vsce": { - "version": "2.15.0", - "resolved": "https://registry.npmjs.org/vsce/-/vsce-2.15.0.tgz", - "integrity": "sha512-P8E9LAZvBCQnoGoizw65JfGvyMqNGlHdlUXD1VAuxtvYAaHBKLBdKPnpy60XKVDAkQCfmMu53g+gq9FM+ydepw==", - "dev": true, - "requires": { - "azure-devops-node-api": "^11.0.1", - "chalk": "^2.4.2", - "cheerio": "^1.0.0-rc.9", - "commander": "^6.1.0", - "glob": "^7.0.6", - "hosted-git-info": "^4.0.2", - "keytar": "^7.7.0", - "leven": "^3.1.0", - "markdown-it": "^12.3.2", - "mime": "^1.3.4", - "minimatch": "^3.0.3", - "parse-semver": "^1.1.1", - "read": "^1.0.7", - "semver": "^5.1.0", - "tmp": "^0.2.1", - "typed-rest-client": "^1.8.4", - "url-join": "^4.0.1", - "xml2js": "^0.4.23", - "yauzl": "^2.3.1", - "yazl": "^2.2.2" - }, - "dependencies": { - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, - "chalk": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", - "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", - "dev": true - }, - "commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", - "dev": true - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", - "dev": true - }, - "semver": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", - "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", - "dev": true - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - } - } - }, - "vscode-jsonrpc": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.0.2.tgz", - "integrity": "sha512-RY7HwI/ydoC1Wwg4gJ3y6LpU9FJRZAUnTYMXthqhFXXu77ErDd/xkREpGuk4MyYkk4a+XDWAMqe0S3KkelYQEQ==" - }, - "vscode-languageclient": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-8.0.2.tgz", - "integrity": "sha512-lHlthJtphG9gibGb/y72CKqQUxwPsMXijJVpHEC2bvbFqxmkj9LwQ3aGU9dwjBLqsX1S4KjShYppLvg1UJDF/Q==", - "requires": { - "minimatch": "^3.0.4", - "semver": "^7.3.5", - "vscode-languageserver-protocol": "3.17.2" - } - }, - "vscode-languageserver-protocol": { - "version": "3.17.2", - "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.2.tgz", - "integrity": "sha512-8kYisQ3z/SQ2kyjlNeQxbkkTNmVFoQCqkmGrzLH6A9ecPlgTbp3wDTnUNqaUxYr4vlAcloxx8zwy7G5WdguYNg==", - "requires": { - "vscode-jsonrpc": "8.0.2", - "vscode-languageserver-types": "3.17.2" - } - }, - "vscode-languageserver-types": { - "version": "3.17.2", - "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.2.tgz", - "integrity": "sha512-zHhCWatviizPIq9B7Vh9uvrH6x3sK8itC84HkamnBWoDFJtzBf7SWlpLCZUit72b3os45h6RWQNC9xHRDF8dRA==" - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", - "dev": true - }, - "wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true - }, - "xml2js": { - "version": "0.4.23", - "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", - "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", - "dev": true, - "requires": { - "sax": ">=0.6.0", - "xmlbuilder": "~11.0.0" - } - }, - "xmlbuilder": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", - "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", - "dev": true - }, - "y18n": { - "version": "5.0.8", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", - "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==" - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" - }, - "yargs": { - "version": "17.6.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.6.2.tgz", - "integrity": "sha512-1/9UrdHjDZc0eOU0HxOHoS78C69UD3JRMvzlJ7S79S2nTaWRA/whGCTV8o9e/N/1Va9YIV7Q4sOxD8VV4pCWOw==", - "requires": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - } - }, - "yargs-parser": { - "version": "21.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", - "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==" - }, - "yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", - "dev": true, - "requires": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - }, - "yazl": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/yazl/-/yazl-2.5.1.tgz", - "integrity": "sha512-phENi2PLiHnHb6QBVot+dJnaAZ0xosj7p3fWl+znIjBDlnMI2PsZCJZ306BPTFOaHf5qdDEI8x5qFrSOBN5vrw==", - "dev": true, - "requires": { - "buffer-crc32": "~0.2.3" - } - }, - "yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true - } } } diff --git a/src/tools/rust-analyzer/editors/code/package.json b/src/tools/rust-analyzer/editors/code/package.json index ee1f832d323..ffb5dd9079a 100644 --- a/src/tools/rust-analyzer/editors/code/package.json +++ b/src/tools/rust-analyzer/editors/code/package.json @@ -26,7 +26,7 @@ } }, "engines": { - "vscode": "^1.66.0" + "vscode": "^1.78.0" }, "enabledApiProposals": [], "scripts": { @@ -38,37 +38,32 @@ "lint": "prettier --check . && eslint -c .eslintrc.js --ext ts ./src ./tests", "fix": "prettier --write . && eslint -c .eslintrc.js --ext ts ./src ./tests --fix", "pretest": "tsc && npm run build", - "test": "cross-env TEST_VARIABLE=test node ./out/tests/runTests.js" + "test": "node ./out/tests/runTests.js" }, "dependencies": { + "@hpcc-js/wasm": "^2.13.0", "anser": "^2.1.1", - "d3": "^7.6.1", + "d3": "^7.8.5", "d3-graphviz": "^5.0.2", - "vscode-languageclient": "^8.0.2", - "@hpcc-js/wasm": "2.5.0" + "vscode-languageclient": "^8.1.0" }, "devDependencies": { + "@tsconfig/strictest": "^2.0.1", "@types/node": "~16.11.7", - "@types/vscode": "~1.66.0", - "@typescript-eslint/eslint-plugin": "^5.30.5", - "@typescript-eslint/parser": "^5.30.5", - "@vscode/test-electron": "^2.1.5", - "cross-env": "^7.0.3", - "esbuild": "^0.14.48", - "eslint": "^8.19.0", - "eslint-config-prettier": "^8.5.0", - "ovsx": "^0.5.2", - "prettier": "^2.7.1", - "tslib": "^2.4.0", - "typescript": "^4.7.4", - "vsce": "^2.9.2" + "@types/vscode": "~1.78.1", + "@typescript-eslint/eslint-plugin": "^6.0.0", + "@typescript-eslint/parser": "^6.0.0", + "@vscode/test-electron": "^2.3.3", + "@vscode/vsce": "^2.19.0", + "esbuild": "^0.18.12", + "eslint": "^8.44.0", + "eslint-config-prettier": "^8.8.0", + "ovsx": "^0.8.2", + "prettier": "^3.0.0", + "tslib": "^2.6.0", + "typescript": "^5.1.6" }, "activationEvents": [ - "onLanguage:rust", - "onCommand:rust-analyzer.analyzerStatus", - "onCommand:rust-analyzer.memoryUsage", - "onCommand:rust-analyzer.reloadWorkspace", - "onCommand:rust-analyzer.startServer", "workspaceContains:*/Cargo.toml", "workspaceContains:*/rust-project.json" ], @@ -289,6 +284,11 @@ "command": "rust-analyzer.revealDependency", "title": "Reveal File", "category": "rust-analyzer" + }, + { + "command": "rust-analyzer.viewMemoryLayout", + "title": "View Memory Layout", + "category": "rust-analyzer" } ], "keybindings": [ @@ -298,11 +298,6 @@ "when": "editorTextFocus && editorLangId == rust" }, { - "command": "rust-analyzer.matchingBrace", - "key": "ctrl+shift+m", - "when": "editorTextFocus && editorLangId == rust" - }, - { "command": "rust-analyzer.joinLines", "key": "ctrl+shift+j", "when": "editorTextFocus && editorLangId == rust" @@ -320,7 +315,7 @@ "default": null, "description": "Custom cargo runner extension ID." }, - "rust-analyzer.runnableEnv": { + "rust-analyzer.runnables.extraEnv": { "anyOf": [ { "type": "null" @@ -349,6 +344,16 @@ "default": null, "markdownDescription": "Environment variables passed to the runnable launched using `Test` or `Debug` lens or `rust-analyzer.run` command." }, + "rust-analyzer.runnables.problemMatcher": { + "type": "array", + "items": { + "type": "string" + }, + "default": [ + "$rustc" + ], + "markdownDescription": "Problem matchers to use for `rust-analyzer.run` command, eg `[\"$rustc\", \"$rust-panic\"]`." + }, "rust-analyzer.server.path": { "type": [ "null", @@ -1646,16 +1651,6 @@ "language": "ra_syntax_tree", "scopeName": "source.ra_syntax_tree", "path": "ra_syntax_tree.tmGrammar.json" - }, - { - "scopeName": "rustdoc.markdown.injection", - "path": "rustdoc.markdown.injection.tmGrammar.json", - "injectTo": [ - "source.rust" - ], - "embeddedLanguages": { - "meta.embedded.block.markdown": "text.html.markdown" - } } ], "problemMatchers": [ @@ -2077,6 +2072,10 @@ { "command": "rust-analyzer.openCargoToml", "when": "inRustProject" + }, + { + "command": "rust-analyzer.viewMemoryLayout", + "when": "inRustProject" } ], "editor/context": [ diff --git a/src/tools/rust-analyzer/editors/code/rustdoc.markdown.injection.tmGrammar.json b/src/tools/rust-analyzer/editors/code/rustdoc.markdown.injection.tmGrammar.json deleted file mode 100644 index 3ee49bff31e..00000000000 --- a/src/tools/rust-analyzer/editors/code/rustdoc.markdown.injection.tmGrammar.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "scopeName": "rustdoc.markdown.injection", - "injectionSelector": "L:source.rust", - "patterns": [ - { - "include": "#doc-comment-line" - }, - { - "include": "#doc-comment-block" - } - ], - "repository": { - "doc-comment-line": { - "name": "comment.line.documentation.rust", - "begin": "^\\s*//(/|!)", - "while": "^\\s*//(/|!)", - "contentName": "meta.embedded.block.markdown", - "patterns": [ - { - "include": "text.html.markdown" - } - ] - }, - "doc-comment-block": { - "name": "comment.block.documentation.rust", - "begin": "/\\*(\\*|!)", - "end": "\\s*\\*/", - "contentName": "meta.embedded.block.markdown", - "patterns": [ - { - "include": "text.html.markdown" - } - ] - } - } -} diff --git a/src/tools/rust-analyzer/editors/code/src/ast_inspector.ts b/src/tools/rust-analyzer/editors/code/src/ast_inspector.ts index 176040120f4..688c53a9b1f 100644 --- a/src/tools/rust-analyzer/editors/code/src/ast_inspector.ts +++ b/src/tools/rust-analyzer/editors/code/src/ast_inspector.ts @@ -1,7 +1,8 @@ import * as vscode from "vscode"; -import { Ctx, Disposable } from "./ctx"; -import { RustEditor, isRustEditor } from "./util"; +import type { Ctx, Disposable } from "./ctx"; +import { type RustEditor, isRustEditor } from "./util"; +import { unwrapUndefinable } from "./undefinable"; // FIXME: consider implementing this via the Tree View API? // https://code.visualstudio.com/api/extension-guides/tree-view @@ -36,23 +37,23 @@ export class AstInspector implements vscode.HoverProvider, vscode.DefinitionProv constructor(ctx: Ctx) { ctx.pushExtCleanup( - vscode.languages.registerHoverProvider({ scheme: "rust-analyzer" }, this) + vscode.languages.registerHoverProvider({ scheme: "rust-analyzer" }, this), ); ctx.pushExtCleanup(vscode.languages.registerDefinitionProvider({ language: "rust" }, this)); vscode.workspace.onDidCloseTextDocument( this.onDidCloseTextDocument, this, - ctx.subscriptions + ctx.subscriptions, ); vscode.workspace.onDidChangeTextDocument( this.onDidChangeTextDocument, this, - ctx.subscriptions + ctx.subscriptions, ); vscode.window.onDidChangeVisibleTextEditors( this.onDidChangeVisibleTextEditors, this, - ctx.subscriptions + ctx.subscriptions, ); } dispose() { @@ -84,7 +85,7 @@ export class AstInspector implements vscode.HoverProvider, vscode.DefinitionProv private findAstTextEditor(): undefined | vscode.TextEditor { return vscode.window.visibleTextEditors.find( - (it) => it.document.uri.scheme === "rust-analyzer" + (it) => it.document.uri.scheme === "rust-analyzer", ); } @@ -99,7 +100,7 @@ export class AstInspector implements vscode.HoverProvider, vscode.DefinitionProv // additional positional params are omitted provideDefinition( doc: vscode.TextDocument, - pos: vscode.Position + pos: vscode.Position, ): vscode.ProviderResult<vscode.DefinitionLink[]> { if (!this.rustEditor || doc.uri.toString() !== this.rustEditor.document.uri.toString()) { return; @@ -131,7 +132,7 @@ export class AstInspector implements vscode.HoverProvider, vscode.DefinitionProv // additional positional params are omitted provideHover( doc: vscode.TextDocument, - hoverPosition: vscode.Position + hoverPosition: vscode.Position, ): vscode.ProviderResult<vscode.Hover> { if (!this.rustEditor) return; @@ -158,14 +159,15 @@ export class AstInspector implements vscode.HoverProvider, vscode.DefinitionProv private parseRustTextRange( doc: vscode.TextDocument, - astLine: string + astLine: string, ): undefined | vscode.Range { const parsedRange = /(\d+)\.\.(\d+)/.exec(astLine); if (!parsedRange) return; const [begin, end] = parsedRange.slice(1).map((off) => this.positionAt(doc, +off)); - - return new vscode.Range(begin, end); + const actualBegin = unwrapUndefinable(begin); + const actualEnd = unwrapUndefinable(end); + return new vscode.Range(actualBegin, actualEnd); } // Memoize the last value, otherwise the CPU is at 100% single core diff --git a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts index b38fa06a85c..ef4dff095cf 100644 --- a/src/tools/rust-analyzer/editors/code/src/bootstrap.ts +++ b/src/tools/rust-analyzer/editors/code/src/bootstrap.ts @@ -1,20 +1,20 @@ import * as vscode from "vscode"; import * as os from "os"; -import { Config } from "./config"; +import type { Config } from "./config"; import { log, isValidExecutable } from "./util"; -import { PersistentState } from "./persistent_state"; +import type { PersistentState } from "./persistent_state"; import { exec } from "child_process"; export async function bootstrap( context: vscode.ExtensionContext, config: Config, - state: PersistentState + state: PersistentState, ): Promise<string> { const path = await getServer(context, config, state); if (!path) { throw new Error( "Rust Analyzer Language Server is not available. " + - "Please, ensure its [proper installation](https://rust-analyzer.github.io/manual.html#installation)." + "Please, ensure its [proper installation](https://rust-analyzer.github.io/manual.html#installation).", ); } @@ -34,9 +34,9 @@ export async function bootstrap( async function getServer( context: vscode.ExtensionContext, config: Config, - state: PersistentState + state: PersistentState, ): Promise<string | undefined> { - const explicitPath = process.env.__RA_LSP_SERVER_DEBUG ?? config.serverPath; + const explicitPath = process.env["__RA_LSP_SERVER_DEBUG"] ?? config.serverPath; if (explicitPath) { if (explicitPath.startsWith("~/")) { return os.homedir() + explicitPath.slice("~".length); @@ -49,7 +49,7 @@ async function getServer( const bundled = vscode.Uri.joinPath(context.extensionUri, "server", `rust-analyzer${ext}`); const bundledExists = await vscode.workspace.fs.stat(bundled).then( () => true, - () => false + () => false, ); if (bundledExists) { let server = bundled; @@ -58,7 +58,7 @@ async function getServer( const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`); let exists = await vscode.workspace.fs.stat(dest).then( () => true, - () => false + () => false, ); if (exists && config.package.version !== state.serverVersion) { await vscode.workspace.fs.delete(dest); @@ -81,7 +81,7 @@ async function getServer( "run `cargo xtask install --server` to build the language server from sources. " + "If you feel that your platform should be supported, please create an issue " + "about that [here](https://github.com/rust-lang/rust-analyzer/issues) and we " + - "will consider it." + "will consider it.", ); return undefined; } @@ -131,7 +131,7 @@ async function patchelf(dest: vscode.Uri): Promise<void> { } else { resolve(stdout); } - } + }, ); handle.stdin?.write(expression); handle.stdin?.end(); @@ -139,6 +139,6 @@ async function patchelf(dest: vscode.Uri): Promise<void> { } finally { await vscode.workspace.fs.delete(origFile); } - } + }, ); } diff --git a/src/tools/rust-analyzer/editors/code/src/client.ts b/src/tools/rust-analyzer/editors/code/src/client.ts index f721fcce766..ba8546763ec 100644 --- a/src/tools/rust-analyzer/editors/code/src/client.ts +++ b/src/tools/rust-analyzer/editors/code/src/client.ts @@ -6,9 +6,10 @@ import * as Is from "vscode-languageclient/lib/common/utils/is"; import { assert } from "./util"; import * as diagnostics from "./diagnostics"; import { WorkspaceEdit } from "vscode"; -import { Config, prepareVSCodeConfig } from "./config"; +import { type Config, prepareVSCodeConfig } from "./config"; import { randomUUID } from "crypto"; import { sep as pathSeparator } from "path"; +import { unwrapUndefinable } from "./undefinable"; export interface Env { [name: string]: string; @@ -33,21 +34,24 @@ export const LINKED_COMMANDS = new Map<string, ra.CommandLink>(); // add code to remove a target command from the map after the link is // clicked, but assuming most links in hover sheets won't be clicked anyway // this code won't change the overall memory use much. -setInterval(function cleanupOlderCommandLinks() { - // keys are returned in insertion order, we'll keep a few - // of recent keys available, and clean the rest - const keys = [...LINKED_COMMANDS.keys()]; - const keysToRemove = keys.slice(0, keys.length - 10); - for (const key of keysToRemove) { - LINKED_COMMANDS.delete(key); - } -}, 10 * 60 * 1000); +setInterval( + function cleanupOlderCommandLinks() { + // keys are returned in insertion order, we'll keep a few + // of recent keys available, and clean the rest + const keys = [...LINKED_COMMANDS.keys()]; + const keysToRemove = keys.slice(0, keys.length - 10); + for (const key of keysToRemove) { + LINKED_COMMANDS.delete(key); + } + }, + 10 * 60 * 1000, +); function renderCommand(cmd: ra.CommandLink): string { const commandId = randomUUID(); LINKED_COMMANDS.set(commandId, cmd); return `[${cmd.title}](command:rust-analyzer.linkToCommand?${encodeURIComponent( - JSON.stringify([commandId]) + JSON.stringify([commandId]), )} '${cmd.tooltip}')`; } @@ -56,7 +60,7 @@ function renderHoverActions(actions: ra.CommandLinkGroup[]): vscode.MarkdownStri .map( (group) => (group.title ? group.title + " " : "") + - group.commands.map(renderCommand).join(" | ") + group.commands.map(renderCommand).join(" | "), ) .join("___"); @@ -71,7 +75,7 @@ export async function createClient( initializationOptions: vscode.WorkspaceConfiguration, serverOptions: lc.ServerOptions, config: Config, - unlinkedFiles: vscode.Uri[] + unlinkedFiles: vscode.Uri[], ): Promise<lc.LanguageClient> { const clientOptions: lc.LanguageClientOptions = { documentSelector: [{ scheme: "file", language: "rust" }], @@ -92,7 +96,7 @@ export async function createClient( async configuration( params: lc.ConfigurationParams, token: vscode.CancellationToken, - next: lc.ConfigurationRequest.HandlerSignature + next: lc.ConfigurationRequest.HandlerSignature, ) { const resp = await next(params, token); if (resp && Array.isArray(resp)) { @@ -116,7 +120,7 @@ export async function createClient( async handleDiagnostics( uri: vscode.Uri, diagnosticList: vscode.Diagnostic[], - next: lc.HandleDiagnosticsSignature + next: lc.HandleDiagnosticsSignature, ) { const preview = config.previewRustcOutput; const errorCode = config.useRustcErrorCode; @@ -136,20 +140,20 @@ export async function createClient( const folder = vscode.workspace.getWorkspaceFolder(uri)?.uri.fsPath; if (folder) { const parentBackslash = uri.fsPath.lastIndexOf( - pathSeparator + "src" + pathSeparator + "src", ); const parent = uri.fsPath.substring(0, parentBackslash); if (parent.startsWith(folder)) { const path = vscode.Uri.file( - parent + pathSeparator + "Cargo.toml" + parent + pathSeparator + "Cargo.toml", ); void vscode.workspace.fs.stat(path).then(async () => { const choice = await vscode.window.showInformationMessage( `This rust file does not belong to a loaded cargo project. It looks like it might belong to the workspace at ${path.path}, do you want to add it to the linked Projects?`, "Yes", "No", - "Don't show this again" + "Don't show this again", ); switch (choice) { case undefined: @@ -167,14 +171,14 @@ export async function createClient( config .get<any[]>("linkedProjects") ?.concat(pathToInsert), - false + false, ); break; case "Don't show this again": await config.update( "showUnlinkedFileNotification", false, - false + false, ); break; } @@ -221,7 +225,7 @@ export async function createClient( document: vscode.TextDocument, position: vscode.Position, token: vscode.CancellationToken, - _next: lc.ProvideHoverSignature + _next: lc.ProvideHoverSignature, ) { const editor = vscode.window.activeTextEditor; const positionOrRange = editor?.selection?.contains(position) @@ -235,7 +239,7 @@ export async function createClient( client.code2ProtocolConverter.asTextDocumentIdentifier(document), position: positionOrRange, }, - token + token, ) .then( (result) => { @@ -249,7 +253,7 @@ export async function createClient( (error) => { client.handleFailedRequest(lc.HoverRequest.type, token, error, null); return Promise.resolve(null); - } + }, ); }, // Using custom handling of CodeActions to support action groups and snippet edits. @@ -259,14 +263,14 @@ export async function createClient( range: vscode.Range, context: vscode.CodeActionContext, token: vscode.CancellationToken, - _next: lc.ProvideCodeActionsSignature + _next: lc.ProvideCodeActionsSignature, ) { const params: lc.CodeActionParams = { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(document), range: client.code2ProtocolConverter.asRange(range), context: await client.code2ProtocolConverter.asCodeActionContext( context, - token + token, ), }; return client.sendRequest(lc.CodeActionRequest.type, params, token).then( @@ -282,21 +286,21 @@ export async function createClient( if (lc.CodeAction.is(item)) { assert( !item.command, - "We don't expect to receive commands in CodeActions" + "We don't expect to receive commands in CodeActions", ); const action = await client.protocol2CodeConverter.asCodeAction( item, - token + token, ); result.push(action); continue; } assert( isCodeActionWithoutEditsAndCommands(item), - "We don't expect edits or commands here" + "We don't expect edits or commands here", ); const kind = client.protocol2CodeConverter.asCodeActionKind( - (item as any).kind + (item as any).kind, ); const action = new vscode.CodeAction(item.title, kind); const group = (item as any).group; @@ -323,10 +327,12 @@ export async function createClient( } for (const [group, { index, items }] of groups) { if (items.length === 1) { - result[index] = items[0]; + const item = unwrapUndefinable(items[0]); + result[index] = item; } else { const action = new vscode.CodeAction(group); - action.kind = items[0].kind; + const item = unwrapUndefinable(items[0]); + action.kind = item.kind; action.command = { command: "rust-analyzer.applyActionGroup", title: "", @@ -348,7 +354,7 @@ export async function createClient( } return result; }, - (_error) => undefined + (_error) => undefined, ); }, }, @@ -361,7 +367,7 @@ export async function createClient( "rust-analyzer", "Rust Analyzer Language Server", serverOptions, - clientOptions + clientOptions, ); // To turn on all proposed features use: client.registerProposedFeatures(); @@ -397,7 +403,7 @@ class ExperimentalFeatures implements lc.StaticFeature { } initialize( _capabilities: lc.ServerCapabilities, - _documentSelector: lc.DocumentSelector | undefined + _documentSelector: lc.DocumentSelector | undefined, ): void {} dispose(): void {} } @@ -416,7 +422,7 @@ class OverrideFeatures implements lc.StaticFeature { } initialize( _capabilities: lc.ServerCapabilities, - _documentSelector: lc.DocumentSelector | undefined + _documentSelector: lc.DocumentSelector | undefined, ): void {} dispose(): void {} } diff --git a/src/tools/rust-analyzer/editors/code/src/commands.ts b/src/tools/rust-analyzer/editors/code/src/commands.ts index 98ccd50dc04..e21f536f26a 100644 --- a/src/tools/rust-analyzer/editors/code/src/commands.ts +++ b/src/tools/rust-analyzer/editors/code/src/commands.ts @@ -3,23 +3,24 @@ import * as lc from "vscode-languageclient"; import * as ra from "./lsp_ext"; import * as path from "path"; -import { Ctx, Cmd, CtxInit, discoverWorkspace } from "./ctx"; +import { type Ctx, type Cmd, type CtxInit, discoverWorkspace } from "./ctx"; import { applySnippetWorkspaceEdit, applySnippetTextEdits } from "./snippets"; import { spawnSync } from "child_process"; -import { RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run"; +import { type RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run"; import { AstInspector } from "./ast_inspector"; import { isRustDocument, isCargoTomlDocument, sleep, isRustEditor, - RustEditor, - RustDocument, + type RustEditor, + type RustDocument, } from "./util"; import { startDebugSession, makeDebugConfig } from "./debug"; -import { LanguageClient } from "vscode-languageclient/node"; +import type { LanguageClient } from "vscode-languageclient/node"; import { LINKED_COMMANDS } from "./client"; -import { DependencyId } from "./dependencies_provider"; +import type { DependencyId } from "./dependencies_provider"; +import { unwrapUndefinable } from "./undefinable"; export * from "./ast_inspector"; export * from "./run"; @@ -47,7 +48,7 @@ export function analyzerStatus(ctx: CtxInit): Cmd { })(); ctx.pushExtCleanup( - vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-status", tdcp) + vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-status", tdcp), ); return async () => { @@ -79,7 +80,7 @@ export function memoryUsage(ctx: CtxInit): Cmd { })(); ctx.pushExtCleanup( - vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-memory", tdcp) + vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-memory", tdcp), ); return async () => { @@ -125,11 +126,12 @@ export function matchingBrace(ctx: CtxInit): Cmd { const response = await client.sendRequest(ra.matchingBrace, { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document), positions: editor.selections.map((s) => - client.code2ProtocolConverter.asPosition(s.active) + client.code2ProtocolConverter.asPosition(s.active), ), }); editor.selections = editor.selections.map((sel, idx) => { - const active = client.protocol2CodeConverter.asPosition(response[idx]); + const position = unwrapUndefinable(response[idx]); + const active = client.protocol2CodeConverter.asPosition(position); const anchor = sel.isEmpty ? active : sel.anchor; return new vscode.Selection(anchor, active); }); @@ -194,7 +196,7 @@ export function onEnter(ctx: CtxInit): Cmd { const lcEdits = await client .sendRequest(ra.onEnter, { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier( - editor.document + editor.document, ), position: client.code2ProtocolConverter.asPosition(editor.selection.active), }) @@ -231,7 +233,7 @@ export function parentModule(ctx: CtxInit): Cmd { if (!locations) return; if (locations.length === 1) { - const loc = locations[0]; + const loc = unwrapUndefinable(locations[0]); const uri = client.protocol2CodeConverter.asUri(loc.targetUri); const range = client.protocol2CodeConverter.asRange(loc.targetRange); @@ -247,7 +249,7 @@ export function parentModule(ctx: CtxInit): Cmd { client, uri, position, - locations.map((loc) => lc.Location.create(loc.targetUri, loc.targetRange)) + locations.map((loc) => lc.Location.create(loc.targetUri, loc.targetRange)), ); } }; @@ -331,7 +333,13 @@ async function revealParentChain(document: RustDocument, ctx: CtxInit) { } while (!ctx.dependencies?.contains(documentPath)); parentChain.reverse(); for (const idx in parentChain) { - await ctx.treeView?.reveal(parentChain[idx], { select: true, expand: true }); + const treeView = ctx.treeView; + if (!treeView) { + continue; + } + + const dependency = unwrapUndefinable(parentChain[idx]); + await treeView.reveal(dependency, { select: true, expand: true }); } } @@ -349,7 +357,7 @@ export function ssr(ctx: CtxInit): Cmd { const position = editor.selection.active; const selections = editor.selections; const textDocument = client.code2ProtocolConverter.asTextDocumentIdentifier( - editor.document + editor.document, ); const options: vscode.InputBoxOptions = { @@ -365,7 +373,7 @@ export function ssr(ctx: CtxInit): Cmd { selections, }); } catch (e) { - return e.toString(); + return String(e); } return null; }, @@ -389,9 +397,9 @@ export function ssr(ctx: CtxInit): Cmd { }); await vscode.workspace.applyEdit( - await client.protocol2CodeConverter.asWorkspaceEdit(edit, token) + await client.protocol2CodeConverter.asWorkspaceEdit(edit, token), ); - } + }, ); }; } @@ -420,12 +428,12 @@ export function syntaxTree(ctx: CtxInit): Cmd { vscode.workspace.onDidChangeTextDocument( this.onDidChangeTextDocument, this, - ctx.subscriptions + ctx.subscriptions, ); vscode.window.onDidChangeActiveTextEditor( this.onDidChangeActiveTextEditor, this, - ctx.subscriptions + ctx.subscriptions, ); } @@ -444,7 +452,7 @@ export function syntaxTree(ctx: CtxInit): Cmd { async provideTextDocumentContent( uri: vscode.Uri, - ct: vscode.CancellationToken + ct: vscode.CancellationToken, ): Promise<string> { const rustEditor = ctx.activeRustEditor; if (!rustEditor) return ""; @@ -467,12 +475,12 @@ export function syntaxTree(ctx: CtxInit): Cmd { ctx.pushExtCleanup(new AstInspector(ctx)); ctx.pushExtCleanup( - vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-syntax-tree", tdcp) + vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-syntax-tree", tdcp), ); ctx.pushExtCleanup( vscode.languages.setLanguageConfiguration("ra_syntax_tree", { brackets: [["[", ")"]], - }) + }), ); return async () => { @@ -505,7 +513,7 @@ function viewFileUsingTextDocumentContentProvider( requestType: lc.RequestType<lc.TextDocumentPositionParams, string, void>, uri: string, scheme: string, - shouldUpdate: boolean + shouldUpdate: boolean, ): Cmd { const tdcp = new (class implements vscode.TextDocumentContentProvider { readonly uri = vscode.Uri.parse(uri); @@ -514,12 +522,12 @@ function viewFileUsingTextDocumentContentProvider( vscode.workspace.onDidChangeTextDocument( this.onDidChangeTextDocument, this, - ctx.subscriptions + ctx.subscriptions, ); vscode.window.onDidChangeActiveTextEditor( this.onDidChangeActiveTextEditor, this, - ctx.subscriptions + ctx.subscriptions, ); } @@ -538,7 +546,7 @@ function viewFileUsingTextDocumentContentProvider( async provideTextDocumentContent( _uri: vscode.Uri, - ct: vscode.CancellationToken + ct: vscode.CancellationToken, ): Promise<string> { const rustEditor = ctx.activeRustEditor; if (!rustEditor) return ""; @@ -546,7 +554,7 @@ function viewFileUsingTextDocumentContentProvider( const client = ctx.client; const params = { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier( - rustEditor.document + rustEditor.document, ), position: client.code2ProtocolConverter.asPosition(rustEditor.selection.active), }; @@ -594,7 +602,7 @@ export function interpretFunction(ctx: CtxInit): Cmd { ra.interpretFunction, uri, `rust-analyzer-interpret-function`, - false + false, ); } @@ -606,12 +614,12 @@ export function viewFileText(ctx: CtxInit): Cmd { vscode.workspace.onDidChangeTextDocument( this.onDidChangeTextDocument, this, - ctx.subscriptions + ctx.subscriptions, ); vscode.window.onDidChangeActiveTextEditor( this.onDidChangeActiveTextEditor, this, - ctx.subscriptions + ctx.subscriptions, ); } @@ -630,14 +638,14 @@ export function viewFileText(ctx: CtxInit): Cmd { async provideTextDocumentContent( _uri: vscode.Uri, - ct: vscode.CancellationToken + ct: vscode.CancellationToken, ): Promise<string> { const rustEditor = ctx.activeRustEditor; if (!rustEditor) return ""; const client = ctx.client; const params = client.code2ProtocolConverter.asTextDocumentIdentifier( - rustEditor.document + rustEditor.document, ); return client.sendRequest(ra.viewFileText, params, ct); } @@ -648,7 +656,7 @@ export function viewFileText(ctx: CtxInit): Cmd { })(); ctx.pushExtCleanup( - vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-file-text", tdcp) + vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-file-text", tdcp), ); return async () => { @@ -669,12 +677,12 @@ export function viewItemTree(ctx: CtxInit): Cmd { vscode.workspace.onDidChangeTextDocument( this.onDidChangeTextDocument, this, - ctx.subscriptions + ctx.subscriptions, ); vscode.window.onDidChangeActiveTextEditor( this.onDidChangeActiveTextEditor, this, - ctx.subscriptions + ctx.subscriptions, ); } @@ -693,7 +701,7 @@ export function viewItemTree(ctx: CtxInit): Cmd { async provideTextDocumentContent( _uri: vscode.Uri, - ct: vscode.CancellationToken + ct: vscode.CancellationToken, ): Promise<string> { const rustEditor = ctx.activeRustEditor; if (!rustEditor) return ""; @@ -701,7 +709,7 @@ export function viewItemTree(ctx: CtxInit): Cmd { const params = { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier( - rustEditor.document + rustEditor.document, ), }; return client.sendRequest(ra.viewItemTree, params, ct); @@ -713,7 +721,7 @@ export function viewItemTree(ctx: CtxInit): Cmd { })(); ctx.pushExtCleanup( - vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-item-tree", tdcp) + vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-item-tree", tdcp), ); return async () => { @@ -738,7 +746,7 @@ function crateGraph(ctx: CtxInit, full: boolean): Cmd { enableScripts: true, retainContextWhenHidden: true, localResourceRoots: [nodeModulesPath], - } + }, ); const params = { full: full, @@ -827,7 +835,7 @@ export function expandMacro(ctx: CtxInit): Cmd { const expanded = await client.sendRequest(ra.expandMacro, { textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier( - editor.document + editor.document, ), position, }); @@ -843,7 +851,7 @@ export function expandMacro(ctx: CtxInit): Cmd { })(); ctx.pushExtCleanup( - vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-expand-macro", tdcp) + vscode.workspace.registerTextDocumentContentProvider("rust-analyzer-expand-macro", tdcp), ); return async () => { @@ -875,7 +883,7 @@ export function addProject(ctx: CtxInit): Cmd { return discoverWorkspace([file], discoverProjectCommand, { cwd: path.dirname(file.uri.fsPath), }); - }) + }), ); ctx.addToDiscoveredWorkspaces(workspaces); @@ -893,14 +901,14 @@ async function showReferencesImpl( client: LanguageClient | undefined, uri: string, position: lc.Position, - locations: lc.Location[] + locations: lc.Location[], ) { if (client) { await vscode.commands.executeCommand( "editor.action.showReferences", vscode.Uri.parse(uri), client.protocol2CodeConverter.asPosition(position), - locations.map(client.protocol2CodeConverter.asLocation) + locations.map(client.protocol2CodeConverter.asLocation), ); } } @@ -917,7 +925,7 @@ export function applyActionGroup(_ctx: CtxInit): Cmd { if (!selectedAction) return; await vscode.commands.executeCommand( "rust-analyzer.resolveCodeAction", - selectedAction.arguments + selectedAction.arguments, ); }; } @@ -992,7 +1000,7 @@ export function resolveCodeAction(ctx: CtxInit): Cmd { documentChanges: itemEdit.documentChanges?.filter((change) => "kind" in change), }; const fileSystemEdit = await client.protocol2CodeConverter.asWorkspaceEdit( - lcFileSystemEdit + lcFileSystemEdit, ); await vscode.workspace.applyEdit(fileSystemEdit); await applySnippetWorkspaceEdit(edit); @@ -1045,12 +1053,12 @@ export function peekTests(ctx: CtxInit): Cmd { const locations: lc.Location[] = tests.map((it) => lc.Location.create( it.runnable.location!.targetUri, - it.runnable.location!.targetSelectionRange - ) + it.runnable.location!.targetSelectionRange, + ), ); await showReferencesImpl(client, uri, position, locations); - } + }, ); }; } @@ -1121,3 +1129,281 @@ export function linkToCommand(_: Ctx): Cmd { } }; } + +export function viewMemoryLayout(ctx: CtxInit): Cmd { + return async () => { + const editor = vscode.window.activeTextEditor; + if (!editor) return; + const client = ctx.client; + + const position = editor.selection.active; + const expanded = await client.sendRequest(ra.viewRecursiveMemoryLayout, { + textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document), + position, + }); + + const document = vscode.window.createWebviewPanel( + "memory_layout", + "[Memory Layout]", + vscode.ViewColumn.Two, + { enableScripts: true }, + ); + + document.webview.html = `<!DOCTYPE html> +<html lang="en"> +<head> + <meta charset="UTF-8"> + <meta http-equiv="X-UA-Compatible" content="IE=edge"> + <meta name="viewport" content="width=device-width, initial-scale=1.0"> + <title>Document</title> + <style> + * { + box-sizing: border-box; + } + + body { + margin: 0; + overflow: hidden; + min-height: 100%; + height: 100vh; + padding: 32px; + position: relative; + display: block; + + background-color: var(--vscode-editor-background); + font-family: var(--vscode-editor-font-family); + font-size: var(--vscode-editor-font-size); + color: var(--vscode-editor-foreground); + } + + .container { + position: relative; + } + + .trans { + transition: all 0.2s ease-in-out; + } + + .grid { + height: 100%; + position: relative; + color: var(--vscode-commandCenter-activeBorder); + pointer-events: none; + } + + .grid-line { + position: absolute; + width: 100%; + height: 1px; + background-color: var(--vscode-commandCenter-activeBorder); + } + + #tooltip { + position: fixed; + display: none; + z-index: 1; + pointer-events: none; + padding: 4px 8px; + z-index: 2; + + color: var(--vscode-editorHoverWidget-foreground); + background-color: var(--vscode-editorHoverWidget-background); + border: 1px solid var(--vscode-editorHoverWidget-border); + } + + #tooltip b { + color: var(--vscode-editorInlayHint-typeForeground); + } + + #tooltip ul { + margin-left: 0; + padding-left: 20px; + } + + table { + position: absolute; + transform: rotateZ(90deg) rotateX(180deg); + transform-origin: top left; + border-collapse: collapse; + table-layout: fixed; + left: 48px; + top: 0; + max-height: calc(100vw - 64px - 48px); + z-index: 1; + } + + td { + border: 1px solid var(--vscode-focusBorder); + writing-mode: vertical-rl; + text-orientation: sideways-right; + + height: 80px; + } + + td p { + height: calc(100% - 16px); + width: calc(100% - 8px); + margin: 8px 4px; + display: inline-block; + transform: rotateY(180deg); + pointer-events: none; + overflow: hidden; + } + + td p * { + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; + display: inline-block; + height: 100%; + } + + td p b { + color: var(--vscode-editorInlayHint-typeForeground); + } + + td:hover { + background-color: var(--vscode-editor-hoverHighlightBackground); + } + + td:empty { + visibility: hidden; + border: 0; + } + </style> +</head> +<body> + <div id="tooltip"></div> +</body> +<script>(function() { + +const data = ${JSON.stringify(expanded)} + +if (!(data && data.nodes.length)) { + document.body.innerText = "Not Available" + return +} + +data.nodes.map(n => { + n.typename = n.typename.replaceAll('&', '&').replaceAll('<', '<').replaceAll('>', '>').replaceAll('"', ' & quot; ').replaceAll("'", ''') + return n +}) + +let height = window.innerHeight - 64 + +addEventListener("resize", e => { + const new_height = window.innerHeight - 64 + height = new_height + container.classList.remove("trans") + table.classList.remove("trans") + locate() + setTimeout(() => { // give delay to redraw, annoying but needed + container.classList.add("trans") + table.classList.add("trans") + }, 0) +}) + +const container = document.createElement("div") +container.classList.add("container") +container.classList.add("trans") +document.body.appendChild(container) + +const tooltip = document.getElementById("tooltip") + +let y = 0 +let zoom = 1.0 + +const table = document.createElement("table") +table.classList.add("trans") +container.appendChild(table) +const rows = [] + +function node_t(idx, depth, offset) { + if (!rows[depth]) { + rows[depth] = { el: document.createElement("tr"), offset: 0 } + } + + if (rows[depth].offset < offset) { + const pad = document.createElement("td") + pad.colSpan = offset - rows[depth].offset + rows[depth].el.appendChild(pad) + rows[depth].offset += offset - rows[depth].offset + } + + const td = document.createElement("td") + td.innerHTML = '<p><span>' + data.nodes[idx].itemName + ':</span> <b>' + data.nodes[idx].typename + '</b></p>' + + td.colSpan = data.nodes[idx].size + + td.addEventListener("mouseover", e => { + const node = data.nodes[idx] + tooltip.innerHTML = node.itemName + ": <b>" + node.typename + "</b><br/>" + + "<ul>" + + "<li>size = " + node.size + "</li>" + + "<li>align = " + node.alignment + "</li>" + + "<li>field offset = " + node.offset + "</li>" + + "</ul>" + + "<i>double click to focus</i>" + + tooltip.style.display = "block" + }) + td.addEventListener("mouseleave", _ => tooltip.style.display = "none") + const total_offset = rows[depth].offset + td.addEventListener("dblclick", e => { + const node = data.nodes[idx] + zoom = data.nodes[0].size / node.size + y = -(total_offset) / data.nodes[0].size * zoom + x = 0 + locate() + }) + + rows[depth].el.appendChild(td) + rows[depth].offset += data.nodes[idx].size + + + if (data.nodes[idx].childrenStart != -1) { + for (let i = 0; i < data.nodes[idx].childrenLen; i++) { + if (data.nodes[data.nodes[idx].childrenStart + i].size) { + node_t(data.nodes[idx].childrenStart + i, depth + 1, offset + data.nodes[data.nodes[idx].childrenStart + i].offset) + } + } + } +} + +node_t(0, 0, 0) + +for (const row of rows) table.appendChild(row.el) + +const grid = document.createElement("div") +grid.classList.add("grid") +container.appendChild(grid) + +for (let i = 0; i < data.nodes[0].size / 8 + 1; i++) { + const el = document.createElement("div") + el.classList.add("grid-line") + el.style.top = (i / (data.nodes[0].size / 8) * 100) + "%" + el.innerText = i * 8 + grid.appendChild(el) +} + +addEventListener("mousemove", e => { + tooltip.style.top = e.clientY + 10 + "px" + tooltip.style.left = e.clientX + 10 + "px" +}) + +function locate() { + container.style.top = height * y + "px" + container.style.height = (height * zoom) + "px" + + table.style.width = container.style.height +} + +locate() + +})() +</script> +</html>`; + + ctx.pushExtCleanup(document); + }; +} diff --git a/src/tools/rust-analyzer/editors/code/src/config.ts b/src/tools/rust-analyzer/editors/code/src/config.ts index c6d2bcc2b2a..a047f9659a9 100644 --- a/src/tools/rust-analyzer/editors/code/src/config.ts +++ b/src/tools/rust-analyzer/editors/code/src/config.ts @@ -2,8 +2,9 @@ import * as Is from "vscode-languageclient/lib/common/utils/is"; import * as os from "os"; import * as path from "path"; import * as vscode from "vscode"; -import { Env } from "./client"; +import type { Env } from "./client"; import { log } from "./util"; +import { expectNotUndefined, unwrapUndefinable } from "./undefinable"; export type RunnableEnvCfg = | undefined @@ -37,7 +38,7 @@ export class Config { vscode.workspace.onDidChangeConfiguration( this.onDidChangeConfiguration, this, - ctx.subscriptions + ctx.subscriptions, ); this.refreshLogging(); this.configureLanguage(); @@ -63,7 +64,7 @@ export class Config { this.configureLanguage(); const requiresReloadOpt = this.requiresReloadOpts.find((opt) => - event.affectsConfiguration(opt) + event.affectsConfiguration(opt), ); if (!requiresReloadOpt) return; @@ -98,7 +99,8 @@ export class Config { let onEnterRules: vscode.OnEnterRule[] = [ { // Carry indentation from the previous line - beforeText: /^\s*$/, + // if it's only whitespace + beforeText: /^\s+$/, action: { indentAction: vscode.IndentAction.None }, }, { @@ -208,8 +210,8 @@ export class Config { Object.entries(extraEnv).map(([k, v]) => [ k, typeof v !== "string" ? v.toString() : v, - ]) - ) + ]), + ), ); } get traceExtension() { @@ -220,12 +222,16 @@ export class Config { return this.get<string[] | undefined>("discoverProjectCommand"); } + get problemMatcher(): string[] { + return this.get<string[]>("runnables.problemMatcher") || []; + } + get cargoRunner() { return this.get<string | undefined>("cargoRunner"); } - get runnableEnv() { - const item = this.get<any>("runnableEnv"); + get runnablesExtraEnv() { + const item = this.get<any>("runnables.extraEnv") ?? this.get<any>("runnableEnv"); if (!item) return item; const fixRecord = (r: Record<string, any>) => { for (const key in r) { @@ -300,7 +306,7 @@ export class Config { // to interact with. export function prepareVSCodeConfig<T>( resp: T, - cb?: (key: Extract<keyof T, string>, res: { [key: string]: any }) => void + cb?: (key: Extract<keyof T, string>, res: { [key: string]: any }) => void, ): T { if (Is.string(resp)) { return substituteVSCodeVariableInString(resp) as T; @@ -334,7 +340,7 @@ export function substituteVariablesInEnv(env: Env): Env { const depRe = new RegExp(/\${(?<depName>.+?)}/g); let match = undefined; while ((match = depRe.exec(value))) { - const depName = match.groups!.depName; + const depName = unwrapUndefinable(match.groups?.["depName"]); deps.add(depName); // `depName` at this point can have a form of `expression` or // `prefix:expression` @@ -343,7 +349,7 @@ export function substituteVariablesInEnv(env: Env): Env { } } return [`env:${key}`, { deps: [...deps], value }]; - }) + }), ); const resolved = new Set<string>(); @@ -352,7 +358,7 @@ export function substituteVariablesInEnv(env: Env): Env { if (match) { const { prefix, body } = match.groups!; if (prefix === "env") { - const envName = body; + const envName = unwrapUndefinable(body); envWithDeps[dep] = { value: process.env[envName] ?? "", deps: [], @@ -380,13 +386,12 @@ export function substituteVariablesInEnv(env: Env): Env { do { leftToResolveSize = toResolve.size; for (const key of toResolve) { - if (envWithDeps[key].deps.every((dep) => resolved.has(dep))) { - envWithDeps[key].value = envWithDeps[key].value.replace( - /\${(?<depName>.+?)}/g, - (_wholeMatch, depName) => { - return envWithDeps[depName].value; - } - ); + const item = unwrapUndefinable(envWithDeps[key]); + if (item.deps.every((dep) => resolved.has(dep))) { + item.value = item.value.replace(/\${(?<depName>.+?)}/g, (_wholeMatch, depName) => { + const item = unwrapUndefinable(envWithDeps[depName]); + return item.value; + }); resolved.add(key); toResolve.delete(key); } @@ -395,7 +400,8 @@ export function substituteVariablesInEnv(env: Env): Env { const resolvedEnv: Env = {}; for (const key of Object.keys(env)) { - resolvedEnv[key] = envWithDeps[`env:${key}`].value; + const item = unwrapUndefinable(envWithDeps[`env:${key}`]); + resolvedEnv[key] = item.value; } return resolvedEnv; } @@ -414,20 +420,19 @@ function substituteVSCodeVariableInString(val: string): string { function computeVscodeVar(varName: string): string | null { const workspaceFolder = () => { const folders = vscode.workspace.workspaceFolders ?? []; - if (folders.length === 1) { - // TODO: support for remote workspaces? - return folders[0].uri.fsPath; - } else if (folders.length > 1) { - // could use currently opened document to detect the correct - // workspace. However, that would be determined by the document - // user has opened on Editor startup. Could lead to - // unpredictable workspace selection in practice. - // It's better to pick the first one - return folders[0].uri.fsPath; - } else { - // no workspace opened - return ""; - } + const folder = folders[0]; + // TODO: support for remote workspaces? + const fsPath: string = + folder === undefined + ? // no workspace opened + "" + : // could use currently opened document to detect the correct + // workspace. However, that would be determined by the document + // user has opened on Editor startup. Could lead to + // unpredictable workspace selection in practice. + // It's better to pick the first one + folder.uri.fsPath; + return fsPath; }; // https://code.visualstudio.com/docs/editor/variables-reference const supportedVariables: { [k: string]: () => string } = { @@ -444,13 +449,17 @@ function computeVscodeVar(varName: string): string | null { // https://github.com/microsoft/vscode/blob/08ac1bb67ca2459496b272d8f4a908757f24f56f/src/vs/workbench/api/common/extHostVariableResolverService.ts#L81 // or // https://github.com/microsoft/vscode/blob/29eb316bb9f154b7870eb5204ec7f2e7cf649bec/src/vs/server/node/remoteTerminalChannel.ts#L56 - execPath: () => process.env.VSCODE_EXEC_PATH ?? process.execPath, + execPath: () => process.env["VSCODE_EXEC_PATH"] ?? process.execPath, pathSeparator: () => path.sep, }; if (varName in supportedVariables) { - return supportedVariables[varName](); + const fn = expectNotUndefined( + supportedVariables[varName], + `${varName} should not be undefined here`, + ); + return fn(); } else { // return "${" + varName + "}"; return null; diff --git a/src/tools/rust-analyzer/editors/code/src/ctx.ts b/src/tools/rust-analyzer/editors/code/src/ctx.ts index a72b5391ff1..ac144cbebb2 100644 --- a/src/tools/rust-analyzer/editors/code/src/ctx.ts +++ b/src/tools/rust-analyzer/editors/code/src/ctx.ts @@ -1,5 +1,5 @@ import * as vscode from "vscode"; -import * as lc from "vscode-languageclient/node"; +import type * as lc from "vscode-languageclient/node"; import * as ra from "./lsp_ext"; import * as path from "path"; @@ -12,19 +12,19 @@ import { isRustEditor, LazyOutputChannel, log, - RustEditor, + type RustEditor, } from "./util"; -import { ServerStatusParams } from "./lsp_ext"; +import type { ServerStatusParams } from "./lsp_ext"; import { - Dependency, - DependencyFile, + type Dependency, + type DependencyFile, RustDependenciesProvider, - DependencyId, + type DependencyId, } from "./dependencies_provider"; import { execRevealDependency } from "./commands"; import { PersistentState } from "./persistent_state"; import { bootstrap } from "./bootstrap"; -import { ExecOptions } from "child_process"; +import type { ExecOptions } from "child_process"; // We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if // only those are in use. We use "Empty" to represent these scenarios @@ -42,10 +42,10 @@ export type Workspace = export function fetchWorkspace(): Workspace { const folders = (vscode.workspace.workspaceFolders || []).filter( - (folder) => folder.uri.scheme === "file" + (folder) => folder.uri.scheme === "file", ); const rustDocuments = vscode.workspace.textDocuments.filter((document) => - isRustDocument(document) + isRustDocument(document), ); return folders.length === 0 @@ -61,7 +61,7 @@ export function fetchWorkspace(): Workspace { export async function discoverWorkspace( files: readonly vscode.TextDocument[], command: string[], - options: ExecOptions + options: ExecOptions, ): Promise<JsonProject> { const paths = files.map((f) => `"${f.uri.fsPath}"`).join(" "); const joinedCommand = command.join(" "); @@ -110,7 +110,7 @@ export class Ctx { constructor( readonly extCtx: vscode.ExtensionContext, commandFactories: Record<string, CommandFactory>, - workspace: Workspace + workspace: Workspace, ) { extCtx.subscriptions.push(this); this.statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left); @@ -186,7 +186,7 @@ export class Ctx { log.error("Bootstrap error", err); throw new Error(message); - } + }, ); const newEnv = Object.assign({}, process.env, this.config.serverExtraEnv); const run: lc.Executable = { @@ -216,7 +216,7 @@ export class Ctx { return discoverWorkspace([file], discoverProjectCommand, { cwd: path.dirname(file.uri.fsPath), }); - }) + }), ); this.addToDiscoveredWorkspaces(workspaces); @@ -230,7 +230,7 @@ export class Ctx { if (key === "linkedProjects" && this.config.discoveredWorkspaces.length > 0) { obj["linkedProjects"] = this.config.discoveredWorkspaces; } - } + }, ); this._client = await createClient( @@ -239,17 +239,17 @@ export class Ctx { initializationOptions, serverOptions, this.config, - this.unlinkedFiles + this.unlinkedFiles, ); this.pushClientCleanup( this._client.onNotification(ra.serverStatus, (params) => - this.setServerStatus(params) - ) + this.setServerStatus(params), + ), ); this.pushClientCleanup( this._client.onNotification(ra.openServerLogs, () => { this.outputChannel!.show(); - }) + }), ); } return this._client; @@ -395,7 +395,7 @@ export class Ctx { } else { callback = () => vscode.window.showErrorMessage( - `command ${fullName} failed: rust-analyzer server is not running` + `command ${fullName} failed: rust-analyzer server is not running`, ); } @@ -423,7 +423,7 @@ export class Ctx { } statusBar.color = new vscode.ThemeColor("statusBarItem.warningForeground"); statusBar.backgroundColor = new vscode.ThemeColor( - "statusBarItem.warningBackground" + "statusBarItem.warningBackground", ); statusBar.command = "rust-analyzer.openLogs"; icon = "$(warning) "; @@ -440,7 +440,7 @@ export class Ctx { case "stopped": statusBar.tooltip.appendText("Server is stopped"); statusBar.tooltip.appendMarkdown( - "\n\n[Start server](command:rust-analyzer.startServer)" + "\n\n[Start server](command:rust-analyzer.startServer)", ); statusBar.color = undefined; statusBar.backgroundColor = undefined; @@ -453,13 +453,13 @@ export class Ctx { } statusBar.tooltip.appendMarkdown("\n\n[Open logs](command:rust-analyzer.openLogs)"); statusBar.tooltip.appendMarkdown( - "\n\n[Reload Workspace](command:rust-analyzer.reloadWorkspace)" + "\n\n[Reload Workspace](command:rust-analyzer.reloadWorkspace)", ); statusBar.tooltip.appendMarkdown( - "\n\n[Rebuild Proc Macros](command:rust-analyzer.rebuildProcMacros)" + "\n\n[Rebuild Proc Macros](command:rust-analyzer.rebuildProcMacros)", ); statusBar.tooltip.appendMarkdown( - "\n\n[Restart server](command:rust-analyzer.restartServer)" + "\n\n[Restart server](command:rust-analyzer.restartServer)", ); statusBar.tooltip.appendMarkdown("\n\n[Stop server](command:rust-analyzer.stopServer)"); if (!status.quiescent) icon = "$(sync~spin) "; diff --git a/src/tools/rust-analyzer/editors/code/src/debug.ts b/src/tools/rust-analyzer/editors/code/src/debug.ts index cffee1de6af..f3d6238d51b 100644 --- a/src/tools/rust-analyzer/editors/code/src/debug.ts +++ b/src/tools/rust-analyzer/editors/code/src/debug.ts @@ -1,18 +1,19 @@ import * as os from "os"; import * as vscode from "vscode"; import * as path from "path"; -import * as ra from "./lsp_ext"; +import type * as ra from "./lsp_ext"; import { Cargo, getRustcId, getSysroot } from "./toolchain"; -import { Ctx } from "./ctx"; +import type { Ctx } from "./ctx"; import { prepareEnv } from "./run"; +import { unwrapUndefinable } from "./undefinable"; const debugOutput = vscode.window.createOutputChannel("Debug"); type DebugConfigProvider = ( config: ra.Runnable, executable: string, env: Record<string, string>, - sourceFileMap?: Record<string, string> + sourceFileMap?: Record<string, string>, ) => vscode.DebugConfiguration; export async function makeDebugConfig(ctx: Ctx, runnable: ra.Runnable): Promise<void> { @@ -30,7 +31,7 @@ export async function makeDebugConfig(ctx: Ctx, runnable: ra.Runnable): Promise< const answer = await vscode.window.showErrorMessage( `Launch configuration '${debugConfig.name}' already exists!`, "Cancel", - "Update" + "Update", ); if (answer === "Cancel") return; @@ -67,7 +68,7 @@ export async function startDebugSession(ctx: Ctx, runnable: ra.Runnable): Promis async function getDebugConfiguration( ctx: Ctx, - runnable: ra.Runnable + runnable: ra.Runnable, ): Promise<vscode.DebugConfiguration | undefined> { const editor = ctx.activeRustEditor; if (!editor) return; @@ -91,7 +92,7 @@ async function getDebugConfiguration( if (!debugEngine) { await vscode.window.showErrorMessage( `Install [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb)` + - ` or [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools) extension for debugging.` + ` or [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools) extension for debugging.`, ); return; } @@ -105,12 +106,13 @@ async function getDebugConfiguration( const workspaceFolders = vscode.workspace.workspaceFolders!; const isMultiFolderWorkspace = workspaceFolders.length > 1; const firstWorkspace = workspaceFolders[0]; - const workspace = + const maybeWorkspace = !isMultiFolderWorkspace || !runnable.args.workspaceRoot ? firstWorkspace : workspaceFolders.find((w) => runnable.args.workspaceRoot?.includes(w.uri.fsPath)) || firstWorkspace; + const workspace = unwrapUndefinable(maybeWorkspace); const wsFolder = path.normalize(workspace.uri.fsPath); const workspaceQualifier = isMultiFolderWorkspace ? `:${workspace.name}` : ""; function simplifyPath(p: string): string { @@ -118,7 +120,7 @@ async function getDebugConfiguration( return path.normalize(p).replace(wsFolder, "${workspaceFolder" + workspaceQualifier + "}"); } - const env = prepareEnv(runnable, ctx.config.runnableEnv); + const env = prepareEnv(runnable, ctx.config.runnablesExtraEnv); const executable = await getDebugExecutable(runnable, env); let sourceFileMap = debugOptions.sourceFileMap; if (sourceFileMap === "auto") { @@ -130,12 +132,8 @@ async function getDebugConfiguration( sourceFileMap[`/rustc/${commitHash}/`] = rustlib; } - const debugConfig = knownEngines[debugEngine.id]( - runnable, - simplifyPath(executable), - env, - sourceFileMap - ); + const provider = unwrapUndefinable(knownEngines[debugEngine.id]); + const debugConfig = provider(runnable, simplifyPath(executable), env, sourceFileMap); if (debugConfig.type in debugOptions.engineSettings) { const settingsMap = (debugOptions.engineSettings as any)[debugConfig.type]; for (var key in settingsMap) { @@ -149,8 +147,9 @@ async function getDebugConfiguration( debugConfig.name = `run ${path.basename(executable)}`; } - if (debugConfig.cwd) { - debugConfig.cwd = simplifyPath(debugConfig.cwd); + const cwd = debugConfig["cwd"]; + if (cwd) { + debugConfig["cwd"] = simplifyPath(cwd); } return debugConfig; @@ -158,7 +157,7 @@ async function getDebugConfiguration( async function getDebugExecutable( runnable: ra.Runnable, - env: Record<string, string> + env: Record<string, string>, ): Promise<string> { const cargo = new Cargo(runnable.args.workspaceRoot || ".", debugOutput, env); const executable = await cargo.executableFromArgs(runnable.args.cargoArgs); @@ -171,7 +170,7 @@ function getLldbDebugConfig( runnable: ra.Runnable, executable: string, env: Record<string, string>, - sourceFileMap?: Record<string, string> + sourceFileMap?: Record<string, string>, ): vscode.DebugConfiguration { return { type: "lldb", @@ -190,7 +189,7 @@ function getCppvsDebugConfig( runnable: ra.Runnable, executable: string, env: Record<string, string>, - sourceFileMap?: Record<string, string> + sourceFileMap?: Record<string, string>, ): vscode.DebugConfiguration { return { type: os.platform() === "win32" ? "cppvsdbg" : "cppdbg", diff --git a/src/tools/rust-analyzer/editors/code/src/dependencies_provider.ts b/src/tools/rust-analyzer/editors/code/src/dependencies_provider.ts index 8900aa9a5f3..863ace07801 100644 --- a/src/tools/rust-analyzer/editors/code/src/dependencies_provider.ts +++ b/src/tools/rust-analyzer/editors/code/src/dependencies_provider.ts @@ -1,9 +1,10 @@ import * as vscode from "vscode"; import * as fspath from "path"; import * as fs from "fs"; -import { CtxInit } from "./ctx"; +import type { CtxInit } from "./ctx"; import * as ra from "./lsp_ext"; -import { FetchDependencyListResult } from "./lsp_ext"; +import type { FetchDependencyListResult } from "./lsp_ext"; +import { unwrapUndefinable } from "./undefinable"; export class RustDependenciesProvider implements vscode.TreeDataProvider<Dependency | DependencyFile> @@ -42,19 +43,24 @@ export class RustDependenciesProvider } getParent?( - element: Dependency | DependencyFile + element: Dependency | DependencyFile, ): vscode.ProviderResult<Dependency | DependencyFile> { if (element instanceof Dependency) return undefined; return element.parent; } getTreeItem(element: Dependency | DependencyFile): vscode.TreeItem | Thenable<vscode.TreeItem> { - if (element.id! in this.dependenciesMap) return this.dependenciesMap[element.id!]; + const dependenciesMap = this.dependenciesMap; + const elementId = element.id!; + if (elementId in dependenciesMap) { + const dependency = unwrapUndefinable(dependenciesMap[elementId]); + return dependency; + } return element; } getChildren( - element?: Dependency | DependencyFile + element?: Dependency | DependencyFile, ): vscode.ProviderResult<Dependency[] | DependencyFile[]> { return new Promise((resolve, _reject) => { if (!vscode.workspace.workspaceFolders) { @@ -81,7 +87,7 @@ export class RustDependenciesProvider private async getRootDependencies(): Promise<Dependency[]> { const dependenciesResult: FetchDependencyListResult = await this.ctx.client.sendRequest( ra.fetchDependencyList, - {} + {}, ); const crates = dependenciesResult.crates; @@ -101,17 +107,17 @@ export class RustDependenciesProvider moduleName, version, vscode.Uri.parse(path).fsPath, - vscode.TreeItemCollapsibleState.Collapsed + vscode.TreeItemCollapsibleState.Collapsed, ); } } export class Dependency extends vscode.TreeItem { constructor( - public readonly label: string, + public override readonly label: string, private version: string, readonly dependencyPath: string, - public readonly collapsibleState: vscode.TreeItemCollapsibleState + public override readonly collapsibleState: vscode.TreeItemCollapsibleState, ) { super(label, collapsibleState); this.resourceUri = vscode.Uri.file(dependencyPath); @@ -127,10 +133,10 @@ export class Dependency extends vscode.TreeItem { export class DependencyFile extends vscode.TreeItem { constructor( - readonly label: string, + override readonly label: string, readonly dependencyPath: string, readonly parent: Dependency | DependencyFile, - public readonly collapsibleState: vscode.TreeItemCollapsibleState + public override readonly collapsibleState: vscode.TreeItemCollapsibleState, ) { super(vscode.Uri.file(dependencyPath), collapsibleState); this.id = this.resourceUri!.fsPath.toLowerCase(); diff --git a/src/tools/rust-analyzer/editors/code/src/diagnostics.ts b/src/tools/rust-analyzer/editors/code/src/diagnostics.ts index 9695d8bf26d..e31a1cdcef9 100644 --- a/src/tools/rust-analyzer/editors/code/src/diagnostics.ts +++ b/src/tools/rust-analyzer/editors/code/src/diagnostics.ts @@ -1,7 +1,14 @@ import * as anser from "anser"; import * as vscode from "vscode"; -import { ProviderResult, Range, TextEditorDecorationType, ThemeColor, window } from "vscode"; -import { Ctx } from "./ctx"; +import { + type ProviderResult, + Range, + type TextEditorDecorationType, + ThemeColor, + window, +} from "vscode"; +import type { Ctx } from "./ctx"; +import { unwrapUndefinable } from "./undefinable"; export const URI_SCHEME = "rust-analyzer-diagnostics-view"; @@ -82,7 +89,7 @@ export class AnsiDecorationProvider implements vscode.Disposable { } private _getDecorations( - uri: vscode.Uri + uri: vscode.Uri, ): ProviderResult<[TextEditorDecorationType, Range[]][]> { const stringContents = getRenderedDiagnostic(this.ctx, uri); const lines = stringContents.split("\n"); @@ -110,7 +117,7 @@ export class AnsiDecorationProvider implements vscode.Disposable { lineNumber, offset - totalEscapeLength, lineNumber, - offset + content.length - totalEscapeLength + offset + content.length - totalEscapeLength, ); offset += content.length; @@ -176,7 +183,7 @@ export class AnsiDecorationProvider implements vscode.Disposable { private static _convertColor( color?: string, - truecolor?: string + truecolor?: string, ): ThemeColor | string | undefined { if (!color) { return undefined; @@ -195,7 +202,8 @@ export class AnsiDecorationProvider implements vscode.Disposable { // anser won't return both the RGB and the color name at the same time, // so just fake a single foreground control char with the palette number: const spans = anser.ansiToJson(`\x1b[38;5;${paletteColor}m`); - const rgb = spans[1].fg; + const span = unwrapUndefinable(spans[1]); + const rgb = span.fg; if (rgb) { return `rgb(${rgb})`; diff --git a/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts b/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts index b72804e510c..bb7896973f1 100644 --- a/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts +++ b/src/tools/rust-analyzer/editors/code/src/lsp_ext.ts @@ -27,17 +27,17 @@ export type CommandLinkGroup = { // rust-analyzer extensions export const analyzerStatus = new lc.RequestType<AnalyzerStatusParams, string, void>( - "rust-analyzer/analyzerStatus" + "rust-analyzer/analyzerStatus", ); export const cancelFlycheck = new lc.NotificationType0("rust-analyzer/cancelFlycheck"); export const clearFlycheck = new lc.NotificationType0("rust-analyzer/clearFlycheck"); export const expandMacro = new lc.RequestType<ExpandMacroParams, ExpandedMacro | null, void>( - "rust-analyzer/expandMacro" + "rust-analyzer/expandMacro", ); export const memoryUsage = new lc.RequestType0<string, void>("rust-analyzer/memoryUsage"); export const openServerLogs = new lc.NotificationType0("rust-analyzer/openServerLogs"); export const relatedTests = new lc.RequestType<lc.TextDocumentPositionParams, TestInfo[], void>( - "rust-analyzer/relatedTests" + "rust-analyzer/relatedTests", ); export const reloadWorkspace = new lc.RequestType0<null, void>("rust-analyzer/reloadWorkspace"); export const rebuildProcMacros = new lc.RequestType0<null, void>("rust-analyzer/rebuildProcMacros"); @@ -47,25 +47,25 @@ export const runFlycheck = new lc.NotificationType<{ }>("rust-analyzer/runFlycheck"); export const shuffleCrateGraph = new lc.RequestType0<null, void>("rust-analyzer/shuffleCrateGraph"); export const syntaxTree = new lc.RequestType<SyntaxTreeParams, string, void>( - "rust-analyzer/syntaxTree" + "rust-analyzer/syntaxTree", ); export const viewCrateGraph = new lc.RequestType<ViewCrateGraphParams, string, void>( - "rust-analyzer/viewCrateGraph" + "rust-analyzer/viewCrateGraph", ); export const viewFileText = new lc.RequestType<lc.TextDocumentIdentifier, string, void>( - "rust-analyzer/viewFileText" + "rust-analyzer/viewFileText", ); export const viewHir = new lc.RequestType<lc.TextDocumentPositionParams, string, void>( - "rust-analyzer/viewHir" + "rust-analyzer/viewHir", ); export const viewMir = new lc.RequestType<lc.TextDocumentPositionParams, string, void>( - "rust-analyzer/viewMir" + "rust-analyzer/viewMir", ); export const interpretFunction = new lc.RequestType<lc.TextDocumentPositionParams, string, void>( - "rust-analyzer/interpretFunction" + "rust-analyzer/interpretFunction", ); export const viewItemTree = new lc.RequestType<ViewItemTreeParams, string, void>( - "rust-analyzer/viewItemTree" + "rust-analyzer/viewItemTree", ); export type AnalyzerStatusParams = { textDocument?: lc.TextDocumentIdentifier }; @@ -121,22 +121,22 @@ export type ViewItemTreeParams = { textDocument: lc.TextDocumentIdentifier }; // experimental extensions export const joinLines = new lc.RequestType<JoinLinesParams, lc.TextEdit[], void>( - "experimental/joinLines" + "experimental/joinLines", ); export const matchingBrace = new lc.RequestType<MatchingBraceParams, lc.Position[], void>( - "experimental/matchingBrace" + "experimental/matchingBrace", ); export const moveItem = new lc.RequestType<MoveItemParams, lc.TextEdit[], void>( - "experimental/moveItem" + "experimental/moveItem", ); export const onEnter = new lc.RequestType<lc.TextDocumentPositionParams, lc.TextEdit[], void>( - "experimental/onEnter" + "experimental/onEnter", ); export const openCargoToml = new lc.RequestType<OpenCargoTomlParams, lc.Location, void>( - "experimental/openCargoToml" + "experimental/openCargoToml", ); export const openDocs = new lc.RequestType<lc.TextDocumentPositionParams, string | void, void>( - "experimental/externalDocs" + "experimental/externalDocs", ); export const parentModule = new lc.RequestType< lc.TextDocumentPositionParams, @@ -144,12 +144,17 @@ export const parentModule = new lc.RequestType< void >("experimental/parentModule"); export const runnables = new lc.RequestType<RunnablesParams, Runnable[], void>( - "experimental/runnables" + "experimental/runnables", ); export const serverStatus = new lc.NotificationType<ServerStatusParams>( - "experimental/serverStatus" + "experimental/serverStatus", ); export const ssr = new lc.RequestType<SsrParams, lc.WorkspaceEdit, void>("experimental/ssr"); +export const viewRecursiveMemoryLayout = new lc.RequestType< + lc.TextDocumentPositionParams, + RecursiveMemoryLayout | null, + void +>("rust-analyzer/viewRecursiveMemoryLayout"); export type JoinLinesParams = { textDocument: lc.TextDocumentIdentifier; @@ -197,3 +202,17 @@ export type SsrParams = { position: lc.Position; selections: readonly lc.Range[]; }; + +export type RecursiveMemoryLayoutNode = { + item_name: string; + typename: string; + size: number; + alignment: number; + offset: number; + parent_idx: number; + children_start: number; + children_len: number; +}; +export type RecursiveMemoryLayout = { + nodes: RecursiveMemoryLayoutNode[]; +}; diff --git a/src/tools/rust-analyzer/editors/code/src/main.ts b/src/tools/rust-analyzer/editors/code/src/main.ts index be9bc9d363c..448150bac06 100644 --- a/src/tools/rust-analyzer/editors/code/src/main.ts +++ b/src/tools/rust-analyzer/editors/code/src/main.ts @@ -2,7 +2,7 @@ import * as vscode from "vscode"; import * as lc from "vscode-languageclient/node"; import * as commands from "./commands"; -import { CommandFactory, Ctx, fetchWorkspace } from "./ctx"; +import { type CommandFactory, Ctx, fetchWorkspace } from "./ctx"; import * as diagnostics from "./diagnostics"; import { activateTaskProvider } from "./tasks"; import { setContextValue } from "./util"; @@ -18,7 +18,7 @@ export async function deactivate() { } export async function activate( - context: vscode.ExtensionContext + context: vscode.ExtensionContext, ): Promise<RustAnalyzerExtensionApi> { if (vscode.extensions.getExtension("rust-lang.rust")) { vscode.window @@ -26,7 +26,7 @@ export async function activate( `You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` + "plugins enabled. These are known to conflict and cause various functions of " + "both plugins to not work correctly. You should disable one of them.", - "Got it" + "Got it", ) .then(() => {}, console.error); } @@ -36,7 +36,7 @@ export async function activate( // so we do it ourselves. const api = await activateServer(ctx).catch((err) => { void vscode.window.showErrorMessage( - `Cannot activate rust-analyzer extension: ${err.message}` + `Cannot activate rust-analyzer extension: ${err.message}`, ); throw err; }); @@ -53,8 +53,8 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> { ctx.pushExtCleanup( vscode.workspace.registerTextDocumentContentProvider( diagnostics.URI_SCHEME, - diagnosticProvider - ) + diagnosticProvider, + ), ); const decorationProvider = new diagnostics.AnsiDecorationProvider(ctx); @@ -71,7 +71,7 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> { vscode.workspace.onDidChangeTextDocument( async (event) => await decorateVisibleEditors(event.document), null, - ctx.subscriptions + ctx.subscriptions, ); vscode.workspace.onDidOpenTextDocument(decorateVisibleEditors, null, ctx.subscriptions); vscode.window.onDidChangeActiveTextEditor( @@ -82,7 +82,7 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> { } }, null, - ctx.subscriptions + ctx.subscriptions, ); vscode.window.onDidChangeVisibleTextEditors( async (visibleEditors) => { @@ -92,13 +92,13 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> { } }, null, - ctx.subscriptions + ctx.subscriptions, ); vscode.workspace.onDidChangeWorkspaceFolders( async (_) => ctx.onWorkspaceFolderChanges(), null, - ctx.subscriptions + ctx.subscriptions, ); vscode.workspace.onDidChangeConfiguration( async (_) => { @@ -107,7 +107,7 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> { }); }, null, - ctx.subscriptions + ctx.subscriptions, ); await ctx.start(); @@ -179,6 +179,7 @@ function createCommands(): Record<string, CommandFactory> { runFlycheck: { enabled: commands.runFlycheck }, ssr: { enabled: commands.ssr }, serverVersion: { enabled: commands.serverVersion }, + viewMemoryLayout: { enabled: commands.viewMemoryLayout }, // Internal commands which are invoked by the server. applyActionGroup: { enabled: commands.applyActionGroup }, applySnippetWorkspaceEdit: { enabled: commands.applySnippetWorkspaceEditCommand }, diff --git a/src/tools/rust-analyzer/editors/code/src/nullable.ts b/src/tools/rust-analyzer/editors/code/src/nullable.ts new file mode 100644 index 00000000000..e973e162907 --- /dev/null +++ b/src/tools/rust-analyzer/editors/code/src/nullable.ts @@ -0,0 +1,19 @@ +export type NotNull<T> = T extends null ? never : T; + +export type Nullable<T> = T | null; + +function isNotNull<T>(input: Nullable<T>): input is NotNull<T> { + return input !== null; +} + +function expectNotNull<T>(input: Nullable<T>, msg: string): NotNull<T> { + if (isNotNull(input)) { + return input; + } + + throw new TypeError(msg); +} + +export function unwrapNullable<T>(input: Nullable<T>): NotNull<T> { + return expectNotNull(input, `unwrapping \`null\``); +} diff --git a/src/tools/rust-analyzer/editors/code/src/persistent_state.ts b/src/tools/rust-analyzer/editors/code/src/persistent_state.ts index 8964a78dc32..cebd16a3c90 100644 --- a/src/tools/rust-analyzer/editors/code/src/persistent_state.ts +++ b/src/tools/rust-analyzer/editors/code/src/persistent_state.ts @@ -1,4 +1,4 @@ -import * as vscode from "vscode"; +import type * as vscode from "vscode"; import { log } from "./util"; export class PersistentState { diff --git a/src/tools/rust-analyzer/editors/code/src/run.ts b/src/tools/rust-analyzer/editors/code/src/run.ts index 623fb102953..c893d390554 100644 --- a/src/tools/rust-analyzer/editors/code/src/run.ts +++ b/src/tools/rust-analyzer/editors/code/src/run.ts @@ -1,11 +1,12 @@ import * as vscode from "vscode"; -import * as lc from "vscode-languageclient"; +import type * as lc from "vscode-languageclient"; import * as ra from "./lsp_ext"; import * as tasks from "./tasks"; -import { CtxInit } from "./ctx"; +import type { CtxInit } from "./ctx"; import { makeDebugConfig } from "./debug"; -import { Config, RunnableEnvCfg } from "./config"; +import type { Config, RunnableEnvCfg } from "./config"; +import { unwrapUndefinable } from "./undefinable"; const quickPickButtons = [ { iconPath: new vscode.ThemeIcon("save"), tooltip: "Save as a launch.json configuration." }, @@ -15,7 +16,7 @@ export async function selectRunnable( ctx: CtxInit, prevRunnable?: RunnableQuickPick, debuggeeOnly = false, - showButtons: boolean = true + showButtons: boolean = true, ): Promise<RunnableQuickPick | undefined> { const editor = ctx.activeRustEditor; if (!editor) return; @@ -68,12 +69,14 @@ export async function selectRunnable( quickPick.onDidHide(() => close()), quickPick.onDidAccept(() => close(quickPick.selectedItems[0])), quickPick.onDidTriggerButton(async (_button) => { - await makeDebugConfig(ctx, quickPick.activeItems[0].runnable); + const runnable = unwrapUndefinable(quickPick.activeItems[0]).runnable; + await makeDebugConfig(ctx, runnable); close(); }), - quickPick.onDidChangeActive((active) => { - if (showButtons && active.length > 0) { - if (active[0].label.startsWith("cargo")) { + quickPick.onDidChangeActive((activeList) => { + if (showButtons && activeList.length > 0) { + const active = unwrapUndefinable(activeList[0]); + if (active.label.startsWith("cargo")) { // save button makes no sense for `cargo test` or `cargo check` quickPick.buttons = []; } else if (quickPick.buttons.length === 0) { @@ -81,7 +84,7 @@ export async function selectRunnable( } } }), - quickPick + quickPick, ); quickPick.show(); }); @@ -100,7 +103,7 @@ export class RunnableQuickPick implements vscode.QuickPickItem { export function prepareEnv( runnable: ra.Runnable, - runnableEnvCfg: RunnableEnvCfg + runnableEnvCfg: RunnableEnvCfg, ): Record<string, string> { const env: Record<string, string> = { RUST_BACKTRACE: "short" }; @@ -140,7 +143,7 @@ export async function createTask(runnable: ra.Runnable, config: Config): Promise command: args[0], // run, test, etc... args: args.slice(1), cwd: runnable.args.workspaceRoot || ".", - env: prepareEnv(runnable, config.runnableEnv), + env: prepareEnv(runnable, config.runnablesExtraEnv), overrideCargo: runnable.args.overrideCargo, }; @@ -151,8 +154,9 @@ export async function createTask(runnable: ra.Runnable, config: Config): Promise definition, runnable.label, args, + config.problemMatcher, config.cargoRunner, - true + true, ); cargoTask.presentationOptions.clear = true; diff --git a/src/tools/rust-analyzer/editors/code/src/snippets.ts b/src/tools/rust-analyzer/editors/code/src/snippets.ts index 299d29c27ee..d81765649ff 100644 --- a/src/tools/rust-analyzer/editors/code/src/snippets.ts +++ b/src/tools/rust-analyzer/editors/code/src/snippets.ts @@ -1,10 +1,11 @@ import * as vscode from "vscode"; import { assert } from "./util"; +import { unwrapUndefinable } from "./undefinable"; export async function applySnippetWorkspaceEdit(edit: vscode.WorkspaceEdit) { if (edit.entries().length === 1) { - const [uri, edits] = edit.entries()[0]; + const [uri, edits] = unwrapUndefinable(edit.entries()[0]); const editor = await editorFromUri(uri); if (editor) await applySnippetTextEdits(editor, edits); return; @@ -16,7 +17,9 @@ export async function applySnippetWorkspaceEdit(edit: vscode.WorkspaceEdit) { for (const indel of edits) { assert( !parseSnippet(indel.newText), - `bad ws edit: snippet received with multiple edits: ${JSON.stringify(edit)}` + `bad ws edit: snippet received with multiple edits: ${JSON.stringify( + edit, + )}`, ); builder.replace(indel.range, indel.newText); } @@ -31,7 +34,7 @@ async function editorFromUri(uri: vscode.Uri): Promise<vscode.TextEditor | undef await vscode.window.showTextDocument(uri, {}); } return vscode.window.visibleTextEditors.find( - (it) => it.document.uri.toString() === uri.toString() + (it) => it.document.uri.toString() === uri.toString(), ); } @@ -55,8 +58,8 @@ export async function applySnippetTextEdits(editor: vscode.TextEditor, edits: vs selections.push( new vscode.Selection( new vscode.Position(startLine, startColumn), - new vscode.Position(startLine, endColumn) - ) + new vscode.Position(startLine, endColumn), + ), ); builder.replace(indel.range, newText); } else { @@ -68,7 +71,8 @@ export async function applySnippetTextEdits(editor: vscode.TextEditor, edits: vs }); if (selections.length > 0) editor.selections = selections; if (selections.length === 1) { - editor.revealRange(selections[0], vscode.TextEditorRevealType.InCenterIfOutsideViewport); + const selection = unwrapUndefinable(selections[0]); + editor.revealRange(selection, vscode.TextEditorRevealType.InCenterIfOutsideViewport); } } diff --git a/src/tools/rust-analyzer/editors/code/src/tasks.ts b/src/tools/rust-analyzer/editors/code/src/tasks.ts index d6509d9aa6e..1d5ab82aa04 100644 --- a/src/tools/rust-analyzer/editors/code/src/tasks.ts +++ b/src/tools/rust-analyzer/editors/code/src/tasks.ts @@ -1,7 +1,8 @@ import * as vscode from "vscode"; import * as toolchain from "./toolchain"; -import { Config } from "./config"; +import type { Config } from "./config"; import { log } from "./util"; +import { unwrapUndefinable } from "./undefinable"; // This ends up as the `type` key in tasks.json. RLS also uses `cargo` and // our configuration should be compatible with it so use the same key. @@ -46,7 +47,8 @@ class CargoTaskProvider implements vscode.TaskProvider { { type: TASK_TYPE, command: def.command }, `cargo ${def.command}`, [def.command], - this.config.cargoRunner + this.config.problemMatcher, + this.config.cargoRunner, ); vscodeTask.group = def.group; tasks.push(vscodeTask); @@ -70,7 +72,8 @@ class CargoTaskProvider implements vscode.TaskProvider { definition, task.name, args, - this.config.cargoRunner + this.config.problemMatcher, + this.config.cargoRunner, ); } @@ -83,8 +86,9 @@ export async function buildCargoTask( definition: CargoTaskDefinition, name: string, args: string[], + problemMatcher: string[], customRunner?: string, - throwOnError: boolean = false + throwOnError: boolean = false, ): Promise<vscode.Task> { let exec: vscode.ProcessExecution | vscode.ShellExecution | undefined = undefined; @@ -117,7 +121,8 @@ export async function buildCargoTask( const fullCommand = [...cargoCommand, ...args]; - exec = new vscode.ProcessExecution(fullCommand[0], fullCommand.slice(1), definition); + const processName = unwrapUndefinable(fullCommand[0]); + exec = new vscode.ProcessExecution(processName, fullCommand.slice(1), definition); } return new vscode.Task( @@ -128,7 +133,7 @@ export async function buildCargoTask( name, TASK_SOURCE, exec, - ["$rustc", "$rust-panic"] + problemMatcher, ); } diff --git a/src/tools/rust-analyzer/editors/code/src/toolchain.ts b/src/tools/rust-analyzer/editors/code/src/toolchain.ts index 917a1d6b099..58e5fc747a1 100644 --- a/src/tools/rust-analyzer/editors/code/src/toolchain.ts +++ b/src/tools/rust-analyzer/editors/code/src/toolchain.ts @@ -4,6 +4,8 @@ import * as path from "path"; import * as readline from "readline"; import * as vscode from "vscode"; import { execute, log, memoizeAsync } from "./util"; +import { unwrapNullable } from "./nullable"; +import { unwrapUndefinable } from "./undefinable"; interface CompilationArtifact { fileName: string; @@ -21,7 +23,7 @@ export class Cargo { constructor( readonly rootFolder: string, readonly output: vscode.OutputChannel, - readonly env: Record<string, string> + readonly env: Record<string, string>, ) {} // Made public for testing purposes @@ -74,7 +76,7 @@ export class Cargo { this.output.append(message.message.rendered); } }, - (stderr) => this.output.append(stderr) + (stderr) => this.output.append(stderr), ); } catch (err) { this.output.show(true); @@ -93,13 +95,14 @@ export class Cargo { throw new Error("Multiple compilation artifacts are not supported."); } - return artifacts[0].fileName; + const artifact = unwrapUndefinable(artifacts[0]); + return artifact.fileName; } private async runCargo( cargoArgs: string[], onStdoutJson: (obj: any) => void, - onStderrString: (data: string) => void + onStderrString: (data: string) => void, ): Promise<number> { const path = await cargoPath(); return await new Promise((resolve, reject) => { @@ -142,7 +145,9 @@ export async function getRustcId(dir: string): Promise<string> { const data = await execute(`${rustcPath} -V -v`, { cwd: dir }); const rx = /commit-hash:\s(.*)$/m; - return rx.exec(data)![1]; + const result = unwrapNullable(rx.exec(data)); + const first = unwrapUndefinable(result[1]); + return first; } /** Mirrors `toolchain::cargo()` implementation */ @@ -167,11 +172,11 @@ export const getPathForExecutable = memoizeAsync( if (await isFileAtUri(standardPath)) return standardPath.fsPath; } return executableName; - } + }, ); async function lookupInPath(exec: string): Promise<boolean> { - const paths = process.env.PATH ?? ""; + const paths = process.env["PATH"] ?? ""; const candidates = paths.split(path.delimiter).flatMap((dirInPath) => { const candidate = path.join(dirInPath, exec); diff --git a/src/tools/rust-analyzer/editors/code/src/undefinable.ts b/src/tools/rust-analyzer/editors/code/src/undefinable.ts new file mode 100644 index 00000000000..813bac5a123 --- /dev/null +++ b/src/tools/rust-analyzer/editors/code/src/undefinable.ts @@ -0,0 +1,19 @@ +export type NotUndefined<T> = T extends undefined ? never : T; + +export type Undefinable<T> = T | undefined; + +function isNotUndefined<T>(input: Undefinable<T>): input is NotUndefined<T> { + return input !== undefined; +} + +export function expectNotUndefined<T>(input: Undefinable<T>, msg: string): NotUndefined<T> { + if (isNotUndefined(input)) { + return input; + } + + throw new TypeError(msg); +} + +export function unwrapUndefinable<T>(input: Undefinable<T>): NotUndefined<T> { + return expectNotUndefined(input, `unwrapping \`undefined\``); +} diff --git a/src/tools/rust-analyzer/editors/code/src/util.ts b/src/tools/rust-analyzer/editors/code/src/util.ts index b6b779e2660..38ce6761578 100644 --- a/src/tools/rust-analyzer/editors/code/src/util.ts +++ b/src/tools/rust-analyzer/editors/code/src/util.ts @@ -1,7 +1,6 @@ -import * as lc from "vscode-languageclient/node"; import * as vscode from "vscode"; import { strict as nativeAssert } from "assert"; -import { exec, ExecOptions, spawnSync } from "child_process"; +import { exec, type ExecOptions, spawnSync } from "child_process"; import { inspect } from "util"; export function assert(condition: boolean, explanation: string): asserts condition { @@ -57,37 +56,6 @@ export const log = new (class { } })(); -export async function sendRequestWithRetry<TParam, TRet>( - client: lc.LanguageClient, - reqType: lc.RequestType<TParam, TRet, unknown>, - param: TParam, - token?: vscode.CancellationToken -): Promise<TRet> { - // The sequence is `10 * (2 ** (2 * n))` where n is 1, 2, 3... - for (const delay of [40, 160, 640, 2560, 10240, null]) { - try { - return await (token - ? client.sendRequest(reqType, param, token) - : client.sendRequest(reqType, param)); - } catch (error) { - if (delay === null) { - log.warn("LSP request timed out", { method: reqType.method, param, error }); - throw error; - } - if (error.code === lc.LSPErrorCodes.RequestCancelled) { - throw error; - } - - if (error.code !== lc.LSPErrorCodes.ContentModified) { - log.warn("LSP request failed", { method: reqType.method, param, error }); - throw error; - } - await sleep(delay); - } - } - throw "unreachable"; -} - export function sleep(ms: number) { return new Promise((resolve) => setTimeout(resolve, ms)); } @@ -146,7 +114,7 @@ export function setContextValue(key: string, value: any): Thenable<void> { * underlying async function. */ export function memoizeAsync<Ret, TThis, Param extends string>( - func: (this: TThis, arg: Param) => Promise<Ret> + func: (this: TThis, arg: Param) => Promise<Ret>, ) { const cache = new Map<string, Ret>(); diff --git a/src/tools/rust-analyzer/editors/code/tests/unit/index.ts b/src/tools/rust-analyzer/editors/code/tests/unit/index.ts index 2fa223bed4a..8ad46546abd 100644 --- a/src/tools/rust-analyzer/editors/code/tests/unit/index.ts +++ b/src/tools/rust-analyzer/editors/code/tests/unit/index.ts @@ -1,3 +1,4 @@ +import * as assert from "node:assert/strict"; import { readdir } from "fs/promises"; import * as path from "path"; @@ -30,6 +31,7 @@ class Suite { await test.promise; ok(` ✔ ${test.name}`); } catch (e) { + assert.ok(e instanceof Error); error(` ✖︎ ${test.name}\n ${e.stack}`); failed += 1; } @@ -50,6 +52,7 @@ export class Context { await ctx.run(); ok(`✔ ${name}`); } catch (e) { + assert.ok(e instanceof Error); error(`✖︎ ${name}\n ${e.stack}`); throw e; } @@ -60,7 +63,7 @@ export async function run(): Promise<void> { const context = new Context(); const testFiles = (await readdir(path.resolve(__dirname))).filter((name) => - name.endsWith(".test.js") + name.endsWith(".test.js"), ); for (const testFile of testFiles) { try { diff --git a/src/tools/rust-analyzer/editors/code/tests/unit/launch_config.test.ts b/src/tools/rust-analyzer/editors/code/tests/unit/launch_config.test.ts index 0531e064d2d..eeb8608a42d 100644 --- a/src/tools/rust-analyzer/editors/code/tests/unit/launch_config.test.ts +++ b/src/tools/rust-analyzer/editors/code/tests/unit/launch_config.test.ts @@ -1,6 +1,6 @@ import * as assert from "assert"; import { Cargo } from "../../src/toolchain"; -import { Context } from "."; +import type { Context } from "."; export async function getTests(ctx: Context) { await ctx.suite("Launch configuration/Lens", (suite) => { diff --git a/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts b/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts index b7d59e399dd..b1407ce0193 100644 --- a/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts +++ b/src/tools/rust-analyzer/editors/code/tests/unit/runnable_env.test.ts @@ -1,8 +1,8 @@ import * as assert from "assert"; import { prepareEnv } from "../../src/run"; -import { RunnableEnvCfg } from "../../src/config"; -import { Context } from "."; -import * as ra from "../../src/lsp_ext"; +import type { RunnableEnvCfg } from "../../src/config"; +import type { Context } from "."; +import type * as ra from "../../src/lsp_ext"; function makeRunnable(label: string): ra.Runnable { return { diff --git a/src/tools/rust-analyzer/editors/code/tests/unit/settings.test.ts b/src/tools/rust-analyzer/editors/code/tests/unit/settings.test.ts index 224cea5a232..bafb9569a1c 100644 --- a/src/tools/rust-analyzer/editors/code/tests/unit/settings.test.ts +++ b/src/tools/rust-analyzer/editors/code/tests/unit/settings.test.ts @@ -1,5 +1,5 @@ import * as assert from "assert"; -import { Context } from "."; +import type { Context } from "."; import { substituteVariablesInEnv } from "../../src/config"; export async function getTests(ctx: Context) { @@ -39,6 +39,7 @@ export async function getTests(ctx: Context) { }); suite.addTest("Should support external variables", async () => { + process.env["TEST_VARIABLE"] = "test"; const envJson = { USING_EXTERNAL_VAR: "${env:TEST_VARIABLE} test ${env:TEST_VARIABLE}", }; @@ -48,6 +49,7 @@ export async function getTests(ctx: Context) { const actualEnv = await substituteVariablesInEnv(envJson); assert.deepStrictEqual(actualEnv, expectedEnv); + delete process.env["TEST_VARIABLE"]; }); suite.addTest("should support VSCode variables", async () => { @@ -55,7 +57,7 @@ export async function getTests(ctx: Context) { USING_VSCODE_VAR: "${workspaceFolderBasename}", }; const actualEnv = await substituteVariablesInEnv(envJson); - assert.deepStrictEqual(actualEnv.USING_VSCODE_VAR, "code"); + assert.deepStrictEqual(actualEnv["USING_VSCODE_VAR"], "code"); }); }); } diff --git a/src/tools/rust-analyzer/editors/code/tsconfig.json b/src/tools/rust-analyzer/editors/code/tsconfig.json index 42e2846858a..ee353c28dd6 100644 --- a/src/tools/rust-analyzer/editors/code/tsconfig.json +++ b/src/tools/rust-analyzer/editors/code/tsconfig.json @@ -1,18 +1,18 @@ { + "extends": "@tsconfig/strictest/tsconfig.json", "compilerOptions": { + "esModuleInterop": false, "module": "commonjs", + "moduleResolution": "node16", "target": "es2021", "outDir": "out", "lib": ["es2021"], "sourceMap": true, "rootDir": ".", - "strict": true, - "useUnknownInCatchVariables": false, - "noUnusedLocals": true, - "noUnusedParameters": true, - "noImplicitReturns": true, - "noFallthroughCasesInSwitch": true, - "newLine": "LF" + "newLine": "LF", + + // FIXME: https://github.com/rust-lang/rust-analyzer/issues/15253 + "exactOptionalPropertyTypes": false }, "exclude": ["node_modules", ".vscode-test"], "include": ["src", "tests"] diff --git a/src/tools/rust-analyzer/lib/README.md b/src/tools/rust-analyzer/lib/README.md index ed55e31d6bb..d420eeb9605 100644 --- a/src/tools/rust-analyzer/lib/README.md +++ b/src/tools/rust-analyzer/lib/README.md @@ -3,3 +3,12 @@ Crates in this directory are published to [crates.io](https://crates.io) and obey semver. They _could_ live in a separate repo, but we want to experiment with a monorepo setup. + +We use these crates from crates.io, not the local copies because we want to ensure that +rust-analyzer works with the versions that are published. This means if you add a new API to these +crates, you need to release a new version to crates.io before you can use that API in rust-analyzer. + +To release new versions of these packages, change their version in Cargo.toml. Once your PR is merged into master a workflow will automatically publish the new version to crates.io. + +While prototyping, the local versions can be used by uncommenting the relevant lines in the +`[patch.'crates-io']` section in Cargo.toml diff --git a/src/tools/rust-analyzer/lib/la-arena/Cargo.toml b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml index ec5ba8ba00c..01f2b87b39d 100644 --- a/src/tools/rust-analyzer/lib/la-arena/Cargo.toml +++ b/src/tools/rust-analyzer/lib/la-arena/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "la-arena" -version = "0.3.0" +version = "0.3.1" description = "Simple index-based arena without deletion." license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/la-arena" diff --git a/src/tools/rust-analyzer/lib/line-index/Cargo.toml b/src/tools/rust-analyzer/lib/line-index/Cargo.toml index 019ad3a53ba..6c0d06f4717 100644 --- a/src/tools/rust-analyzer/lib/line-index/Cargo.toml +++ b/src/tools/rust-analyzer/lib/line-index/Cargo.toml @@ -1,11 +1,11 @@ [package] name = "line-index" -version = "0.1.0-pre.1" +version = "0.1.0" description = "Maps flat `TextSize` offsets to/from `(line, column)` representation." license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/line-index" edition = "2021" [dependencies] -text-size.workspace = true -nohash-hasher.workspace = true +text-size = "1.1.0" +nohash-hasher = "0.2.0" diff --git a/src/tools/rust-analyzer/lib/line-index/src/lib.rs b/src/tools/rust-analyzer/lib/line-index/src/lib.rs index ad67d3f246e..03371c9c87a 100644 --- a/src/tools/rust-analyzer/lib/line-index/src/lib.rs +++ b/src/tools/rust-analyzer/lib/line-index/src/lib.rs @@ -94,44 +94,7 @@ pub struct LineIndex { impl LineIndex { /// Returns a `LineIndex` for the `text`. pub fn new(text: &str) -> LineIndex { - let mut newlines = Vec::<TextSize>::with_capacity(16); - let mut line_wide_chars = IntMap::<u32, Box<[WideChar]>>::default(); - - let mut wide_chars = Vec::<WideChar>::new(); - let mut cur_row = TextSize::from(0); - let mut cur_col = TextSize::from(0); - let mut line = 0u32; - - for c in text.chars() { - let c_len = TextSize::of(c); - cur_row += c_len; - if c == '\n' { - newlines.push(cur_row); - - // Save any wide characters seen in the previous line - if !wide_chars.is_empty() { - let cs = std::mem::take(&mut wide_chars).into_boxed_slice(); - line_wide_chars.insert(line, cs); - } - - // Prepare for processing the next line - cur_col = TextSize::from(0); - line += 1; - continue; - } - - if !c.is_ascii() { - wide_chars.push(WideChar { start: cur_col, end: cur_col + c_len }); - } - - cur_col += c_len; - } - - // Save any wide characters seen in the last line - if !wide_chars.is_empty() { - line_wide_chars.insert(line, wide_chars.into_boxed_slice()); - } - + let (newlines, line_wide_chars) = analyze_source_file(text); LineIndex { newlines: newlines.into_boxed_slice(), line_wide_chars, @@ -235,3 +198,182 @@ impl LineIndex { self.len } } + +/// This is adapted from the rustc_span crate, https://github.com/rust-lang/rust/blob/de59844c98f7925242a798a72c59dc3610dd0e2c/compiler/rustc_span/src/analyze_source_file.rs +fn analyze_source_file(src: &str) -> (Vec<TextSize>, IntMap<u32, Box<[WideChar]>>) { + assert!(src.len() < !0u32 as usize); + let mut lines = vec![]; + let mut line_wide_chars = IntMap::<u32, Vec<WideChar>>::default(); + + // Calls the right implementation, depending on hardware support available. + analyze_source_file_dispatch(src, &mut lines, &mut line_wide_chars); + + (lines, line_wide_chars.into_iter().map(|(k, v)| (k, v.into_boxed_slice())).collect()) +} + +#[cfg(any(target_arch = "x86", target_arch = "x86_64"))] +fn analyze_source_file_dispatch( + src: &str, + lines: &mut Vec<TextSize>, + multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>, +) { + if is_x86_feature_detected!("sse2") { + // SAFETY: SSE2 support was checked + unsafe { + analyze_source_file_sse2(src, lines, multi_byte_chars); + } + } else { + analyze_source_file_generic(src, src.len(), TextSize::from(0), lines, multi_byte_chars); + } +} + +/// Checks 16 byte chunks of text at a time. If the chunk contains +/// something other than printable ASCII characters and newlines, the +/// function falls back to the generic implementation. Otherwise it uses +/// SSE2 intrinsics to quickly find all newlines. +#[target_feature(enable = "sse2")] +#[cfg(any(target_arch = "x86", target_arch = "x86_64"))] +unsafe fn analyze_source_file_sse2( + src: &str, + lines: &mut Vec<TextSize>, + multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>, +) { + #[cfg(target_arch = "x86")] + use std::arch::x86::*; + #[cfg(target_arch = "x86_64")] + use std::arch::x86_64::*; + + const CHUNK_SIZE: usize = 16; + + let src_bytes = src.as_bytes(); + + let chunk_count = src.len() / CHUNK_SIZE; + + // This variable keeps track of where we should start decoding a + // chunk. If a multi-byte character spans across chunk boundaries, + // we need to skip that part in the next chunk because we already + // handled it. + let mut intra_chunk_offset = 0; + + for chunk_index in 0..chunk_count { + let ptr = src_bytes.as_ptr() as *const __m128i; + // We don't know if the pointer is aligned to 16 bytes, so we + // use `loadu`, which supports unaligned loading. + let chunk = _mm_loadu_si128(ptr.add(chunk_index)); + + // For character in the chunk, see if its byte value is < 0, which + // indicates that it's part of a UTF-8 char. + let multibyte_test = _mm_cmplt_epi8(chunk, _mm_set1_epi8(0)); + // Create a bit mask from the comparison results. + let multibyte_mask = _mm_movemask_epi8(multibyte_test); + + // If the bit mask is all zero, we only have ASCII chars here: + if multibyte_mask == 0 { + assert!(intra_chunk_offset == 0); + + // Check for newlines in the chunk + let newlines_test = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8)); + let newlines_mask = _mm_movemask_epi8(newlines_test); + + if newlines_mask != 0 { + // All control characters are newlines, record them + let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32; + let output_offset = TextSize::from((chunk_index * CHUNK_SIZE + 1) as u32); + + loop { + let index = newlines_mask.trailing_zeros(); + + if index >= CHUNK_SIZE as u32 { + // We have arrived at the end of the chunk. + break; + } + + lines.push(TextSize::from(index) + output_offset); + + // Clear the bit, so we can find the next one. + newlines_mask &= (!1) << index; + } + } + continue; + } + + // The slow path. + // There are control chars in here, fallback to generic decoding. + let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset; + intra_chunk_offset = analyze_source_file_generic( + &src[scan_start..], + CHUNK_SIZE - intra_chunk_offset, + TextSize::from(scan_start as u32), + lines, + multi_byte_chars, + ); + } + + // There might still be a tail left to analyze + let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset; + if tail_start < src.len() { + analyze_source_file_generic( + &src[tail_start..], + src.len() - tail_start, + TextSize::from(tail_start as u32), + lines, + multi_byte_chars, + ); + } +} + +#[cfg(not(any(target_arch = "x86", target_arch = "x86_64")))] +// The target (or compiler version) does not support SSE2 ... +fn analyze_source_file_dispatch( + src: &str, + lines: &mut Vec<TextSize>, + multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>, +) { + analyze_source_file_generic(src, src.len(), TextSize::from(0), lines, multi_byte_chars); +} + +// `scan_len` determines the number of bytes in `src` to scan. Note that the +// function can read past `scan_len` if a multi-byte character start within the +// range but extends past it. The overflow is returned by the function. +fn analyze_source_file_generic( + src: &str, + scan_len: usize, + output_offset: TextSize, + lines: &mut Vec<TextSize>, + multi_byte_chars: &mut IntMap<u32, Vec<WideChar>>, +) -> usize { + assert!(src.len() >= scan_len); + let mut i = 0; + let src_bytes = src.as_bytes(); + + while i < scan_len { + let byte = unsafe { + // We verified that i < scan_len <= src.len() + *src_bytes.get_unchecked(i) + }; + + // How much to advance in order to get to the next UTF-8 char in the + // string. + let mut char_len = 1; + + if byte == b'\n' { + lines.push(TextSize::from(i as u32 + 1) + output_offset); + } else if byte >= 127 { + // The slow path: Just decode to `char`. + let c = src[i..].chars().next().unwrap(); + char_len = c.len_utf8(); + + let pos = TextSize::from(i as u32) + output_offset; + + if char_len > 1 { + assert!((2..=4).contains(&char_len)); + let mbc = WideChar { start: pos, end: pos + TextSize::from(char_len as u32) }; + multi_byte_chars.entry(lines.len() as u32).or_default().push(mbc); + } + } + + i += char_len; + } + + i - scan_len +} diff --git a/src/tools/rust-analyzer/lib/line-index/src/tests.rs b/src/tools/rust-analyzer/lib/line-index/src/tests.rs index 31c01c20ee3..8f3762d1910 100644 --- a/src/tools/rust-analyzer/lib/line-index/src/tests.rs +++ b/src/tools/rust-analyzer/lib/line-index/src/tests.rs @@ -1,11 +1,120 @@ -use super::LineIndex; - -#[test] -fn test_empty_index() { - let col_index = LineIndex::new( - " -const C: char = 'x'; -", - ); - assert_eq!(col_index.line_wide_chars.len(), 0); +use crate::{LineIndex, TextSize, WideChar}; + +macro_rules! test { + ( + case: $test_name:ident, + text: $text:expr, + lines: $lines:expr, + multi_byte_chars: $multi_byte_chars:expr, + ) => { + #[test] + fn $test_name() { + let line_index = LineIndex::new($text); + + let expected_lines: Vec<TextSize> = + $lines.into_iter().map(<TextSize as From<u32>>::from).collect(); + + assert_eq!(&*line_index.newlines, &*expected_lines); + + let expected_mbcs: Vec<_> = $multi_byte_chars + .into_iter() + .map(|(line, (pos, end)): (u32, (u32, u32))| { + (line, WideChar { start: TextSize::from(pos), end: TextSize::from(end) }) + }) + .collect(); + + assert_eq!( + line_index + .line_wide_chars + .iter() + .flat_map(|(line, val)| std::iter::repeat(*line).zip(val.iter().copied())) + .collect::<Vec<_>>(), + expected_mbcs + ); + } + }; } + +test!( + case: empty_text, + text: "", + lines: vec![], + multi_byte_chars: vec![], +); + +test!( + case: newlines_short, + text: "a\nc", + lines: vec![2], + multi_byte_chars: vec![], +); + +test!( + case: newlines_long, + text: "012345678\nabcdef012345678\na", + lines: vec![10, 26], + multi_byte_chars: vec![], +); + +test!( + case: newline_and_multi_byte_char_in_same_chunk, + text: "01234β789\nbcdef0123456789abcdef", + lines: vec![11], + multi_byte_chars: vec![(0, (5, 7))], +); + +test!( + case: newline_and_control_char_in_same_chunk, + text: "01234\u{07}6789\nbcdef0123456789abcdef", + lines: vec![11], + multi_byte_chars: vec![], +); + +test!( + case: multi_byte_char_short, + text: "aβc", + lines: vec![], + multi_byte_chars: vec![(0, (1, 3))], +); + +test!( + case: multi_byte_char_long, + text: "0123456789abcΔf012345β", + lines: vec![], + multi_byte_chars: vec![(0, (13, 15)), (0, (22, 24))], +); + +test!( + case: multi_byte_char_across_chunk_boundary, + text: "0123456789abcdeΔ123456789abcdef01234", + lines: vec![], + multi_byte_chars: vec![(0, (15, 17))], +); + +test!( + case: multi_byte_char_across_chunk_boundary_tail, + text: "0123456789abcdeΔ....", + lines: vec![], + multi_byte_chars: vec![(0, (15, 17))], +); + +test!( + case: multi_byte_with_new_lines, + text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf", + lines: vec![7, 27], + multi_byte_chars: vec![(1, (13, 15)), (2, (29, 31))], +); + +test!( + case: trailing_newline, + text: "0123456789\n", + lines: vec![11], + multi_byte_chars: vec![], +); + +test!( + case: trailing_newline_chunk_boundary, + text: "0123456789abcde\n", + lines: vec![16], + multi_byte_chars: vec![], +); diff --git a/src/tools/rust-analyzer/lib/line-index/tests/it.rs b/src/tools/rust-analyzer/lib/line-index/tests/it.rs deleted file mode 100644 index ce1c0bc6f14..00000000000 --- a/src/tools/rust-analyzer/lib/line-index/tests/it.rs +++ /dev/null @@ -1,62 +0,0 @@ -use line_index::{LineCol, LineIndex, TextRange}; - -#[test] -fn test_line_index() { - let text = "hello\nworld"; - let table = [ - (00, 0, 0), - (01, 0, 1), - (05, 0, 5), - (06, 1, 0), - (07, 1, 1), - (08, 1, 2), - (10, 1, 4), - (11, 1, 5), - ]; - - let index = LineIndex::new(text); - for (offset, line, col) in table { - assert_eq!(index.line_col(offset.into()), LineCol { line, col }); - } - - let text = "\nhello\nworld"; - let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)]; - let index = LineIndex::new(text); - for (offset, line, col) in table { - assert_eq!(index.line_col(offset.into()), LineCol { line, col }); - } -} - -#[test] -fn test_char_len() { - assert_eq!('メ'.len_utf8(), 3); - assert_eq!('メ'.len_utf16(), 1); -} - -#[test] -fn test_splitlines() { - fn r(lo: u32, hi: u32) -> TextRange { - TextRange::new(lo.into(), hi.into()) - } - - let text = "a\nbb\nccc\n"; - let line_index = LineIndex::new(text); - - let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>(); - let expected = vec![r(0, 2), r(2, 5), r(5, 9)]; - assert_eq!(actual, expected); - - let text = ""; - let line_index = LineIndex::new(text); - - let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>(); - let expected = vec![]; - assert_eq!(actual, expected); - - let text = "\n"; - let line_index = LineIndex::new(text); - - let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>(); - let expected = vec![r(0, 1)]; - assert_eq!(actual, expected) -} diff --git a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml index e78a9d2eb16..01707d30191 100644 --- a/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml +++ b/src/tools/rust-analyzer/lib/lsp-server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "lsp-server" -version = "0.7.0" +version = "0.7.2" description = "Generic LSP server scaffold." license = "MIT OR Apache-2.0" repository = "https://github.com/rust-lang/rust-analyzer/tree/master/lib/lsp-server" @@ -8,8 +8,8 @@ edition = "2021" [dependencies] log = "0.4.17" -serde_json.workspace = true -serde.workspace = true +serde_json = "1.0.96" +serde = { version = "1.0.156", features = ["derive"] } crossbeam-channel = "0.5.6" [dev-dependencies] diff --git a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs index b241561f9c0..730ad51f424 100644 --- a/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs +++ b/src/tools/rust-analyzer/lib/lsp-server/src/msg.rs @@ -265,7 +265,7 @@ fn read_msg_text(inp: &mut dyn BufRead) -> io::Result<Option<String>> { let header_name = parts.next().unwrap(); let header_value = parts.next().ok_or_else(|| invalid_data!("malformed header: {:?}", buf))?; - if header_name == "Content-Length" { + if header_name.eq_ignore_ascii_case("Content-Length") { size = Some(header_value.parse::<usize>().map_err(invalid_data)?); } } diff --git a/src/tools/rust-analyzer/xtask/Cargo.toml b/src/tools/rust-analyzer/xtask/Cargo.toml index b4b294c3099..7a34617e255 100644 --- a/src/tools/rust-analyzer/xtask/Cargo.toml +++ b/src/tools/rust-analyzer/xtask/Cargo.toml @@ -4,7 +4,7 @@ version = "0.1.0" publish = false license = "MIT OR Apache-2.0" edition = "2021" -rust-version = "1.65" +rust-version.workspace = true [dependencies] anyhow = "1.0.62" diff --git a/src/tools/rust-analyzer/xtask/src/flags.rs b/src/tools/rust-analyzer/xtask/src/flags.rs index 21004797014..7720ad69fe0 100644 --- a/src/tools/rust-analyzer/xtask/src/flags.rs +++ b/src/tools/rust-analyzer/xtask/src/flags.rs @@ -1,5 +1,7 @@ #![allow(unreachable_pub)] +use std::str::FromStr; + use crate::install::{ClientOpt, Malloc, ServerOpt}; xflags::xflags! { @@ -42,7 +44,7 @@ xflags::xflags! { required changelog: String } cmd metrics { - optional --dry-run + optional measurement_type: MeasurementType } /// Builds a benchmark version of rust-analyzer and puts it into `./target`. cmd bb { @@ -106,8 +108,31 @@ pub struct PublishReleaseNotes { } #[derive(Debug)] +pub enum MeasurementType { + Build, + AnalyzeSelf, + AnalyzeRipgrep, + AnalyzeWebRender, + AnalyzeDiesel, +} + +impl FromStr for MeasurementType { + type Err = String; + fn from_str(s: &str) -> Result<Self, Self::Err> { + match s { + "build" => Ok(Self::Build), + "self" => Ok(Self::AnalyzeSelf), + "ripgrep" => Ok(Self::AnalyzeRipgrep), + "webrender" => Ok(Self::AnalyzeWebRender), + "diesel" => Ok(Self::AnalyzeDiesel), + _ => Err("Invalid option".to_string()), + } + } +} + +#[derive(Debug)] pub struct Metrics { - pub dry_run: bool, + pub measurement_type: Option<MeasurementType>, } #[derive(Debug)] diff --git a/src/tools/rust-analyzer/xtask/src/install.rs b/src/tools/rust-analyzer/xtask/src/install.rs index 83223a551d1..e8c00c72e07 100644 --- a/src/tools/rust-analyzer/xtask/src/install.rs +++ b/src/tools/rust-analyzer/xtask/src/install.rs @@ -2,13 +2,13 @@ use std::{env, path::PathBuf, str}; -use anyhow::{bail, format_err, Context, Result}; +use anyhow::{bail, format_err, Context}; use xshell::{cmd, Shell}; use crate::flags; impl flags::Install { - pub(crate) fn run(self, sh: &Shell) -> Result<()> { + pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> { if cfg!(target_os = "macos") { fix_path_for_mac(sh).context("Fix path for mac")?; } @@ -39,7 +39,7 @@ pub(crate) enum Malloc { Jemalloc, } -fn fix_path_for_mac(sh: &Shell) -> Result<()> { +fn fix_path_for_mac(sh: &Shell) -> anyhow::Result<()> { let mut vscode_path: Vec<PathBuf> = { const COMMON_APP_PATH: &str = r"/Applications/Visual Studio Code.app/Contents/Resources/app/bin"; @@ -68,7 +68,7 @@ fn fix_path_for_mac(sh: &Shell) -> Result<()> { Ok(()) } -fn install_client(sh: &Shell, client_opt: ClientOpt) -> Result<()> { +fn install_client(sh: &Shell, client_opt: ClientOpt) -> anyhow::Result<()> { let _dir = sh.push_dir("./editors/code"); // Package extension. @@ -129,7 +129,7 @@ fn install_client(sh: &Shell, client_opt: ClientOpt) -> Result<()> { Ok(()) } -fn install_server(sh: &Shell, opts: ServerOpt) -> Result<()> { +fn install_server(sh: &Shell, opts: ServerOpt) -> anyhow::Result<()> { let features = match opts.malloc { Malloc::System => &[][..], Malloc::Mimalloc => &["--features", "mimalloc"], diff --git a/src/tools/rust-analyzer/xtask/src/metrics.rs b/src/tools/rust-analyzer/xtask/src/metrics.rs index b6f730dbf12..68537423195 100644 --- a/src/tools/rust-analyzer/xtask/src/metrics.rs +++ b/src/tools/rust-analyzer/xtask/src/metrics.rs @@ -1,6 +1,6 @@ use std::{ collections::BTreeMap, - env, fs, + fs, io::Write as _, path::Path, time::{Instant, SystemTime, UNIX_EPOCH}, @@ -9,16 +9,13 @@ use std::{ use anyhow::{bail, format_err}; use xshell::{cmd, Shell}; -use crate::flags; +use crate::flags::{self, MeasurementType}; type Unit = String; impl flags::Metrics { pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> { let mut metrics = Metrics::new(sh)?; - if !self.dry_run { - sh.remove_path("./target/release")?; - } if !Path::new("./target/rustc-perf").exists() { sh.create_dir("./target/rustc-perf")?; cmd!(sh, "git clone https://github.com/rust-lang/rustc-perf.git ./target/rustc-perf") @@ -32,38 +29,47 @@ impl flags::Metrics { let _env = sh.push_env("RA_METRICS", "1"); - { - // https://github.com/rust-lang/rust-analyzer/issues/9997 - let _d = sh.push_dir("target/rustc-perf/collector/benchmarks/webrender"); - cmd!(sh, "cargo update -p url --precise 1.6.1").run()?; - } - metrics.measure_build(sh)?; - metrics.measure_analysis_stats_self(sh)?; - metrics.measure_analysis_stats(sh, "ripgrep")?; - metrics.measure_analysis_stats(sh, "webrender")?; - metrics.measure_analysis_stats(sh, "diesel/diesel")?; - - if !self.dry_run { - let _d = sh.push_dir("target"); - let metrics_token = env::var("METRICS_TOKEN").unwrap(); - cmd!( - sh, - "git clone --depth 1 https://{metrics_token}@github.com/rust-analyzer/metrics.git" - ) - .run()?; - - { - let mut file = - fs::File::options().append(true).open("target/metrics/metrics.json")?; - writeln!(file, "{}", metrics.json())?; + let filename = match self.measurement_type { + Some(ms) => match ms { + MeasurementType::Build => { + metrics.measure_build(sh)?; + "build.json" + } + MeasurementType::AnalyzeSelf => { + metrics.measure_analysis_stats_self(sh)?; + "self.json" + } + MeasurementType::AnalyzeRipgrep => { + metrics.measure_analysis_stats(sh, "ripgrep")?; + "ripgrep.json" + } + MeasurementType::AnalyzeWebRender => { + { + // https://github.com/rust-lang/rust-analyzer/issues/9997 + let _d = sh.push_dir("target/rustc-perf/collector/benchmarks/webrender"); + cmd!(sh, "cargo update -p url --precise 1.6.1").run()?; + } + metrics.measure_analysis_stats(sh, "webrender")?; + "webrender.json" + } + MeasurementType::AnalyzeDiesel => { + metrics.measure_analysis_stats(sh, "diesel/diesel")?; + "diesel.json" + } + }, + None => { + metrics.measure_build(sh)?; + metrics.measure_analysis_stats_self(sh)?; + metrics.measure_analysis_stats(sh, "ripgrep")?; + metrics.measure_analysis_stats(sh, "webrender")?; + metrics.measure_analysis_stats(sh, "diesel/diesel")?; + "all.json" } + }; - let _d = sh.push_dir("metrics"); - cmd!(sh, "git add .").run()?; - cmd!(sh, "git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈") - .run()?; - cmd!(sh, "git push origin master").run()?; - } + let mut file = + fs::File::options().write(true).create(true).open(format!("target/{}", filename))?; + writeln!(file, "{}", metrics.json())?; eprintln!("{metrics:#?}"); Ok(()) } diff --git a/src/tools/rust-analyzer/xtask/src/publish.rs b/src/tools/rust-analyzer/xtask/src/publish.rs index cdb7d8fac89..7faae9b20c4 100644 --- a/src/tools/rust-analyzer/xtask/src/publish.rs +++ b/src/tools/rust-analyzer/xtask/src/publish.rs @@ -1,12 +1,12 @@ mod notes; use crate::flags; -use anyhow::{anyhow, bail, Result}; +use anyhow::bail; use std::env; use xshell::{cmd, Shell}; impl flags::PublishReleaseNotes { - pub(crate) fn run(self, sh: &Shell) -> Result<()> { + pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> { let asciidoc = sh.read_file(&self.changelog)?; let mut markdown = notes::convert_asciidoc_to_markdown(std::io::Cursor::new(&asciidoc))?; let file_name = check_file_name(self.changelog)?; @@ -24,11 +24,11 @@ impl flags::PublishReleaseNotes { } } -fn check_file_name<P: AsRef<std::path::Path>>(path: P) -> Result<String> { +fn check_file_name<P: AsRef<std::path::Path>>(path: P) -> anyhow::Result<String> { let file_name = path .as_ref() .file_name() - .ok_or_else(|| anyhow!("file name is not specified as `changelog`"))? + .ok_or_else(|| anyhow::format_err!("file name is not specified as `changelog`"))? .to_string_lossy(); let mut chars = file_name.chars(); @@ -61,7 +61,7 @@ fn create_original_changelog_url(file_name: &str) -> String { format!("https://rust-analyzer.github.io/thisweek/{year}/{month}/{day}/{stem}.html") } -fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> Result<()> { +fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> anyhow::Result<()> { let token = match env::var("GITHUB_TOKEN") { Ok(token) => token, Err(_) => bail!("Please obtain a personal access token from https://github.com/settings/tokens and set the `GITHUB_TOKEN` environment variable."), @@ -79,6 +79,7 @@ fn update_release(sh: &Shell, tag_name: &str, release_notes: &str) -> Result<()> let release_id = cmd!(sh, "jq .id").stdin(release_json).read()?; let mut patch = String::new(); + // note: the GitHub API doesn't update the target commit if the tag already exists write_json::object(&mut patch) .string("tag_name", tag_name) .string("target_commitish", "master") diff --git a/tests/codegen/aarch64-struct-align-128.rs b/tests/codegen/aarch64-struct-align-128.rs new file mode 100644 index 00000000000..bf34717786d --- /dev/null +++ b/tests/codegen/aarch64-struct-align-128.rs @@ -0,0 +1,150 @@ +// Test that structs aligned to 128 bits are passed with the correct ABI on aarch64. + +// revisions:linux darwin windows +//[linux] compile-flags: --target aarch64-unknown-linux-gnu +//[darwin] compile-flags: --target aarch64-apple-darwin +//[windows] compile-flags: --target aarch64-pc-windows-msvc +//[linux] needs-llvm-components: aarch64 +//[darwin] needs-llvm-components: aarch64 +//[windows] needs-llvm-components: aarch64 + +#![feature(no_core, lang_items)] +#![crate_type = "lib"] +#![no_core] + +#[lang="sized"] +trait Sized { } +#[lang="freeze"] +trait Freeze { } +#[lang="copy"] +trait Copy { } + + + +// Passed as `[i64 x 2]`, since it's an aggregate with size <= 128 bits, align < 128 bits. +#[repr(C)] +pub struct Align8 { + pub a: u64, + pub b: u64, +} + +// repr(transparent), so same as above. +#[repr(transparent)] +pub struct Transparent8 { + a: Align8 +} + +// Passed as `[i64 x 2]`, since it's an aggregate with size <= 128 bits, align < 128 bits. +#[repr(C)] +pub struct Wrapped8 { + a: Align8, +} + +extern "C" { + // linux: declare void @test_8([2 x i64], [2 x i64], [2 x i64]) + // darwin: declare void @test_8([2 x i64], [2 x i64], [2 x i64]) + // windows: declare void @test_8([2 x i64], [2 x i64], [2 x i64]) + fn test_8(a: Align8, b: Transparent8, c: Wrapped8); +} + + + +// Passed as `i128`, since it's an aggregate with size <= 128 bits, align = 128 bits. +// EXCEPT on Linux, where there's a special case to use its unadjusted alignment, +// making it the same as `Align8`, so it's be passed as `[i64 x 2]`. +#[repr(C)] +#[repr(align(16))] +pub struct Align16 { + pub a: u64, + pub b: u64, +} + +// repr(transparent), so same as above. +#[repr(transparent)] +pub struct Transparent16 { + a: Align16 +} + +// Passed as `i128`, since it's an aggregate with size <= 128 bits, align = 128 bits. +// On Linux, the "unadjustedness" doesn't recurse into fields, so this is passed as `i128`. +#[repr(C)] +pub struct Wrapped16 { + pub a: Align16, +} + +extern "C" { + // linux: declare void @test_16([2 x i64], [2 x i64], i128) + // darwin: declare void @test_16(i128, i128, i128) + // windows: declare void @test_16(i128, i128, i128) + fn test_16(a: Align16, b: Transparent16, c: Wrapped16); +} + + + +// Passed as `i128`, since it's an aggregate with size <= 128 bits, align = 128 bits. +#[repr(C)] +pub struct I128 { + pub a: i128, +} + +// repr(transparent), so same as above. +#[repr(transparent)] +pub struct TransparentI128 { + a: I128 +} + +// Passed as `i128`, since it's an aggregate with size <= 128 bits, align = 128 bits. +#[repr(C)] +pub struct WrappedI128 { + pub a: I128 +} + +extern "C" { + // linux: declare void @test_i128(i128, i128, i128) + // darwin: declare void @test_i128(i128, i128, i128) + // windows: declare void @test_i128(i128, i128, i128) + fn test_i128(a: I128, b: TransparentI128, c: WrappedI128); +} + + + +// Passed as `[2 x i64]`, since it's an aggregate with size <= 128 bits, align < 128 bits. +// Note that the Linux special case does not apply, because packing is not considered "adjustment". +#[repr(C)] +#[repr(packed)] +pub struct Packed { + pub a: i128, +} + +// repr(transparent), so same as above. +#[repr(transparent)] +pub struct TransparentPacked { + a: Packed +} + +// Passed as `[2 x i64]`, since it's an aggregate with size <= 128 bits, align < 128 bits. +#[repr(C)] +pub struct WrappedPacked { + pub a: Packed +} + +extern "C" { + // linux: declare void @test_packed([2 x i64], [2 x i64], [2 x i64]) + // darwin: declare void @test_packed([2 x i64], [2 x i64], [2 x i64]) + // windows: declare void @test_packed([2 x i64], [2 x i64], [2 x i64]) + fn test_packed(a: Packed, b: TransparentPacked, c: WrappedPacked); +} + + + +pub unsafe fn main( + a1: Align8, a2: Transparent8, a3: Wrapped8, + b1: Align16, b2: Transparent16, b3: Wrapped16, + c1: I128, c2: TransparentI128, c3: WrappedI128, + d1: Packed, d2: TransparentPacked, d3: WrappedPacked, +) { + test_8(a1, a2, a3); + test_16(b1, b2, b3); + test_i128(c1, c2, c3); + test_packed(d1, d2, d3); +} diff --git a/tests/codegen/addr-of-mutate.rs b/tests/codegen/addr-of-mutate.rs index bea1aad2352..6dfc1825015 100644 --- a/tests/codegen/addr-of-mutate.rs +++ b/tests/codegen/addr-of-mutate.rs @@ -6,7 +6,7 @@ // Test for the absence of `readonly` on the argument when it is mutated via `&raw const`. // See <https://github.com/rust-lang/rust/issues/111502>. -// CHECK: i8 @foo(ptr noalias nocapture noundef dereferenceable(128) %x) +// CHECK: i8 @foo(ptr noalias nocapture noundef align 1 dereferenceable(128) %x) #[no_mangle] pub fn foo(x: [u8; 128]) -> u8 { let ptr = core::ptr::addr_of!(x).cast_mut(); @@ -16,7 +16,7 @@ pub fn foo(x: [u8; 128]) -> u8 { x[0] } -// CHECK: i1 @second(ptr noalias nocapture noundef dereferenceable({{[0-9]+}}) %a_ptr_and_b) +// CHECK: i1 @second(ptr noalias nocapture noundef align {{[0-9]+}} dereferenceable({{[0-9]+}}) %a_ptr_and_b) #[no_mangle] pub unsafe fn second(a_ptr_and_b: (*mut (i32, bool), (i64, bool))) -> bool { let b_bool_ptr = core::ptr::addr_of!(a_ptr_and_b.1.1).cast_mut(); @@ -25,7 +25,7 @@ pub unsafe fn second(a_ptr_and_b: (*mut (i32, bool), (i64, bool))) -> bool { } // If going through a deref (and there are no other mutating accesses), then `readonly` is fine. -// CHECK: i1 @third(ptr noalias nocapture noundef readonly dereferenceable({{[0-9]+}}) %a_ptr_and_b) +// CHECK: i1 @third(ptr noalias nocapture noundef readonly align {{[0-9]+}} dereferenceable({{[0-9]+}}) %a_ptr_and_b) #[no_mangle] pub unsafe fn third(a_ptr_and_b: (*mut (i32, bool), (i64, bool))) -> bool { let b_bool_ptr = core::ptr::addr_of!((*a_ptr_and_b.0).1).cast_mut(); diff --git a/tests/codegen/align-byval-vector.rs b/tests/codegen/align-byval-vector.rs new file mode 100644 index 00000000000..3c8be659671 --- /dev/null +++ b/tests/codegen/align-byval-vector.rs @@ -0,0 +1,58 @@ +// revisions:x86-linux x86-darwin + +//[x86-linux] compile-flags: --target i686-unknown-linux-gnu +//[x86-linux] needs-llvm-components: x86 +//[x86-darwin] compile-flags: --target i686-apple-darwin +//[x86-darwin] needs-llvm-components: x86 + +// Tests that aggregates containing vector types get their alignment increased to 16 on Darwin. + +#![feature(no_core, lang_items, repr_simd, simd_ffi)] +#![crate_type = "lib"] +#![no_std] +#![no_core] +#![allow(non_camel_case_types)] + +#[lang = "sized"] +trait Sized {} +#[lang = "freeze"] +trait Freeze {} +#[lang = "copy"] +trait Copy {} + +#[repr(simd)] +pub struct i32x4(i32, i32, i32, i32); + +#[repr(C)] +pub struct Foo { + a: i32x4, + b: i8, +} + +// This tests that we recursively check for vector types, not just at the top level. +#[repr(C)] +pub struct DoubleFoo { + one: Foo, + two: Foo, +} + +extern "C" { + // x86-linux: declare void @f({{.*}}byval(%Foo) align 4{{.*}}) + // x86-darwin: declare void @f({{.*}}byval(%Foo) align 16{{.*}}) + fn f(foo: Foo); + + // x86-linux: declare void @g({{.*}}byval(%DoubleFoo) align 4{{.*}}) + // x86-darwin: declare void @g({{.*}}byval(%DoubleFoo) align 16{{.*}}) + fn g(foo: DoubleFoo); +} + +pub fn main() { + unsafe { f(Foo { a: i32x4(1, 2, 3, 4), b: 0 }) } + + unsafe { + g(DoubleFoo { + one: Foo { a: i32x4(1, 2, 3, 4), b: 0 }, + two: Foo { a: i32x4(1, 2, 3, 4), b: 0 }, + }) + } +} diff --git a/tests/codegen/align-byval.rs b/tests/codegen/align-byval.rs new file mode 100644 index 00000000000..e2446e02ef4 --- /dev/null +++ b/tests/codegen/align-byval.rs @@ -0,0 +1,342 @@ +// ignore-tidy-linelength +// revisions:m68k wasm x86_64-linux x86_64-windows i686-linux i686-windows + +//[m68k] compile-flags: --target m68k-unknown-linux-gnu +//[m68k] needs-llvm-components: m68k +//[wasm] compile-flags: --target wasm32-unknown-emscripten +//[wasm] needs-llvm-components: webassembly +//[x86_64-linux] compile-flags: --target x86_64-unknown-linux-gnu +//[x86_64-linux] needs-llvm-components: x86 +//[x86_64-windows] compile-flags: --target x86_64-pc-windows-msvc +//[x86_64-windows] needs-llvm-components: x86 +//[i686-linux] compile-flags: --target i686-unknown-linux-gnu +//[i686-linux] needs-llvm-components: x86 +//[i686-windows] compile-flags: --target i686-pc-windows-msvc +//[i686-windows] needs-llvm-components: x86 + +// Tests that `byval` alignment is properly specified (#80127). +// The only targets that use `byval` are m68k, wasm, x86-64, and x86. +// Note also that Windows mandates a by-ref ABI here, so it does not use byval. + +#![feature(no_core, lang_items)] +#![crate_type = "lib"] +#![no_std] +#![no_core] + +#[lang="sized"] trait Sized { } +#[lang="freeze"] trait Freeze { } +#[lang="copy"] trait Copy { } + +impl Copy for i32 {} +impl Copy for i64 {} + +// This struct can be represented as a pair, so it exercises the OperandValue::Pair +// codepath in `codegen_argument`. +#[repr(C)] +pub struct NaturalAlign1 { + a: i8, + b: i8, +} + +// This struct cannot be represented as an immediate, so it exercises the OperandValue::Ref +// codepath in `codegen_argument`. +#[repr(C)] +pub struct NaturalAlign2 { + a: [i16; 16], + b: i16, +} + +#[repr(C)] +#[repr(align(4))] +pub struct ForceAlign4 { + a: [i8; 16], + b: i8, +} + +// On i686-windows, this is passed on stack using `byval` +#[repr(C)] +pub struct NaturalAlign8 { + a: i64, + b: i64, + c: i64 +} + +// On i686-windows, this is passed by reference (because alignment is >4 and requested/forced), +// even though it has the exact same layout as `NaturalAlign8`! +#[repr(C)] +#[repr(align(8))] +pub struct ForceAlign8 { + a: i64, + b: i64, + c: i64 +} + +// On i686-windows, this is passed on stack, because requested alignment is <=4. +#[repr(C)] +#[repr(align(4))] +pub struct LowerFA8 { + a: i64, + b: i64, + c: i64 +} + +// On i686-windows, this is passed by reference, because it contains a field with +// requested/forced alignment. +#[repr(C)] +pub struct WrappedFA8 { + a: ForceAlign8 +} + +// On i686-windows, this has the same ABI as ForceAlign8, i.e. passed by reference. +#[repr(transparent)] +pub struct TransparentFA8 { + _0: (), + a: ForceAlign8 +} + +#[repr(C)] +#[repr(align(16))] +pub struct ForceAlign16 { + a: [i32; 16], + b: i8 +} + +// CHECK-LABEL: @call_na1 +#[no_mangle] +pub unsafe fn call_na1(x: NaturalAlign1) { + // CHECK: start: + + // m68k: [[ALLOCA:%[a-z0-9+]]] = alloca { i8, i8 }, align 1 + // m68k: call void @natural_align_1({{.*}}byval({ i8, i8 }) align 1{{.*}} [[ALLOCA]]) + + // wasm: [[ALLOCA:%[a-z0-9+]]] = alloca { i8, i8 }, align 1 + // wasm: call void @natural_align_1({{.*}}byval({ i8, i8 }) align 1{{.*}} [[ALLOCA]]) + + // x86_64-linux: call void @natural_align_1(i16 + + // x86_64-windows: call void @natural_align_1(i16 + + // i686-linux: [[ALLOCA:%[a-z0-9+]]] = alloca { i8, i8 }, align 4 + // i686-linux: call void @natural_align_1({{.*}}byval({ i8, i8 }) align 4{{.*}} [[ALLOCA]]) + + // i686-windows: [[ALLOCA:%[a-z0-9+]]] = alloca { i8, i8 }, align 4 + // i686-windows: call void @natural_align_1({{.*}}byval({ i8, i8 }) align 4{{.*}} [[ALLOCA]]) + natural_align_1(x); +} + +// CHECK-LABEL: @call_na2 +#[no_mangle] +pub unsafe fn call_na2(x: NaturalAlign2) { + // CHECK: start: + + // m68k-NEXT: call void @natural_align_2 + // wasm-NEXT: call void @natural_align_2 + // x86_64-linux-NEXT: call void @natural_align_2 + // x86_64-windows-NEXT: call void @natural_align_2 + + // i686-linux: [[ALLOCA:%[0-9]+]] = alloca %NaturalAlign2, align 4 + // i686-linux: call void @natural_align_2({{.*}}byval(%NaturalAlign2) align 4{{.*}} [[ALLOCA]]) + + // i686-windows: [[ALLOCA:%[0-9]+]] = alloca %NaturalAlign2, align 4 + // i686-windows: call void @natural_align_2({{.*}}byval(%NaturalAlign2) align 4{{.*}} [[ALLOCA]]) + natural_align_2(x); +} + +// CHECK-LABEL: @call_fa4 +#[no_mangle] +pub unsafe fn call_fa4(x: ForceAlign4) { + // CHECK: start: + // CHECK-NEXT: call void @force_align_4 + force_align_4(x); +} + +// CHECK-LABEL: @call_na8 +#[no_mangle] +pub unsafe fn call_na8(x: NaturalAlign8) { + // CHECK: start: + // CHECK-NEXT: call void @natural_align_8 + natural_align_8(x); +} + +// CHECK-LABEL: @call_fa8 +#[no_mangle] +pub unsafe fn call_fa8(x: ForceAlign8) { + // CHECK: start: + // CHECK-NEXT: call void @force_align_8 + force_align_8(x); +} + +// CHECK-LABEL: @call_lfa8 +#[no_mangle] +pub unsafe fn call_lfa8(x: LowerFA8) { + // CHECK: start: + // CHECK-NEXT: call void @lower_fa8 + lower_fa8(x); +} + +// CHECK-LABEL: @call_wfa8 +#[no_mangle] +pub unsafe fn call_wfa8(x: WrappedFA8) { + // CHECK: start: + // CHECK-NEXT: call void @wrapped_fa8 + wrapped_fa8(x); +} + +// CHECK-LABEL: @call_tfa8 +#[no_mangle] +pub unsafe fn call_tfa8(x: TransparentFA8) { + // CHECK: start: + // CHECK-NEXT: call void @transparent_fa8 + transparent_fa8(x); +} + +// CHECK-LABEL: @call_fa16 +#[no_mangle] +pub unsafe fn call_fa16(x: ForceAlign16) { + // CHECK: start: + // CHECK-NEXT: call void @force_align_16 + force_align_16(x); +} + +extern "C" { + // m68k: declare void @natural_align_1({{.*}}byval({ i8, i8 }) align 1{{.*}}) + + // wasm: declare void @natural_align_1({{.*}}byval({ i8, i8 }) align 1{{.*}}) + + // x86_64-linux: declare void @natural_align_1(i16) + + // x86_64-windows: declare void @natural_align_1(i16) + + // i686-linux: declare void @natural_align_1({{.*}}byval({ i8, i8 }) align 4{{.*}}) + + // i686-windows: declare void @natural_align_1({{.*}}byval({ i8, i8 }) align 4{{.*}}) + fn natural_align_1(x: NaturalAlign1); + + // m68k: declare void @natural_align_2({{.*}}byval(%NaturalAlign2) align 2{{.*}}) + + // wasm: declare void @natural_align_2({{.*}}byval(%NaturalAlign2) align 2{{.*}}) + + // x86_64-linux: declare void @natural_align_2({{.*}}byval(%NaturalAlign2) align 2{{.*}}) + + // x86_64-windows: declare void @natural_align_2( + // x86_64-windows-NOT: byval + // x86_64-windows-SAME: align 2{{.*}}) + + // i686-linux: declare void @natural_align_2({{.*}}byval(%NaturalAlign2) align 4{{.*}}) + + // i686-windows: declare void @natural_align_2({{.*}}byval(%NaturalAlign2) align 4{{.*}}) + fn natural_align_2(x: NaturalAlign2); + + // m68k: declare void @force_align_4({{.*}}byval(%ForceAlign4) align 4{{.*}}) + + // wasm: declare void @force_align_4({{.*}}byval(%ForceAlign4) align 4{{.*}}) + + // x86_64-linux: declare void @force_align_4({{.*}}byval(%ForceAlign4) align 4{{.*}}) + + // x86_64-windows: declare void @force_align_4( + // x86_64-windows-NOT: byval + // x86_64-windows-SAME: align 4{{.*}}) + + // i686-linux: declare void @force_align_4({{.*}}byval(%ForceAlign4) align 4{{.*}}) + + // i686-windows: declare void @force_align_4({{.*}}byval(%ForceAlign4) align 4{{.*}}) + fn force_align_4(x: ForceAlign4); + + // m68k: declare void @natural_align_8({{.*}}byval(%NaturalAlign8) align 4{{.*}}) + + // wasm: declare void @natural_align_8({{.*}}byval(%NaturalAlign8) align 8{{.*}}) + + // x86_64-linux: declare void @natural_align_8({{.*}}byval(%NaturalAlign8) align 8{{.*}}) + + // x86_64-windows: declare void @natural_align_8( + // x86_64-windows-NOT: byval + // x86_64-windows-SAME: align 8{{.*}}) + + // i686-linux: declare void @natural_align_8({{.*}}byval(%NaturalAlign8) align 4{{.*}}) + + // i686-windows: declare void @natural_align_8({{.*}}byval(%NaturalAlign8) align 4{{.*}}) + fn natural_align_8(x: NaturalAlign8); + + // m68k: declare void @force_align_8({{.*}}byval(%ForceAlign8) align 8{{.*}}) + + // wasm: declare void @force_align_8({{.*}}byval(%ForceAlign8) align 8{{.*}}) + + // x86_64-linux: declare void @force_align_8({{.*}}byval(%ForceAlign8) align 8{{.*}}) + + // x86_64-windows: declare void @force_align_8( + // x86_64-windows-NOT: byval + // x86_64-windows-SAME: align 8{{.*}}) + + // i686-linux: declare void @force_align_8({{.*}}byval(%ForceAlign8) align 4{{.*}}) + + // i686-windows: declare void @force_align_8( + // i686-windows-NOT: byval + // i686-windows-SAME: align 8{{.*}}) + fn force_align_8(x: ForceAlign8); + + // m68k: declare void @lower_fa8({{.*}}byval(%LowerFA8) align 4{{.*}}) + + // wasm: declare void @lower_fa8({{.*}}byval(%LowerFA8) align 8{{.*}}) + + // x86_64-linux: declare void @lower_fa8({{.*}}byval(%LowerFA8) align 8{{.*}}) + + // x86_64-windows: declare void @lower_fa8( + // x86_64-windows-NOT: byval + // x86_64-windows-SAME: align 8{{.*}}) + + // i686-linux: declare void @lower_fa8({{.*}}byval(%LowerFA8) align 4{{.*}}) + + // i686-windows: declare void @lower_fa8({{.*}}byval(%LowerFA8) align 4{{.*}}) + fn lower_fa8(x: LowerFA8); + + // m68k: declare void @wrapped_fa8({{.*}}byval(%WrappedFA8) align 8{{.*}}) + + // wasm: declare void @wrapped_fa8({{.*}}byval(%WrappedFA8) align 8{{.*}}) + + // x86_64-linux: declare void @wrapped_fa8({{.*}}byval(%WrappedFA8) align 8{{.*}}) + + // x86_64-windows: declare void @wrapped_fa8( + // x86_64-windows-NOT: byval + // x86_64-windows-SAME: align 8{{.*}}) + + // i686-linux: declare void @wrapped_fa8({{.*}}byval(%WrappedFA8) align 4{{.*}}) + + // i686-windows: declare void @wrapped_fa8( + // i686-windows-NOT: byval + // i686-windows-SAME: align 8{{.*}}) + fn wrapped_fa8(x: WrappedFA8); + + // m68k: declare void @transparent_fa8({{.*}}byval(%TransparentFA8) align 8{{.*}}) + + // wasm: declare void @transparent_fa8({{.*}}byval(%TransparentFA8) align 8{{.*}}) + + // x86_64-linux: declare void @transparent_fa8({{.*}}byval(%TransparentFA8) align 8{{.*}}) + + // x86_64-windows: declare void @transparent_fa8( + // x86_64-windows-NOT: byval + // x86_64-windows-SAME: align 8{{.*}}) + + // i686-linux: declare void @transparent_fa8({{.*}}byval(%TransparentFA8) align 4{{.*}}) + + // i686-windows: declare void @transparent_fa8( + // i686-windows-NOT: byval + // i686-windows-SAME: align 8{{.*}}) + fn transparent_fa8(x: TransparentFA8); + + // m68k: declare void @force_align_16({{.*}}byval(%ForceAlign16) align 16{{.*}}) + + // wasm: declare void @force_align_16({{.*}}byval(%ForceAlign16) align 16{{.*}}) + + // x86_64-linux: declare void @force_align_16({{.*}}byval(%ForceAlign16) align 16{{.*}}) + + // x86_64-windows: declare void @force_align_16( + // x86_64-windows-NOT: byval + // x86_64-windows-SAME: align 16{{.*}}) + + // i686-linux: declare void @force_align_16({{.*}}byval(%ForceAlign16) align 4{{.*}}) + + // i686-windows: declare void @force_align_16( + // i686-windows-NOT: byval + // i686-windows-SAME: align 16{{.*}}) + fn force_align_16(x: ForceAlign16); +} diff --git a/tests/codegen/array-map.rs b/tests/codegen/array-map.rs index 24f3f43d078..4d218e6a951 100644 --- a/tests/codegen/array-map.rs +++ b/tests/codegen/array-map.rs @@ -30,7 +30,6 @@ pub fn short_integer_map(x: [u32; 8]) -> [u32; 8] { pub fn long_integer_map(x: [u32; 512]) -> [u32; 512] { // CHECK: start: // CHECK-NEXT: alloca [512 x i32] - // CHECK-NEXT: alloca %"core::mem::manually_drop::ManuallyDrop<[u32; 512]>" // CHECK-NOT: alloca // CHECK: mul <{{[0-9]+}} x i32> // CHECK: add <{{[0-9]+}} x i32> diff --git a/tests/codegen/function-arguments-noopt.rs b/tests/codegen/function-arguments-noopt.rs index 35f31eba3b1..f99cc8fb415 100644 --- a/tests/codegen/function-arguments-noopt.rs +++ b/tests/codegen/function-arguments-noopt.rs @@ -42,7 +42,7 @@ pub fn borrow_call(x: &i32, f: fn(&i32) -> &i32) -> &i32 { f(x) } -// CHECK: void @struct_({{%S\*|ptr}} sret(%S){{( %_0)?}}, {{%S\*|ptr}} %x) +// CHECK: void @struct_({{%S\*|ptr}} sret(%S) align 4{{( %_0)?}}, {{%S\*|ptr}} align 4 %x) #[no_mangle] pub fn struct_(x: S) -> S { x @@ -51,7 +51,7 @@ pub fn struct_(x: S) -> S { // CHECK-LABEL: @struct_call #[no_mangle] pub fn struct_call(x: S, f: fn(S) -> S) -> S { - // CHECK: call void %f({{%S\*|ptr}} sret(%S){{( %_0)?}}, {{%S\*|ptr}} %{{.+}}) + // CHECK: call void %f({{%S\*|ptr}} sret(%S) align 4{{( %_0)?}}, {{%S\*|ptr}} align 4 %{{.+}}) f(x) } diff --git a/tests/codegen/function-arguments.rs b/tests/codegen/function-arguments.rs index ccf4a5de327..2f047f10311 100644 --- a/tests/codegen/function-arguments.rs +++ b/tests/codegen/function-arguments.rs @@ -142,7 +142,7 @@ pub fn mutable_notunpin_borrow(_: &mut NotUnpin) { pub fn notunpin_borrow(_: &NotUnpin) { } -// CHECK: @indirect_struct({{%S\*|ptr}} noalias nocapture noundef readonly dereferenceable(32) %_1) +// CHECK: @indirect_struct({{%S\*|ptr}} noalias nocapture noundef readonly align 4 dereferenceable(32) %_1) #[no_mangle] pub fn indirect_struct(_: S) { } @@ -188,7 +188,7 @@ pub fn notunpin_box(x: Box<NotUnpin>) -> Box<NotUnpin> { x } -// CHECK: @struct_return({{%S\*|ptr}} noalias nocapture noundef sret(%S) dereferenceable(32){{( %_0)?}}) +// CHECK: @struct_return({{%S\*|ptr}} noalias nocapture noundef sret(%S) align 4 dereferenceable(32){{( %_0)?}}) #[no_mangle] pub fn struct_return() -> S { S { diff --git a/tests/debuginfo/rc_arc.rs b/tests/debuginfo/rc_arc.rs index 5d5492d7217..1d07bd76eab 100644 --- a/tests/debuginfo/rc_arc.rs +++ b/tests/debuginfo/rc_arc.rs @@ -27,37 +27,37 @@ // cdb-command:g // cdb-command:dx rc,d -// cdb-check:rc,d : 111 [Type: alloc::rc::Rc<i32>] +// cdb-check:rc,d : 111 [Type: alloc::rc::Rc<i32,alloc::alloc::Global>] // cdb-check: [Reference count] : 11 [Type: core::cell::Cell<usize>] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell<usize>] // cdb-command:dx weak_rc,d -// cdb-check:weak_rc,d : 111 [Type: alloc::rc::Weak<i32>] +// cdb-check:weak_rc,d : 111 [Type: alloc::rc::Weak<i32,alloc::alloc::Global>] // cdb-check: [Reference count] : 11 [Type: core::cell::Cell<usize>] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell<usize>] // cdb-command:dx arc,d -// cdb-check:arc,d : 222 [Type: alloc::sync::Arc<i32>] +// cdb-check:arc,d : 222 [Type: alloc::sync::Arc<i32,alloc::alloc::Global>] // cdb-check: [Reference count] : 21 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] // cdb-command:dx weak_arc,d -// cdb-check:weak_arc,d : 222 [Type: alloc::sync::Weak<i32>] +// cdb-check:weak_arc,d : 222 [Type: alloc::sync::Weak<i32,alloc::alloc::Global>] // cdb-check: [Reference count] : 21 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] // cdb-command:dx dyn_rc,d -// cdb-check:dyn_rc,d [Type: alloc::rc::Rc<dyn$<core::fmt::Debug> >] +// cdb-check:dyn_rc,d [Type: alloc::rc::Rc<dyn$<core::fmt::Debug>,alloc::alloc::Global>] // cdb-check: [Reference count] : 31 [Type: core::cell::Cell<usize>] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell<usize>] // cdb-command:dx dyn_rc_weak,d -// cdb-check:dyn_rc_weak,d [Type: alloc::rc::Weak<dyn$<core::fmt::Debug> >] +// cdb-check:dyn_rc_weak,d [Type: alloc::rc::Weak<dyn$<core::fmt::Debug>,alloc::alloc::Global>] // cdb-check: [Reference count] : 31 [Type: core::cell::Cell<usize>] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell<usize>] // cdb-command:dx slice_rc,d -// cdb-check:slice_rc,d : { len=3 } [Type: alloc::rc::Rc<slice2$<u32> >] +// cdb-check:slice_rc,d : { len=3 } [Type: alloc::rc::Rc<slice2$<u32>,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] // cdb-check: [Reference count] : 41 [Type: core::cell::Cell<usize>] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell<usize>] @@ -66,7 +66,7 @@ // cdb-check: [2] : 3 [Type: u32] // cdb-command:dx slice_rc_weak,d -// cdb-check:slice_rc_weak,d : { len=3 } [Type: alloc::rc::Weak<slice2$<u32> >] +// cdb-check:slice_rc_weak,d : { len=3 } [Type: alloc::rc::Weak<slice2$<u32>,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] // cdb-check: [Reference count] : 41 [Type: core::cell::Cell<usize>] // cdb-check: [Weak reference count] : 2 [Type: core::cell::Cell<usize>] @@ -75,17 +75,17 @@ // cdb-check: [2] : 3 [Type: u32] // cdb-command:dx dyn_arc,d -// cdb-check:dyn_arc,d [Type: alloc::sync::Arc<dyn$<core::fmt::Debug> >] +// cdb-check:dyn_arc,d [Type: alloc::sync::Arc<dyn$<core::fmt::Debug>,alloc::alloc::Global>] // cdb-check: [Reference count] : 51 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] // cdb-command:dx dyn_arc_weak,d -// cdb-check:dyn_arc_weak,d [Type: alloc::sync::Weak<dyn$<core::fmt::Debug> >] +// cdb-check:dyn_arc_weak,d [Type: alloc::sync::Weak<dyn$<core::fmt::Debug>,alloc::alloc::Global>] // cdb-check: [Reference count] : 51 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] // cdb-command:dx slice_arc,d -// cdb-check:slice_arc,d : { len=3 } [Type: alloc::sync::Arc<slice2$<u32> >] +// cdb-check:slice_arc,d : { len=3 } [Type: alloc::sync::Arc<slice2$<u32>,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] // cdb-check: [Reference count] : 61 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] @@ -94,7 +94,7 @@ // cdb-check: [2] : 6 [Type: u32] // cdb-command:dx slice_arc_weak,d -// cdb-check:slice_arc_weak,d : { len=3 } [Type: alloc::sync::Weak<slice2$<u32> >] +// cdb-check:slice_arc_weak,d : { len=3 } [Type: alloc::sync::Weak<slice2$<u32>,alloc::alloc::Global>] // cdb-check: [Length] : 3 [Type: [...]] // cdb-check: [Reference count] : 61 [Type: core::sync::atomic::AtomicUsize] // cdb-check: [Weak reference count] : 2 [Type: core::sync::atomic::AtomicUsize] diff --git a/tests/debuginfo/thread.rs b/tests/debuginfo/thread.rs index e7e83c7aacd..5516f4fec3e 100644 --- a/tests/debuginfo/thread.rs +++ b/tests/debuginfo/thread.rs @@ -14,7 +14,7 @@ // // cdb-command:dx t,d // cdb-check:t,d : [...] [Type: std::thread::Thread *] -// cdb-check:[...] inner [...][Type: core::pin::Pin<alloc::sync::Arc<std::thread::Inner> >] +// cdb-check:[...] inner [...][Type: core::pin::Pin<alloc::sync::Arc<std::thread::Inner,alloc::alloc::Global> >] use std::thread; diff --git a/tests/run-make-fulldeps/issue-19371/foo.rs b/tests/run-make-fulldeps/issue-19371/foo.rs index 9cca6200050..d4959247d1c 100644 --- a/tests/run-make-fulldeps/issue-19371/foo.rs +++ b/tests/run-make-fulldeps/issue-19371/foo.rs @@ -68,6 +68,6 @@ fn compile(code: String, output: PathBuf, sysroot: PathBuf) { let ongoing_codegen = queries.ongoing_codegen()?; queries.linker(ongoing_codegen) }); - linker.unwrap().link(); + linker.unwrap().link().unwrap(); }); } diff --git a/tests/run-make/extern-fn-explicit-align/Makefile b/tests/run-make/extern-fn-explicit-align/Makefile new file mode 100644 index 00000000000..3cbbf383996 --- /dev/null +++ b/tests/run-make/extern-fn-explicit-align/Makefile @@ -0,0 +1,6 @@ +# ignore-cross-compile +include ../tools.mk + +all: $(call NATIVE_STATICLIB,test) + $(RUSTC) test.rs + $(call RUN,test) || exit 1 diff --git a/tests/run-make/extern-fn-explicit-align/test.c b/tests/run-make/extern-fn-explicit-align/test.c new file mode 100644 index 00000000000..a3db3442aaf --- /dev/null +++ b/tests/run-make/extern-fn-explicit-align/test.c @@ -0,0 +1,93 @@ +#include <assert.h> +#include <stdbool.h> +#include <stdint.h> +#include <string.h> + +struct BoolAndU32 +{ + bool a; + uint32_t b; +}; + +#ifdef _MSC_VER +__declspec(align(16)) +struct TwoU64s +{ + uint64_t a; + uint64_t b; +}; +#else +struct __attribute__((aligned(16))) TwoU64s +{ + uint64_t a; + uint64_t b; +}; +#endif + +struct WrappedU64s +{ + struct TwoU64s a; +}; + +#ifdef _MSC_VER +__declspec(align(1)) +struct LowerAlign +{ + uint64_t a; + uint64_t b; +}; +#else +struct __attribute__((aligned(1))) LowerAlign +{ + uint64_t a; + uint64_t b; +}; +#endif + +#pragma pack(push, 1) +struct Packed +{ + uint64_t a; + uint64_t b; +}; +#pragma pack(pop) + +int32_t many_args( + void *a, + void *b, + const char *c, + uint64_t d, + bool e, + struct BoolAndU32 f, + void *g, + struct TwoU64s h, + void *i, + struct WrappedU64s j, + void *k, + struct LowerAlign l, + void *m, + struct Packed n, + const char *o) +{ + assert(!a); + assert(!b); + assert(!c); + assert(d == 42); + assert(e); + assert(f.a); + assert(f.b == 1337); + assert(!g); + assert(h.a == 1); + assert(h.b == 2); + assert(!i); + assert(j.a.a == 3); + assert(j.a.b == 4); + assert(!k); + assert(l.a == 5); + assert(l.b == 6); + assert(!m); + assert(n.a == 7); + assert(n.b == 8); + assert(strcmp(o, "Hello world") == 0); + return 0; +} diff --git a/tests/run-make/extern-fn-explicit-align/test.rs b/tests/run-make/extern-fn-explicit-align/test.rs new file mode 100644 index 00000000000..846622de3cd --- /dev/null +++ b/tests/run-make/extern-fn-explicit-align/test.rs @@ -0,0 +1,89 @@ +// Issue #80127: Passing structs via FFI should work with explicit alignment. + +use std::ffi::{CStr, c_char}; +use std::ptr::null_mut; + +#[repr(C)] +pub struct BoolAndU32 { + pub a: bool, + pub b: u32, +} + +#[repr(C)] +#[repr(align(16))] +pub struct TwoU64s { + pub a: u64, + pub b: u64, +} + +#[repr(C)] +pub struct WrappedU64s { + pub a: TwoU64s +} + +#[repr(C)] +// Even though requesting align 1 can never change the alignment, it still affects the ABI +// on some platforms like i686-windows. +#[repr(align(1))] +pub struct LowerAlign { + pub a: u64, + pub b: u64, +} + +#[repr(C)] +#[repr(packed)] +pub struct Packed { + pub a: u64, + pub b: u64, +} + +#[link(name = "test", kind = "static")] +extern "C" { + fn many_args( + a: *mut (), + b: *mut (), + c: *const c_char, + d: u64, + e: bool, + f: BoolAndU32, + g: *mut (), + h: TwoU64s, + i: *mut (), + j: WrappedU64s, + k: *mut (), + l: LowerAlign, + m: *mut (), + n: Packed, + o: *const c_char, + ) -> i32; +} + +const STRING: &CStr = unsafe { CStr::from_bytes_with_nul_unchecked(b"Hello world\0") }; + +fn main() { + let bool_and_u32 = BoolAndU32 { a: true, b: 1337 }; + let two_u64s = TwoU64s { a: 1, b: 2 }; + let wrapped = WrappedU64s { a: TwoU64s { a: 3, b: 4 } }; + let lower = LowerAlign { a: 5, b: 6 }; + let packed = Packed { a: 7, b: 8 }; + let string = STRING; + unsafe { + many_args( + null_mut(), + null_mut(), + null_mut(), + 42, + true, + bool_and_u32, + null_mut(), + two_u64s, + null_mut(), + wrapped, + null_mut(), + lower, + null_mut(), + packed, + string.as_ptr(), + ); + } +} diff --git a/tests/run-make/extern-fn-struct-passing-abi/test.c b/tests/run-make/extern-fn-struct-passing-abi/test.c index 136b07129e1..2cff776d86c 100644 --- a/tests/run-make/extern-fn-struct-passing-abi/test.c +++ b/tests/run-make/extern-fn-struct-passing-abi/test.c @@ -28,6 +28,14 @@ struct Huge { int32_t e; }; +struct Huge64 { + int64_t a; + int64_t b; + int64_t c; + int64_t d; + int64_t e; +}; + struct FloatPoint { double x; double y; @@ -152,6 +160,21 @@ void byval_rect_with_many_huge(struct Huge a, struct Huge b, struct Huge c, assert(g.d == 420); } +// System V x86_64 ABI: +// a, b, d, e, f should be byval pointer (on the stack) +// g passed via register (fixes #41375) +// +// i686-windows ABI: +// a, b, d, e, f, g should be byval pointer +void byval_rect_with_many_huge64(struct Huge64 a, struct Huge64 b, struct Huge64 c, + struct Huge64 d, struct Huge64 e, struct Huge64 f, + struct Rect g) { + assert(g.a == 1234); + assert(g.b == 4567); + assert(g.c == 7890); + assert(g.d == 4209); +} + // System V x86_64 & Win64 ABI: // a, b should be in registers // s should be split across 2 integer registers @@ -279,6 +302,19 @@ struct Huge huge_struct(struct Huge s) { return s; } +// System V x86_64 & i686-windows ABI: +// s should be byval pointer +// return should in a hidden sret pointer +struct Huge64 huge64_struct(struct Huge64 s) { + assert(s.a == 1234); + assert(s.b == 1335); + assert(s.c == 1436); + assert(s.d == 1537); + assert(s.e == 1638); + + return s; +} + // System V x86_64 ABI: // p should be in registers // return should be in registers diff --git a/tests/run-make/extern-fn-struct-passing-abi/test.rs b/tests/run-make/extern-fn-struct-passing-abi/test.rs index afe0f52ef0b..99e079f98a8 100644 --- a/tests/run-make/extern-fn-struct-passing-abi/test.rs +++ b/tests/run-make/extern-fn-struct-passing-abi/test.rs @@ -38,6 +38,16 @@ struct Huge { #[derive(Clone, Copy, Debug, PartialEq)] #[repr(C)] +struct Huge64 { + a: i64, + b: i64, + c: i64, + d: i64, + e: i64, +} + +#[derive(Clone, Copy, Debug, PartialEq)] +#[repr(C)] struct FloatPoint { x: f64, y: f64, @@ -79,6 +89,12 @@ extern "C" { fn byval_rect_with_many_huge(a: Huge, b: Huge, c: Huge, d: Huge, e: Huge, f: Huge, g: Rect); + fn byval_rect_with_many_huge64( + a: Huge64, b: Huge64, c: Huge64, + d: Huge64, e: Huge64, f: Huge64, + g: Rect, + ); + fn split_rect(a: i32, b: i32, s: Rect); fn split_rect_floats(a: f32, b: f32, s: FloatRect); @@ -95,6 +111,8 @@ extern "C" { fn huge_struct(s: Huge) -> Huge; + fn huge64_struct(s: Huge64) -> Huge64; + fn float_point(p: FloatPoint) -> FloatPoint; fn float_one(f: FloatOne) -> FloatOne; @@ -107,6 +125,7 @@ fn main() { let t = BiggerRect { s: s, a: 27834, b: 7657 }; let u = FloatRect { a: 3489, b: 3490, c: 8. }; let v = Huge { a: 5647, b: 5648, c: 5649, d: 5650, e: 5651 }; + let w = Huge64 { a: 1234, b: 1335, c: 1436, d: 1537, e: 1638 }; let p = FloatPoint { x: 5., y: -3. }; let f1 = FloatOne { x: 7. }; let i = IntOdd { a: 1, b: 2, c: 3 }; @@ -117,12 +136,14 @@ fn main() { byval_rect_floats(1., 2., 3., 4., 5., 6., 7., s, u); byval_rect_with_float(1, 2, 3.0, 4, 5, 6, s); byval_rect_with_many_huge(v, v, v, v, v, v, Rect { a: 123, b: 456, c: 789, d: 420 }); + byval_rect_with_many_huge64(w, w, w, w, w, w, Rect { a: 1234, b: 4567, c: 7890, d: 4209 }); split_rect(1, 2, s); split_rect_floats(1., 2., u); split_rect_with_floats(1, 2, 3.0, 4, 5.0, 6, s); split_and_byval_rect(1, 2, 3, s, s); split_rect(1, 2, s); assert_eq!(huge_struct(v), v); + assert_eq!(huge64_struct(w), w); assert_eq!(split_ret_byval_struct(1, 2, s), s); assert_eq!(sret_byval_struct(1, 2, 3, 4, s), t); assert_eq!(sret_split_struct(1, 2, s), t); diff --git a/tests/run-make/print-native-static-libs/Makefile b/tests/run-make/print-native-static-libs/Makefile index 98e72d7696f..a16c8b0f2a4 100644 --- a/tests/run-make/print-native-static-libs/Makefile +++ b/tests/run-make/print-native-static-libs/Makefile @@ -5,7 +5,7 @@ include ../tools.mk all: $(RUSTC) --crate-type rlib -lbar_cli bar.rs - $(RUSTC) foo.rs -lfoo_cli --crate-type staticlib --print native-static-libs 2>&1 \ + $(RUSTC) foo.rs -lfoo_cli -lfoo_cli --crate-type staticlib --print native-static-libs 2>&1 \ | grep 'note: native-static-libs: ' \ | sed 's/note: native-static-libs: \(.*\)/\1/' > $(TMPDIR)/libs.txt @@ -13,3 +13,7 @@ all: cat $(TMPDIR)/libs.txt | grep -F "systemd" # in foo.rs cat $(TMPDIR)/libs.txt | grep -F "bar_cli" cat $(TMPDIR)/libs.txt | grep -F "foo_cli" + + # make sure that foo_cli and glib-2.0 are not consecutively present + cat $(TMPDIR)/libs.txt | grep -Fv "foo_cli -lfoo_cli" + cat $(TMPDIR)/libs.txt | grep -Fv "glib-2.0 -lglib-2.0" diff --git a/tests/run-make/print-native-static-libs/bar.rs b/tests/run-make/print-native-static-libs/bar.rs index a563bbc2a22..cd9c1c453e5 100644 --- a/tests/run-make/print-native-static-libs/bar.rs +++ b/tests/run-make/print-native-static-libs/bar.rs @@ -3,6 +3,7 @@ pub extern "C" fn my_bar_add(left: i32, right: i32) -> i32 { // Obviously makes no sense but... unsafe { g_free(std::ptr::null_mut()); + g_free2(std::ptr::null_mut()); } left + right } @@ -11,3 +12,8 @@ pub extern "C" fn my_bar_add(left: i32, right: i32) -> i32 { extern "C" { fn g_free(p: *mut ()); } + +#[link(name = "glib-2.0")] +extern "C" { + fn g_free2(p: *mut ()); +} diff --git a/tests/rustdoc-gui/anchors.goml b/tests/rustdoc-gui/anchors.goml index e9b77296917..30b83f0da38 100644 --- a/tests/rustdoc-gui/anchors.goml +++ b/tests/rustdoc-gui/anchors.goml @@ -17,34 +17,34 @@ define-function: ( assert-css: (".main-heading h1 a:nth-of-type(1)", {"color": |main_heading_color|}) assert-css: (".main-heading a:nth-of-type(2)", {"color": |main_heading_type_color|}) assert-css: ( - ".rightside .srclink", + ".rightside a.src", {"color": |src_link_color|, "text-decoration": "none solid " + |src_link_color|}, ALL, ) compare-elements-css: ( - ".rightside .srclink", - ".rightside.srclink", + ".rightside a.src", + "a.rightside.src", ["color", "text-decoration"], ) compare-elements-css: ( - ".main-heading .srclink", - ".rightside.srclink", + ".main-heading a.src", + "a.rightside.src", ["color", "text-decoration"], ) - move-cursor-to: ".main-heading .srclink" + move-cursor-to: ".main-heading a.src" assert-css: ( - ".main-heading .srclink", + ".main-heading a.src", {"color": |src_link_color|, "text-decoration": "underline solid " + |src_link_color|}, ) - move-cursor-to: ".impl-items .rightside .srclink" + move-cursor-to: ".impl-items .rightside a.src" assert-css: ( - ".impl-items .rightside .srclink", + ".impl-items .rightside a.src", {"color": |src_link_color|, "text-decoration": "none solid " + |src_link_color|}, ) - move-cursor-to: ".impl-items .rightside.srclink" + move-cursor-to: ".impl-items a.rightside.src" assert-css: ( - ".impl-items .rightside.srclink", + ".impl-items a.rightside.src", {"color": |src_link_color|, "text-decoration": "none solid " + |src_link_color|}, ) diff --git a/tests/rustdoc-gui/basic-code.goml b/tests/rustdoc-gui/basic-code.goml index e372f711974..22ac5316184 100644 --- a/tests/rustdoc-gui/basic-code.goml +++ b/tests/rustdoc-gui/basic-code.goml @@ -1,6 +1,6 @@ // Small test to ensure the "src-line-numbers" element is only present once on // the page. go-to: "file://" + |DOC_PATH| + "/test_docs/index.html" -click: ".srclink" +click: "a.src" wait-for: ".src-line-numbers" assert-count: (".src-line-numbers", 1) diff --git a/tests/rustdoc-gui/code-sidebar-toggle.goml b/tests/rustdoc-gui/code-sidebar-toggle.goml index 28c3712f307..d1efa45dc1d 100644 --- a/tests/rustdoc-gui/code-sidebar-toggle.goml +++ b/tests/rustdoc-gui/code-sidebar-toggle.goml @@ -1,7 +1,7 @@ // This test checks that the source code pages sidebar toggle is working as expected. go-to: "file://" + |DOC_PATH| + "/test_docs/index.html" -click: ".srclink" +click: "a.src" wait-for: "#src-sidebar-toggle" click: "#src-sidebar-toggle" expect-failure: true -assert-css: ("#source-sidebar", { "left": "-300px" }) +assert-css: ("#src-sidebar", { "left": "-300px" }) diff --git a/tests/rustdoc-gui/headings.goml b/tests/rustdoc-gui/headings.goml index 089e2203a04..102b699b1dd 100644 --- a/tests/rustdoc-gui/headings.goml +++ b/tests/rustdoc-gui/headings.goml @@ -194,27 +194,27 @@ call-function: ( "check-colors", { "theme": "ayu", - "heading_color": "rgb(255, 255, 255)", - "small_heading_color": "rgb(197, 197, 197)", - "heading_border_color": "rgb(92, 103, 115)", + "heading_color": "#fff", + "small_heading_color": "#c5c5c5", + "heading_border_color": "#5c6773", }, ) call-function: ( "check-colors", { "theme": "dark", - "heading_color": "rgb(221, 221, 221)", - "small_heading_color": "rgb(221, 221, 221)", - "heading_border_color": "rgb(210, 210, 210)", + "heading_color": "#ddd", + "small_heading_color": "#ddd", + "heading_border_color": "#d2d2d2", }, ) call-function: ( "check-colors", { "theme": "light", - "heading_color": "rgb(0, 0, 0)", - "small_heading_color": "rgb(0, 0, 0)", - "heading_border_color": "rgb(221, 221, 221)", + "heading_color": "black", + "small_heading_color": "black", + "heading_border_color": "#ddd", }, ) @@ -224,7 +224,7 @@ define-function: ( block { set-local-storage: {"rustdoc-theme": |theme|} reload: - assert-css: (".since", {"color": "rgb(128, 128, 128)"}, ALL) + assert-css: (".since", {"color": "#808080"}, ALL) }, ) diff --git a/tests/rustdoc-gui/jump-to-def-background.goml b/tests/rustdoc-gui/jump-to-def-background.goml index 6adc36b0edb..fa7ed3586dd 100644 --- a/tests/rustdoc-gui/jump-to-def-background.goml +++ b/tests/rustdoc-gui/jump-to-def-background.goml @@ -1,4 +1,4 @@ -// We check the background color on the jump to definition links in the source code page. +// We check the background color on the jump to definition links in the src code page. go-to: "file://" + |DOC_PATH| + "/src/link_to_definition/lib.rs.html" define-function: ( @@ -10,7 +10,7 @@ define-function: ( // We reload the page so the local storage settings are being used. reload: assert-css: ( - "body.source .example-wrap pre.rust a", + "body.src .example-wrap pre.rust a", {"background-color": |background_color|}, ALL, ) diff --git a/tests/rustdoc-gui/sidebar-source-code-display.goml b/tests/rustdoc-gui/sidebar-source-code-display.goml index 0c680bcc9fb..33210c9fdc9 100644 --- a/tests/rustdoc-gui/sidebar-source-code-display.goml +++ b/tests/rustdoc-gui/sidebar-source-code-display.goml @@ -40,7 +40,7 @@ define-function: ( reload: wait-for-css: ("#src-sidebar-toggle", {"visibility": "visible"}) assert-css: ( - "#source-sidebar details[open] > .files a.selected", + "#src-sidebar details[open] > .files a.selected", {"color": |color_hover|, "background-color": |background|}, ) @@ -62,58 +62,58 @@ define-function: ( // Without hover or focus. assert-css: ( - "#source-sidebar details[open] > .files a:not(.selected)", + "#src-sidebar details[open] > .files a:not(.selected)", {"color": |color|, "background-color": |background_toggle|}, ) // With focus. - focus: "#source-sidebar details[open] > .files a:not(.selected)" + focus: "#src-sidebar details[open] > .files a:not(.selected)" wait-for-css: ( - "#source-sidebar details[open] > .files a:not(.selected):focus", + "#src-sidebar details[open] > .files a:not(.selected):focus", {"color": |color_hover|, "background-color": |background_hover|}, ) focus: ".search-input" // With hover. - move-cursor-to: "#source-sidebar details[open] > .files a:not(.selected)" + move-cursor-to: "#src-sidebar details[open] > .files a:not(.selected)" assert-css: ( - "#source-sidebar details[open] > .files a:not(.selected):hover", + "#src-sidebar details[open] > .files a:not(.selected):hover", {"color": |color_hover|, "background-color": |background_hover|}, ) // Without hover or focus. assert-css: ( - "#source-sidebar .dir-entry summary", + "#src-sidebar .dir-entry summary", {"color": |color|, "background-color": |background_toggle|}, ) // With focus. - focus: "#source-sidebar .dir-entry summary" + focus: "#src-sidebar .dir-entry summary" wait-for-css: ( - "#source-sidebar .dir-entry summary:focus", + "#src-sidebar .dir-entry summary:focus", {"color": |color_hover|, "background-color": |background_hover|}, ) focus: ".search-input" // With hover. - move-cursor-to: "#source-sidebar .dir-entry summary" + move-cursor-to: "#src-sidebar .dir-entry summary" assert-css: ( - "#source-sidebar .dir-entry summary:hover", + "#src-sidebar .dir-entry summary:hover", {"color": |color_hover|, "background-color": |background_hover|}, ) // Without hover or focus. assert-css: ( - "#source-sidebar details[open] > .folders > details > summary", + "#src-sidebar details[open] > .folders > details > summary", {"color": |color|, "background-color": |background_toggle|}, ) // With focus. - focus: "#source-sidebar details[open] > .folders > details > summary" + focus: "#src-sidebar details[open] > .folders > details > summary" wait-for-css: ( - "#source-sidebar details[open] > .folders > details > summary:focus", + "#src-sidebar details[open] > .folders > details > summary:focus", {"color": |color_hover|, "background-color": |background_hover|}, ) focus: ".search-input" // With hover. - move-cursor-to: "#source-sidebar details[open] > .folders > details > summary" + move-cursor-to: "#src-sidebar details[open] > .folders > details > summary" assert-css: ( - "#source-sidebar details[open] > .folders > details > summary:hover", + "#src-sidebar details[open] > .folders > details > summary:hover", {"color": |color_hover|, "background-color": |background_hover|}, ) }, @@ -190,16 +190,16 @@ assert-window-property: {"pageYOffset": "2542"} // make it the current selection. set-window-size: (500, 700) click: "#src-sidebar-toggle" -wait-for-css: ("#source-sidebar", {"visibility": "visible"}) +wait-for-css: ("#src-sidebar", {"visibility": "visible"}) assert-local-storage: {"rustdoc-source-sidebar-show": "true"} click: ".sidebar a.selected" go-to: "file://" + |DOC_PATH| + "/src/test_docs/lib.rs.html" -wait-for-css: ("#source-sidebar", {"visibility": "hidden"}) +wait-for-css: ("#src-sidebar", {"visibility": "hidden"}) assert-local-storage: {"rustdoc-source-sidebar-show": "false"} // Resize back to desktop size, to check that the sidebar doesn't spontaneously open. set-window-size: (1000, 1000) -wait-for-css: ("#source-sidebar", {"visibility": "hidden"}) +wait-for-css: ("#src-sidebar", {"visibility": "hidden"}) assert-local-storage: {"rustdoc-source-sidebar-show": "false"} click: "#src-sidebar-toggle" -wait-for-css: ("#source-sidebar", {"visibility": "visible"}) +wait-for-css: ("#src-sidebar", {"visibility": "visible"}) assert-local-storage: {"rustdoc-source-sidebar-show": "true"} diff --git a/tests/rustdoc-gui/sidebar-source-code.goml b/tests/rustdoc-gui/sidebar-source-code.goml index 2cb88817884..69c589741cb 100644 --- a/tests/rustdoc-gui/sidebar-source-code.goml +++ b/tests/rustdoc-gui/sidebar-source-code.goml @@ -14,7 +14,7 @@ define-function: ( } reload: // Checking results colors. - assert-css: (".source .sidebar", { + assert-css: (".src .sidebar", { "color": |color|, "background-color": |background_color| }, ALL) @@ -53,8 +53,8 @@ assert-css: ("nav.sidebar", {"width": "50px"}) // We now click on the button to expand the sidebar. click: (10, 10) // We wait for the sidebar to be expanded. -wait-for-css: (".source-sidebar-expanded nav.sidebar", {"width": "300px"}) -assert-css: (".source-sidebar-expanded nav.sidebar a", {"font-size": "14px"}) +wait-for-css: (".src-sidebar-expanded nav.sidebar", {"width": "300px"}) +assert-css: (".src-sidebar-expanded nav.sidebar a", {"font-size": "14px"}) // We collapse the sidebar. click: (10, 10) // We ensure that the class has been removed. @@ -66,24 +66,24 @@ go-to: "file://" + |DOC_PATH| + "/src/lib2/another_folder/sub_mod/mod.rs.html" // First we expand the sidebar again. click: (10, 10) // We wait for the sidebar to be expanded. -wait-for-css: (".source-sidebar-expanded nav.sidebar", {"width": "300px"}) +wait-for-css: (".src-sidebar-expanded nav.sidebar", {"width": "300px"}) assert: "//*[@class='dir-entry' and @open]/*[text()='lib2']" assert: "//*[@class='dir-entry' and @open]/*[text()='another_folder']" assert: "//*[@class='dir-entry' and @open]/*[text()='sub_mod']" // Only "another_folder" should be "open" in "lib2". assert: "//*[@class='dir-entry' and not(@open)]/*[text()='another_mod']" // All other trees should be collapsed. -assert-count: ("//*[@id='source-sidebar']/details[not(text()='lib2') and not(@open)]", 9) +assert-count: ("//*[@id='src-sidebar']/details[not(text()='lib2') and not(@open)]", 9) // We now switch to mobile mode. set-window-size: (600, 600) -wait-for-css: (".source-sidebar-expanded nav.sidebar", {"left": "0px"}) +wait-for-css: (".src-sidebar-expanded nav.sidebar", {"left": "0px"}) // We collapse the sidebar. click: (10, 10) // We check that the sidebar has been moved off-screen. assert-css: ("nav.sidebar", {"left": "-1000px"}) // We ensure that the class has been removed. -assert-false: ".source-sidebar-expanded" +assert-false: ".src-sidebar-expanded" assert: "nav.sidebar" // Check that the topbar is not visible diff --git a/tests/rustdoc-gui/source-code-page.goml b/tests/rustdoc-gui/source-code-page.goml index f8f73398d9b..f19e3ce80e1 100644 --- a/tests/rustdoc-gui/source-code-page.goml +++ b/tests/rustdoc-gui/source-code-page.goml @@ -98,26 +98,26 @@ assert-document-property: ({"URL": "/lib.rs.html"}, ENDS_WITH) // First we "open" it. click: "#src-sidebar-toggle" -assert: ".source-sidebar-expanded" +assert: ".src-sidebar-expanded" // We check that the first entry of the sidebar is collapsed -assert-property: ("#source-sidebar details:first-of-type", {"open": "false"}) -assert-text: ("#source-sidebar details:first-of-type > summary", "extend_css") +assert-property: ("#src-sidebar details:first-of-type", {"open": "false"}) +assert-text: ("#src-sidebar details:first-of-type > summary", "extend_css") // We now click on it. -click: "#source-sidebar details:first-of-type > summary" -assert-property: ("#source-sidebar details:first-of-type", {"open": "true"}) +click: "#src-sidebar details:first-of-type > summary" +assert-property: ("#src-sidebar details:first-of-type", {"open": "true"}) // And now we collapse it again. -click: "#source-sidebar details:first-of-type > summary" -assert-property: ("#source-sidebar details:first-of-type", {"open": "false"}) +click: "#src-sidebar details:first-of-type > summary" +assert-property: ("#src-sidebar details:first-of-type", {"open": "false"}) // And open it again, since it'll be the reference we use to check positions. -click: "#source-sidebar details:first-of-type > summary" -assert-property: ("#source-sidebar details:first-of-type", {"open": "true"}) +click: "#src-sidebar details:first-of-type > summary" +assert-property: ("#src-sidebar details:first-of-type", {"open": "true"}) // Check the sidebar directory entries have a marker and spacing (desktop). store-property: ( - "#source-sidebar > details:first-of-type.dir-entry[open] > .files > a", + "#src-sidebar > details:first-of-type.dir-entry[open] > .files > a", {"offsetHeight": link_height}, ) define-function: ( @@ -125,28 +125,28 @@ define-function: ( (x, y), block { assert: "details:first-of-type.dir-entry[open] > summary::marker" - assert-css: ("#source-sidebar > details:first-of-type.dir-entry", {"padding-left": "4px"}) + assert-css: ("#src-sidebar > details:first-of-type.dir-entry", {"padding-left": "4px"}) // This check ensures that the summary is only one line. assert-property: ( - "#source-sidebar > details:first-of-type.dir-entry[open] > summary", + "#src-sidebar > details:first-of-type.dir-entry[open] > summary", {"offsetHeight": |link_height|} ) assert-position: ( - "#source-sidebar > details:first-of-type.dir-entry[open] > summary", + "#src-sidebar > details:first-of-type.dir-entry[open] > summary", {"x": |x|, "y": |y|} ) assert-property: ( - "#source-sidebar > details:first-of-type.dir-entry[open] > .files > a", + "#src-sidebar > details:first-of-type.dir-entry[open] > .files > a", {"offsetHeight": |link_height|} ) assert-position: ( - "#source-sidebar > details:first-of-type.dir-entry[open] > .files > a", + "#src-sidebar > details:first-of-type.dir-entry[open] > .files > a", // left margin {"x": |x| + 27, "y": |y| + |link_height|} ) } ) -store-property: ("#source-sidebar > .title", { +store-property: ("#src-sidebar > .title", { "offsetHeight": source_sidebar_title_height, "offsetTop": source_sidebar_title_y, }) @@ -175,7 +175,7 @@ assert-property: ("#main-content", {"offsetTop": 76}) // 21 = 76 - 34 - 21 // Check the sidebar directory entries have a marker and spacing (tablet). -store-property: ("#source-sidebar > .title", { +store-property: ("#src-sidebar > .title", { "offsetHeight": source_sidebar_title_height, "offsetTop": source_sidebar_title_y, }) @@ -189,7 +189,7 @@ set-window-size: (450, 700) assert-css: ("nav.sub", {"flex-direction": "column"}) // Check the sidebar directory entries have a marker and spacing (phone). -store-property: ("#source-sidebar > .title", { +store-property: ("#src-sidebar > .title", { "offsetHeight": source_sidebar_title_height, "offsetTop": source_sidebar_title_y, }) diff --git a/tests/rustdoc-gui/src-font-size.goml b/tests/rustdoc-gui/src-font-size.goml index ff30bcdf2a2..f23387d73d3 100644 --- a/tests/rustdoc-gui/src-font-size.goml +++ b/tests/rustdoc-gui/src-font-size.goml @@ -4,13 +4,13 @@ go-to: "file://" + |DOC_PATH| + "/test_docs/struct.Foo.html" show-text: true // Check the impl headers. -assert-css: (".impl .srclink", {"font-size": "16px", "font-weight": 400}, ALL) +assert-css: (".impl a.src", {"font-size": "16px", "font-weight": 400}, ALL) assert-css: (".impl .code-header", {"font-size": "18px", "font-weight": 600}, ALL) // Check the impl items. -assert-css: (".impl-items .srclink", {"font-size": "16px", "font-weight": 400}, ALL) +assert-css: (".impl-items a.src", {"font-size": "16px", "font-weight": 400}, ALL) assert-css: (".impl-items .code-header", {"font-size": "16px", "font-weight": 600}, ALL) // Check that we can click on source link store-document-property: {"URL": url} -click: ".impl-items .srclink" +click: ".impl-items a.src" assert-document-property-false: {"URL": |url|} diff --git a/tests/rustdoc-json/impls/issue-112852-dangling-trait-impl-id-2.rs b/tests/rustdoc-json/impls/issue-112852-dangling-trait-impl-id-2.rs new file mode 100644 index 00000000000..d2ac316d47d --- /dev/null +++ b/tests/rustdoc-json/impls/issue-112852-dangling-trait-impl-id-2.rs @@ -0,0 +1,15 @@ +#![feature(no_core)] +#![no_core] + +// @count "$.index[*][?(@.inner.impl)]" 1 +// @!has "$.index[*][?(@.name == 'HiddenPubStruct')]" +// @has "$.index[*][?(@.name == 'NotHiddenPubStruct')]" +// @has "$.index[*][?(@.name=='PubTrait')]" +pub trait PubTrait {} + +#[doc(hidden)] +pub struct HiddenPubStruct; +pub struct NotHiddenPubStruct; + +impl PubTrait for HiddenPubStruct {} +impl PubTrait for NotHiddenPubStruct {} diff --git a/tests/rustdoc-json/impls/issue-112852-dangling-trait-impl-id-3.rs b/tests/rustdoc-json/impls/issue-112852-dangling-trait-impl-id-3.rs new file mode 100644 index 00000000000..fcd27ca4b7c --- /dev/null +++ b/tests/rustdoc-json/impls/issue-112852-dangling-trait-impl-id-3.rs @@ -0,0 +1,14 @@ +// compile-flags: --document-hidden-items + +#![feature(no_core)] +#![no_core] + +// @has "$.index[*][?(@.name == 'HiddenPubStruct')]" +// @has "$.index[*][?(@.inner.impl)]" +// @has "$.index[*][?(@.name=='PubTrait')]" +pub trait PubTrait {} + +#[doc(hidden)] +pub struct HiddenPubStruct; + +impl PubTrait for HiddenPubStruct {} diff --git a/tests/rustdoc-json/impls/issue-112852-dangling-trait-impl-id.rs b/tests/rustdoc-json/impls/issue-112852-dangling-trait-impl-id.rs new file mode 100644 index 00000000000..141c54a57dd --- /dev/null +++ b/tests/rustdoc-json/impls/issue-112852-dangling-trait-impl-id.rs @@ -0,0 +1,21 @@ +#![feature(no_core)] +#![no_core] + +// @count "$.index[*][?(@.inner.impl)]" 1 +// @!has "$.index[*][?(@.name == 'HiddenPubStruct')]" +// @has "$.index[*][?(@.name == 'NotHiddenPubStruct')]" +// @has "$.index[*][?(@.name=='PubTrait')]" +pub trait PubTrait {} + +#[doc(hidden)] +pub mod hidden { + pub struct HiddenPubStruct; + + impl crate::PubTrait for HiddenPubStruct {} +} + +pub mod not_hidden { + pub struct NotHiddenPubStruct; + + impl crate::PubTrait for NotHiddenPubStruct {} +} diff --git a/tests/rustdoc-json/traits/private_supertrait.rs b/tests/rustdoc-json/traits/private_supertrait.rs new file mode 100644 index 00000000000..49238e5e88b --- /dev/null +++ b/tests/rustdoc-json/traits/private_supertrait.rs @@ -0,0 +1,15 @@ +// ignore-tidy-linelength + +#![feature(no_core)] +#![no_core] + + +// @!has "$.index[*][?(@.name == 'sealed')]" +mod sealed { + // @set sealed_id = "$.index[*][?(@.name=='Sealed')].id" + pub trait Sealed {} +} + +// @count "$.index[*][?(@.name=='Trait')].inner.trait.bounds[*]" 1 +// @is "$.index[*][?(@.name=='Trait')].inner.trait.bounds[0].trait_bound.trait.id" $sealed_id +pub trait Trait: sealed::Sealed {} diff --git a/tests/rustdoc/anchors.no_const_anchor.html b/tests/rustdoc/anchors.no_const_anchor.html index a8587829d3e..06673d87406 100644 --- a/tests/rustdoc/anchors.no_const_anchor.html +++ b/tests/rustdoc/anchors.no_const_anchor.html @@ -1 +1 @@ -<section id="associatedconstant.YOLO" class="method"><a class="srclink rightside" href="../src/foo/anchors.rs.html#16">source</a><h4 class="code-header">const <a href="#associatedconstant.YOLO" class="constant">YOLO</a>: <a class="primitive" href="{{channel}}/std/primitive.u32.html">u32</a></h4></section> \ No newline at end of file +<section id="associatedconstant.YOLO" class="method"><a class="src rightside" href="../src/foo/anchors.rs.html#16">source</a><h4 class="code-header">const <a href="#associatedconstant.YOLO" class="constant">YOLO</a>: <a class="primitive" href="{{channel}}/std/primitive.u32.html">u32</a></h4></section> \ No newline at end of file diff --git a/tests/rustdoc/anchors.no_const_anchor2.html b/tests/rustdoc/anchors.no_const_anchor2.html index 4c5e45fea2d..73c3d0a807b 100644 --- a/tests/rustdoc/anchors.no_const_anchor2.html +++ b/tests/rustdoc/anchors.no_const_anchor2.html @@ -1 +1 @@ -<section id="associatedconstant.X" class="associatedconstant"><a class="srclink rightside" href="../src/foo/anchors.rs.html#42">source</a><h4 class="code-header">pub const <a href="#associatedconstant.X" class="constant">X</a>: <a class="primitive" href="{{channel}}/std/primitive.i32.html">i32</a> = 0i32</h4></section> \ No newline at end of file +<section id="associatedconstant.X" class="associatedconstant"><a class="src rightside" href="../src/foo/anchors.rs.html#42">source</a><h4 class="code-header">pub const <a href="#associatedconstant.X" class="constant">X</a>: <a class="primitive" href="{{channel}}/std/primitive.i32.html">i32</a> = 0i32</h4></section> \ No newline at end of file diff --git a/tests/rustdoc/anchors.no_method_anchor.html b/tests/rustdoc/anchors.no_method_anchor.html index 44957a5b71a..e8b61caa1c1 100644 --- a/tests/rustdoc/anchors.no_method_anchor.html +++ b/tests/rustdoc/anchors.no_method_anchor.html @@ -1 +1 @@ -<section id="method.new" class="method"><a class="srclink rightside" href="../src/foo/anchors.rs.html#48">source</a><h4 class="code-header">pub fn <a href="#method.new" class="fn">new</a>() -> Self</h4></section> \ No newline at end of file +<section id="method.new" class="method"><a class="src rightside" href="../src/foo/anchors.rs.html#48">source</a><h4 class="code-header">pub fn <a href="#method.new" class="fn">new</a>() -> Self</h4></section> \ No newline at end of file diff --git a/tests/rustdoc/anchors.no_trait_method_anchor.html b/tests/rustdoc/anchors.no_trait_method_anchor.html index 75c2caf87a8..abdb17c27dc 100644 --- a/tests/rustdoc/anchors.no_trait_method_anchor.html +++ b/tests/rustdoc/anchors.no_trait_method_anchor.html @@ -1 +1 @@ -<section id="method.bar" class="method"><a class="srclink rightside" href="../src/foo/anchors.rs.html#23">source</a><h4 class="code-header">fn <a href="#method.bar" class="fn">bar</a>()</h4></section> \ No newline at end of file +<section id="method.bar" class="method"><a class="src rightside" href="../src/foo/anchors.rs.html#23">source</a><h4 class="code-header">fn <a href="#method.bar" class="fn">bar</a>()</h4></section> \ No newline at end of file diff --git a/tests/rustdoc/anchors.no_tymethod_anchor.html b/tests/rustdoc/anchors.no_tymethod_anchor.html index 38575eadfa9..23f4277c5b5 100644 --- a/tests/rustdoc/anchors.no_tymethod_anchor.html +++ b/tests/rustdoc/anchors.no_tymethod_anchor.html @@ -1 +1 @@ -<section id="tymethod.foo" class="method"><a class="srclink rightside" href="../src/foo/anchors.rs.html#20">source</a><h4 class="code-header">fn <a href="#tymethod.foo" class="fn">foo</a>()</h4></section> \ No newline at end of file +<section id="tymethod.foo" class="method"><a class="src rightside" href="../src/foo/anchors.rs.html#20">source</a><h4 class="code-header">fn <a href="#tymethod.foo" class="fn">foo</a>()</h4></section> \ No newline at end of file diff --git a/tests/rustdoc/anchors.no_type_anchor.html b/tests/rustdoc/anchors.no_type_anchor.html index dd65d98fee6..62b9295508f 100644 --- a/tests/rustdoc/anchors.no_type_anchor.html +++ b/tests/rustdoc/anchors.no_type_anchor.html @@ -1 +1 @@ -<section id="associatedtype.T" class="method"><a class="srclink rightside" href="../src/foo/anchors.rs.html#13">source</a><h4 class="code-header">type <a href="#associatedtype.T" class="associatedtype">T</a></h4></section> \ No newline at end of file +<section id="associatedtype.T" class="method"><a class="src rightside" href="../src/foo/anchors.rs.html#13">source</a><h4 class="code-header">type <a href="#associatedtype.T" class="associatedtype">T</a></h4></section> \ No newline at end of file diff --git a/tests/rustdoc/ensure-src-link.rs b/tests/rustdoc/ensure-src-link.rs index c65387080f1..f99c4c4d988 100644 --- a/tests/rustdoc/ensure-src-link.rs +++ b/tests/rustdoc/ensure-src-link.rs @@ -2,5 +2,5 @@ // This test ensures that the [src] link is present on traits items. -// @has foo/trait.Iterator.html '//*[@id="method.zip"]//a[@class="srclink rightside"]' "source" +// @has foo/trait.Iterator.html '//*[@id="method.zip"]//a[@class="src rightside"]' "source" pub use std::iter::Iterator; diff --git a/tests/rustdoc/source-file.rs b/tests/rustdoc/source-file.rs index 4e166479063..16d4cbe3a34 100644 --- a/tests/rustdoc/source-file.rs +++ b/tests/rustdoc/source-file.rs @@ -1,5 +1,5 @@ #![crate_name = "foo"] -// @hasraw source-files.js source-file.rs +// @hasraw src-files.js source-file.rs pub struct Foo; diff --git a/tests/rustdoc/src-links-auto-impls.rs b/tests/rustdoc/src-links-auto-impls.rs index 953563833c9..1c8d1573192 100644 --- a/tests/rustdoc/src-links-auto-impls.rs +++ b/tests/rustdoc/src-links-auto-impls.rs @@ -2,11 +2,11 @@ // @has foo/struct.Unsized.html // @has - '//*[@id="impl-Sized-for-Unsized"]/h3[@class="code-header"]' 'impl !Sized for Unsized' -// @!has - '//*[@id="impl-Sized-for-Unsized"]//a[@class="srclink"]' 'source' +// @!has - '//*[@id="impl-Sized-for-Unsized"]//a[@class="src"]' 'source' // @has - '//*[@id="impl-Sync-for-Unsized"]/h3[@class="code-header"]' 'impl Sync for Unsized' -// @!has - '//*[@id="impl-Sync-for-Unsized"]//a[@class="srclink"]' 'source' +// @!has - '//*[@id="impl-Sync-for-Unsized"]//a[@class="src"]' 'source' // @has - '//*[@id="impl-Any-for-Unsized"]/h3[@class="code-header"]' 'impl<T> Any for T' -// @has - '//*[@id="impl-Any-for-Unsized"]//a[@class="srclink rightside"]' 'source' +// @has - '//*[@id="impl-Any-for-Unsized"]//a[@class="src rightside"]' 'source' pub struct Unsized { data: [u8], } diff --git a/tests/rustdoc/static-root-path.rs b/tests/rustdoc/static-root-path.rs index 86928b0fb0a..3148ea047b7 100644 --- a/tests/rustdoc/static-root-path.rs +++ b/tests/rustdoc/static-root-path.rs @@ -8,10 +8,10 @@ pub struct SomeStruct; // @has src/static_root_path/static-root-path.rs.html -// @matchesraw - '"/cache/source-script-' -// @!matchesraw - '"\.\./\.\./source-script' -// @matchesraw - '"\.\./\.\./source-files.js"' -// @!matchesraw - '"/cache/source-files\.js"' +// @matchesraw - '"/cache/src-script-' +// @!matchesraw - '"\.\./\.\./src-script' +// @matchesraw - '"\.\./\.\./src-files.js"' +// @!matchesraw - '"/cache/src-files\.js"' // @has settings.html // @matchesraw - '/cache/settings-' diff --git a/tests/ui/associated-consts/issue-105330.stderr b/tests/ui/associated-consts/issue-105330.stderr index bbafc55dac3..e9fe3a5e514 100644 --- a/tests/ui/associated-consts/issue-105330.stderr +++ b/tests/ui/associated-consts/issue-105330.stderr @@ -39,6 +39,12 @@ error[E0562]: `impl Trait` only allowed in function and inherent method return t LL | impl TraitWAssocConst for impl Demo { | ^^^^^^^^^ +error[E0131]: `main` function is not allowed to have generic parameters + --> $DIR/issue-105330.rs:17:8 + | +LL | fn main<A: TraitWAssocConst<A=32>>() { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ `main` cannot have generic parameters + error[E0277]: the trait bound `Demo: TraitWAssocConst` is not satisfied --> $DIR/issue-105330.rs:12:11 | @@ -101,12 +107,6 @@ note: required by a bound in `foo` LL | fn foo<A: TraitWAssocConst<A=32>>() { | ^^^^ required by this bound in `foo` -error[E0131]: `main` function is not allowed to have generic parameters - --> $DIR/issue-105330.rs:17:8 - | -LL | fn main<A: TraitWAssocConst<A=32>>() { - | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ `main` cannot have generic parameters - error: aborting due to 11 previous errors Some errors have detailed explanations: E0131, E0271, E0277, E0404, E0562, E0618, E0658. diff --git a/tests/ui/associated-inherent-types/inference.rs b/tests/ui/associated-inherent-types/inference.rs index ebd8e1d5594..66f879c5a71 100644 --- a/tests/ui/associated-inherent-types/inference.rs +++ b/tests/ui/associated-inherent-types/inference.rs @@ -1,5 +1,7 @@ // Testing inference capabilities. // check-pass +// revisions: current next +//[next] compile-flags: -Ztrait-solver=next #![feature(inherent_associated_types)] #![allow(incomplete_features)] diff --git a/tests/ui/attributes/auxiliary/rustc_confusables_across_crate.rs b/tests/ui/attributes/auxiliary/rustc_confusables_across_crate.rs new file mode 100644 index 00000000000..2fb2d3ad4c4 --- /dev/null +++ b/tests/ui/attributes/auxiliary/rustc_confusables_across_crate.rs @@ -0,0 +1,11 @@ +#![feature(rustc_attrs)] + +pub struct BTreeSet; + +impl BTreeSet { + #[rustc_confusables("push", "test_b")] + pub fn insert(&self) {} + + #[rustc_confusables("pulled")] + pub fn pull(&self) {} +} diff --git a/tests/ui/attributes/rustc_confusables.rs b/tests/ui/attributes/rustc_confusables.rs new file mode 100644 index 00000000000..352e91d065f --- /dev/null +++ b/tests/ui/attributes/rustc_confusables.rs @@ -0,0 +1,47 @@ +// aux-build: rustc_confusables_across_crate.rs + +#![feature(rustc_attrs)] + +extern crate rustc_confusables_across_crate; + +use rustc_confusables_across_crate::BTreeSet; + +fn main() { + // Misspellings (similarly named methods) take precedence over `rustc_confusables`. + let x = BTreeSet {}; + x.inser(); + //~^ ERROR no method named + //~| HELP there is a method with a similar name + x.foo(); + //~^ ERROR no method named + x.push(); + //~^ ERROR no method named + //~| HELP you might have meant to use `insert` + x.test(); + //~^ ERROR no method named + x.pulled(); + //~^ ERROR no method named + //~| HELP there is a method with a similar name +} + +struct Bar; + +impl Bar { + #[rustc_confusables()] + //~^ ERROR expected at least one confusable name + fn baz() {} + + #[rustc_confusables] + //~^ ERROR malformed `rustc_confusables` attribute input + //~| HELP must be of the form + fn qux() {} + + #[rustc_confusables(invalid_meta_item)] + //~^ ERROR expected a quoted string literal + //~| HELP consider surrounding this with quotes + fn quux() {} +} + +#[rustc_confusables("blah")] +//~^ ERROR attribute should be applied to an inherent method +fn not_inherent_impl_method() {} diff --git a/tests/ui/attributes/rustc_confusables.stderr b/tests/ui/attributes/rustc_confusables.stderr new file mode 100644 index 00000000000..9fd4470cdbb --- /dev/null +++ b/tests/ui/attributes/rustc_confusables.stderr @@ -0,0 +1,68 @@ +error: malformed `rustc_confusables` attribute input + --> $DIR/rustc_confusables.rs:34:5 + | +LL | #[rustc_confusables] + | ^^^^^^^^^^^^^^^^^^^^ help: must be of the form: `#[rustc_confusables("name1", "name2", ...)]` + +error: attribute should be applied to an inherent method + --> $DIR/rustc_confusables.rs:45:1 + | +LL | #[rustc_confusables("blah")] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +error: expected at least one confusable name + --> $DIR/rustc_confusables.rs:30:5 + | +LL | #[rustc_confusables()] + | ^^^^^^^^^^^^^^^^^^^^^^ + +error[E0539]: expected a quoted string literal + --> $DIR/rustc_confusables.rs:39:25 + | +LL | #[rustc_confusables(invalid_meta_item)] + | ^^^^^^^^^^^^^^^^^ + | +help: consider surrounding this with quotes + | +LL | #[rustc_confusables("invalid_meta_item")] + | + + + +error[E0599]: no method named `inser` found for struct `rustc_confusables_across_crate::BTreeSet` in the current scope + --> $DIR/rustc_confusables.rs:12:7 + | +LL | x.inser(); + | ^^^^^ help: there is a method with a similar name: `insert` + +error[E0599]: no method named `foo` found for struct `rustc_confusables_across_crate::BTreeSet` in the current scope + --> $DIR/rustc_confusables.rs:15:7 + | +LL | x.foo(); + | ^^^ method not found in `BTreeSet` + +error[E0599]: no method named `push` found for struct `rustc_confusables_across_crate::BTreeSet` in the current scope + --> $DIR/rustc_confusables.rs:17:7 + | +LL | x.push(); + | ^^^^ method not found in `BTreeSet` + | +help: you might have meant to use `insert` + | +LL | x.insert(); + | ~~~~~~ + +error[E0599]: no method named `test` found for struct `rustc_confusables_across_crate::BTreeSet` in the current scope + --> $DIR/rustc_confusables.rs:20:7 + | +LL | x.test(); + | ^^^^ method not found in `BTreeSet` + +error[E0599]: no method named `pulled` found for struct `rustc_confusables_across_crate::BTreeSet` in the current scope + --> $DIR/rustc_confusables.rs:22:7 + | +LL | x.pulled(); + | ^^^^^^ help: there is a method with a similar name: `pull` + +error: aborting due to 9 previous errors + +Some errors have detailed explanations: E0539, E0599. +For more information about an error, try `rustc --explain E0539`. diff --git a/tests/ui/borrowck/copy-suggestion-region-vid.rs b/tests/ui/borrowck/copy-suggestion-region-vid.rs index dff95283459..3c5b887ce17 100644 --- a/tests/ui/borrowck/copy-suggestion-region-vid.rs +++ b/tests/ui/borrowck/copy-suggestion-region-vid.rs @@ -1,3 +1,4 @@ +//@run-rustfix pub struct DataStruct(); pub struct HelperStruct<'n> { diff --git a/tests/ui/borrowck/copy-suggestion-region-vid.stderr b/tests/ui/borrowck/copy-suggestion-region-vid.stderr index 40b8ab182f3..b344aa66405 100644 --- a/tests/ui/borrowck/copy-suggestion-region-vid.stderr +++ b/tests/ui/borrowck/copy-suggestion-region-vid.stderr @@ -1,5 +1,5 @@ error[E0382]: borrow of moved value: `helpers` - --> $DIR/copy-suggestion-region-vid.rs:12:43 + --> $DIR/copy-suggestion-region-vid.rs:13:43 | LL | let helpers = [vec![], vec![]]; | ------- move occurs because `helpers` has type `[Vec<&i64>; 2]`, which does not implement the `Copy` trait @@ -8,6 +8,11 @@ LL | HelperStruct { helpers, is_empty: helpers[0].is_empty() } | ------- ^^^^^^^^^^ value borrowed here after move | | | value moved here + | +help: consider cloning the value if the performance cost is acceptable + | +LL | HelperStruct { helpers: helpers.clone(), is_empty: helpers[0].is_empty() } + | +++++++++++++++++ error: aborting due to previous error diff --git a/tests/ui/borrowck/issue-85765-closure.rs b/tests/ui/borrowck/issue-85765-closure.rs new file mode 100644 index 00000000000..f2d1dd0fbc3 --- /dev/null +++ b/tests/ui/borrowck/issue-85765-closure.rs @@ -0,0 +1,31 @@ +fn main() { + let _ = || { + let mut test = Vec::new(); + let rofl: &Vec<Vec<i32>> = &mut test; + //~^ HELP consider changing this binding's type + rofl.push(Vec::new()); + //~^ ERROR cannot borrow `*rofl` as mutable, as it is behind a `&` reference + //~| NOTE `rofl` is a `&` reference, so the data it refers to cannot be borrowed as mutable + + let mut mutvar = 42; + let r = &mutvar; + //~^ HELP consider changing this to be a mutable reference + *r = 0; + //~^ ERROR cannot assign to `*r`, which is behind a `&` reference + //~| NOTE `r` is a `&` reference, so the data it refers to cannot be written + + #[rustfmt::skip] + let x: &usize = &mut{0}; + //~^ HELP consider changing this binding's type + *x = 1; + //~^ ERROR cannot assign to `*x`, which is behind a `&` reference + //~| NOTE `x` is a `&` reference, so the data it refers to cannot be written + + #[rustfmt::skip] + let y: &usize = &mut(0); + //~^ HELP consider changing this binding's type + *y = 1; + //~^ ERROR cannot assign to `*y`, which is behind a `&` reference + //~| NOTE `y` is a `&` reference, so the data it refers to cannot be written + }; +} diff --git a/tests/ui/borrowck/issue-85765-closure.stderr b/tests/ui/borrowck/issue-85765-closure.stderr new file mode 100644 index 00000000000..936ddd67bcd --- /dev/null +++ b/tests/ui/borrowck/issue-85765-closure.stderr @@ -0,0 +1,48 @@ +error[E0596]: cannot borrow `*rofl` as mutable, as it is behind a `&` reference + --> $DIR/issue-85765-closure.rs:6:9 + | +LL | rofl.push(Vec::new()); + | ^^^^ `rofl` is a `&` reference, so the data it refers to cannot be borrowed as mutable + | +help: consider changing this binding's type + | +LL | let rofl: &mut Vec<Vec<i32>> = &mut test; + | ~~~~~~~~~~~~~~~~~~ + +error[E0594]: cannot assign to `*r`, which is behind a `&` reference + --> $DIR/issue-85765-closure.rs:13:9 + | +LL | *r = 0; + | ^^^^^^ `r` is a `&` reference, so the data it refers to cannot be written + | +help: consider changing this to be a mutable reference + | +LL | let r = &mut mutvar; + | +++ + +error[E0594]: cannot assign to `*x`, which is behind a `&` reference + --> $DIR/issue-85765-closure.rs:20:9 + | +LL | *x = 1; + | ^^^^^^ `x` is a `&` reference, so the data it refers to cannot be written + | +help: consider changing this binding's type + | +LL | let x: &mut usize = &mut{0}; + | ~~~~~~~~~~ + +error[E0594]: cannot assign to `*y`, which is behind a `&` reference + --> $DIR/issue-85765-closure.rs:27:9 + | +LL | *y = 1; + | ^^^^^^ `y` is a `&` reference, so the data it refers to cannot be written + | +help: consider changing this binding's type + | +LL | let y: &mut usize = &mut(0); + | ~~~~~~~~~~ + +error: aborting due to 4 previous errors + +Some errors have detailed explanations: E0594, E0596. +For more information about an error, try `rustc --explain E0594`. diff --git a/tests/ui/btreemap/btreemap-index-mut-2.rs b/tests/ui/btreemap/btreemap-index-mut-2.rs new file mode 100644 index 00000000000..fe676210a1b --- /dev/null +++ b/tests/ui/btreemap/btreemap-index-mut-2.rs @@ -0,0 +1,8 @@ +use std::collections::BTreeMap; + +fn main() { + let _ = || { + let mut map = BTreeMap::<u32, u32>::new(); + map[&0] = 1; //~ ERROR cannot assign + }; +} diff --git a/tests/ui/btreemap/btreemap-index-mut-2.stderr b/tests/ui/btreemap/btreemap-index-mut-2.stderr new file mode 100644 index 00000000000..c8d4fd59550 --- /dev/null +++ b/tests/ui/btreemap/btreemap-index-mut-2.stderr @@ -0,0 +1,19 @@ +error[E0594]: cannot assign to data in an index of `BTreeMap<u32, u32>` + --> $DIR/btreemap-index-mut-2.rs:6:9 + | +LL | map[&0] = 1; + | ^^^^^^^^^^^ cannot assign + | + = help: trait `IndexMut` is required to modify indexed content, but it is not implemented for `BTreeMap<u32, u32>` +help: to modify a `BTreeMap<u32, u32>`, use `.get_mut()`, `.insert()` or the entry API + | +LL | map.insert(&0, 1); + | ~~~~~~~~ ~ + +LL | map.get_mut(&0).map(|val| { *val = 1; }); + | ~~~~~~~~~ ~~~~~~~~~~~~~~~~~~ ++++ +LL | let val = map.entry(&0).or_insert(1); + | +++++++++ ~~~~~~~ ~~~~~~~~~~~~ + + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0594`. diff --git a/tests/ui/coherence/coherence-doesnt-use-infcx-evaluate.rs b/tests/ui/coherence/coherence-doesnt-use-infcx-evaluate.rs new file mode 100644 index 00000000000..063826f1d54 --- /dev/null +++ b/tests/ui/coherence/coherence-doesnt-use-infcx-evaluate.rs @@ -0,0 +1,24 @@ +// check-pass +// issue: 113415 + +// Makes sure that coherence doesn't call any of the `predicate_may_hold`-esque fns, +// since they are using a different infcx which doesn't preserve the intercrate flag. + +#![feature(specialization)] +//~^ WARN the feature `specialization` is incomplete + +trait Assoc { + type Output; +} + +default impl<T> Assoc for T { + type Output = bool; +} + +impl Assoc for u8 {} + +trait Foo {} +impl Foo for u32 {} +impl Foo for <u8 as Assoc>::Output {} + +fn main() {} diff --git a/tests/ui/coherence/coherence-doesnt-use-infcx-evaluate.stderr b/tests/ui/coherence/coherence-doesnt-use-infcx-evaluate.stderr new file mode 100644 index 00000000000..56eb21cd20f --- /dev/null +++ b/tests/ui/coherence/coherence-doesnt-use-infcx-evaluate.stderr @@ -0,0 +1,12 @@ +warning: the feature `specialization` is incomplete and may not be safe to use and/or cause compiler crashes + --> $DIR/coherence-doesnt-use-infcx-evaluate.rs:7:12 + | +LL | #![feature(specialization)] + | ^^^^^^^^^^^^^^ + | + = note: see issue #31844 <https://github.com/rust-lang/rust/issues/31844> for more information + = help: consider using `min_specialization` instead, which is more stable and complete + = note: `#[warn(incomplete_features)]` on by default + +warning: 1 warning emitted + diff --git a/tests/ui/consts/const-eval/const_fn_target_feature.rs b/tests/ui/consts/const-eval/const_fn_target_feature.rs new file mode 100644 index 00000000000..5d02ce3f21b --- /dev/null +++ b/tests/ui/consts/const-eval/const_fn_target_feature.rs @@ -0,0 +1,17 @@ +// only-x86_64 +// compile-flags:-C target-feature=+ssse3 + +#![crate_type = "lib"] + +// ok (ssse3 enabled at compile time) +const A: () = unsafe { ssse3_fn() }; + +// error (avx2 not enabled at compile time) +const B: () = unsafe { avx2_fn() }; +//~^ ERROR evaluation of constant value failed + +#[target_feature(enable = "ssse3")] +const unsafe fn ssse3_fn() {} + +#[target_feature(enable = "avx2")] +const unsafe fn avx2_fn() {} diff --git a/tests/ui/consts/const-eval/const_fn_target_feature.stderr b/tests/ui/consts/const-eval/const_fn_target_feature.stderr new file mode 100644 index 00000000000..36918b52cd3 --- /dev/null +++ b/tests/ui/consts/const-eval/const_fn_target_feature.stderr @@ -0,0 +1,9 @@ +error[E0080]: evaluation of constant value failed + --> $DIR/const_fn_target_feature.rs:10:24 + | +LL | const B: () = unsafe { avx2_fn() }; + | ^^^^^^^^^ calling a function that requires unavailable target features: avx2 + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0080`. diff --git a/tests/ui/deriving/deriving-all-codegen.stdout b/tests/ui/deriving/deriving-all-codegen.stdout index d6a2c80cc06..6bfc859bfe8 100644 --- a/tests/ui/deriving/deriving-all-codegen.stdout +++ b/tests/ui/deriving/deriving-all-codegen.stdout @@ -798,14 +798,14 @@ impl ::core::marker::Copy for Enum0 { } #[automatically_derived] impl ::core::fmt::Debug for Enum0 { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { - unsafe { ::core::intrinsics::unreachable() } + match *self {} } } #[automatically_derived] impl ::core::hash::Hash for Enum0 { #[inline] fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { - unsafe { ::core::intrinsics::unreachable() } + match *self {} } } #[automatically_derived] @@ -813,9 +813,7 @@ impl ::core::marker::StructuralPartialEq for Enum0 { } #[automatically_derived] impl ::core::cmp::PartialEq for Enum0 { #[inline] - fn eq(&self, other: &Enum0) -> bool { - unsafe { ::core::intrinsics::unreachable() } - } + fn eq(&self, other: &Enum0) -> bool { match *self {} } } #[automatically_derived] impl ::core::marker::StructuralEq for Enum0 { } @@ -831,15 +829,13 @@ impl ::core::cmp::PartialOrd for Enum0 { #[inline] fn partial_cmp(&self, other: &Enum0) -> ::core::option::Option<::core::cmp::Ordering> { - unsafe { ::core::intrinsics::unreachable() } + match *self {} } } #[automatically_derived] impl ::core::cmp::Ord for Enum0 { #[inline] - fn cmp(&self, other: &Enum0) -> ::core::cmp::Ordering { - unsafe { ::core::intrinsics::unreachable() } - } + fn cmp(&self, other: &Enum0) -> ::core::cmp::Ordering { match *self {} } } // A single-variant enum. diff --git a/tests/ui/dyn-star/llvm-old-style-ptrs.rs b/tests/ui/dyn-star/llvm-old-style-ptrs.rs index d35519632be..4c042a53979 100644 --- a/tests/ui/dyn-star/llvm-old-style-ptrs.rs +++ b/tests/ui/dyn-star/llvm-old-style-ptrs.rs @@ -3,6 +3,8 @@ // (opaque-pointers flag is called force-opaque-pointers in LLVM 13...) // min-llvm-version: 14.0 +// (the ability to disable opaque pointers has been removed in LLVM 17) +// ignore-llvm-version: 17 - 99 // This test can be removed once non-opaque pointers are gone from LLVM, maybe. diff --git a/tests/ui/error-codes/E0283.stderr b/tests/ui/error-codes/E0283.stderr index 89e634a7064..fa8d4b6e015 100644 --- a/tests/ui/error-codes/E0283.stderr +++ b/tests/ui/error-codes/E0283.stderr @@ -7,7 +7,7 @@ LL | fn create() -> u32; LL | let cont: u32 = Generator::create(); | ^^^^^^^^^^^^^^^^^ cannot call associated function of trait | -help: use a fully-qualified path to a specific available implementation (2 found) +help: use a fully-qualified path to a specific available implementation | LL | let cont: u32 = </* self type */ as Generator>::create(); | +++++++++++++++++++ + diff --git a/tests/ui/error-codes/E0790.stderr b/tests/ui/error-codes/E0790.stderr index 7248766285d..f559abae397 100644 --- a/tests/ui/error-codes/E0790.stderr +++ b/tests/ui/error-codes/E0790.stderr @@ -63,7 +63,7 @@ LL | fn my_fn(); LL | MyTrait2::my_fn(); | ^^^^^^^^^^^^^^^ cannot call associated function of trait | -help: use a fully-qualified path to a specific available implementation (2 found) +help: use a fully-qualified path to a specific available implementation | LL | </* self type */ as MyTrait2>::my_fn(); | +++++++++++++++++++ + diff --git a/tests/ui/impl-trait/issue-102605.stderr b/tests/ui/impl-trait/issue-102605.stderr index dfe18e43eee..dcb22797173 100644 --- a/tests/ui/impl-trait/issue-102605.stderr +++ b/tests/ui/impl-trait/issue-102605.stderr @@ -1,3 +1,11 @@ +error[E0277]: `main` has invalid return type `Option<()>` + --> $DIR/issue-102605.rs:11:14 + | +LL | fn main() -> Option<()> { + | ^^^^^^^^^^ `main` can only return types that implement `Termination` + | + = help: consider using `()`, or a `Result` + error[E0308]: mismatched types --> $DIR/issue-102605.rs:13:20 | @@ -25,14 +33,6 @@ help: try wrapping the expression in `Err` LL | convert_result(Err(foo())) | ++++ + -error[E0277]: `main` has invalid return type `Option<()>` - --> $DIR/issue-102605.rs:11:14 - | -LL | fn main() -> Option<()> { - | ^^^^^^^^^^ `main` can only return types that implement `Termination` - | - = help: consider using `()`, or a `Result` - error: aborting due to 2 previous errors Some errors have detailed explanations: E0277, E0308. diff --git a/tests/ui/inference/need_type_info/infer-var-for-self-param.rs b/tests/ui/inference/need_type_info/infer-var-for-self-param.rs new file mode 100644 index 00000000000..51ac7943f24 --- /dev/null +++ b/tests/ui/inference/need_type_info/infer-var-for-self-param.rs @@ -0,0 +1,7 @@ +// Regression test for #113610 where we ICEd when trying to print +// inference variables created by instantiating the self type parameter. + +fn main() { + let _ = (Default::default(),); + //~^ ERROR cannot call associated function on trait +} diff --git a/tests/ui/inference/need_type_info/infer-var-for-self-param.stderr b/tests/ui/inference/need_type_info/infer-var-for-self-param.stderr new file mode 100644 index 00000000000..36d75469392 --- /dev/null +++ b/tests/ui/inference/need_type_info/infer-var-for-self-param.stderr @@ -0,0 +1,14 @@ +error[E0790]: cannot call associated function on trait without specifying the corresponding `impl` type + --> $DIR/infer-var-for-self-param.rs:5:14 + | +LL | let _ = (Default::default(),); + | ^^^^^^^^^^^^^^^^ cannot call associated function of trait + | +help: use a fully-qualified path to a specific available implementation + | +LL | let _ = (</* self type */ as Default>::default(),); + | +++++++++++++++++++ + + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0790`. diff --git a/tests/ui/layout/debug.stderr b/tests/ui/layout/debug.stderr index b9fa1b299e9..eeffb3c5f64 100644 --- a/tests/ui/layout/debug.stderr +++ b/tests/ui/layout/debug.stderr @@ -53,6 +53,8 @@ error: layout_of(E) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, Layout { size: Size(12 bytes), @@ -77,9 +79,13 @@ error: layout_of(E) = Layout { variants: Single { index: 1, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/debug.rs:7:1 | @@ -124,6 +130,8 @@ error: layout_of(S) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/debug.rs:10:1 | @@ -146,6 +154,8 @@ error: layout_of(U) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/debug.rs:13:1 | @@ -237,6 +247,8 @@ error: layout_of(std::result::Result<i32, i32>) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), }, Layout { size: Size(8 bytes), @@ -272,9 +284,13 @@ error: layout_of(std::result::Result<i32, i32>) = Layout { variants: Single { index: 1, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/debug.rs:16:1 | @@ -301,6 +317,8 @@ error: layout_of(i32) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/debug.rs:19:1 | @@ -323,6 +341,8 @@ error: layout_of(V) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(2 bytes), } --> $DIR/debug.rs:22:1 | @@ -345,6 +365,8 @@ error: layout_of(W) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(2 bytes), } --> $DIR/debug.rs:28:1 | @@ -367,6 +389,8 @@ error: layout_of(Y) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(2 bytes), } --> $DIR/debug.rs:34:1 | @@ -389,6 +413,8 @@ error: layout_of(P1) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/debug.rs:41:1 | @@ -411,6 +437,8 @@ error: layout_of(P2) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/debug.rs:45:1 | @@ -433,6 +461,8 @@ error: layout_of(P3) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/debug.rs:53:1 | @@ -455,6 +485,8 @@ error: layout_of(P4) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/debug.rs:57:1 | @@ -482,6 +514,8 @@ error: layout_of(P5) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/debug.rs:61:1 | @@ -509,6 +543,8 @@ error: layout_of(std::mem::MaybeUninit<u8>) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/debug.rs:64:1 | diff --git a/tests/ui/layout/hexagon-enum.stderr b/tests/ui/layout/hexagon-enum.stderr index d850dd69c96..a2ad4a1ab58 100644 --- a/tests/ui/layout/hexagon-enum.stderr +++ b/tests/ui/layout/hexagon-enum.stderr @@ -59,9 +59,13 @@ error: layout_of(A) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/hexagon-enum.rs:16:1 | @@ -129,9 +133,13 @@ error: layout_of(B) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/hexagon-enum.rs:20:1 | @@ -199,9 +207,13 @@ error: layout_of(C) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(2 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(2 bytes), } --> $DIR/hexagon-enum.rs:24:1 | @@ -269,9 +281,13 @@ error: layout_of(P) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/hexagon-enum.rs:28:1 | @@ -339,9 +355,13 @@ error: layout_of(T) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/hexagon-enum.rs:34:1 | diff --git a/tests/ui/layout/issue-96158-scalarpair-payload-might-be-uninit.stderr b/tests/ui/layout/issue-96158-scalarpair-payload-might-be-uninit.stderr index 8c7c915350f..d3ba1a295b1 100644 --- a/tests/ui/layout/issue-96158-scalarpair-payload-might-be-uninit.stderr +++ b/tests/ui/layout/issue-96158-scalarpair-payload-might-be-uninit.stderr @@ -81,6 +81,8 @@ error: layout_of(MissingPayloadField) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, Layout { size: Size(1 bytes), @@ -99,9 +101,13 @@ error: layout_of(MissingPayloadField) = Layout { variants: Single { index: 1, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/issue-96158-scalarpair-payload-might-be-uninit.rs:16:1 | @@ -193,6 +199,8 @@ error: layout_of(CommonPayloadField) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, Layout { size: Size(2 bytes), @@ -228,9 +236,13 @@ error: layout_of(CommonPayloadField) = Layout { variants: Single { index: 1, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/issue-96158-scalarpair-payload-might-be-uninit.rs:25:1 | @@ -320,6 +332,8 @@ error: layout_of(CommonPayloadFieldIsMaybeUninit) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, Layout { size: Size(2 bytes), @@ -354,9 +368,13 @@ error: layout_of(CommonPayloadFieldIsMaybeUninit) = Layout { variants: Single { index: 1, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/issue-96158-scalarpair-payload-might-be-uninit.rs:33:1 | @@ -462,6 +480,8 @@ error: layout_of(NicheFirst) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, Layout { size: Size(0 bytes), @@ -480,6 +500,8 @@ error: layout_of(NicheFirst) = Layout { variants: Single { index: 1, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, Layout { size: Size(0 bytes), @@ -498,9 +520,13 @@ error: layout_of(NicheFirst) = Layout { variants: Single { index: 2, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/issue-96158-scalarpair-payload-might-be-uninit.rs:41:1 | @@ -606,6 +632,8 @@ error: layout_of(NicheSecond) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, Layout { size: Size(0 bytes), @@ -624,6 +652,8 @@ error: layout_of(NicheSecond) = Layout { variants: Single { index: 1, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, Layout { size: Size(0 bytes), @@ -642,9 +672,13 @@ error: layout_of(NicheSecond) = Layout { variants: Single { index: 2, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/issue-96158-scalarpair-payload-might-be-uninit.rs:50:1 | diff --git a/tests/ui/layout/issue-96185-overaligned-enum.stderr b/tests/ui/layout/issue-96185-overaligned-enum.stderr index de6177c8dfc..c539eb453d9 100644 --- a/tests/ui/layout/issue-96185-overaligned-enum.stderr +++ b/tests/ui/layout/issue-96185-overaligned-enum.stderr @@ -53,6 +53,10 @@ error: layout_of(Aligned1) = Layout { variants: Single { index: 0, }, + max_repr_align: Some( + Align(8 bytes), + ), + unadjusted_abi_align: Align(1 bytes), }, Layout { size: Size(8 bytes), @@ -71,9 +75,17 @@ error: layout_of(Aligned1) = Layout { variants: Single { index: 1, }, + max_repr_align: Some( + Align(8 bytes), + ), + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: Some( + Align(8 bytes), + ), + unadjusted_abi_align: Align(1 bytes), } --> $DIR/issue-96185-overaligned-enum.rs:8:1 | @@ -141,6 +153,10 @@ error: layout_of(Aligned2) = Layout { variants: Single { index: 0, }, + max_repr_align: Some( + Align(1 bytes), + ), + unadjusted_abi_align: Align(1 bytes), }, Layout { size: Size(1 bytes), @@ -159,9 +175,17 @@ error: layout_of(Aligned2) = Layout { variants: Single { index: 1, }, + max_repr_align: Some( + Align(1 bytes), + ), + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: Some( + Align(1 bytes), + ), + unadjusted_abi_align: Align(1 bytes), } --> $DIR/issue-96185-overaligned-enum.rs:16:1 | diff --git a/tests/ui/layout/thumb-enum.stderr b/tests/ui/layout/thumb-enum.stderr index 227bd950b66..6f6ab498206 100644 --- a/tests/ui/layout/thumb-enum.stderr +++ b/tests/ui/layout/thumb-enum.stderr @@ -59,9 +59,13 @@ error: layout_of(A) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/thumb-enum.rs:16:1 | @@ -129,9 +133,13 @@ error: layout_of(B) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), } --> $DIR/thumb-enum.rs:20:1 | @@ -199,9 +207,13 @@ error: layout_of(C) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(2 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(2 bytes), } --> $DIR/thumb-enum.rs:24:1 | @@ -269,9 +281,13 @@ error: layout_of(P) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/thumb-enum.rs:28:1 | @@ -339,9 +355,13 @@ error: layout_of(T) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/thumb-enum.rs:34:1 | diff --git a/tests/ui/layout/zero-sized-array-enum-niche.stderr b/tests/ui/layout/zero-sized-array-enum-niche.stderr index a3e82070e0f..df9f1cc8d10 100644 --- a/tests/ui/layout/zero-sized-array-enum-niche.stderr +++ b/tests/ui/layout/zero-sized-array-enum-niche.stderr @@ -57,6 +57,8 @@ error: layout_of(std::result::Result<[u32; 0], bool>) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), }, Layout { size: Size(2 bytes), @@ -88,9 +90,13 @@ error: layout_of(std::result::Result<[u32; 0], bool>) = Layout { variants: Single { index: 1, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/zero-sized-array-enum-niche.rs:13:1 | @@ -156,6 +162,8 @@ error: layout_of(MultipleAlignments) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(2 bytes), }, Layout { size: Size(4 bytes), @@ -178,6 +186,8 @@ error: layout_of(MultipleAlignments) = Layout { variants: Single { index: 1, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), }, Layout { size: Size(2 bytes), @@ -209,9 +219,13 @@ error: layout_of(MultipleAlignments) = Layout { variants: Single { index: 2, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/zero-sized-array-enum-niche.rs:21:1 | @@ -277,6 +291,8 @@ error: layout_of(std::result::Result<[u32; 0], Packed<std::num::NonZeroU16>>) = variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), }, Layout { size: Size(3 bytes), @@ -308,9 +324,13 @@ error: layout_of(std::result::Result<[u32; 0], Packed<std::num::NonZeroU16>>) = variants: Single { index: 1, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/zero-sized-array-enum-niche.rs:37:1 | @@ -380,6 +400,8 @@ error: layout_of(std::result::Result<[u32; 0], Packed<U16IsZero>>) = Layout { variants: Single { index: 0, }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), }, Layout { size: Size(2 bytes), @@ -411,9 +433,13 @@ error: layout_of(std::result::Result<[u32; 0], Packed<U16IsZero>>) = Layout { variants: Single { index: 1, }, + max_repr_align: None, + unadjusted_abi_align: Align(1 bytes), }, ], }, + max_repr_align: None, + unadjusted_abi_align: Align(4 bytes), } --> $DIR/zero-sized-array-enum-niche.rs:44:1 | diff --git a/tests/ui/liveness/liveness-move-call-arg-2.rs b/tests/ui/liveness/liveness-move-call-arg-2.rs new file mode 100644 index 00000000000..b93535c89b1 --- /dev/null +++ b/tests/ui/liveness/liveness-move-call-arg-2.rs @@ -0,0 +1,12 @@ +fn take(_x: Box<isize>) {} + + +fn main() { + let _ = || { + let x: Box<isize> = Box::new(25); + + loop { + take(x); //~ ERROR use of moved value: `x` + } + }; +} diff --git a/tests/ui/liveness/liveness-move-call-arg-2.stderr b/tests/ui/liveness/liveness-move-call-arg-2.stderr new file mode 100644 index 00000000000..479a086a80c --- /dev/null +++ b/tests/ui/liveness/liveness-move-call-arg-2.stderr @@ -0,0 +1,26 @@ +error[E0382]: use of moved value: `x` + --> $DIR/liveness-move-call-arg-2.rs:9:18 + | +LL | let x: Box<isize> = Box::new(25); + | - move occurs because `x` has type `Box<isize>`, which does not implement the `Copy` trait +LL | +LL | loop { + | ---- inside of this loop +LL | take(x); + | ^ value moved here, in previous iteration of loop + | +note: consider changing this parameter type in function `take` to borrow instead if owning the value isn't necessary + --> $DIR/liveness-move-call-arg-2.rs:1:13 + | +LL | fn take(_x: Box<isize>) {} + | ---- ^^^^^^^^^^ this parameter takes ownership of the value + | | + | in this function +help: consider cloning the value if the performance cost is acceptable + | +LL | take(x.clone()); + | ++++++++ + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0382`. diff --git a/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout b/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout index 091862de30f..4f8730053ee 100644 --- a/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout +++ b/tests/ui/proc-macro/allowed-attr-stmt-expr.stdout @@ -19,17 +19,17 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:35:9: 35:10 (#11), + span: $DIR/allowed-attr-stmt-expr.rs:35:9: 35:10 (#10), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "rustc_dummy", - span: $DIR/allowed-attr-stmt-expr.rs:35:11: 35:22 (#11), + span: $DIR/allowed-attr-stmt-expr.rs:35:11: 35:22 (#10), }, ], - span: $DIR/allowed-attr-stmt-expr.rs:35:10: 35:23 (#11), + span: $DIR/allowed-attr-stmt-expr.rs:35:10: 35:23 (#10), }, Ident { ident: "struct", @@ -206,17 +206,17 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:35:9: 35:10 (#32), + span: $DIR/allowed-attr-stmt-expr.rs:35:9: 35:10 (#31), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "rustc_dummy", - span: $DIR/allowed-attr-stmt-expr.rs:35:11: 35:22 (#32), + span: $DIR/allowed-attr-stmt-expr.rs:35:11: 35:22 (#31), }, ], - span: $DIR/allowed-attr-stmt-expr.rs:35:10: 35:23 (#32), + span: $DIR/allowed-attr-stmt-expr.rs:35:10: 35:23 (#31), }, Punct { ch: '#', diff --git a/tests/ui/proc-macro/attr-stmt-expr.stdout b/tests/ui/proc-macro/attr-stmt-expr.stdout index f9b2305c735..c6d77e0ed0c 100644 --- a/tests/ui/proc-macro/attr-stmt-expr.stdout +++ b/tests/ui/proc-macro/attr-stmt-expr.stdout @@ -3,17 +3,17 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:33:9: 33:10 (#8), + span: $DIR/attr-stmt-expr.rs:33:9: 33:10 (#7), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "rustc_dummy", - span: $DIR/attr-stmt-expr.rs:33:11: 33:22 (#8), + span: $DIR/attr-stmt-expr.rs:33:11: 33:22 (#7), }, ], - span: $DIR/attr-stmt-expr.rs:33:10: 33:23 (#8), + span: $DIR/attr-stmt-expr.rs:33:10: 33:23 (#7), }, Ident { ident: "struct", @@ -190,17 +190,17 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:33:9: 33:10 (#29), + span: $DIR/attr-stmt-expr.rs:33:9: 33:10 (#28), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "rustc_dummy", - span: $DIR/attr-stmt-expr.rs:33:11: 33:22 (#29), + span: $DIR/attr-stmt-expr.rs:33:11: 33:22 (#28), }, ], - span: $DIR/attr-stmt-expr.rs:33:10: 33:23 (#29), + span: $DIR/attr-stmt-expr.rs:33:10: 33:23 (#28), }, Punct { ch: '#', diff --git a/tests/ui/proc-macro/capture-macro-rules-invoke.stdout b/tests/ui/proc-macro/capture-macro-rules-invoke.stdout index b88fbd3e897..01d71ff989b 100644 --- a/tests/ui/proc-macro/capture-macro-rules-invoke.stdout +++ b/tests/ui/proc-macro/capture-macro-rules-invoke.stdout @@ -8,7 +8,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:36:24: 36:28 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:21:21: 21:26 (#4), + span: $DIR/capture-macro-rules-invoke.rs:21:21: 21:26 (#3), }, ] PRINT-BANG INPUT (DISPLAY): 1 + 1, { "a" }, let a = 1;, String, my_name, 'a, my_val = 30, @@ -37,12 +37,12 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:38:17: 38:18 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:14:29: 14:34 (#8), + span: $DIR/capture-macro-rules-invoke.rs:14:29: 14:34 (#7), }, Punct { ch: ',', spacing: Alone, - span: $DIR/capture-macro-rules-invoke.rs:14:34: 14:35 (#8), + span: $DIR/capture-macro-rules-invoke.rs:14:34: 14:35 (#7), }, Group { delimiter: None, @@ -60,12 +60,12 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:39:13: 39:20 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:14:36: 14:42 (#8), + span: $DIR/capture-macro-rules-invoke.rs:14:36: 14:42 (#7), }, Punct { ch: ',', spacing: Alone, - span: $DIR/capture-macro-rules-invoke.rs:14:42: 14:43 (#8), + span: $DIR/capture-macro-rules-invoke.rs:14:42: 14:43 (#7), }, Group { delimiter: None, @@ -90,12 +90,12 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:40:21: 40:22 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:14:44: 14:49 (#8), + span: $DIR/capture-macro-rules-invoke.rs:14:44: 14:49 (#7), }, Punct { ch: ',', spacing: Alone, - span: $DIR/capture-macro-rules-invoke.rs:14:49: 14:50 (#8), + span: $DIR/capture-macro-rules-invoke.rs:14:49: 14:50 (#7), }, Group { delimiter: None, @@ -105,12 +105,12 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:41:13: 41:19 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:14:51: 14:54 (#8), + span: $DIR/capture-macro-rules-invoke.rs:14:51: 14:54 (#7), }, Punct { ch: ',', spacing: Alone, - span: $DIR/capture-macro-rules-invoke.rs:14:54: 14:55 (#8), + span: $DIR/capture-macro-rules-invoke.rs:14:54: 14:55 (#7), }, Ident { ident: "my_name", @@ -119,7 +119,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ Punct { ch: ',', spacing: Alone, - span: $DIR/capture-macro-rules-invoke.rs:14:62: 14:63 (#8), + span: $DIR/capture-macro-rules-invoke.rs:14:62: 14:63 (#7), }, Group { delimiter: None, @@ -134,12 +134,12 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:43:13: 43:15 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:15:29: 15:38 (#8), + span: $DIR/capture-macro-rules-invoke.rs:15:29: 15:38 (#7), }, Punct { ch: ',', spacing: Alone, - span: $DIR/capture-macro-rules-invoke.rs:15:38: 15:39 (#8), + span: $DIR/capture-macro-rules-invoke.rs:15:38: 15:39 (#7), }, Group { delimiter: None, @@ -160,12 +160,12 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:44:22: 44:24 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:15:40: 15:45 (#8), + span: $DIR/capture-macro-rules-invoke.rs:15:40: 15:45 (#7), }, Punct { ch: ',', spacing: Alone, - span: $DIR/capture-macro-rules-invoke.rs:15:45: 15:46 (#8), + span: $DIR/capture-macro-rules-invoke.rs:15:45: 15:46 (#7), }, Group { delimiter: None, @@ -203,12 +203,12 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:45:26: 45:32 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:15:47: 15:52 (#8), + span: $DIR/capture-macro-rules-invoke.rs:15:47: 15:52 (#7), }, Punct { ch: ',', spacing: Alone, - span: $DIR/capture-macro-rules-invoke.rs:15:52: 15:53 (#8), + span: $DIR/capture-macro-rules-invoke.rs:15:52: 15:53 (#7), }, Group { delimiter: None, @@ -246,12 +246,12 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:46:16: 46:31 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:15:54: 15:58 (#8), + span: $DIR/capture-macro-rules-invoke.rs:15:54: 15:58 (#7), }, Punct { ch: ',', spacing: Alone, - span: $DIR/capture-macro-rules-invoke.rs:15:58: 15:59 (#8), + span: $DIR/capture-macro-rules-invoke.rs:15:58: 15:59 (#7), }, Group { delimiter: Bracket, @@ -274,7 +274,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ Punct { ch: ',', spacing: Alone, - span: $DIR/capture-macro-rules-invoke.rs:15:63: 15:64 (#8), + span: $DIR/capture-macro-rules-invoke.rs:15:63: 15:64 (#7), }, Group { delimiter: None, @@ -291,7 +291,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:48:14: 48:16 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:15:65: 15:69 (#8), + span: $DIR/capture-macro-rules-invoke.rs:15:65: 15:69 (#7), }, ] PRINT-BANG INPUT (DISPLAY): (a, b) @@ -319,6 +319,6 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-macro-rules-invoke.rs:52:26: 52:32 (#0), }, ], - span: $DIR/capture-macro-rules-invoke.rs:27:21: 27:25 (#12), + span: $DIR/capture-macro-rules-invoke.rs:27:21: 27:25 (#11), }, ] diff --git a/tests/ui/proc-macro/capture-unglued-token.stdout b/tests/ui/proc-macro/capture-unglued-token.stdout index 7e6b540332c..a0d2178f000 100644 --- a/tests/ui/proc-macro/capture-unglued-token.stdout +++ b/tests/ui/proc-macro/capture-unglued-token.stdout @@ -23,6 +23,6 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/capture-unglued-token.rs:19:30: 19:31 (#0), }, ], - span: $DIR/capture-unglued-token.rs:15:42: 15:48 (#4), + span: $DIR/capture-unglued-token.rs:15:42: 15:48 (#3), }, ] diff --git a/tests/ui/proc-macro/dollar-crate-issue-57089.stdout b/tests/ui/proc-macro/dollar-crate-issue-57089.stdout index 2622c005d93..de4f0c000b6 100644 --- a/tests/ui/proc-macro/dollar-crate-issue-57089.stdout +++ b/tests/ui/proc-macro/dollar-crate-issue-57089.stdout @@ -2,79 +2,79 @@ PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ; PRINT-BANG INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/dollar-crate-issue-57089.rs:17:13: 17:19 (#4), + span: $DIR/dollar-crate-issue-57089.rs:17:13: 17:19 (#3), }, Ident { ident: "M", - span: $DIR/dollar-crate-issue-57089.rs:17:20: 17:21 (#4), + span: $DIR/dollar-crate-issue-57089.rs:17:20: 17:21 (#3), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "$crate", - span: $DIR/dollar-crate-issue-57089.rs:17:22: 17:28 (#4), + span: $DIR/dollar-crate-issue-57089.rs:17:22: 17:28 (#3), }, Punct { ch: ':', spacing: Joint, - span: $DIR/dollar-crate-issue-57089.rs:17:28: 17:29 (#4), + span: $DIR/dollar-crate-issue-57089.rs:17:28: 17:29 (#3), }, Punct { ch: ':', spacing: Alone, - span: $DIR/dollar-crate-issue-57089.rs:17:29: 17:30 (#4), + span: $DIR/dollar-crate-issue-57089.rs:17:29: 17:30 (#3), }, Ident { ident: "S", - span: $DIR/dollar-crate-issue-57089.rs:17:30: 17:31 (#4), + span: $DIR/dollar-crate-issue-57089.rs:17:30: 17:31 (#3), }, ], - span: $DIR/dollar-crate-issue-57089.rs:17:21: 17:32 (#4), + span: $DIR/dollar-crate-issue-57089.rs:17:21: 17:32 (#3), }, Punct { ch: ';', spacing: Alone, - span: $DIR/dollar-crate-issue-57089.rs:17:32: 17:33 (#4), + span: $DIR/dollar-crate-issue-57089.rs:17:32: 17:33 (#3), }, ] PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/dollar-crate-issue-57089.rs:21:9: 21:15 (#4), + span: $DIR/dollar-crate-issue-57089.rs:21:9: 21:15 (#3), }, Ident { ident: "A", - span: $DIR/dollar-crate-issue-57089.rs:21:16: 21:17 (#4), + span: $DIR/dollar-crate-issue-57089.rs:21:16: 21:17 (#3), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "$crate", - span: $DIR/dollar-crate-issue-57089.rs:21:18: 21:24 (#4), + span: $DIR/dollar-crate-issue-57089.rs:21:18: 21:24 (#3), }, Punct { ch: ':', spacing: Joint, - span: $DIR/dollar-crate-issue-57089.rs:21:24: 21:25 (#4), + span: $DIR/dollar-crate-issue-57089.rs:21:24: 21:25 (#3), }, Punct { ch: ':', spacing: Alone, - span: $DIR/dollar-crate-issue-57089.rs:21:25: 21:26 (#4), + span: $DIR/dollar-crate-issue-57089.rs:21:25: 21:26 (#3), }, Ident { ident: "S", - span: $DIR/dollar-crate-issue-57089.rs:21:26: 21:27 (#4), + span: $DIR/dollar-crate-issue-57089.rs:21:26: 21:27 (#3), }, ], - span: $DIR/dollar-crate-issue-57089.rs:21:17: 21:28 (#4), + span: $DIR/dollar-crate-issue-57089.rs:21:17: 21:28 (#3), }, Punct { ch: ';', spacing: Alone, - span: $DIR/dollar-crate-issue-57089.rs:21:28: 21:29 (#4), + span: $DIR/dollar-crate-issue-57089.rs:21:28: 21:29 (#3), }, ] diff --git a/tests/ui/proc-macro/dollar-crate-issue-62325.stdout b/tests/ui/proc-macro/dollar-crate-issue-62325.stdout index a91908239c3..c7e72bf4ff5 100644 --- a/tests/ui/proc-macro/dollar-crate-issue-62325.stdout +++ b/tests/ui/proc-macro/dollar-crate-issue-62325.stdout @@ -2,109 +2,109 @@ PRINT-ATTR INPUT (DISPLAY): struct A(identity! ($crate :: S)) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/dollar-crate-issue-62325.rs:19:5: 19:11 (#4), + span: $DIR/dollar-crate-issue-62325.rs:19:5: 19:11 (#3), }, Ident { ident: "A", - span: $DIR/dollar-crate-issue-62325.rs:19:12: 19:13 (#4), + span: $DIR/dollar-crate-issue-62325.rs:19:12: 19:13 (#3), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "identity", - span: $DIR/dollar-crate-issue-62325.rs:19:14: 19:22 (#4), + span: $DIR/dollar-crate-issue-62325.rs:19:14: 19:22 (#3), }, Punct { ch: '!', spacing: Alone, - span: $DIR/dollar-crate-issue-62325.rs:19:22: 19:23 (#4), + span: $DIR/dollar-crate-issue-62325.rs:19:22: 19:23 (#3), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "$crate", - span: $DIR/dollar-crate-issue-62325.rs:19:24: 19:30 (#4), + span: $DIR/dollar-crate-issue-62325.rs:19:24: 19:30 (#3), }, Punct { ch: ':', spacing: Joint, - span: $DIR/dollar-crate-issue-62325.rs:19:30: 19:31 (#4), + span: $DIR/dollar-crate-issue-62325.rs:19:30: 19:31 (#3), }, Punct { ch: ':', spacing: Alone, - span: $DIR/dollar-crate-issue-62325.rs:19:31: 19:32 (#4), + span: $DIR/dollar-crate-issue-62325.rs:19:31: 19:32 (#3), }, Ident { ident: "S", - span: $DIR/dollar-crate-issue-62325.rs:19:32: 19:33 (#4), + span: $DIR/dollar-crate-issue-62325.rs:19:32: 19:33 (#3), }, ], - span: $DIR/dollar-crate-issue-62325.rs:19:23: 19:34 (#4), + span: $DIR/dollar-crate-issue-62325.rs:19:23: 19:34 (#3), }, ], - span: $DIR/dollar-crate-issue-62325.rs:19:13: 19:35 (#4), + span: $DIR/dollar-crate-issue-62325.rs:19:13: 19:35 (#3), }, Punct { ch: ';', spacing: Alone, - span: $DIR/dollar-crate-issue-62325.rs:19:35: 19:36 (#4), + span: $DIR/dollar-crate-issue-62325.rs:19:35: 19:36 (#3), }, ] PRINT-ATTR INPUT (DISPLAY): struct B(identity! ($crate :: S)) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/auxiliary/dollar-crate-external.rs:21:5: 21:11 (#12), + span: $DIR/auxiliary/dollar-crate-external.rs:21:5: 21:11 (#11), }, Ident { ident: "B", - span: $DIR/auxiliary/dollar-crate-external.rs:21:12: 21:13 (#12), + span: $DIR/auxiliary/dollar-crate-external.rs:21:12: 21:13 (#11), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "identity", - span: $DIR/auxiliary/dollar-crate-external.rs:21:14: 21:22 (#12), + span: $DIR/auxiliary/dollar-crate-external.rs:21:14: 21:22 (#11), }, Punct { ch: '!', spacing: Alone, - span: $DIR/auxiliary/dollar-crate-external.rs:21:22: 21:23 (#12), + span: $DIR/auxiliary/dollar-crate-external.rs:21:22: 21:23 (#11), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "$crate", - span: $DIR/auxiliary/dollar-crate-external.rs:21:24: 21:30 (#12), + span: $DIR/auxiliary/dollar-crate-external.rs:21:24: 21:30 (#11), }, Punct { ch: ':', spacing: Joint, - span: $DIR/auxiliary/dollar-crate-external.rs:21:30: 21:31 (#12), + span: $DIR/auxiliary/dollar-crate-external.rs:21:30: 21:31 (#11), }, Punct { ch: ':', spacing: Alone, - span: $DIR/auxiliary/dollar-crate-external.rs:21:31: 21:32 (#12), + span: $DIR/auxiliary/dollar-crate-external.rs:21:31: 21:32 (#11), }, Ident { ident: "S", - span: $DIR/auxiliary/dollar-crate-external.rs:21:32: 21:33 (#12), + span: $DIR/auxiliary/dollar-crate-external.rs:21:32: 21:33 (#11), }, ], - span: $DIR/auxiliary/dollar-crate-external.rs:21:23: 21:34 (#12), + span: $DIR/auxiliary/dollar-crate-external.rs:21:23: 21:34 (#11), }, ], - span: $DIR/auxiliary/dollar-crate-external.rs:21:13: 21:35 (#12), + span: $DIR/auxiliary/dollar-crate-external.rs:21:13: 21:35 (#11), }, Punct { ch: ';', spacing: Alone, - span: $DIR/auxiliary/dollar-crate-external.rs:21:35: 21:36 (#12), + span: $DIR/auxiliary/dollar-crate-external.rs:21:35: 21:36 (#11), }, ] diff --git a/tests/ui/proc-macro/dollar-crate.stdout b/tests/ui/proc-macro/dollar-crate.stdout index 4e169d47e1a..0f5f87ceca2 100644 --- a/tests/ui/proc-macro/dollar-crate.stdout +++ b/tests/ui/proc-macro/dollar-crate.stdout @@ -2,239 +2,239 @@ PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ; PRINT-BANG INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/dollar-crate.rs:20:17: 20:23 (#4), + span: $DIR/dollar-crate.rs:20:17: 20:23 (#3), }, Ident { ident: "M", - span: $DIR/dollar-crate.rs:20:24: 20:25 (#4), + span: $DIR/dollar-crate.rs:20:24: 20:25 (#3), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "$crate", - span: $DIR/dollar-crate.rs:20:26: 20:32 (#4), + span: $DIR/dollar-crate.rs:20:26: 20:32 (#3), }, Punct { ch: ':', spacing: Joint, - span: $DIR/dollar-crate.rs:20:32: 20:33 (#4), + span: $DIR/dollar-crate.rs:20:32: 20:33 (#3), }, Punct { ch: ':', spacing: Alone, - span: $DIR/dollar-crate.rs:20:33: 20:34 (#4), + span: $DIR/dollar-crate.rs:20:33: 20:34 (#3), }, Ident { ident: "S", - span: $DIR/dollar-crate.rs:20:34: 20:35 (#4), + span: $DIR/dollar-crate.rs:20:34: 20:35 (#3), }, ], - span: $DIR/dollar-crate.rs:20:25: 20:36 (#4), + span: $DIR/dollar-crate.rs:20:25: 20:36 (#3), }, Punct { ch: ';', spacing: Alone, - span: $DIR/dollar-crate.rs:20:36: 20:37 (#4), + span: $DIR/dollar-crate.rs:20:36: 20:37 (#3), }, ] PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/dollar-crate.rs:24:13: 24:19 (#4), + span: $DIR/dollar-crate.rs:24:13: 24:19 (#3), }, Ident { ident: "A", - span: $DIR/dollar-crate.rs:24:20: 24:21 (#4), + span: $DIR/dollar-crate.rs:24:20: 24:21 (#3), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "$crate", - span: $DIR/dollar-crate.rs:24:22: 24:28 (#4), + span: $DIR/dollar-crate.rs:24:22: 24:28 (#3), }, Punct { ch: ':', spacing: Joint, - span: $DIR/dollar-crate.rs:24:28: 24:29 (#4), + span: $DIR/dollar-crate.rs:24:28: 24:29 (#3), }, Punct { ch: ':', spacing: Alone, - span: $DIR/dollar-crate.rs:24:29: 24:30 (#4), + span: $DIR/dollar-crate.rs:24:29: 24:30 (#3), }, Ident { ident: "S", - span: $DIR/dollar-crate.rs:24:30: 24:31 (#4), + span: $DIR/dollar-crate.rs:24:30: 24:31 (#3), }, ], - span: $DIR/dollar-crate.rs:24:21: 24:32 (#4), + span: $DIR/dollar-crate.rs:24:21: 24:32 (#3), }, Punct { ch: ';', spacing: Alone, - span: $DIR/dollar-crate.rs:24:32: 24:33 (#4), + span: $DIR/dollar-crate.rs:24:32: 24:33 (#3), }, ] PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ; PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/dollar-crate.rs:27:13: 27:19 (#4), + span: $DIR/dollar-crate.rs:27:13: 27:19 (#3), }, Ident { ident: "D", - span: $DIR/dollar-crate.rs:27:20: 27:21 (#4), + span: $DIR/dollar-crate.rs:27:20: 27:21 (#3), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "$crate", - span: $DIR/dollar-crate.rs:27:22: 27:28 (#4), + span: $DIR/dollar-crate.rs:27:22: 27:28 (#3), }, Punct { ch: ':', spacing: Joint, - span: $DIR/dollar-crate.rs:27:28: 27:29 (#4), + span: $DIR/dollar-crate.rs:27:28: 27:29 (#3), }, Punct { ch: ':', spacing: Alone, - span: $DIR/dollar-crate.rs:27:29: 27:30 (#4), + span: $DIR/dollar-crate.rs:27:29: 27:30 (#3), }, Ident { ident: "S", - span: $DIR/dollar-crate.rs:27:30: 27:31 (#4), + span: $DIR/dollar-crate.rs:27:30: 27:31 (#3), }, ], - span: $DIR/dollar-crate.rs:27:21: 27:32 (#4), + span: $DIR/dollar-crate.rs:27:21: 27:32 (#3), }, Punct { ch: ';', spacing: Alone, - span: $DIR/dollar-crate.rs:27:32: 27:33 (#4), + span: $DIR/dollar-crate.rs:27:32: 27:33 (#3), }, ] PRINT-BANG INPUT (DISPLAY): struct M($crate :: S) ; PRINT-BANG INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/auxiliary/dollar-crate-external.rs:7:13: 7:19 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:7:13: 7:19 (#14), }, Ident { ident: "M", - span: $DIR/auxiliary/dollar-crate-external.rs:7:20: 7:21 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:7:20: 7:21 (#14), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "$crate", - span: $DIR/auxiliary/dollar-crate-external.rs:7:22: 7:28 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:7:22: 7:28 (#14), }, Punct { ch: ':', spacing: Joint, - span: $DIR/auxiliary/dollar-crate-external.rs:7:28: 7:29 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:7:28: 7:29 (#14), }, Punct { ch: ':', spacing: Alone, - span: $DIR/auxiliary/dollar-crate-external.rs:7:29: 7:30 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:7:29: 7:30 (#14), }, Ident { ident: "S", - span: $DIR/auxiliary/dollar-crate-external.rs:7:30: 7:31 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:7:30: 7:31 (#14), }, ], - span: $DIR/auxiliary/dollar-crate-external.rs:7:21: 7:32 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:7:21: 7:32 (#14), }, Punct { ch: ';', spacing: Alone, - span: $DIR/auxiliary/dollar-crate-external.rs:7:32: 7:33 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:7:32: 7:33 (#14), }, ] PRINT-ATTR INPUT (DISPLAY): struct A($crate :: S) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/auxiliary/dollar-crate-external.rs:11:9: 11:15 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:11:9: 11:15 (#14), }, Ident { ident: "A", - span: $DIR/auxiliary/dollar-crate-external.rs:11:16: 11:17 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:11:16: 11:17 (#14), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "$crate", - span: $DIR/auxiliary/dollar-crate-external.rs:11:18: 11:24 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:11:18: 11:24 (#14), }, Punct { ch: ':', spacing: Joint, - span: $DIR/auxiliary/dollar-crate-external.rs:11:24: 11:25 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:11:24: 11:25 (#14), }, Punct { ch: ':', spacing: Alone, - span: $DIR/auxiliary/dollar-crate-external.rs:11:25: 11:26 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:11:25: 11:26 (#14), }, Ident { ident: "S", - span: $DIR/auxiliary/dollar-crate-external.rs:11:26: 11:27 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:11:26: 11:27 (#14), }, ], - span: $DIR/auxiliary/dollar-crate-external.rs:11:17: 11:28 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:11:17: 11:28 (#14), }, Punct { ch: ';', spacing: Alone, - span: $DIR/auxiliary/dollar-crate-external.rs:11:28: 11:29 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:11:28: 11:29 (#14), }, ] PRINT-DERIVE INPUT (DISPLAY): struct D($crate :: S) ; PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/auxiliary/dollar-crate-external.rs:14:9: 14:15 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:14:9: 14:15 (#14), }, Ident { ident: "D", - span: $DIR/auxiliary/dollar-crate-external.rs:14:16: 14:17 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:14:16: 14:17 (#14), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "$crate", - span: $DIR/auxiliary/dollar-crate-external.rs:14:18: 14:24 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:14:18: 14:24 (#14), }, Punct { ch: ':', spacing: Joint, - span: $DIR/auxiliary/dollar-crate-external.rs:14:24: 14:25 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:14:24: 14:25 (#14), }, Punct { ch: ':', spacing: Alone, - span: $DIR/auxiliary/dollar-crate-external.rs:14:25: 14:26 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:14:25: 14:26 (#14), }, Ident { ident: "S", - span: $DIR/auxiliary/dollar-crate-external.rs:14:26: 14:27 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:14:26: 14:27 (#14), }, ], - span: $DIR/auxiliary/dollar-crate-external.rs:14:17: 14:28 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:14:17: 14:28 (#14), }, Punct { ch: ';', spacing: Alone, - span: $DIR/auxiliary/dollar-crate-external.rs:14:28: 14:29 (#15), + span: $DIR/auxiliary/dollar-crate-external.rs:14:28: 14:29 (#14), }, ] diff --git a/tests/ui/proc-macro/expand-to-derive.stdout b/tests/ui/proc-macro/expand-to-derive.stdout index a6437982a37..39f00918329 100644 --- a/tests/ui/proc-macro/expand-to-derive.stdout +++ b/tests/ui/proc-macro/expand-to-derive.stdout @@ -6,35 +6,35 @@ PRINT-DERIVE INPUT (DISPLAY): struct Foo PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/expand-to-derive.rs:16:9: 16:15 (#4), + span: $DIR/expand-to-derive.rs:16:9: 16:15 (#3), }, Ident { ident: "Foo", - span: $DIR/expand-to-derive.rs:16:16: 16:19 (#4), + span: $DIR/expand-to-derive.rs:16:16: 16:19 (#3), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "field", - span: $DIR/expand-to-derive.rs:18:13: 18:18 (#4), + span: $DIR/expand-to-derive.rs:18:13: 18:18 (#3), }, Punct { ch: ':', spacing: Alone, - span: $DIR/expand-to-derive.rs:18:18: 18:19 (#4), + span: $DIR/expand-to-derive.rs:18:18: 18:19 (#3), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "bool", - span: $DIR/expand-to-derive.rs:18:21: 18:25 (#4), + span: $DIR/expand-to-derive.rs:18:21: 18:25 (#3), }, Punct { ch: ';', spacing: Alone, - span: $DIR/expand-to-derive.rs:18:25: 18:26 (#4), + span: $DIR/expand-to-derive.rs:18:25: 18:26 (#3), }, Group { delimiter: Brace, @@ -90,15 +90,15 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ kind: Integer, symbol: "0", suffix: None, - span: $DIR/expand-to-derive.rs:20:17: 20:18 (#4), + span: $DIR/expand-to-derive.rs:20:17: 20:18 (#3), }, ], - span: $DIR/expand-to-derive.rs:18:27: 21:14 (#4), + span: $DIR/expand-to-derive.rs:18:27: 21:14 (#3), }, ], - span: $DIR/expand-to-derive.rs:18:20: 21:15 (#4), + span: $DIR/expand-to-derive.rs:18:20: 21:15 (#3), }, ], - span: $DIR/expand-to-derive.rs:16:20: 22:10 (#4), + span: $DIR/expand-to-derive.rs:16:20: 22:10 (#3), }, ] diff --git a/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout b/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout index 686d53e8876..40181efc0b8 100644 --- a/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout +++ b/tests/ui/proc-macro/expr-stmt-nonterminal-tokens.stdout @@ -3,39 +3,39 @@ PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = #[allow(warning PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", - span: #4 bytes(299..303), + span: #3 bytes(299..303), }, Ident { ident: "E", - span: #4 bytes(304..305), + span: #3 bytes(304..305), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "V", - span: #4 bytes(320..321), + span: #3 bytes(320..321), }, Punct { ch: '=', spacing: Alone, - span: #4 bytes(322..323), + span: #3 bytes(322..323), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "let", - span: #4 bytes(326..329), + span: #3 bytes(326..329), }, Ident { ident: "_", - span: #4 bytes(330..331), + span: #3 bytes(330..331), }, Punct { ch: '=', spacing: Alone, - span: #4 bytes(332..333), + span: #3 bytes(332..333), }, Group { delimiter: None, @@ -97,29 +97,29 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ span: #0 bytes(560..561), }, ], - span: #4 bytes(334..339), + span: #3 bytes(334..339), }, Punct { ch: ';', spacing: Alone, - span: #4 bytes(339..340), + span: #3 bytes(339..340), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: #4 bytes(341..342), + span: #3 bytes(341..342), }, ], - span: #4 bytes(324..344), + span: #3 bytes(324..344), }, Punct { ch: ',', spacing: Alone, - span: #4 bytes(344..345), + span: #3 bytes(344..345), }, ], - span: #4 bytes(306..355), + span: #3 bytes(306..355), }, ] PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0; } ; 0 }, } @@ -127,39 +127,39 @@ PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { 0 } ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", - span: #8 bytes(423..427), + span: #7 bytes(423..427), }, Ident { ident: "E", - span: #8 bytes(428..429), + span: #7 bytes(428..429), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "V", - span: #8 bytes(444..445), + span: #7 bytes(444..445), }, Punct { ch: '=', spacing: Alone, - span: #8 bytes(446..447), + span: #7 bytes(446..447), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "let", - span: #8 bytes(450..453), + span: #7 bytes(450..453), }, Ident { ident: "_", - span: #8 bytes(454..455), + span: #7 bytes(454..455), }, Punct { ch: '=', spacing: Alone, - span: #8 bytes(456..457), + span: #7 bytes(456..457), }, Group { delimiter: Brace, @@ -174,71 +174,71 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ span: #0 bytes(578..579), }, ], - span: #8 bytes(460..465), + span: #7 bytes(460..465), }, ], - span: #8 bytes(458..467), + span: #7 bytes(458..467), }, Punct { ch: ';', spacing: Alone, - span: #8 bytes(467..468), + span: #7 bytes(467..468), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: #8 bytes(469..470), + span: #7 bytes(469..470), }, ], - span: #8 bytes(448..472), + span: #7 bytes(448..472), }, Punct { ch: ',', spacing: Alone, - span: #8 bytes(472..473), + span: #7 bytes(472..473), }, ], - span: #8 bytes(430..483), + span: #7 bytes(430..483), }, ] PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { {} } ; 0 }, } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", - span: #12 bytes(423..427), + span: #11 bytes(423..427), }, Ident { ident: "E", - span: #12 bytes(428..429), + span: #11 bytes(428..429), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "V", - span: #12 bytes(444..445), + span: #11 bytes(444..445), }, Punct { ch: '=', spacing: Alone, - span: #12 bytes(446..447), + span: #11 bytes(446..447), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "let", - span: #12 bytes(450..453), + span: #11 bytes(450..453), }, Ident { ident: "_", - span: #12 bytes(454..455), + span: #11 bytes(454..455), }, Punct { ch: '=', spacing: Alone, - span: #12 bytes(456..457), + span: #11 bytes(456..457), }, Group { delimiter: Brace, @@ -252,32 +252,32 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ span: #0 bytes(596..598), }, ], - span: #12 bytes(460..465), + span: #11 bytes(460..465), }, ], - span: #12 bytes(458..467), + span: #11 bytes(458..467), }, Punct { ch: ';', spacing: Alone, - span: #12 bytes(467..468), + span: #11 bytes(467..468), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: #12 bytes(469..470), + span: #11 bytes(469..470), }, ], - span: #12 bytes(448..472), + span: #11 bytes(448..472), }, Punct { ch: ',', spacing: Alone, - span: #12 bytes(472..473), + span: #11 bytes(472..473), }, ], - span: #12 bytes(430..483), + span: #11 bytes(430..483), }, ] PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH; } ; 0 }, } @@ -285,39 +285,39 @@ PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { PATH } ; 0 }, PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", - span: #16 bytes(423..427), + span: #15 bytes(423..427), }, Ident { ident: "E", - span: #16 bytes(428..429), + span: #15 bytes(428..429), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "V", - span: #16 bytes(444..445), + span: #15 bytes(444..445), }, Punct { ch: '=', spacing: Alone, - span: #16 bytes(446..447), + span: #15 bytes(446..447), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "let", - span: #16 bytes(450..453), + span: #15 bytes(450..453), }, Ident { ident: "_", - span: #16 bytes(454..455), + span: #15 bytes(454..455), }, Punct { ch: '=', spacing: Alone, - span: #16 bytes(456..457), + span: #15 bytes(456..457), }, Group { delimiter: Brace, @@ -330,32 +330,32 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ span: #0 bytes(615..619), }, ], - span: #16 bytes(460..465), + span: #15 bytes(460..465), }, ], - span: #16 bytes(458..467), + span: #15 bytes(458..467), }, Punct { ch: ';', spacing: Alone, - span: #16 bytes(467..468), + span: #15 bytes(467..468), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: #16 bytes(469..470), + span: #15 bytes(469..470), }, ], - span: #16 bytes(448..472), + span: #15 bytes(448..472), }, Punct { ch: ',', spacing: Alone, - span: #16 bytes(472..473), + span: #15 bytes(472..473), }, ], - span: #16 bytes(430..483), + span: #15 bytes(430..483), }, ] PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { 0 + 1; } ; 0 }, } @@ -363,39 +363,39 @@ PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { 0 + 1 } ; 0 } PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", - span: #20 bytes(423..427), + span: #19 bytes(423..427), }, Ident { ident: "E", - span: #20 bytes(428..429), + span: #19 bytes(428..429), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "V", - span: #20 bytes(444..445), + span: #19 bytes(444..445), }, Punct { ch: '=', spacing: Alone, - span: #20 bytes(446..447), + span: #19 bytes(446..447), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "let", - span: #20 bytes(450..453), + span: #19 bytes(450..453), }, Ident { ident: "_", - span: #20 bytes(454..455), + span: #19 bytes(454..455), }, Punct { ch: '=', spacing: Alone, - span: #20 bytes(456..457), + span: #19 bytes(456..457), }, Group { delimiter: Brace, @@ -421,32 +421,32 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ span: #0 bytes(640..641), }, ], - span: #20 bytes(460..465), + span: #19 bytes(460..465), }, ], - span: #20 bytes(458..467), + span: #19 bytes(458..467), }, Punct { ch: ';', spacing: Alone, - span: #20 bytes(467..468), + span: #19 bytes(467..468), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: #20 bytes(469..470), + span: #19 bytes(469..470), }, ], - span: #20 bytes(448..472), + span: #19 bytes(448..472), }, Punct { ch: ',', spacing: Alone, - span: #20 bytes(472..473), + span: #19 bytes(472..473), }, ], - span: #20 bytes(430..483), + span: #19 bytes(430..483), }, ] PRINT-DERIVE INPUT (DISPLAY): enum E { V = { let _ = { PATH + 1; } ; 0 }, } @@ -454,39 +454,39 @@ PRINT-DERIVE DEEP-RE-COLLECTED (DISPLAY): enum E { V = { let _ = { PATH + 1 } ; PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "enum", - span: #24 bytes(423..427), + span: #23 bytes(423..427), }, Ident { ident: "E", - span: #24 bytes(428..429), + span: #23 bytes(428..429), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "V", - span: #24 bytes(444..445), + span: #23 bytes(444..445), }, Punct { ch: '=', spacing: Alone, - span: #24 bytes(446..447), + span: #23 bytes(446..447), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "let", - span: #24 bytes(450..453), + span: #23 bytes(450..453), }, Ident { ident: "_", - span: #24 bytes(454..455), + span: #23 bytes(454..455), }, Punct { ch: '=', spacing: Alone, - span: #24 bytes(456..457), + span: #23 bytes(456..457), }, Group { delimiter: Brace, @@ -510,31 +510,31 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ span: #0 bytes(665..666), }, ], - span: #24 bytes(460..465), + span: #23 bytes(460..465), }, ], - span: #24 bytes(458..467), + span: #23 bytes(458..467), }, Punct { ch: ';', spacing: Alone, - span: #24 bytes(467..468), + span: #23 bytes(467..468), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: #24 bytes(469..470), + span: #23 bytes(469..470), }, ], - span: #24 bytes(448..472), + span: #23 bytes(448..472), }, Punct { ch: ',', spacing: Alone, - span: #24 bytes(472..473), + span: #23 bytes(472..473), }, ], - span: #24 bytes(430..483), + span: #23 bytes(430..483), }, ] diff --git a/tests/ui/proc-macro/input-interpolated.stdout b/tests/ui/proc-macro/input-interpolated.stdout index 34566c78019..6a8789b2c41 100644 --- a/tests/ui/proc-macro/input-interpolated.stdout +++ b/tests/ui/proc-macro/input-interpolated.stdout @@ -9,7 +9,7 @@ PRINT-ATTR INPUT (DISPLAY): const A : u8 = 0 ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "const", - span: #4 bytes(416..421), + span: #3 bytes(416..421), }, Ident { ident: "A", @@ -18,34 +18,34 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: ':', spacing: Alone, - span: #4 bytes(424..425), + span: #3 bytes(424..425), }, Ident { ident: "u8", - span: #4 bytes(426..428), + span: #3 bytes(426..428), }, Punct { ch: '=', spacing: Alone, - span: #4 bytes(429..430), + span: #3 bytes(429..430), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: #4 bytes(431..432), + span: #3 bytes(431..432), }, Punct { ch: ';', spacing: Alone, - span: #4 bytes(432..433), + span: #3 bytes(432..433), }, ] PRINT-DERIVE INPUT (DISPLAY): struct A {} PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: #4 bytes(468..474), + span: #3 bytes(468..474), }, Ident { ident: "A", @@ -54,6 +54,6 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Group { delimiter: Brace, stream: TokenStream [], - span: #4 bytes(478..480), + span: #3 bytes(478..480), }, ] diff --git a/tests/ui/proc-macro/issue-75734-pp-paren.stdout b/tests/ui/proc-macro/issue-75734-pp-paren.stdout index 0fda6654ff3..2f7c013e958 100644 --- a/tests/ui/proc-macro/issue-75734-pp-paren.stdout +++ b/tests/ui/proc-macro/issue-75734-pp-paren.stdout @@ -118,17 +118,17 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/issue-75734-pp-paren.rs:25:16: 25:17 (#0), }, ], - span: $DIR/issue-75734-pp-paren.rs:17:21: 17:25 (#7), + span: $DIR/issue-75734-pp-paren.rs:17:21: 17:25 (#6), }, Punct { ch: '*', spacing: Alone, - span: $DIR/issue-75734-pp-paren.rs:17:26: 17:27 (#7), + span: $DIR/issue-75734-pp-paren.rs:17:26: 17:27 (#6), }, Literal { kind: Integer, symbol: "2", suffix: None, - span: $DIR/issue-75734-pp-paren.rs:17:28: 17:29 (#7), + span: $DIR/issue-75734-pp-paren.rs:17:28: 17:29 (#6), }, ] diff --git a/tests/ui/proc-macro/issue-78675-captured-inner-attrs.stdout b/tests/ui/proc-macro/issue-78675-captured-inner-attrs.stdout index 60a400a5dea..ae5e9400809 100644 --- a/tests/ui/proc-macro/issue-78675-captured-inner-attrs.stdout +++ b/tests/ui/proc-macro/issue-78675-captured-inner-attrs.stdout @@ -5,12 +5,12 @@ PRINT-BANG DEEP-RE-COLLECTED (DISPLAY): foo! { #[fake_attr] mod bar { #! [doc = PRINT-BANG INPUT (DEBUG): TokenStream [ Ident { ident: "foo", - span: $DIR/issue-78675-captured-inner-attrs.rs:20:9: 20:12 (#4), + span: $DIR/issue-78675-captured-inner-attrs.rs:20:9: 20:12 (#3), }, Punct { ch: '!', spacing: Alone, - span: $DIR/issue-78675-captured-inner-attrs.rs:20:12: 20:13 (#4), + span: $DIR/issue-78675-captured-inner-attrs.rs:20:12: 20:13 (#3), }, Group { delimiter: Brace, @@ -18,17 +18,17 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-78675-captured-inner-attrs.rs:21:13: 21:14 (#4), + span: $DIR/issue-78675-captured-inner-attrs.rs:21:13: 21:14 (#3), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "fake_attr", - span: $DIR/issue-78675-captured-inner-attrs.rs:21:15: 21:24 (#4), + span: $DIR/issue-78675-captured-inner-attrs.rs:21:15: 21:24 (#3), }, ], - span: $DIR/issue-78675-captured-inner-attrs.rs:21:14: 21:25 (#4), + span: $DIR/issue-78675-captured-inner-attrs.rs:21:14: 21:25 (#3), }, Group { delimiter: None, @@ -79,9 +79,9 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/issue-78675-captured-inner-attrs.rs:27:13: 29:6 (#0), }, ], - span: $DIR/issue-78675-captured-inner-attrs.rs:22:13: 22:18 (#4), + span: $DIR/issue-78675-captured-inner-attrs.rs:22:13: 22:18 (#3), }, ], - span: $DIR/issue-78675-captured-inner-attrs.rs:20:14: 23:10 (#4), + span: $DIR/issue-78675-captured-inner-attrs.rs:20:14: 23:10 (#3), }, ] diff --git a/tests/ui/proc-macro/issue-80760-empty-stmt.stdout b/tests/ui/proc-macro/issue-80760-empty-stmt.stdout index 4b7ed874307..82f52e4bc48 100644 --- a/tests/ui/proc-macro/issue-80760-empty-stmt.stdout +++ b/tests/ui/proc-macro/issue-80760-empty-stmt.stdout @@ -9,6 +9,6 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/issue-80760-empty-stmt.rs:25:17: 25:18 (#0), }, ], - span: $DIR/issue-80760-empty-stmt.rs:13:21: 13:23 (#4), + span: $DIR/issue-80760-empty-stmt.rs:13:21: 13:23 (#3), }, ] diff --git a/tests/ui/proc-macro/macro-rules-derive-cfg.stdout b/tests/ui/proc-macro/macro-rules-derive-cfg.stdout index 74641058ef3..aee0f966d0f 100644 --- a/tests/ui/proc-macro/macro-rules-derive-cfg.stdout +++ b/tests/ui/proc-macro/macro-rules-derive-cfg.stdout @@ -10,76 +10,76 @@ PRINT-DERIVE INPUT (DISPLAY): struct Foo PRINT-DERIVE INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/macro-rules-derive-cfg.rs:17:9: 17:15 (#4), + span: $DIR/macro-rules-derive-cfg.rs:17:9: 17:15 (#3), }, Ident { ident: "Foo", - span: $DIR/macro-rules-derive-cfg.rs:17:16: 17:19 (#4), + span: $DIR/macro-rules-derive-cfg.rs:17:16: 17:19 (#3), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "val", - span: $DIR/macro-rules-derive-cfg.rs:18:13: 18:16 (#4), + span: $DIR/macro-rules-derive-cfg.rs:18:13: 18:16 (#3), }, Punct { ch: ':', spacing: Alone, - span: $DIR/macro-rules-derive-cfg.rs:18:16: 18:17 (#4), + span: $DIR/macro-rules-derive-cfg.rs:18:16: 18:17 (#3), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "bool", - span: $DIR/macro-rules-derive-cfg.rs:18:19: 18:23 (#4), + span: $DIR/macro-rules-derive-cfg.rs:18:19: 18:23 (#3), }, Punct { ch: ';', spacing: Alone, - span: $DIR/macro-rules-derive-cfg.rs:18:23: 18:24 (#4), + span: $DIR/macro-rules-derive-cfg.rs:18:23: 18:24 (#3), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "let", - span: $DIR/macro-rules-derive-cfg.rs:19:17: 19:20 (#4), + span: $DIR/macro-rules-derive-cfg.rs:19:17: 19:20 (#3), }, Ident { ident: "a", - span: $DIR/macro-rules-derive-cfg.rs:19:21: 19:22 (#4), + span: $DIR/macro-rules-derive-cfg.rs:19:21: 19:22 (#3), }, Punct { ch: '=', spacing: Alone, - span: $DIR/macro-rules-derive-cfg.rs:19:23: 19:24 (#4), + span: $DIR/macro-rules-derive-cfg.rs:19:23: 19:24 (#3), }, Punct { ch: '#', spacing: Alone, - span: $DIR/macro-rules-derive-cfg.rs:19:25: 19:26 (#4), + span: $DIR/macro-rules-derive-cfg.rs:19:25: 19:26 (#3), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "rustc_dummy", - span: $DIR/macro-rules-derive-cfg.rs:19:48: 19:59 (#4), + span: $DIR/macro-rules-derive-cfg.rs:19:48: 19:59 (#3), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "first", - span: $DIR/macro-rules-derive-cfg.rs:19:60: 19:65 (#4), + span: $DIR/macro-rules-derive-cfg.rs:19:60: 19:65 (#3), }, ], - span: $DIR/macro-rules-derive-cfg.rs:19:59: 19:66 (#4), + span: $DIR/macro-rules-derive-cfg.rs:19:59: 19:66 (#3), }, ], - span: $DIR/macro-rules-derive-cfg.rs:19:25: 19:26 (#4), + span: $DIR/macro-rules-derive-cfg.rs:19:25: 19:26 (#3), }, Punct { ch: '#', @@ -151,21 +151,21 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [ Punct { ch: ';', spacing: Alone, - span: $DIR/macro-rules-derive-cfg.rs:19:74: 19:75 (#4), + span: $DIR/macro-rules-derive-cfg.rs:19:74: 19:75 (#3), }, Literal { kind: Integer, symbol: "0", suffix: None, - span: $DIR/macro-rules-derive-cfg.rs:20:17: 20:18 (#4), + span: $DIR/macro-rules-derive-cfg.rs:20:17: 20:18 (#3), }, ], - span: $DIR/macro-rules-derive-cfg.rs:18:25: 21:14 (#4), + span: $DIR/macro-rules-derive-cfg.rs:18:25: 21:14 (#3), }, ], - span: $DIR/macro-rules-derive-cfg.rs:18:18: 21:15 (#4), + span: $DIR/macro-rules-derive-cfg.rs:18:18: 21:15 (#3), }, ], - span: $DIR/macro-rules-derive-cfg.rs:17:20: 22:10 (#4), + span: $DIR/macro-rules-derive-cfg.rs:17:20: 22:10 (#3), }, ] diff --git a/tests/ui/proc-macro/meta-macro-hygiene.stdout b/tests/ui/proc-macro/meta-macro-hygiene.stdout index 6b7b0c819cc..17b69daa4f0 100644 --- a/tests/ui/proc-macro/meta-macro-hygiene.stdout +++ b/tests/ui/proc-macro/meta-macro-hygiene.stdout @@ -1,6 +1,6 @@ -Def site: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#5) -Input: TokenStream [Ident { ident: "$crate", span: $DIR/meta-macro-hygiene.rs:24:37: 24:43 (#4) }, Punct { ch: ':', spacing: Joint, span: $DIR/meta-macro-hygiene.rs:24:43: 24:44 (#4) }, Punct { ch: ':', spacing: Alone, span: $DIR/meta-macro-hygiene.rs:24:44: 24:45 (#4) }, Ident { ident: "dummy", span: $DIR/meta-macro-hygiene.rs:24:45: 24:50 (#4) }, Punct { ch: '!', spacing: Alone, span: $DIR/meta-macro-hygiene.rs:24:50: 24:51 (#4) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: $DIR/meta-macro-hygiene.rs:24:51: 24:53 (#4) }] -Respanned: TokenStream [Ident { ident: "$crate", span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#5) }, Punct { ch: ':', spacing: Joint, span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#5) }, Punct { ch: ':', spacing: Alone, span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#5) }, Ident { ident: "dummy", span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#5) }, Punct { ch: '!', spacing: Alone, span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#5) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#5) }] +Def site: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#4) +Input: TokenStream [Ident { ident: "$crate", span: $DIR/meta-macro-hygiene.rs:24:37: 24:43 (#3) }, Punct { ch: ':', spacing: Joint, span: $DIR/meta-macro-hygiene.rs:24:43: 24:44 (#3) }, Punct { ch: ':', spacing: Alone, span: $DIR/meta-macro-hygiene.rs:24:44: 24:45 (#3) }, Ident { ident: "dummy", span: $DIR/meta-macro-hygiene.rs:24:45: 24:50 (#3) }, Punct { ch: '!', spacing: Alone, span: $DIR/meta-macro-hygiene.rs:24:50: 24:51 (#3) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: $DIR/meta-macro-hygiene.rs:24:51: 24:53 (#3) }] +Respanned: TokenStream [Ident { ident: "$crate", span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#4) }, Punct { ch: ':', spacing: Joint, span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#4) }, Punct { ch: ':', spacing: Alone, span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#4) }, Ident { ident: "dummy", span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#4) }, Punct { ch: '!', spacing: Alone, span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#4) }, Group { delimiter: Parenthesis, stream: TokenStream [], span: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#4) }] #![feature /* 0#0 */(prelude_import)] // aux-build:make-macro.rs // aux-build:meta-macro.rs @@ -18,8 +18,7 @@ Respanned: TokenStream [Ident { ident: "$crate", span: $DIR/auxiliary/make-macro use core /* 0#1 */::prelude /* 0#1 */::rust_2018 /* 0#1 */::*; #[macro_use /* 0#1 */] extern crate core /* 0#1 */; -#[macro_use /* 0#1 */] -extern crate compiler_builtins /* 0#1 */; +extern crate compiler_builtins /* 442 */ as _ /* 0#1 */; // Don't load unnecessary hygiene information from std extern crate std /* 0#0 */; @@ -47,23 +46,21 @@ Expansions: crate0::{{expn0}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Root crate0::{{expn1}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: AstPass(StdImports) crate0::{{expn2}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "produce_it") -crate0::{{expn3}}: parent: crate0::{{expn2}}, call_site_ctxt: #4, def_site_ctxt: #0, kind: Macro(Bang, "meta_macro::print_def_site") -crate0::{{expn4}}: parent: crate0::{{expn3}}, call_site_ctxt: #5, def_site_ctxt: #0, kind: Macro(Bang, "$crate::dummy") +crate0::{{expn3}}: parent: crate0::{{expn2}}, call_site_ctxt: #3, def_site_ctxt: #0, kind: Macro(Bang, "meta_macro::print_def_site") +crate0::{{expn4}}: parent: crate0::{{expn3}}, call_site_ctxt: #4, def_site_ctxt: #0, kind: Macro(Bang, "$crate::dummy") crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "derive") crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "derive") crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "include") -crate2::{{expn1}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: AstPass(StdImports) SyntaxContexts: #0: parent: #0, outer_mark: (crate0::{{expn0}}, Opaque) #1: parent: #0, outer_mark: (crate0::{{expn1}}, Opaque) #2: parent: #0, outer_mark: (crate0::{{expn1}}, Transparent) -#3: parent: #0, outer_mark: (crate2::{{expn1}}, Opaque) -#4: parent: #0, outer_mark: (crate0::{{expn2}}, SemiTransparent) -#5: parent: #0, outer_mark: (crate0::{{expn3}}, Opaque) -#6: parent: #4, outer_mark: (crate0::{{expn3}}, Transparent) -#7: parent: #0, outer_mark: (crate0::{{expn3}}, SemiTransparent) -#8: parent: #0, outer_mark: (crate0::{{expn4}}, Opaque) -#9: parent: #5, outer_mark: (crate0::{{expn4}}, Transparent) -#10: parent: #5, outer_mark: (crate0::{{expn4}}, SemiTransparent) +#3: parent: #0, outer_mark: (crate0::{{expn2}}, SemiTransparent) +#4: parent: #0, outer_mark: (crate0::{{expn3}}, Opaque) +#5: parent: #3, outer_mark: (crate0::{{expn3}}, Transparent) +#6: parent: #0, outer_mark: (crate0::{{expn3}}, SemiTransparent) +#7: parent: #0, outer_mark: (crate0::{{expn4}}, Opaque) +#8: parent: #4, outer_mark: (crate0::{{expn4}}, Transparent) +#9: parent: #4, outer_mark: (crate0::{{expn4}}, SemiTransparent) */ diff --git a/tests/ui/proc-macro/meta-macro.stdout b/tests/ui/proc-macro/meta-macro.stdout index 662682d40b2..b2d20c23722 100644 --- a/tests/ui/proc-macro/meta-macro.stdout +++ b/tests/ui/proc-macro/meta-macro.stdout @@ -1,3 +1,3 @@ -Def site: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#4) +Def site: $DIR/auxiliary/make-macro.rs:7:9: 7:56 (#3) Input: TokenStream [] Respanned: TokenStream [] diff --git a/tests/ui/proc-macro/nested-macro-rules.stdout b/tests/ui/proc-macro/nested-macro-rules.stdout index 31113904041..829cfdc0c33 100644 --- a/tests/ui/proc-macro/nested-macro-rules.stdout +++ b/tests/ui/proc-macro/nested-macro-rules.stdout @@ -2,45 +2,45 @@ PRINT-BANG INPUT (DISPLAY): FirstStruct PRINT-BANG INPUT (DEBUG): TokenStream [ Ident { ident: "FirstStruct", - span: $DIR/auxiliary/nested-macro-rules.rs:16:14: 16:25 (#7), + span: $DIR/auxiliary/nested-macro-rules.rs:16:14: 16:25 (#6), }, ] PRINT-ATTR INPUT (DISPLAY): struct FirstAttrStruct {} PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/auxiliary/nested-macro-rules.rs:10:32: 10:38 (#6), + span: $DIR/auxiliary/nested-macro-rules.rs:10:32: 10:38 (#5), }, Ident { ident: "FirstAttrStruct", - span: $DIR/auxiliary/nested-macro-rules.rs:16:27: 16:42 (#7), + span: $DIR/auxiliary/nested-macro-rules.rs:16:27: 16:42 (#6), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/auxiliary/nested-macro-rules.rs:10:57: 10:59 (#6), + span: $DIR/auxiliary/nested-macro-rules.rs:10:57: 10:59 (#5), }, ] PRINT-BANG INPUT (DISPLAY): SecondStruct PRINT-BANG INPUT (DEBUG): TokenStream [ Ident { ident: "SecondStruct", - span: $DIR/nested-macro-rules.rs:21:38: 21:50 (#16), + span: $DIR/nested-macro-rules.rs:21:38: 21:50 (#15), }, ] PRINT-ATTR INPUT (DISPLAY): struct SecondAttrStruct {} PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "struct", - span: $DIR/auxiliary/nested-macro-rules.rs:10:32: 10:38 (#15), + span: $DIR/auxiliary/nested-macro-rules.rs:10:32: 10:38 (#14), }, Ident { ident: "SecondAttrStruct", - span: $DIR/nested-macro-rules.rs:21:52: 21:68 (#16), + span: $DIR/nested-macro-rules.rs:21:52: 21:68 (#15), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/auxiliary/nested-macro-rules.rs:10:57: 10:59 (#15), + span: $DIR/auxiliary/nested-macro-rules.rs:10:57: 10:59 (#14), }, ] diff --git a/tests/ui/proc-macro/nested-nonterminal-tokens.stdout b/tests/ui/proc-macro/nested-nonterminal-tokens.stdout index a3d24dd26fe..4c5550bb077 100644 --- a/tests/ui/proc-macro/nested-nonterminal-tokens.stdout +++ b/tests/ui/proc-macro/nested-nonterminal-tokens.stdout @@ -16,45 +16,45 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/nested-nonterminal-tokens.rs:25:26: 25:27 (#0), }, ], - span: $DIR/nested-nonterminal-tokens.rs:17:41: 17:43 (#4), + span: $DIR/nested-nonterminal-tokens.rs:17:41: 17:43 (#3), }, Punct { ch: '+', spacing: Alone, - span: $DIR/nested-nonterminal-tokens.rs:17:44: 17:45 (#4), + span: $DIR/nested-nonterminal-tokens.rs:17:44: 17:45 (#3), }, Literal { kind: Integer, symbol: "1", suffix: None, - span: $DIR/nested-nonterminal-tokens.rs:17:46: 17:47 (#4), + span: $DIR/nested-nonterminal-tokens.rs:17:46: 17:47 (#3), }, ], - span: $DIR/nested-nonterminal-tokens.rs:18:41: 18:43 (#5), + span: $DIR/nested-nonterminal-tokens.rs:18:41: 18:43 (#4), }, Punct { ch: '+', spacing: Alone, - span: $DIR/nested-nonterminal-tokens.rs:18:44: 18:45 (#5), + span: $DIR/nested-nonterminal-tokens.rs:18:44: 18:45 (#4), }, Literal { kind: Integer, symbol: "2", suffix: None, - span: $DIR/nested-nonterminal-tokens.rs:18:46: 18:47 (#5), + span: $DIR/nested-nonterminal-tokens.rs:18:46: 18:47 (#4), }, ], - span: $DIR/nested-nonterminal-tokens.rs:20:21: 20:23 (#6), + span: $DIR/nested-nonterminal-tokens.rs:20:21: 20:23 (#5), }, Punct { ch: '+', spacing: Alone, - span: $DIR/nested-nonterminal-tokens.rs:20:24: 20:25 (#6), + span: $DIR/nested-nonterminal-tokens.rs:20:24: 20:25 (#5), }, Literal { kind: Integer, symbol: "3", suffix: None, - span: $DIR/nested-nonterminal-tokens.rs:20:26: 20:27 (#6), + span: $DIR/nested-nonterminal-tokens.rs:20:26: 20:27 (#5), }, ] diff --git a/tests/ui/proc-macro/nodelim-groups.stdout b/tests/ui/proc-macro/nodelim-groups.stdout index 6b410f0bfb7..cdf851b535a 100644 --- a/tests/ui/proc-macro/nodelim-groups.stdout +++ b/tests/ui/proc-macro/nodelim-groups.stdout @@ -4,7 +4,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ kind: Str, symbol: "hi", suffix: None, - span: $DIR/nodelim-groups.rs:16:42: 16:46 (#4), + span: $DIR/nodelim-groups.rs:16:42: 16:46 (#3), }, Group { delimiter: None, @@ -44,7 +44,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/nodelim-groups.rs:20:27: 20:28 (#0), }, ], - span: $DIR/nodelim-groups.rs:16:47: 16:51 (#4), + span: $DIR/nodelim-groups.rs:16:47: 16:51 (#3), }, Group { delimiter: Parenthesis, @@ -53,21 +53,21 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ kind: Integer, symbol: "1", suffix: None, - span: $DIR/nodelim-groups.rs:16:53: 16:54 (#4), + span: $DIR/nodelim-groups.rs:16:53: 16:54 (#3), }, Punct { ch: '+', spacing: Alone, - span: $DIR/nodelim-groups.rs:16:55: 16:56 (#4), + span: $DIR/nodelim-groups.rs:16:55: 16:56 (#3), }, Literal { kind: Integer, symbol: "1", suffix: None, - span: $DIR/nodelim-groups.rs:16:57: 16:58 (#4), + span: $DIR/nodelim-groups.rs:16:57: 16:58 (#3), }, ], - span: $DIR/nodelim-groups.rs:16:52: 16:59 (#4), + span: $DIR/nodelim-groups.rs:16:52: 16:59 (#3), }, ] PRINT-BANG INPUT (DISPLAY): "hi" "hello".len() + "world".len() (1 + 1) @@ -76,7 +76,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ kind: Str, symbol: "hi", suffix: None, - span: $DIR/nodelim-groups.rs:16:42: 16:46 (#9), + span: $DIR/nodelim-groups.rs:16:42: 16:46 (#8), }, Group { delimiter: None, @@ -105,12 +105,12 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/nodelim-groups.rs:21:28: 21:30 (#0), }, ], - span: $DIR/nodelim-groups.rs:15:49: 15:54 (#8), + span: $DIR/nodelim-groups.rs:15:49: 15:54 (#7), }, Punct { ch: '+', spacing: Alone, - span: $DIR/nodelim-groups.rs:15:55: 15:56 (#8), + span: $DIR/nodelim-groups.rs:15:55: 15:56 (#7), }, Group { delimiter: None, @@ -136,10 +136,10 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ span: $DIR/nodelim-groups.rs:21:44: 21:46 (#0), }, ], - span: $DIR/nodelim-groups.rs:15:57: 15:62 (#8), + span: $DIR/nodelim-groups.rs:15:57: 15:62 (#7), }, ], - span: $DIR/nodelim-groups.rs:16:47: 16:51 (#9), + span: $DIR/nodelim-groups.rs:16:47: 16:51 (#8), }, Group { delimiter: Parenthesis, @@ -148,20 +148,20 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ kind: Integer, symbol: "1", suffix: None, - span: $DIR/nodelim-groups.rs:16:53: 16:54 (#9), + span: $DIR/nodelim-groups.rs:16:53: 16:54 (#8), }, Punct { ch: '+', spacing: Alone, - span: $DIR/nodelim-groups.rs:16:55: 16:56 (#9), + span: $DIR/nodelim-groups.rs:16:55: 16:56 (#8), }, Literal { kind: Integer, symbol: "1", suffix: None, - span: $DIR/nodelim-groups.rs:16:57: 16:58 (#9), + span: $DIR/nodelim-groups.rs:16:57: 16:58 (#8), }, ], - span: $DIR/nodelim-groups.rs:16:52: 16:59 (#9), + span: $DIR/nodelim-groups.rs:16:52: 16:59 (#8), }, ] diff --git a/tests/ui/proc-macro/nonterminal-expansion.stdout b/tests/ui/proc-macro/nonterminal-expansion.stdout index 4d884348f2c..b2557af18ca 100644 --- a/tests/ui/proc-macro/nonterminal-expansion.stdout +++ b/tests/ui/proc-macro/nonterminal-expansion.stdout @@ -3,12 +3,12 @@ PRINT-ATTR_ARGS RE-COLLECTED (DISPLAY): a, line! (), b PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ Ident { ident: "a", - span: $DIR/nonterminal-expansion.rs:13:27: 13:28 (#4), + span: $DIR/nonterminal-expansion.rs:13:27: 13:28 (#3), }, Punct { ch: ',', spacing: Alone, - span: $DIR/nonterminal-expansion.rs:13:28: 13:29 (#4), + span: $DIR/nonterminal-expansion.rs:13:28: 13:29 (#3), }, Group { delimiter: None, @@ -28,15 +28,15 @@ PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [ span: $DIR/nonterminal-expansion.rs:19:24: 19:26 (#0), }, ], - span: $DIR/nonterminal-expansion.rs:13:30: 13:35 (#4), + span: $DIR/nonterminal-expansion.rs:13:30: 13:35 (#3), }, Punct { ch: ',', spacing: Alone, - span: $DIR/nonterminal-expansion.rs:13:35: 13:36 (#4), + span: $DIR/nonterminal-expansion.rs:13:35: 13:36 (#3), }, Ident { ident: "b", - span: $DIR/nonterminal-expansion.rs:13:37: 13:38 (#4), + span: $DIR/nonterminal-expansion.rs:13:37: 13:38 (#3), }, ] diff --git a/tests/ui/proc-macro/nonterminal-recollect-attr.stdout b/tests/ui/proc-macro/nonterminal-recollect-attr.stdout index 6824395ae40..e722ee97d4c 100644 --- a/tests/ui/proc-macro/nonterminal-recollect-attr.stdout +++ b/tests/ui/proc-macro/nonterminal-recollect-attr.stdout @@ -5,30 +5,30 @@ First recollected: TokenStream [ }, Ident { ident: "struct", - span: $DIR/nonterminal-recollect-attr.rs:14:12: 14:18 (#4), + span: $DIR/nonterminal-recollect-attr.rs:14:12: 14:18 (#3), }, Ident { ident: "Foo", - span: $DIR/nonterminal-recollect-attr.rs:14:19: 14:22 (#4), + span: $DIR/nonterminal-recollect-attr.rs:14:19: 14:22 (#3), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "field", - span: $DIR/nonterminal-recollect-attr.rs:15:13: 15:18 (#4), + span: $DIR/nonterminal-recollect-attr.rs:15:13: 15:18 (#3), }, Punct { ch: ':', spacing: Alone, - span: $DIR/nonterminal-recollect-attr.rs:15:18: 15:19 (#4), + span: $DIR/nonterminal-recollect-attr.rs:15:18: 15:19 (#3), }, Ident { ident: "u8", - span: $DIR/nonterminal-recollect-attr.rs:15:20: 15:22 (#4), + span: $DIR/nonterminal-recollect-attr.rs:15:20: 15:22 (#3), }, ], - span: $DIR/nonterminal-recollect-attr.rs:14:23: 16:10 (#4), + span: $DIR/nonterminal-recollect-attr.rs:14:23: 16:10 (#3), }, ] Second recollected: TokenStream [ @@ -38,29 +38,29 @@ Second recollected: TokenStream [ }, Ident { ident: "struct", - span: $DIR/nonterminal-recollect-attr.rs:14:12: 14:18 (#4), + span: $DIR/nonterminal-recollect-attr.rs:14:12: 14:18 (#3), }, Ident { ident: "Foo", - span: $DIR/nonterminal-recollect-attr.rs:14:19: 14:22 (#4), + span: $DIR/nonterminal-recollect-attr.rs:14:19: 14:22 (#3), }, Group { delimiter: Brace, stream: TokenStream [ Ident { ident: "field", - span: $DIR/nonterminal-recollect-attr.rs:15:13: 15:18 (#4), + span: $DIR/nonterminal-recollect-attr.rs:15:13: 15:18 (#3), }, Punct { ch: ':', spacing: Alone, - span: $DIR/nonterminal-recollect-attr.rs:15:18: 15:19 (#4), + span: $DIR/nonterminal-recollect-attr.rs:15:18: 15:19 (#3), }, Ident { ident: "u8", - span: $DIR/nonterminal-recollect-attr.rs:15:20: 15:22 (#4), + span: $DIR/nonterminal-recollect-attr.rs:15:20: 15:22 (#3), }, ], - span: $DIR/nonterminal-recollect-attr.rs:14:23: 16:10 (#4), + span: $DIR/nonterminal-recollect-attr.rs:14:23: 16:10 (#3), }, ] diff --git a/tests/ui/proc-macro/nonterminal-token-hygiene.stdout b/tests/ui/proc-macro/nonterminal-token-hygiene.stdout index c08e5308138..76d54ab2f13 100644 --- a/tests/ui/proc-macro/nonterminal-token-hygiene.stdout +++ b/tests/ui/proc-macro/nonterminal-token-hygiene.stdout @@ -6,19 +6,19 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ stream: TokenStream [ Ident { ident: "struct", - span: $DIR/nonterminal-token-hygiene.rs:31:5: 31:11 (#5), + span: $DIR/nonterminal-token-hygiene.rs:31:5: 31:11 (#4), }, Ident { ident: "S", - span: $DIR/nonterminal-token-hygiene.rs:31:12: 31:13 (#5), + span: $DIR/nonterminal-token-hygiene.rs:31:12: 31:13 (#4), }, Punct { ch: ';', spacing: Alone, - span: $DIR/nonterminal-token-hygiene.rs:31:13: 31:14 (#5), + span: $DIR/nonterminal-token-hygiene.rs:31:13: 31:14 (#4), }, ], - span: $DIR/nonterminal-token-hygiene.rs:21:27: 21:32 (#6), + span: $DIR/nonterminal-token-hygiene.rs:21:27: 21:32 (#5), }, ] #![feature /* 0#0 */(prelude_import)] @@ -39,8 +39,7 @@ PRINT-BANG INPUT (DEBUG): TokenStream [ use ::core /* 0#1 */::prelude /* 0#1 */::rust_2015 /* 0#1 */::*; #[macro_use /* 0#1 */] extern crate core /* 0#2 */; -#[macro_use /* 0#1 */] -extern crate compiler_builtins /* 0#2 */; +extern crate compiler_builtins /* 442 */ as _ /* 0#2 */; // Don't load unnecessary hygiene information from std extern crate std /* 0#0 */; @@ -59,9 +58,9 @@ macro_rules! outer } struct S /* 0#0 */; -macro inner /* 0#4 */ { () => { print_bang! { struct S; } } } +macro inner /* 0#3 */ { () => { print_bang! { struct S; } } } -struct S /* 0#5 */; +struct S /* 0#4 */; // OK, not a duplicate definition of `S` fn main /* 0#0 */() {} @@ -71,22 +70,20 @@ Expansions: crate0::{{expn0}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Root crate0::{{expn1}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: AstPass(StdImports) crate0::{{expn2}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "outer") -crate0::{{expn3}}: parent: crate0::{{expn2}}, call_site_ctxt: #4, def_site_ctxt: #4, kind: Macro(Bang, "inner") -crate0::{{expn4}}: parent: crate0::{{expn3}}, call_site_ctxt: #6, def_site_ctxt: #0, kind: Macro(Bang, "print_bang") +crate0::{{expn3}}: parent: crate0::{{expn2}}, call_site_ctxt: #3, def_site_ctxt: #3, kind: Macro(Bang, "inner") +crate0::{{expn4}}: parent: crate0::{{expn3}}, call_site_ctxt: #5, def_site_ctxt: #0, kind: Macro(Bang, "print_bang") crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "derive") crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Attr, "derive") crate1::{{expnNNN}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: Macro(Bang, "include") -crate2::{{expn1}}: parent: crate0::{{expn0}}, call_site_ctxt: #0, def_site_ctxt: #0, kind: AstPass(StdImports) SyntaxContexts: #0: parent: #0, outer_mark: (crate0::{{expn0}}, Opaque) #1: parent: #0, outer_mark: (crate0::{{expn1}}, Opaque) #2: parent: #0, outer_mark: (crate0::{{expn1}}, Transparent) -#3: parent: #0, outer_mark: (crate2::{{expn1}}, Opaque) -#4: parent: #0, outer_mark: (crate0::{{expn2}}, SemiTransparent) -#5: parent: #0, outer_mark: (crate0::{{expn3}}, Opaque) -#6: parent: #4, outer_mark: (crate0::{{expn3}}, Opaque) -#7: parent: #0, outer_mark: (crate0::{{expn4}}, Opaque) -#8: parent: #6, outer_mark: (crate0::{{expn4}}, Transparent) -#9: parent: #5, outer_mark: (crate0::{{expn4}}, SemiTransparent) +#3: parent: #0, outer_mark: (crate0::{{expn2}}, SemiTransparent) +#4: parent: #0, outer_mark: (crate0::{{expn3}}, Opaque) +#5: parent: #3, outer_mark: (crate0::{{expn3}}, Opaque) +#6: parent: #0, outer_mark: (crate0::{{expn4}}, Opaque) +#7: parent: #5, outer_mark: (crate0::{{expn4}}, Transparent) +#8: parent: #4, outer_mark: (crate0::{{expn4}}, SemiTransparent) */ diff --git a/tests/ui/resolve/bad-expr-path.stderr b/tests/ui/resolve/bad-expr-path.stderr index 8261e8e53b0..411130913c8 100644 --- a/tests/ui/resolve/bad-expr-path.stderr +++ b/tests/ui/resolve/bad-expr-path.stderr @@ -10,12 +10,6 @@ error[E0425]: cannot find value `arguments` in module `m1` LL | log(debug, m1::arguments); | ^^^^^^^^^ not found in `m1` -error[E0425]: cannot find function `log` in this scope - --> $DIR/bad-expr-path.rs:4:5 - | -LL | log(debug, m1::arguments); - | ^^^ not found in this scope - error[E0580]: `main` function has wrong type --> $DIR/bad-expr-path.rs:3:1 | @@ -25,6 +19,12 @@ LL | fn main(arguments: Vec<String>) { = note: expected fn pointer `fn()` found fn pointer `fn(Vec<String>)` +error[E0425]: cannot find function `log` in this scope + --> $DIR/bad-expr-path.rs:4:5 + | +LL | log(debug, m1::arguments); + | ^^^ not found in this scope + error: aborting due to 4 previous errors Some errors have detailed explanations: E0425, E0580. diff --git a/tests/ui/resolve/bad-expr-path2.stderr b/tests/ui/resolve/bad-expr-path2.stderr index 6e11296d9fc..af3ca99c510 100644 --- a/tests/ui/resolve/bad-expr-path2.stderr +++ b/tests/ui/resolve/bad-expr-path2.stderr @@ -10,12 +10,6 @@ error[E0423]: expected value, found module `m1::arguments` LL | log(debug, m1::arguments); | ^^^^^^^^^^^^^ not a value -error[E0425]: cannot find function `log` in this scope - --> $DIR/bad-expr-path2.rs:6:5 - | -LL | log(debug, m1::arguments); - | ^^^ not found in this scope - error[E0580]: `main` function has wrong type --> $DIR/bad-expr-path2.rs:5:1 | @@ -25,6 +19,12 @@ LL | fn main(arguments: Vec<String>) { = note: expected fn pointer `fn()` found fn pointer `fn(Vec<String>)` +error[E0425]: cannot find function `log` in this scope + --> $DIR/bad-expr-path2.rs:6:5 + | +LL | log(debug, m1::arguments); + | ^^^ not found in this scope + error: aborting due to 4 previous errors Some errors have detailed explanations: E0423, E0425, E0580. diff --git a/tests/ui/suggestions/suggest-mut-method-for-loop-closure.rs b/tests/ui/suggestions/suggest-mut-method-for-loop-closure.rs new file mode 100644 index 00000000000..e4721ba0193 --- /dev/null +++ b/tests/ui/suggestions/suggest-mut-method-for-loop-closure.rs @@ -0,0 +1,19 @@ +use std::collections::HashMap; +struct X(usize); +struct Y { + v: u32, +} + +fn main() { + let _ = || { + let mut buzz = HashMap::new(); + buzz.insert("a", Y { v: 0 }); + + for mut t in buzz.values() { + //~^ HELP + //~| SUGGESTION values_mut() + t.v += 1; + //~^ ERROR cannot assign + } + }; +} diff --git a/tests/ui/suggestions/suggest-mut-method-for-loop-closure.stderr b/tests/ui/suggestions/suggest-mut-method-for-loop-closure.stderr new file mode 100644 index 00000000000..8a2df8d7cc1 --- /dev/null +++ b/tests/ui/suggestions/suggest-mut-method-for-loop-closure.stderr @@ -0,0 +1,15 @@ +error[E0594]: cannot assign to `t.v`, which is behind a `&` reference + --> $DIR/suggest-mut-method-for-loop-closure.rs:15:13 + | +LL | for mut t in buzz.values() { + | ------------- + | | | + | | help: use mutable method: `values_mut()` + | this iterator yields `&` references +... +LL | t.v += 1; + | ^^^^^^^^ `t` is a `&` reference, so the data it refers to cannot be written + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0594`. diff --git a/tests/ui/traits/new-solver/alias-bound-unsound.rs b/tests/ui/traits/new-solver/alias-bound-unsound.rs index 959f1afa04e..38d83d289f1 100644 --- a/tests/ui/traits/new-solver/alias-bound-unsound.rs +++ b/tests/ui/traits/new-solver/alias-bound-unsound.rs @@ -16,12 +16,17 @@ trait Foo { impl Foo for () { type Item = String where String: Copy; + //~^ ERROR overflow evaluating the requirement `<() as Foo>::Item: Copy` } fn main() { let x = String::from("hello, world"); drop(<() as Foo>::copy_me(&x)); - //~^ ERROR the type `&<() as Foo>::Item` is not well-formed - //~| ERROR `<() as Foo>::Item` is not well-formed + //~^ ERROR overflow evaluating the requirement `<() as Foo>::Item: Sized` + //~| ERROR overflow evaluating the requirement `<() as Foo>::Item == _` + //~| ERROR overflow evaluating the requirement `<() as Foo>::Item well-formed` + //~| ERROR overflow evaluating the requirement `String <: <() as Foo>::Item` + //~| ERROR overflow evaluating the requirement `&<() as Foo>::Item well-formed` + //~| ERROR type annotations needed println!("{x}"); } diff --git a/tests/ui/traits/new-solver/alias-bound-unsound.stderr b/tests/ui/traits/new-solver/alias-bound-unsound.stderr index 5800e2c4340..abc6677c132 100644 --- a/tests/ui/traits/new-solver/alias-bound-unsound.stderr +++ b/tests/ui/traits/new-solver/alias-bound-unsound.stderr @@ -1,14 +1,69 @@ -error: the type `&<() as Foo>::Item` is not well-formed - --> $DIR/alias-bound-unsound.rs:23:31 +error[E0275]: overflow evaluating the requirement `<() as Foo>::Item: Copy` + --> $DIR/alias-bound-unsound.rs:18:17 + | +LL | type Item = String where String: Copy; + | ^^^^^^ + | + = help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`alias_bound_unsound`) +note: required by a bound in `Foo::Item` + --> $DIR/alias-bound-unsound.rs:8:16 + | +LL | type Item: Copy + | ^^^^ required by this bound in `Foo::Item` + +error[E0282]: type annotations needed + --> $DIR/alias-bound-unsound.rs:24:5 + | +LL | drop(<() as Foo>::copy_me(&x)); + | ^^^^ cannot infer type of the type parameter `T` declared on the function `drop` + | +help: consider specifying the generic argument + | +LL | drop::<T>(<() as Foo>::copy_me(&x)); + | +++++ + +error[E0275]: overflow evaluating the requirement `&<() as Foo>::Item well-formed` + --> $DIR/alias-bound-unsound.rs:24:31 | LL | drop(<() as Foo>::copy_me(&x)); | ^^ + | + = help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`alias_bound_unsound`) -error: the type `<() as Foo>::Item` is not well-formed - --> $DIR/alias-bound-unsound.rs:23:10 +error[E0275]: overflow evaluating the requirement `String <: <() as Foo>::Item` + --> $DIR/alias-bound-unsound.rs:24:31 + | +LL | drop(<() as Foo>::copy_me(&x)); + | ^^ + | + = help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`alias_bound_unsound`) + +error[E0275]: overflow evaluating the requirement `<() as Foo>::Item well-formed` + --> $DIR/alias-bound-unsound.rs:24:10 + | +LL | drop(<() as Foo>::copy_me(&x)); + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`alias_bound_unsound`) + +error[E0275]: overflow evaluating the requirement `<() as Foo>::Item == _` + --> $DIR/alias-bound-unsound.rs:24:10 | LL | drop(<() as Foo>::copy_me(&x)); | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`alias_bound_unsound`) + +error[E0275]: overflow evaluating the requirement `<() as Foo>::Item: Sized` + --> $DIR/alias-bound-unsound.rs:24:10 + | +LL | drop(<() as Foo>::copy_me(&x)); + | ^^^^^^^^^^^^^^^^^^^^ + | + = help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`alias_bound_unsound`) + = note: the return type of a function must have a statically known size -error: aborting due to 2 previous errors +error: aborting due to 7 previous errors +Some errors have detailed explanations: E0275, E0282. +For more information about an error, try `rustc --explain E0275`. diff --git a/tests/ui/traits/new-solver/escaping-bound-vars-in-writeback-normalization.rs b/tests/ui/traits/new-solver/escaping-bound-vars-in-writeback-normalization.rs new file mode 100644 index 00000000000..29784c32a1b --- /dev/null +++ b/tests/ui/traits/new-solver/escaping-bound-vars-in-writeback-normalization.rs @@ -0,0 +1,18 @@ +// compile-flags: -Ztrait-solver=next +// check-pass + +trait Trivial { + type Assoc; +} + +impl<T: ?Sized> Trivial for T { + type Assoc = (); +} + +fn main() { + // During writeback, we call `normalize_erasing_regions`, which will walk past + // the `for<'a>` binder and try to normalize `<&'a () as Trivial>::Assoc` directly. + // We need to handle this case in the new deep normalizer similarly to how it + // is handled in the old solver. + let x: Option<for<'a> fn(<&'a () as Trivial>::Assoc)> = None; +} diff --git a/tests/ui/traits/new-solver/trait-upcast-lhs-needs-normalization.rs b/tests/ui/traits/new-solver/trait-upcast-lhs-needs-normalization.rs new file mode 100644 index 00000000000..79114b93b78 --- /dev/null +++ b/tests/ui/traits/new-solver/trait-upcast-lhs-needs-normalization.rs @@ -0,0 +1,18 @@ +// check-pass +// compile-flags: -Ztrait-solver=next + +pub trait A {} +pub trait B: A {} + +pub trait Mirror { + type Assoc: ?Sized; +} +impl<T: ?Sized> Mirror for T { + type Assoc = T; +} + +pub fn foo<'a>(x: &'a <dyn B + 'static as Mirror>::Assoc) -> &'a (dyn A + 'static) { + x +} + +fn main() {} diff --git a/tests/ui/transmutability/issue-110892.rs b/tests/ui/transmutability/issue-110892.rs new file mode 100644 index 00000000000..ce926b39996 --- /dev/null +++ b/tests/ui/transmutability/issue-110892.rs @@ -0,0 +1,40 @@ +// check-fail +#![feature(generic_const_exprs, transmutability)] +#![allow(incomplete_features)] + +mod assert { + use std::mem::{Assume, BikeshedIntrinsicFrom}; + + pub fn is_transmutable< + Src, + Dst, + Context, + const ASSUME_ALIGNMENT: bool, + const ASSUME_LIFETIMES: bool, + const ASSUME_SAFETY: bool, + const ASSUME_VALIDITY: bool, + >() + where + Dst: BikeshedIntrinsicFrom< + Src, + Context, + { from_options(ASSUME_ALIGNMENT, ASSUME_LIFETIMES, ASSUME_SAFETY, ASSUME_VALIDITY) } + >, + {} + + // This should not cause an ICE + const fn from_options( + , //~ ERROR expected parameter name, found `,` + , //~ ERROR expected parameter name, found `,` + , //~ ERROR expected parameter name, found `,` + , //~ ERROR expected parameter name, found `,` + ) -> Assume {} //~ ERROR mismatched types +} + +fn main() { + struct Context; + #[repr(C)] struct Src; + #[repr(C)] struct Dst; + + assert::is_transmutable::<Src, Dst, Context, false, false, { true }, false>(); +} diff --git a/tests/ui/transmutability/issue-110892.stderr b/tests/ui/transmutability/issue-110892.stderr new file mode 100644 index 00000000000..13654307aee --- /dev/null +++ b/tests/ui/transmutability/issue-110892.stderr @@ -0,0 +1,36 @@ +error: expected parameter name, found `,` + --> $DIR/issue-110892.rs:27:9 + | +LL | , + | ^ expected parameter name + +error: expected parameter name, found `,` + --> $DIR/issue-110892.rs:28:9 + | +LL | , + | ^ expected parameter name + +error: expected parameter name, found `,` + --> $DIR/issue-110892.rs:29:9 + | +LL | , + | ^ expected parameter name + +error: expected parameter name, found `,` + --> $DIR/issue-110892.rs:30:9 + | +LL | , + | ^ expected parameter name + +error[E0308]: mismatched types + --> $DIR/issue-110892.rs:31:10 + | +LL | const fn from_options( + | ------------ implicitly returns `()` as its body has no tail or `return` expression +... +LL | ) -> Assume {} + | ^^^^^^ expected `Assume`, found `()` + +error: aborting due to 5 previous errors + +For more information about this error, try `rustc --explain E0308`. diff --git a/tests/ui/type-alias/lazy-type-alias-enum-variant.rs b/tests/ui/type-alias/lazy-type-alias-enum-variant.rs new file mode 100644 index 00000000000..78c3159d1c2 --- /dev/null +++ b/tests/ui/type-alias/lazy-type-alias-enum-variant.rs @@ -0,0 +1,17 @@ +// Regression test for issue #113736. +// check-pass + +#![feature(lazy_type_alias)] + +enum Enum { + Unit, + Tuple(), + Struct {}, +} + +fn main() { + type Alias = Enum; + let _ = Alias::Unit; + let _ = Alias::Tuple(); + let _ = Alias::Struct {}; +} diff --git a/tests/ui/unsafe/unsafe-not-inherited.stderr b/tests/ui/unsafe/unsafe-not-inherited.mirunsafeck.stderr index 3bc5ca5c9d1..5536efbc6f4 100644 --- a/tests/ui/unsafe/unsafe-not-inherited.stderr +++ b/tests/ui/unsafe/unsafe-not-inherited.mirunsafeck.stderr @@ -1,5 +1,5 @@ error[E0133]: use of mutable static is unsafe and requires unsafe function or block - --> $DIR/unsafe-not-inherited.rs:6:31 + --> $DIR/unsafe-not-inherited.rs:8:31 | LL | unsafe {static BAR: u64 = FOO;} | ------ ^^^ use of mutable static @@ -9,7 +9,7 @@ LL | unsafe {static BAR: u64 = FOO;} = note: mutable statics can be mutated by multiple threads: aliasing violations or data races will cause undefined behavior error[E0133]: call to unsafe function is unsafe and requires unsafe function or block - --> $DIR/unsafe-not-inherited.rs:18:13 + --> $DIR/unsafe-not-inherited.rs:20:13 | LL | unsafe { | ------ items do not inherit unsafety from separate enclosing items diff --git a/tests/ui/unsafe/unsafe-not-inherited.rs b/tests/ui/unsafe/unsafe-not-inherited.rs index 6d797caa0f9..f9d9a595714 100644 --- a/tests/ui/unsafe/unsafe-not-inherited.rs +++ b/tests/ui/unsafe/unsafe-not-inherited.rs @@ -1,3 +1,5 @@ +// revisions: mirunsafeck thirunsafeck +// [thirunsafeck]compile-flags: -Z thir-unsafeck #![allow(unused, dead_code)] static mut FOO: u64 = 0; diff --git a/tests/ui/unsafe/unsafe-not-inherited.thirunsafeck.stderr b/tests/ui/unsafe/unsafe-not-inherited.thirunsafeck.stderr new file mode 100644 index 00000000000..88ea2e6d1fe --- /dev/null +++ b/tests/ui/unsafe/unsafe-not-inherited.thirunsafeck.stderr @@ -0,0 +1,24 @@ +error[E0133]: use of mutable static is unsafe and requires unsafe function or block + --> $DIR/unsafe-not-inherited.rs:8:31 + | +LL | unsafe {static BAR: u64 = FOO;} + | ------ ^^^ use of mutable static + | | + | items do not inherit unsafety from separate enclosing items + | + = note: mutable statics can be mutated by multiple threads: aliasing violations or data races will cause undefined behavior + +error[E0133]: call to unsafe function `unsafe_call` is unsafe and requires unsafe function or block + --> $DIR/unsafe-not-inherited.rs:20:13 + | +LL | unsafe { + | ------ items do not inherit unsafety from separate enclosing items +... +LL | unsafe_call(); + | ^^^^^^^^^^^^^ call to unsafe function + | + = note: consult the function's documentation for information on how to avoid undefined behavior + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0133`. diff --git a/triagebot.toml b/triagebot.toml index da9d167bd3e..1a435ff074e 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -264,7 +264,7 @@ exclude_labels = [ [autolabel."WG-trait-system-refactor"] trigger_files = [ "compiler/rustc_trait_selection/src/solve", - "compiler/rustc_middle/src/traits/solve.rs" + "compiler/rustc_middle/src/traits/solve" ] [notify-zulip."I-prioritize"] |
