diff options
205 files changed, 3805 insertions, 2575 deletions
diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index 328086af183..9d6ee65049a 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -901,10 +901,39 @@ pub struct Stmt { pub id: NodeId, pub kind: StmtKind, pub span: Span, - pub tokens: Option<LazyTokenStream>, } impl Stmt { + pub fn tokens(&self) -> Option<&LazyTokenStream> { + match self.kind { + StmtKind::Local(ref local) => local.tokens.as_ref(), + StmtKind::Item(ref item) => item.tokens.as_ref(), + StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => expr.tokens.as_ref(), + StmtKind::Empty => None, + StmtKind::MacCall(ref mac) => mac.tokens.as_ref(), + } + } + + pub fn tokens_mut(&mut self) -> Option<&mut LazyTokenStream> { + match self.kind { + StmtKind::Local(ref mut local) => local.tokens.as_mut(), + StmtKind::Item(ref mut item) => item.tokens.as_mut(), + StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens.as_mut(), + StmtKind::Empty => None, + StmtKind::MacCall(ref mut mac) => mac.tokens.as_mut(), + } + } + + pub fn set_tokens(&mut self, tokens: Option<LazyTokenStream>) { + match self.kind { + StmtKind::Local(ref mut local) => local.tokens = tokens, + StmtKind::Item(ref mut item) => item.tokens = tokens, + StmtKind::Expr(ref mut expr) | StmtKind::Semi(ref mut expr) => expr.tokens = tokens, + StmtKind::Empty => {} + StmtKind::MacCall(ref mut mac) => mac.tokens = tokens, + } + } + pub fn has_trailing_semicolon(&self) -> bool { match &self.kind { StmtKind::Semi(_) => true, @@ -912,18 +941,25 @@ impl Stmt { _ => false, } } + + /// Converts a parsed `Stmt` to a `Stmt` with + /// a trailing semicolon. + /// + /// This only modifies the parsed AST struct, not the attached + /// `LazyTokenStream`. The parser is responsible for calling + /// `CreateTokenStream::add_trailing_semi` when there is actually + /// a semicolon in the tokenstream. pub fn add_trailing_semicolon(mut self) -> Self { self.kind = match self.kind { StmtKind::Expr(expr) => StmtKind::Semi(expr), StmtKind::MacCall(mac) => { - StmtKind::MacCall(mac.map(|MacCallStmt { mac, style: _, attrs }| MacCallStmt { - mac, - style: MacStmtStyle::Semicolon, - attrs, + StmtKind::MacCall(mac.map(|MacCallStmt { mac, style: _, attrs, tokens }| { + MacCallStmt { mac, style: MacStmtStyle::Semicolon, attrs, tokens } })) } kind => kind, }; + self } @@ -963,6 +999,7 @@ pub struct MacCallStmt { pub mac: MacCall, pub style: MacStmtStyle, pub attrs: AttrVec, + pub tokens: Option<LazyTokenStream>, } #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug)] @@ -988,6 +1025,7 @@ pub struct Local { pub init: Option<P<Expr>>, pub span: Span, pub attrs: AttrVec, + pub tokens: Option<LazyTokenStream>, } /// An arm of a 'match'. @@ -1845,6 +1883,7 @@ impl UintTy { pub struct AssocTyConstraint { pub id: NodeId, pub ident: Ident, + pub gen_args: Option<GenericArgs>, pub kind: AssocTyConstraintKind, pub span: Span, } diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index ddae0ab03e4..c4e92a9f6d1 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -441,11 +441,14 @@ pub fn noop_flat_map_arm<T: MutVisitor>(mut arm: Arm, vis: &mut T) -> SmallVec<[ } pub fn noop_visit_ty_constraint<T: MutVisitor>( - AssocTyConstraint { id, ident, kind, span }: &mut AssocTyConstraint, + AssocTyConstraint { id, ident, gen_args, kind, span }: &mut AssocTyConstraint, vis: &mut T, ) { vis.visit_id(id); vis.visit_ident(ident); + if let Some(ref mut gen_args) = gen_args { + vis.visit_generic_args(gen_args); + } match kind { AssocTyConstraintKind::Equality { ref mut ty } => { vis.visit_ty(ty); @@ -576,13 +579,14 @@ pub fn noop_visit_parenthesized_parameter_data<T: MutVisitor>( } pub fn noop_visit_local<T: MutVisitor>(local: &mut P<Local>, vis: &mut T) { - let Local { id, pat, ty, init, span, attrs } = local.deref_mut(); + let Local { id, pat, ty, init, span, attrs, tokens } = local.deref_mut(); vis.visit_id(id); vis.visit_pat(pat); visit_opt(ty, |ty| vis.visit_ty(ty)); visit_opt(init, |init| vis.visit_expr(init)); vis.visit_span(span); visit_thin_attrs(attrs, vis); + visit_lazy_tts(tokens, vis); } pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) { @@ -1325,16 +1329,12 @@ pub fn noop_filter_map_expr<T: MutVisitor>(mut e: P<Expr>, vis: &mut T) -> Optio } pub fn noop_flat_map_stmt<T: MutVisitor>( - Stmt { kind, mut span, mut id, mut tokens }: Stmt, + Stmt { kind, mut span, mut id }: Stmt, vis: &mut T, ) -> SmallVec<[Stmt; 1]> { vis.visit_id(&mut id); vis.visit_span(&mut span); - visit_lazy_tts(&mut tokens, vis); - noop_flat_map_stmt_kind(kind, vis) - .into_iter() - .map(|kind| Stmt { id, kind, span, tokens: tokens.clone() }) - .collect() + noop_flat_map_stmt_kind(kind, vis).into_iter().map(|kind| Stmt { id, kind, span }).collect() } pub fn noop_flat_map_stmt_kind<T: MutVisitor>( @@ -1351,9 +1351,10 @@ pub fn noop_flat_map_stmt_kind<T: MutVisitor>( StmtKind::Semi(expr) => vis.filter_map_expr(expr).into_iter().map(StmtKind::Semi).collect(), StmtKind::Empty => smallvec![StmtKind::Empty], StmtKind::MacCall(mut mac) => { - let MacCallStmt { mac: mac_, style: _, attrs } = mac.deref_mut(); + let MacCallStmt { mac: mac_, style: _, attrs, tokens } = mac.deref_mut(); vis.visit_mac_call(mac_); visit_thin_attrs(attrs, vis); + visit_lazy_tts(tokens, vis); smallvec![StmtKind::MacCall(mac)] } } diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index fe67b905bf3..b2207f22816 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -121,10 +121,14 @@ where } pub trait CreateTokenStream: sync::Send + sync::Sync { + fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream>; fn create_token_stream(&self) -> TokenStream; } impl CreateTokenStream for TokenStream { + fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> { + panic!("Cannot call `add_trailing_semi` on a `TokenStream`!"); + } fn create_token_stream(&self) -> TokenStream { self.clone() } @@ -141,6 +145,13 @@ impl LazyTokenStream { LazyTokenStream(Lrc::new(Box::new(inner))) } + /// Extends the captured stream by one token, + /// which must be a trailing semicolon. This + /// affects the `TokenStream` created by `make_tokenstream`. + pub fn add_trailing_semi(&self) -> LazyTokenStream { + LazyTokenStream(Lrc::new(self.0.add_trailing_semi())) + } + pub fn create_token_stream(&self) -> TokenStream { self.0.create_token_stream() } diff --git a/compiler/rustc_ast/src/visit.rs b/compiler/rustc_ast/src/visit.rs index 560064182e1..61426a838de 100644 --- a/compiler/rustc_ast/src/visit.rs +++ b/compiler/rustc_ast/src/visit.rs @@ -485,6 +485,9 @@ pub fn walk_assoc_ty_constraint<'a, V: Visitor<'a>>( constraint: &'a AssocTyConstraint, ) { visitor.visit_ident(constraint.ident); + if let Some(ref gen_args) = constraint.gen_args { + visitor.visit_generic_args(gen_args.span(), gen_args); + } match constraint.kind { AssocTyConstraintKind::Equality { ref ty } => { visitor.visit_ty(ty); @@ -686,7 +689,7 @@ pub fn walk_stmt<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Stmt) { StmtKind::Expr(ref expr) | StmtKind::Semi(ref expr) => visitor.visit_expr(expr), StmtKind::Empty => {} StmtKind::MacCall(ref mac) => { - let MacCallStmt { ref mac, style: _, ref attrs } = **mac; + let MacCallStmt { ref mac, style: _, ref attrs, tokens: _ } = **mac; visitor.visit_mac_call(mac); for attr in attrs.iter() { visitor.visit_attribute(attr); diff --git a/compiler/rustc_ast_lowering/src/item.rs b/compiler/rustc_ast_lowering/src/item.rs index d353bc19f7a..eef6d38aa05 100644 --- a/compiler/rustc_ast_lowering/src/item.rs +++ b/compiler/rustc_ast_lowering/src/item.rs @@ -43,6 +43,7 @@ impl<'a> Visitor<'a> for ItemLowerer<'a, '_, '_> { items: BTreeSet::new(), trait_items: BTreeSet::new(), impl_items: BTreeSet::new(), + foreign_items: BTreeSet::new(), }, ); @@ -105,6 +106,18 @@ impl<'a> Visitor<'a> for ItemLowerer<'a, '_, '_> { visit::walk_assoc_item(self, item, ctxt); } + + fn visit_foreign_item(&mut self, item: &'a ForeignItem) { + self.lctx.allocate_hir_id_counter(item.id); + self.lctx.with_hir_id_owner(item.id, |lctx| { + let hir_item = lctx.lower_foreign_item(item); + let id = hir::ForeignItemId { hir_id: hir_item.hir_id }; + lctx.foreign_items.insert(id, hir_item); + lctx.modules.get_mut(&lctx.current_module).unwrap().foreign_items.insert(id); + }); + + visit::walk_foreign_item(self, item); + } } impl<'hir> LoweringContext<'_, 'hir> { @@ -304,7 +317,12 @@ impl<'hir> LoweringContext<'_, 'hir> { }) } ItemKind::Mod(ref m) => hir::ItemKind::Mod(self.lower_mod(m)), - ItemKind::ForeignMod(ref nm) => hir::ItemKind::ForeignMod(self.lower_foreign_mod(nm)), + ItemKind::ForeignMod(ref fm) => hir::ItemKind::ForeignMod { + abi: fm.abi.map_or(abi::Abi::C, |abi| self.lower_abi(abi)), + items: self + .arena + .alloc_from_iter(fm.items.iter().map(|x| self.lower_foreign_item_ref(x))), + }, ItemKind::GlobalAsm(ref ga) => hir::ItemKind::GlobalAsm(self.lower_global_asm(ga)), ItemKind::TyAlias(_, ref gen, _, Some(ref ty)) => { // We lower @@ -704,10 +722,12 @@ impl<'hir> LoweringContext<'_, 'hir> { } } - fn lower_foreign_mod(&mut self, fm: &ForeignMod) -> hir::ForeignMod<'hir> { - hir::ForeignMod { - abi: fm.abi.map_or(abi::Abi::C, |abi| self.lower_abi(abi)), - items: self.arena.alloc_from_iter(fm.items.iter().map(|x| self.lower_foreign_item(x))), + fn lower_foreign_item_ref(&mut self, i: &ForeignItem) -> hir::ForeignItemRef<'hir> { + hir::ForeignItemRef { + id: hir::ForeignItemId { hir_id: self.lower_node_id(i.id) }, + ident: i.ident, + span: i.span, + vis: self.lower_visibility(&i.vis, Some(i.id)), } } diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index f7c693cc94d..2e1b5a74a7b 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -101,6 +101,7 @@ struct LoweringContext<'a, 'hir: 'a> { trait_items: BTreeMap<hir::TraitItemId, hir::TraitItem<'hir>>, impl_items: BTreeMap<hir::ImplItemId, hir::ImplItem<'hir>>, + foreign_items: BTreeMap<hir::ForeignItemId, hir::ForeignItem<'hir>>, bodies: BTreeMap<hir::BodyId, hir::Body<'hir>>, exported_macros: Vec<hir::MacroDef<'hir>>, non_exported_macro_attrs: Vec<ast::Attribute>, @@ -298,6 +299,7 @@ pub fn lower_crate<'a, 'hir>( items: BTreeMap::new(), trait_items: BTreeMap::new(), impl_items: BTreeMap::new(), + foreign_items: BTreeMap::new(), bodies: BTreeMap::new(), trait_impls: BTreeMap::new(), modules: BTreeMap::new(), @@ -485,6 +487,11 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { visit::walk_assoc_item(self, item, ctxt); } + fn visit_foreign_item(&mut self, item: &'tcx ForeignItem) { + self.lctx.allocate_hir_id_counter(item.id); + visit::walk_foreign_item(self, item); + } + fn visit_ty(&mut self, t: &'tcx Ty) { match t.kind { // Mirrors the case in visit::walk_ty @@ -548,6 +555,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { items: self.items, trait_items: self.trait_items, impl_items: self.impl_items, + foreign_items: self.foreign_items, bodies: self.bodies, body_ids, trait_impls: self.trait_impls, @@ -1000,6 +1008,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { ) -> hir::TypeBinding<'hir> { debug!("lower_assoc_ty_constraint(constraint={:?}, itctx={:?})", constraint, itctx); + if let Some(ref gen_args) = constraint.gen_args { + self.sess.span_fatal( + gen_args.span(), + "generic associated types in trait paths are currently not implemented", + ); + } + let kind = match constraint.kind { AssocTyConstraintKind::Equality { ref ty } => { hir::TypeBindingKind::Equality { ty: self.lower_ty(ty, itctx) } diff --git a/compiler/rustc_ast_passes/src/ast_validation.rs b/compiler/rustc_ast_passes/src/ast_validation.rs index 08ebcbf381a..4ec3e39facc 100644 --- a/compiler/rustc_ast_passes/src/ast_validation.rs +++ b/compiler/rustc_ast_passes/src/ast_validation.rs @@ -1372,16 +1372,18 @@ fn deny_equality_constraints( if param.ident == *ident { let param = ident; match &full_path.segments[qself.position..] { - [PathSegment { ident, .. }] => { + [PathSegment { ident, args, .. }] => { // Make a new `Path` from `foo::Bar` to `Foo<Bar = RhsTy>`. let mut assoc_path = full_path.clone(); // Remove `Bar` from `Foo::Bar`. assoc_path.segments.pop(); let len = assoc_path.segments.len() - 1; + let gen_args = args.as_ref().map(|p| (**p).clone()); // Build `<Bar = RhsTy>`. let arg = AngleBracketedArg::Constraint(AssocTyConstraint { id: rustc_ast::node_id::DUMMY_NODE_ID, ident: *ident, + gen_args, kind: AssocTyConstraintKind::Equality { ty: predicate.rhs_ty.clone(), }, diff --git a/compiler/rustc_ast_passes/src/lib.rs b/compiler/rustc_ast_passes/src/lib.rs index bfe30441980..7487421e709 100644 --- a/compiler/rustc_ast_passes/src/lib.rs +++ b/compiler/rustc_ast_passes/src/lib.rs @@ -6,6 +6,7 @@ #![feature(bindings_after_at)] #![feature(iter_is_partitioned)] +#![recursion_limit = "256"] pub mod ast_validation; pub mod feature_gate; diff --git a/compiler/rustc_builtin_macros/src/deriving/debug.rs b/compiler/rustc_builtin_macros/src/deriving/debug.rs index 9381264f498..5c21329069b 100644 --- a/compiler/rustc_builtin_macros/src/deriving/debug.rs +++ b/compiler/rustc_builtin_macros/src/deriving/debug.rs @@ -132,6 +132,7 @@ fn stmt_let_underscore(cx: &mut ExtCtxt<'_>, sp: Span, expr: P<ast::Expr>) -> as id: ast::DUMMY_NODE_ID, span: sp, attrs: ast::AttrVec::new(), + tokens: None, }); - ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp, tokens: None } + ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp } } diff --git a/compiler/rustc_builtin_macros/src/deriving/mod.rs b/compiler/rustc_builtin_macros/src/deriving/mod.rs index 8c7e85f1eeb..1651180817b 100644 --- a/compiler/rustc_builtin_macros/src/deriving/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/mod.rs @@ -64,7 +64,6 @@ impl MultiItemModifier for BuiltinDerive { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Item(a.expect_item()), span, - tokens: None, }))); }); } else { diff --git a/compiler/rustc_codegen_cranelift/.gitattributes b/compiler/rustc_codegen_cranelift/.gitattributes index 6313b56c578..0ceb3fe646c 100644 --- a/compiler/rustc_codegen_cranelift/.gitattributes +++ b/compiler/rustc_codegen_cranelift/.gitattributes @@ -1 +1,2 @@ * text=auto eol=lf +*.rs diff=rust diff --git a/compiler/rustc_codegen_cranelift/Cargo.lock b/compiler/rustc_codegen_cranelift/Cargo.lock index 2889fac77f6..67ed41e7652 100644 --- a/compiler/rustc_codegen_cranelift/Cargo.lock +++ b/compiler/rustc_codegen_cranelift/Cargo.lock @@ -2,9 +2,9 @@ # It is not intended for manual editing. [[package]] name = "anyhow" -version = "1.0.33" +version = "1.0.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1fd36ffbb1fb7c834eac128ea8d0e310c5aeb635548f9d58861e1308d46e71c" +checksum = "bf8dcb5b4bbaa28653b647d8c77bd4ed40183b48882e130c1f1ffb73de069fd7" [[package]] name = "ar" @@ -31,9 +31,9 @@ checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" [[package]] name = "cc" -version = "1.0.61" +version = "1.0.62" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed67cbde08356238e75fc4656be4749481eeffb09e19f320a25237d5221c985d" +checksum = "f1770ced377336a88a67c473594ccc14eca6f4559217c34f64aac8f83d641b40" [[package]] name = "cfg-if" @@ -42,17 +42,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" [[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] name = "cranelift-bforest" -version = "0.67.0" -source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#44cbdecea03c360ea82e6482f0cf6c614effef21" +version = "0.68.0" +source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-codegen" -version = "0.67.0" -source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#44cbdecea03c360ea82e6482f0cf6c614effef21" +version = "0.68.0" +source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" dependencies = [ "byteorder", "cranelift-bforest", @@ -69,8 +75,8 @@ dependencies = [ [[package]] name = "cranelift-codegen-meta" -version = "0.67.0" -source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#44cbdecea03c360ea82e6482f0cf6c614effef21" +version = "0.68.0" +source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" dependencies = [ "cranelift-codegen-shared", "cranelift-entity", @@ -78,18 +84,18 @@ dependencies = [ [[package]] name = "cranelift-codegen-shared" -version = "0.67.0" -source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#44cbdecea03c360ea82e6482f0cf6c614effef21" +version = "0.68.0" +source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" [[package]] name = "cranelift-entity" -version = "0.67.0" -source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#44cbdecea03c360ea82e6482f0cf6c614effef21" +version = "0.68.0" +source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" [[package]] name = "cranelift-frontend" -version = "0.67.0" -source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#44cbdecea03c360ea82e6482f0cf6c614effef21" +version = "0.68.0" +source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" dependencies = [ "cranelift-codegen", "log", @@ -99,8 +105,8 @@ dependencies = [ [[package]] name = "cranelift-module" -version = "0.67.0" -source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#44cbdecea03c360ea82e6482f0cf6c614effef21" +version = "0.68.0" +source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" dependencies = [ "anyhow", "cranelift-codegen", @@ -111,8 +117,8 @@ dependencies = [ [[package]] name = "cranelift-native" -version = "0.67.0" -source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#44cbdecea03c360ea82e6482f0cf6c614effef21" +version = "0.68.0" +source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" dependencies = [ "cranelift-codegen", "raw-cpuid", @@ -121,8 +127,8 @@ dependencies = [ [[package]] name = "cranelift-object" -version = "0.67.0" -source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#44cbdecea03c360ea82e6482f0cf6c614effef21" +version = "0.68.0" +source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" dependencies = [ "anyhow", "cranelift-codegen", @@ -134,8 +140,8 @@ dependencies = [ [[package]] name = "cranelift-simplejit" -version = "0.67.0" -source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#44cbdecea03c360ea82e6482f0cf6c614effef21" +version = "0.68.0" +source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#19640367dbf0da7093e61add3306c8d092644fb3" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -151,18 +157,18 @@ dependencies = [ [[package]] name = "crc32fast" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1" +checksum = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] name = "errno" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eab5ee3df98a279d9b316b1af6ac95422127b1290317e6d18c1743c99418b01" +checksum = "fa68f2fb9cae9d37c9b2b3584aba698a2e97f72d7aef7b9f7aa71d8b54ce46fe" dependencies = [ "errno-dragonfly", "libc", @@ -187,9 +193,9 @@ checksum = "8f5f3913fa0bfe7ee1fd8248b6b9f42a5af4b9d65ec2dd2c3c26132b950ecfc2" [[package]] name = "gimli" -version = "0.22.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aaf91faf136cb47367fa430cd46e37a788775e7fa104f8b4bcb3861dc389b724" +checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce" dependencies = [ "indexmap", ] @@ -212,17 +218,17 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2448f6066e80e3bfc792e9c98bf705b4b0fc6e8ef5b43e5889aff0eaa9c58743" +checksum = "4d58d1b70b004888f764dfbf6a26a3b0342a1632d33968e4a179d8011c760614" [[package]] name = "libloading" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3557c9384f7f757f6d139cd3a4c62ef4e850696c16bf27924a5538c8a09717a1" +checksum = "1090080fe06ec2648d0da3881d9453d97e71a45f00eb179af7fdd7e3f686fdb0" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "winapi", ] @@ -232,7 +238,7 @@ version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" dependencies = [ - "cfg-if", + "cfg-if 0.1.10", ] [[package]] @@ -246,9 +252,9 @@ dependencies = [ [[package]] name = "object" -version = "0.21.1" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37fd5004feb2ce328a52b0b3d01dbf4ffff72583493900ed15f22d4111c51693" +checksum = "8d3b63360ec3cb337817c2dbd47ab4a0f170d285d8e5a2064600f3def1402397" dependencies = [ "crc32fast", "indexmap", @@ -274,9 +280,9 @@ dependencies = [ [[package]] name = "raw-cpuid" -version = "7.0.3" +version = "8.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4a349ca83373cfa5d6dbb66fd76e58b2cca08da71a5f6400de0a0a6a9bceeaf" +checksum = "1fdf7d9dbd43f3d81d94a49c1c3df73cc2b3827995147e6cf7f89d4ec5483e73" dependencies = [ "bitflags", "cc", @@ -361,9 +367,9 @@ checksum = "fbee7696b84bbf3d89a1c2eccff0850e3047ed46bfcd2e92c29a2d074d57e252" [[package]] name = "syn" -version = "1.0.44" +version = "1.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e03e57e4fcbfe7749842d53e24ccb9aa12b7252dbe5e91d2acad31834c8b8fdd" +checksum = "cc371affeffc477f42a221a1e4297aedcea33d47d19b61455588bd9d8f6b19ac" dependencies = [ "proc-macro2", "quote", @@ -372,24 +378,24 @@ dependencies = [ [[package]] name = "target-lexicon" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe2635952a442a01fd4cb53d98858b5e4bb461b02c0d111f22f31772e3e7a8b2" +checksum = "4ee5a98e506fb7231a304c3a1bd7c132a55016cf65001e0282480665870dfcb9" [[package]] name = "thiserror" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "318234ffa22e0920fe9a40d7b8369b5f649d490980cf7aadcf1eb91594869b42" +checksum = "0e9ae34b84616eedaaf1e9dd6026dbe00dcafa92aa0c8077cb69df1fcfe5e53e" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cae2447b6282786c3493999f40a9be2a6ad20cb8bd268b0a0dbf5a065535c0ab" +checksum = "9ba20f23e85b10754cd195504aebf6a27e2e6cbe28c17778a0c930724628dd56" dependencies = [ "proc-macro2", "quote", diff --git a/compiler/rustc_codegen_cranelift/Cargo.toml b/compiler/rustc_codegen_cranelift/Cargo.toml index 1c8e350d242..cbff06749d3 100644 --- a/compiler/rustc_codegen_cranelift/Cargo.toml +++ b/compiler/rustc_codegen_cranelift/Cargo.toml @@ -15,8 +15,8 @@ cranelift-module = { git = "https://github.com/bytecodealliance/wasmtime/", bran cranelift-simplejit = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", optional = true } cranelift-object = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" } target-lexicon = "0.11.0" -gimli = { version = "0.22.0", default-features = false, features = ["write"]} -object = { version = "0.21.1", default-features = false, features = ["std", "read_core", "write", "coff", "elf", "macho", "pe"] } +gimli = { version = "0.23.0", default-features = false, features = ["write"]} +object = { version = "0.22.0", default-features = false, features = ["std", "read_core", "write", "coff", "elf", "macho", "pe"] } ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "do_not_remove_cg_clif_ranlib" } indexmap = "1.0.2" diff --git a/compiler/rustc_codegen_cranelift/Readme.md b/compiler/rustc_codegen_cranelift/Readme.md index f8a5e13ed54..de54bf67f4a 100644 --- a/compiler/rustc_codegen_cranelift/Readme.md +++ b/compiler/rustc_codegen_cranelift/Readme.md @@ -51,7 +51,7 @@ This should build and run your project with rustc_codegen_cranelift instead of t > You should prefer using the Cargo method. ```bash -$ $cg_clif_dir/build/cg_clif my_crate.rs +$ $cg_clif_dir/build/bin/cg_clif my_crate.rs ``` ### Jit mode @@ -68,7 +68,7 @@ $ $cg_clif_dir/build/cargo.sh jit or ```bash -$ $cg_clif_dir/build/cg_clif --jit my_crate.rs +$ $cg_clif_dir/build/bin/cg_clif --jit my_crate.rs ``` ### Shell @@ -77,7 +77,7 @@ These are a few functions that allow you to easily run rust code from the shell ```bash function jit_naked() { - echo "$@" | $cg_clif_dir/build/cg_clif - --jit + echo "$@" | $cg_clif_dir/build/bin/cg_clif - --jit } function jit() { diff --git a/compiler/rustc_codegen_cranelift/build.sh b/compiler/rustc_codegen_cranelift/build.sh index f9a87e68a04..26041b59cca 100755 --- a/compiler/rustc_codegen_cranelift/build.sh +++ b/compiler/rustc_codegen_cranelift/build.sh @@ -26,22 +26,35 @@ while [[ $# != 0 ]]; do done # Build cg_clif +unset CARGO_TARGET_DIR export RUSTFLAGS="-Zrun_dsymutil=no" +unamestr=$(uname) +if [[ "$unamestr" == 'Linux' ]]; then + export RUSTFLAGS='-Clink-arg=-Wl,-rpath=$ORIGIN/../lib '$RUSTFLAGS +elif [[ "$unamestr" == 'Darwin' ]]; then + export RUSTFLAGS='-Clink-arg=-Wl,-rpath,@loader_path/../lib -Zosx-rpath-install-name '$RUSTFLAGS + dylib_ext='dylib' +else + echo "Unsupported os" + exit 1 +fi if [[ "$CHANNEL" == "release" ]]; then cargo build --release else cargo build fi -rm -rf $target_dir -mkdir $target_dir -cp -a target/$CHANNEL/cg_clif{,_build_sysroot} target/$CHANNEL/*rustc_codegen_cranelift* $target_dir/ -cp -a rust-toolchain scripts/config.sh scripts/cargo.sh $target_dir +rm -rf "$target_dir" +mkdir "$target_dir" +mkdir "$target_dir"/bin "$target_dir"/lib +ln target/$CHANNEL/cg_clif{,_build_sysroot} "$target_dir"/bin +ln target/$CHANNEL/*rustc_codegen_cranelift* "$target_dir"/lib +ln rust-toolchain scripts/config.sh scripts/cargo.sh "$target_dir" if [[ "$build_sysroot" == "1" ]]; then echo "[BUILD] sysroot" export CG_CLIF_INCR_CACHE_DISABLED=1 dir=$(pwd) - cd $target_dir - time $dir/build_sysroot/build_sysroot.sh + cd "$target_dir" + time "$dir/build_sysroot/build_sysroot.sh" fi diff --git a/compiler/rustc_codegen_cranelift/build_sysroot/Cargo.lock b/compiler/rustc_codegen_cranelift/build_sysroot/Cargo.lock index 03ba5b53d2e..a2b8f449f00 100644 --- a/compiler/rustc_codegen_cranelift/build_sysroot/Cargo.lock +++ b/compiler/rustc_codegen_cranelift/build_sysroot/Cargo.lock @@ -2,9 +2,9 @@ # It is not intended for manual editing. [[package]] name = "addr2line" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b6a2d3371669ab3ca9797670853d61402b03d0b4b9ebf33d677dfa720203072" +checksum = "7c0929d69e78dd9bf5408269919fcbcaeb2e35e5d43e5815517cdc6a8e11a423" dependencies = [ "compiler_builtins", "gimli", @@ -47,9 +47,9 @@ checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" [[package]] name = "cc" -version = "1.0.61" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed67cbde08356238e75fc4656be4749481eeffb09e19f320a25237d5221c985d" +checksum = "95752358c8f7552394baf48cd82695b345628ad3f170d607de3ca03b8dacca15" [[package]] name = "cfg-if" @@ -76,9 +76,9 @@ version = "0.0.0" [[package]] name = "dlmalloc" -version = "0.1.4" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35055b1021724f4eb5262eb49130eebff23fc59fc5a14160e05faad8eeb36673" +checksum = "332570860c2edf2d57914987bf9e24835425f75825086b6ba7d1e6a3e4f1f254" dependencies = [ "compiler_builtins", "libc", @@ -108,9 +108,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.22.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aaf91faf136cb47367fa430cd46e37a788775e7fa104f8b4bcb3861dc389b724" +checksum = "f6503fe142514ca4799d4c26297c4248239fe8838d827db6bd6065c6ed29a6ce" dependencies = [ "compiler_builtins", "rustc-std-workspace-alloc", @@ -163,9 +163,9 @@ dependencies = [ [[package]] name = "object" -version = "0.20.0" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ab52be62400ca80aa00285d25253d7f7c437b7375c4de678f5405d3afe82ca5" +checksum = "8d3b63360ec3cb337817c2dbd47ab4a0f170d285d8e5a2064600f3def1402397" dependencies = [ "compiler_builtins", "rustc-std-workspace-alloc", diff --git a/compiler/rustc_codegen_cranelift/build_sysroot/build_sysroot.sh b/compiler/rustc_codegen_cranelift/build_sysroot/build_sysroot.sh index eba15c0dd43..d7a72df2eb2 100755 --- a/compiler/rustc_codegen_cranelift/build_sysroot/build_sysroot.sh +++ b/compiler/rustc_codegen_cranelift/build_sysroot/build_sysroot.sh @@ -10,10 +10,10 @@ dir=$(pwd) # Use rustc with cg_clif as hotpluggable backend instead of the custom cg_clif driver so that # build scripts are still compiled using cg_llvm. -export RUSTC=$dir"/cg_clif_build_sysroot" +export RUSTC=$dir"/bin/cg_clif_build_sysroot" export RUSTFLAGS=$RUSTFLAGS" --clif" -cd $(dirname "$0") +cd "$(dirname "$0")" # Cleanup for previous run # v Clean target dir except for build scripts and incremental cache @@ -28,12 +28,13 @@ if [[ "$1" != "--debug" ]]; then sysroot_channel='release' # FIXME Enable incremental again once rust-lang/rust#74946 is fixed # FIXME Enable -Zmir-opt-level=2 again once it doesn't ice anymore - CARGO_INCREMENTAL=0 RUSTFLAGS="$RUSTFLAGS" cargo build --target $TARGET_TRIPLE --release + CARGO_INCREMENTAL=0 RUSTFLAGS="$RUSTFLAGS" cargo build --target "$TARGET_TRIPLE" --release else sysroot_channel='debug' - cargo build --target $TARGET_TRIPLE + cargo build --target "$TARGET_TRIPLE" fi # Copy files to sysroot -mkdir -p $dir/sysroot/lib/rustlib/$TARGET_TRIPLE/lib/ -cp -a target/$TARGET_TRIPLE/$sysroot_channel/deps/* $dir/sysroot/lib/rustlib/$TARGET_TRIPLE/lib/ +mkdir -p "$dir/lib/rustlib/$TARGET_TRIPLE/lib/" +ln "target/$TARGET_TRIPLE/$sysroot_channel/deps/"* "$dir/lib/rustlib/$TARGET_TRIPLE/lib/" +rm "$dir/lib/rustlib/$TARGET_TRIPLE/lib/"*.{rmeta,d} diff --git a/compiler/rustc_codegen_cranelift/build_sysroot/prepare_sysroot_src.sh b/compiler/rustc_codegen_cranelift/build_sysroot/prepare_sysroot_src.sh index d0fb09ce745..40fbaf646a2 100755 --- a/compiler/rustc_codegen_cranelift/build_sysroot/prepare_sysroot_src.sh +++ b/compiler/rustc_codegen_cranelift/build_sysroot/prepare_sysroot_src.sh @@ -1,18 +1,18 @@ #!/bin/bash set -e -cd $(dirname "$0") +cd "$(dirname "$0")" -SRC_DIR=$(dirname $(rustup which rustc))"/../lib/rustlib/src/rust/" +SRC_DIR="$(dirname "$(rustup which rustc)")/../lib/rustlib/src/rust/" DST_DIR="sysroot_src" -if [ ! -e $SRC_DIR ]; then +if [ ! -e "$SRC_DIR" ]; then echo "Please install rust-src component" exit 1 fi rm -rf $DST_DIR mkdir -p $DST_DIR/library -cp -a $SRC_DIR/library $DST_DIR/ +cp -a "$SRC_DIR/library" $DST_DIR/ pushd $DST_DIR echo "[GIT] init" @@ -22,8 +22,8 @@ git add . echo "[GIT] commit" git commit -m "Initial commit" -q for file in $(ls ../../patches/ | grep -v patcha); do -echo "[GIT] apply" $file -git apply ../../patches/$file +echo "[GIT] apply" "$file" +git apply ../../patches/"$file" git add -A git commit --no-gpg-sign -m "Patch $file" done diff --git a/compiler/rustc_codegen_cranelift/example/std_example.rs b/compiler/rustc_codegen_cranelift/example/std_example.rs index cb512a4aa33..b38e25328a4 100644 --- a/compiler/rustc_codegen_cranelift/example/std_example.rs +++ b/compiler/rustc_codegen_cranelift/example/std_example.rs @@ -53,6 +53,7 @@ fn main() { assert_eq!(0b0000000000000000000000000010000010000000000000000000000000000000_0000000000100000000000000000000000001000000000000100000000000000u128.leading_zeros(), 26); assert_eq!(0b0000000000000000000000000010000000000000000000000000000000000000_0000000000000000000000000000000000001000000000000000000010000000u128.trailing_zeros(), 7); + assert_eq!(core::intrinsics::saturating_sub(0, -170141183460469231731687303715884105728i128), 170141183460469231731687303715884105727i128); let _d = 0i128.checked_div(2i128); let _d = 0u128.checked_div(2u128); diff --git a/compiler/rustc_codegen_cranelift/patches/0022-core-Disable-not-compiling-tests.patch b/compiler/rustc_codegen_cranelift/patches/0022-core-Disable-not-compiling-tests.patch index ee8548783de..8cfffe580a1 100644 --- a/compiler/rustc_codegen_cranelift/patches/0022-core-Disable-not-compiling-tests.patch +++ b/compiler/rustc_codegen_cranelift/patches/0022-core-Disable-not-compiling-tests.patch @@ -52,8 +52,8 @@ index 0475aeb..9558198 100644 fn test_rotate() { assert_eq!(A.rotate_left(6).rotate_right(2).rotate_right(4), A); @@ -112,6 +113,7 @@ mod tests { - assert_eq!(B.rotate_left(64), B); - assert_eq!(C.rotate_left(64), C); + assert_eq!(B.rotate_left(128), B); + assert_eq!(C.rotate_left(128), C); } + */ @@ -72,8 +72,8 @@ index 04ed14f..a6e372e 100644 fn test_rotate() { assert_eq!(A.rotate_left(6).rotate_right(2).rotate_right(4), A); @@ -76,6 +77,7 @@ mod tests { - assert_eq!(B.rotate_left(64), B); - assert_eq!(C.rotate_left(64), C); + assert_eq!(B.rotate_left(128), B); + assert_eq!(C.rotate_left(128), C); } + */ diff --git a/compiler/rustc_codegen_cranelift/prepare.sh b/compiler/rustc_codegen_cranelift/prepare.sh index 87f96f5dcf4..08e7cb18029 100755 --- a/compiler/rustc_codegen_cranelift/prepare.sh +++ b/compiler/rustc_codegen_cranelift/prepare.sh @@ -24,6 +24,7 @@ git checkout -- . git checkout 804a7a21b9e673a482797aa289a18ed480e4d813 # build with cg_llvm for perf comparison +unset CARGO_TARGET_DIR cargo build mv target/debug/main raytracer_cg_llvm popd diff --git a/compiler/rustc_codegen_cranelift/rust-toolchain b/compiler/rustc_codegen_cranelift/rust-toolchain index 0ca96be9ae7..ed1e64f45db 100644 --- a/compiler/rustc_codegen_cranelift/rust-toolchain +++ b/compiler/rustc_codegen_cranelift/rust-toolchain @@ -1 +1 @@ -nightly-2020-10-31 +nightly-2020-11-27 diff --git a/compiler/rustc_codegen_cranelift/scripts/cargo.sh b/compiler/rustc_codegen_cranelift/scripts/cargo.sh index 947b4a28798..dcd40acc02a 100755 --- a/compiler/rustc_codegen_cranelift/scripts/cargo.sh +++ b/compiler/rustc_codegen_cranelift/scripts/cargo.sh @@ -1,16 +1,16 @@ #!/bin/bash dir=$(dirname "$0") -source $dir/config.sh +source "$dir/config.sh" # read nightly compiler from rust-toolchain file -TOOLCHAIN=$(cat $dir/rust-toolchain) +TOOLCHAIN=$(cat "$dir/rust-toolchain") cmd=$1 shift || true if [[ "$cmd" = "jit" ]]; then -cargo +${TOOLCHAIN} rustc "$@" -- --jit +cargo "+${TOOLCHAIN}" rustc "$@" -- --jit else -cargo +${TOOLCHAIN} $cmd "$@" +cargo "+${TOOLCHAIN}" "$cmd" "$@" fi diff --git a/compiler/rustc_codegen_cranelift/scripts/config.sh b/compiler/rustc_codegen_cranelift/scripts/config.sh index 6120a550a27..dea037e2bc0 100644 --- a/compiler/rustc_codegen_cranelift/scripts/config.sh +++ b/compiler/rustc_codegen_cranelift/scripts/config.sh @@ -1,7 +1,8 @@ -#!/usr/bin/env bash +# Note to people running shellcheck: this file should only be sourced, not executed directly. + set -e -unamestr=`uname` +unamestr=$(uname) if [[ "$unamestr" == 'Linux' ]]; then dylib_ext='so' elif [[ "$unamestr" == 'Darwin' ]]; then @@ -40,19 +41,19 @@ echo export RUSTC_WRAPPER= fi -dir=$(cd $(dirname "$BASH_SOURCE"); pwd) +dir=$(cd "$(dirname "${BASH_SOURCE[0]}")"; pwd) -export RUSTC=$dir"/cg_clif" -export RUSTFLAGS=$linker -export RUSTDOCFLAGS=$linker' -Ztrim-diagnostic-paths=no -Cpanic=abort -Zpanic-abort-tests '\ -'-Zcodegen-backend='$dir'/librustc_codegen_cranelift.'$dylib_ext' --sysroot '$dir'/sysroot' +export RUSTC=$dir"/bin/cg_clif" +export RUSTFLAGS=$linker" "$RUSTFLAGS +export RUSTDOCFLAGS=$linker' -Cpanic=abort -Zpanic-abort-tests '\ +'-Zcodegen-backend='$dir'/lib/librustc_codegen_cranelift.'$dylib_ext' --sysroot '$dir # FIXME remove once the atomic shim is gone -if [[ `uname` == 'Darwin' ]]; then +if [[ $(uname) == 'Darwin' ]]; then export RUSTFLAGS="$RUSTFLAGS -Clink-arg=-undefined -Clink-arg=dynamic_lookup" fi -export LD_LIBRARY_PATH="$dir:$(rustc --print sysroot)/lib:$dir/target/out:$dir/sysroot/lib/rustlib/"$TARGET_TRIPLE"/lib" +export LD_LIBRARY_PATH="$(rustc --print sysroot)/lib" export DYLD_LIBRARY_PATH=$LD_LIBRARY_PATH export CG_CLIF_DISPLAY_CG_TIME=1 diff --git a/compiler/rustc_codegen_cranelift/scripts/rustup.sh b/compiler/rustc_codegen_cranelift/scripts/rustup.sh index 541b3c6563b..430f5c469b4 100755 --- a/compiler/rustc_codegen_cranelift/scripts/rustup.sh +++ b/compiler/rustc_codegen_cranelift/scripts/rustup.sh @@ -7,13 +7,13 @@ case $1 in TOOLCHAIN=$(date +%Y-%m-%d) echo "=> Installing new nightly" - rustup toolchain install --profile minimal nightly-${TOOLCHAIN} # Sanity check to see if the nightly exists - echo nightly-${TOOLCHAIN} > rust-toolchain + rustup toolchain install --profile minimal "nightly-${TOOLCHAIN}" # Sanity check to see if the nightly exists + echo "nightly-${TOOLCHAIN}" > rust-toolchain rustup component add rustfmt || true echo "=> Uninstalling all old nighlies" - for nightly in $(rustup toolchain list | grep nightly | grep -v $TOOLCHAIN | grep -v nightly-x86_64); do - rustup toolchain uninstall $nightly + for nightly in $(rustup toolchain list | grep nightly | grep -v "$TOOLCHAIN" | grep -v nightly-x86_64); do + rustup toolchain uninstall "$nightly" done ./clean_all.sh @@ -27,14 +27,30 @@ case $1 in git commit -m "Rustup to $(rustc -V)" ;; "push") - cg_clif=$(pwd) - pushd ../rust - branch=update_cg_clif-$(date +%Y-%m-%d) - git checkout -b $branch - git subtree pull --prefix=compiler/rustc_codegen_cranelift/ https://github.com/bjorn3/rustc_codegen_cranelift.git master - git push -u my $branch - popd + cg_clif=$(pwd) + pushd ../rust + git pull origin master + branch=sync_cg_clif-$(date +%Y-%m-%d) + git checkout -b "$branch" + git subtree pull --prefix=compiler/rustc_codegen_cranelift/ https://github.com/bjorn3/rustc_codegen_cranelift.git master + git push -u my "$branch" + + # immediately merge the merge commit into cg_clif to prevent merge conflicts when syncing + # from rust-lang/rust later + git subtree push --prefix=compiler/rustc_codegen_cranelift/ "$cg_clif" sync_from_rust + popd + git merge sync_from_rust ;; + "pull") + cg_clif=$(pwd) + pushd ../rust + git pull origin master + rust_vers="$(git rev-parse HEAD)" + git subtree push --prefix=compiler/rustc_codegen_cranelift/ "$cg_clif" sync_from_rust + popd + git merge sync_from_rust -m "Sync from rust $rust_vers" + git branch -d sync_from_rust + ;; *) echo "Unknown command '$1'" echo "Usage: ./rustup.sh prepare|commit" diff --git a/compiler/rustc_codegen_cranelift/scripts/test_bootstrap.sh b/compiler/rustc_codegen_cranelift/scripts/test_bootstrap.sh index 7f43f81a6cd..db69541b226 100755 --- a/compiler/rustc_codegen_cranelift/scripts/test_bootstrap.sh +++ b/compiler/rustc_codegen_cranelift/scripts/test_bootstrap.sh @@ -1,7 +1,7 @@ #!/bin/bash set -e -cd $(dirname "$0")/../ +cd "$(dirname "$0")/../" ./build.sh source build/config.sh @@ -11,7 +11,7 @@ git clone https://github.com/rust-lang/rust.git || true pushd rust git fetch git checkout -- . -git checkout $(rustc -V | cut -d' ' -f3 | tr -d '(') +git checkout "$(rustc -V | cut -d' ' -f3 | tr -d '(')" git apply - <<EOF diff --git a/.gitmodules b/.gitmodules @@ -48,7 +48,7 @@ cat > config.toml <<EOF ninja = false [build] -rustc = "$(pwd)/../build/cg_clif" +rustc = "$(pwd)/../build/bin/cg_clif" cargo = "$(rustup which cargo)" full-bootstrap = true local-rebuild = true diff --git a/compiler/rustc_codegen_cranelift/scripts/tests.sh b/compiler/rustc_codegen_cranelift/scripts/tests.sh index d941b73c81b..114b6f30a4a 100755 --- a/compiler/rustc_codegen_cranelift/scripts/tests.sh +++ b/compiler/rustc_codegen_cranelift/scripts/tests.sh @@ -4,63 +4,63 @@ set -e source build/config.sh export CG_CLIF_INCR_CACHE_DISABLED=1 -MY_RUSTC=$RUSTC" "$RUSTFLAGS" -L crate=target/out --out-dir target/out -Cdebuginfo=2" +MY_RUSTC="$RUSTC $RUSTFLAGS -L crate=target/out --out-dir target/out -Cdebuginfo=2" function no_sysroot_tests() { echo "[BUILD] mini_core" - $MY_RUSTC example/mini_core.rs --crate-name mini_core --crate-type lib,dylib --target $TARGET_TRIPLE + $MY_RUSTC example/mini_core.rs --crate-name mini_core --crate-type lib,dylib --target "$TARGET_TRIPLE" echo "[BUILD] example" - $MY_RUSTC example/example.rs --crate-type lib --target $TARGET_TRIPLE + $MY_RUSTC example/example.rs --crate-type lib --target "$TARGET_TRIPLE" if [[ "$JIT_SUPPORTED" = "1" ]]; then echo "[JIT] mini_core_hello_world" - CG_CLIF_JIT_ARGS="abc bcd" $MY_RUSTC --jit example/mini_core_hello_world.rs --cfg jit --target $HOST_TRIPLE + CG_CLIF_JIT_ARGS="abc bcd" $MY_RUSTC --jit example/mini_core_hello_world.rs --cfg jit --target "$HOST_TRIPLE" else echo "[JIT] mini_core_hello_world (skipped)" fi echo "[AOT] mini_core_hello_world" - $MY_RUSTC example/mini_core_hello_world.rs --crate-name mini_core_hello_world --crate-type bin -g --target $TARGET_TRIPLE + $MY_RUSTC example/mini_core_hello_world.rs --crate-name mini_core_hello_world --crate-type bin -g --target "$TARGET_TRIPLE" $RUN_WRAPPER ./target/out/mini_core_hello_world abc bcd # (echo "break set -n main"; echo "run"; sleep 1; echo "si -c 10"; sleep 1; echo "frame variable") | lldb -- ./target/out/mini_core_hello_world abc bcd echo "[AOT] arbitrary_self_types_pointers_and_wrappers" - $MY_RUSTC example/arbitrary_self_types_pointers_and_wrappers.rs --crate-name arbitrary_self_types_pointers_and_wrappers --crate-type bin --target $TARGET_TRIPLE + $MY_RUSTC example/arbitrary_self_types_pointers_and_wrappers.rs --crate-name arbitrary_self_types_pointers_and_wrappers --crate-type bin --target "$TARGET_TRIPLE" $RUN_WRAPPER ./target/out/arbitrary_self_types_pointers_and_wrappers } function base_sysroot_tests() { echo "[AOT] alloc_example" - $MY_RUSTC example/alloc_example.rs --crate-type bin --target $TARGET_TRIPLE + $MY_RUSTC example/alloc_example.rs --crate-type bin --target "$TARGET_TRIPLE" $RUN_WRAPPER ./target/out/alloc_example if [[ "$JIT_SUPPORTED" = "1" ]]; then echo "[JIT] std_example" - $MY_RUSTC --jit example/std_example.rs --target $HOST_TRIPLE + $MY_RUSTC --jit example/std_example.rs --target "$HOST_TRIPLE" else echo "[JIT] std_example (skipped)" fi echo "[AOT] dst_field_align" # FIXME Re-add -Zmir-opt-level=2 once rust-lang/rust#67529 is fixed. - $MY_RUSTC example/dst-field-align.rs --crate-name dst_field_align --crate-type bin --target $TARGET_TRIPLE + $MY_RUSTC example/dst-field-align.rs --crate-name dst_field_align --crate-type bin --target "$TARGET_TRIPLE" $RUN_WRAPPER ./target/out/dst_field_align || (echo $?; false) echo "[AOT] std_example" - $MY_RUSTC example/std_example.rs --crate-type bin --target $TARGET_TRIPLE + $MY_RUSTC example/std_example.rs --crate-type bin --target "$TARGET_TRIPLE" $RUN_WRAPPER ./target/out/std_example arg echo "[AOT] subslice-patterns-const-eval" - $MY_RUSTC example/subslice-patterns-const-eval.rs --crate-type bin -Cpanic=abort --target $TARGET_TRIPLE + $MY_RUSTC example/subslice-patterns-const-eval.rs --crate-type bin -Cpanic=abort --target "$TARGET_TRIPLE" $RUN_WRAPPER ./target/out/subslice-patterns-const-eval echo "[AOT] track-caller-attribute" - $MY_RUSTC example/track-caller-attribute.rs --crate-type bin -Cpanic=abort --target $TARGET_TRIPLE + $MY_RUSTC example/track-caller-attribute.rs --crate-type bin -Cpanic=abort --target "$TARGET_TRIPLE" $RUN_WRAPPER ./target/out/track-caller-attribute echo "[AOT] mod_bench" - $MY_RUSTC example/mod_bench.rs --crate-type bin --target $TARGET_TRIPLE + $MY_RUSTC example/mod_bench.rs --crate-type bin --target "$TARGET_TRIPLE" $RUN_WRAPPER ./target/out/mod_bench pushd rand @@ -73,13 +73,13 @@ function extended_sysroot_tests() { pushd simple-raytracer if [[ "$HOST_TRIPLE" = "$TARGET_TRIPLE" ]]; then echo "[BENCH COMPILE] ebobby/simple-raytracer" - hyperfine --runs ${RUN_RUNS:-10} --warmup 1 --prepare "cargo clean" \ + hyperfine --runs "${RUN_RUNS:-10}" --warmup 1 --prepare "cargo clean" \ "RUSTC=rustc RUSTFLAGS='' cargo build" \ "../build/cargo.sh build" echo "[BENCH RUN] ebobby/simple-raytracer" cp ./target/debug/main ./raytracer_cg_clif - hyperfine --runs ${RUN_RUNS:-10} ./raytracer_cg_llvm ./raytracer_cg_clif + hyperfine --runs "${RUN_RUNS:-10}" ./raytracer_cg_llvm ./raytracer_cg_clif else echo "[BENCH COMPILE] ebobby/simple-raytracer (skipped)" echo "[COMPILE] ebobby/simple-raytracer" diff --git a/compiler/rustc_codegen_cranelift/src/abi/mod.rs b/compiler/rustc_codegen_cranelift/src/abi/mod.rs index ac076789f2e..aee274ab4a8 100644 --- a/compiler/rustc_codegen_cranelift/src/abi/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/abi/mod.rs @@ -214,10 +214,8 @@ pub(crate) fn get_function_name_and_sig<'tcx>( support_vararg: bool, ) -> (String, Signature) { assert!(!inst.substs.needs_infer()); - let fn_sig = tcx.normalize_erasing_late_bound_regions( - ParamEnv::reveal_all(), - fn_sig_for_fn_abi(tcx, inst), - ); + let fn_sig = tcx + .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_sig_for_fn_abi(tcx, inst)); if fn_sig.c_variadic && !support_vararg { tcx.sess.span_fatal( tcx.def_span(inst.def_id()), diff --git a/compiler/rustc_codegen_cranelift/src/archive.rs b/compiler/rustc_codegen_cranelift/src/archive.rs index daf9fa6158f..96579054389 100644 --- a/compiler/rustc_codegen_cranelift/src/archive.rs +++ b/compiler/rustc_codegen_cranelift/src/archive.rs @@ -8,7 +8,7 @@ use rustc_codegen_ssa::back::archive::{find_library, ArchiveBuilder}; use rustc_codegen_ssa::METADATA_FILENAME; use rustc_session::Session; -use object::{Object, SymbolKind}; +use object::{Object, ObjectSymbol, SymbolKind}; #[derive(Debug)] enum ArchiveEntry { @@ -184,7 +184,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> { entry_name.as_bytes().to_vec(), object .symbols() - .filter_map(|(_index, symbol)| { + .filter_map(|symbol| { if symbol.is_undefined() || symbol.is_local() || symbol.kind() != SymbolKind::Data @@ -193,7 +193,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> { { None } else { - symbol.name().map(|name| name.as_bytes().to_vec()) + symbol.name().map(|name| name.as_bytes().to_vec()).ok() } }) .collect::<Vec<_>>(), diff --git a/compiler/rustc_codegen_cranelift/src/atomic_shim.rs b/compiler/rustc_codegen_cranelift/src/atomic_shim.rs index 2f0157c257b..674e6d90751 100644 --- a/compiler/rustc_codegen_cranelift/src/atomic_shim.rs +++ b/compiler/rustc_codegen_cranelift/src/atomic_shim.rs @@ -7,8 +7,7 @@ use crate::prelude::*; #[cfg(all(feature = "jit", unix))] #[no_mangle] -static mut __cg_clif_global_atomic_mutex: libc::pthread_mutex_t = - libc::PTHREAD_MUTEX_INITIALIZER; +static mut __cg_clif_global_atomic_mutex: libc::pthread_mutex_t = libc::PTHREAD_MUTEX_INITIALIZER; pub(crate) fn init_global_lock( module: &mut impl Module, diff --git a/compiler/rustc_codegen_cranelift/src/base.rs b/compiler/rustc_codegen_cranelift/src/base.rs index a4df371c88a..72073896a72 100644 --- a/compiler/rustc_codegen_cranelift/src/base.rs +++ b/compiler/rustc_codegen_cranelift/src/base.rs @@ -12,6 +12,10 @@ pub(crate) fn codegen_fn<'tcx>( ) { let tcx = cx.tcx; + let _inst_guard = + crate::PrintOnPanic(|| format!("{:?} {}", instance, tcx.symbol_name(instance).name)); + debug_assert!(!instance.substs.needs_infer()); + let mir = tcx.instance_mir(instance.def); // Declare function @@ -499,7 +503,8 @@ fn codegen_stmt<'tcx>( UnOp::Neg => match layout.ty.kind() { ty::Int(IntTy::I128) => { // FIXME remove this case once ineg.i128 works - let zero = CValue::const_val(fx, layout, ty::ScalarInt::null(layout.size)); + let zero = + CValue::const_val(fx, layout, ty::ScalarInt::null(layout.size)); crate::num::codegen_int_binop(fx, BinOp::Sub, zero, operand) } ty::Int(_) => CValue::by_val(fx.bcx.ins().ineg(val), layout), @@ -509,7 +514,11 @@ fn codegen_stmt<'tcx>( }; lval.write_cvalue(fx, res); } - Rvalue::Cast(CastKind::Pointer(PointerCast::ReifyFnPointer), ref operand, to_ty) => { + Rvalue::Cast( + CastKind::Pointer(PointerCast::ReifyFnPointer), + ref operand, + to_ty, + ) => { let from_ty = fx.monomorphize(operand.ty(&fx.mir.local_decls, fx.tcx)); let to_layout = fx.layout_of(fx.monomorphize(to_ty)); match *from_ty.kind() { @@ -530,9 +539,21 @@ fn codegen_stmt<'tcx>( _ => bug!("Trying to ReifyFnPointer on non FnDef {:?}", from_ty), } } - Rvalue::Cast(CastKind::Pointer(PointerCast::UnsafeFnPointer), ref operand, to_ty) - | Rvalue::Cast(CastKind::Pointer(PointerCast::MutToConstPointer), ref operand, to_ty) - | Rvalue::Cast(CastKind::Pointer(PointerCast::ArrayToPointer), ref operand, to_ty) => { + Rvalue::Cast( + CastKind::Pointer(PointerCast::UnsafeFnPointer), + ref operand, + to_ty, + ) + | Rvalue::Cast( + CastKind::Pointer(PointerCast::MutToConstPointer), + ref operand, + to_ty, + ) + | Rvalue::Cast( + CastKind::Pointer(PointerCast::ArrayToPointer), + ref operand, + to_ty, + ) => { let to_layout = fx.layout_of(fx.monomorphize(to_ty)); let operand = codegen_operand(fx, operand); lval.write_cvalue(fx, operand.cast_pointer_to(to_layout)); diff --git a/compiler/rustc_codegen_cranelift/src/bin/cg_clif.rs b/compiler/rustc_codegen_cranelift/src/bin/cg_clif.rs index cd01acc9a83..f4d23ebcf4e 100644 --- a/compiler/rustc_codegen_cranelift/src/bin/cg_clif.rs +++ b/compiler/rustc_codegen_cranelift/src/bin/cg_clif.rs @@ -26,15 +26,15 @@ impl rustc_driver::Callbacks for CraneliftPassesCallbacks { config.opts.cg.panic = Some(PanicStrategy::Abort); config.opts.debugging_opts.panic_abort_tests = true; - config.opts.maybe_sysroot = Some( - config.opts.maybe_sysroot.clone().unwrap_or_else( - || std::env::current_exe() - .unwrap() - .parent() - .unwrap() - .join("sysroot"), - ), - ); + config.opts.maybe_sysroot = Some(config.opts.maybe_sysroot.clone().unwrap_or_else(|| { + std::env::current_exe() + .unwrap() + .parent() + .unwrap() + .parent() + .unwrap() + .to_owned() + })); } } diff --git a/compiler/rustc_codegen_cranelift/src/common.rs b/compiler/rustc_codegen_cranelift/src/common.rs index d7d6c3e1677..1485d4451b8 100644 --- a/compiler/rustc_codegen_cranelift/src/common.rs +++ b/compiler/rustc_codegen_cranelift/src/common.rs @@ -233,7 +233,7 @@ pub(crate) fn type_min_max_value( let min_msb = bcx.ins().iconst(types::I64, (min >> 64) as u64 as i64); let min = bcx.ins().iconcat(min_lsb, min_msb); - let max = i128::MIN as u128; + let max = i128::MAX as u128; let max_lsb = bcx.ins().iconst(types::I64, max as u64 as i64); let max_msb = bcx.ins().iconst(types::I64, (max >> 64) as u64 as i64); let max = bcx.ins().iconcat(max_lsb, max_msb); @@ -364,7 +364,7 @@ impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> { self.instance.subst_mir_and_normalize_erasing_regions( self.tcx, ty::ParamEnv::reveal_all(), - value + value, ) } diff --git a/compiler/rustc_codegen_cranelift/src/constant.rs b/compiler/rustc_codegen_cranelift/src/constant.rs index 351bb6ecd23..544b020b711 100644 --- a/compiler/rustc_codegen_cranelift/src/constant.rs +++ b/compiler/rustc_codegen_cranelift/src/constant.rs @@ -163,10 +163,7 @@ pub(crate) fn codegen_const_value<'tcx>( assert!(!layout.is_unsized(), "sized const value"); if layout.is_zst() { - return CValue::by_ref( - crate::Pointer::dangling(layout.align.pref), - layout, - ); + return CValue::by_ref(crate::Pointer::dangling(layout.align.pref), layout); } match const_val { @@ -186,9 +183,7 @@ pub(crate) fn codegen_const_value<'tcx>( } match x { - Scalar::Int(int) => { - CValue::const_val(fx, layout, int) - } + Scalar::Int(int) => CValue::const_val(fx, layout, int), Scalar::Ptr(ptr) => { let alloc_kind = fx.tcx.get_global_alloc(ptr.alloc_id); let base_addr = match alloc_kind { diff --git a/compiler/rustc_codegen_cranelift/src/debuginfo/emit.rs b/compiler/rustc_codegen_cranelift/src/debuginfo/emit.rs index f6f795e4561..c21835b1fc3 100644 --- a/compiler/rustc_codegen_cranelift/src/debuginfo/emit.rs +++ b/compiler/rustc_codegen_cranelift/src/debuginfo/emit.rs @@ -76,7 +76,7 @@ impl WriterRelocate { #[cfg(feature = "jit")] pub(super) fn relocate_for_jit( mut self, - jit_product: &cranelift_simplejit::SimpleJITProduct, + jit_module: &cranelift_simplejit::SimpleJITModule, ) -> Vec<u8> { use std::convert::TryInto; @@ -84,8 +84,9 @@ impl WriterRelocate { match reloc.name { super::DebugRelocName::Section(_) => unreachable!(), super::DebugRelocName::Symbol(sym) => { - let addr = jit_product - .lookup_func(cranelift_module::FuncId::from_u32(sym.try_into().unwrap())); + let addr = jit_module.get_finalized_function( + cranelift_module::FuncId::from_u32(sym.try_into().unwrap()), + ); let val = (addr as u64 as i64 + reloc.addend) as u64; self.writer .write_udata_at(reloc.offset as usize, val, reloc.size) diff --git a/compiler/rustc_codegen_cranelift/src/debuginfo/unwind.rs b/compiler/rustc_codegen_cranelift/src/debuginfo/unwind.rs index 68138404c24..e0f62b64e6b 100644 --- a/compiler/rustc_codegen_cranelift/src/debuginfo/unwind.rs +++ b/compiler/rustc_codegen_cranelift/src/debuginfo/unwind.rs @@ -80,7 +80,7 @@ impl<'tcx> UnwindContext<'tcx> { #[cfg(feature = "jit")] pub(crate) unsafe fn register_jit( self, - jit_product: &cranelift_simplejit::SimpleJITProduct, + jit_module: &cranelift_simplejit::SimpleJITModule, ) -> Option<UnwindRegistry> { let mut eh_frame = EhFrame::from(super::emit::WriterRelocate::new(super::target_endian( self.tcx, @@ -91,7 +91,7 @@ impl<'tcx> UnwindContext<'tcx> { return None; } - let mut eh_frame = eh_frame.0.relocate_for_jit(jit_product); + let mut eh_frame = eh_frame.0.relocate_for_jit(jit_module); // GCC expects a terminating "empty" length, so write a 0 length at the end of the table. eh_frame.extend(&[0, 0, 0, 0]); diff --git a/compiler/rustc_codegen_cranelift/src/discriminant.rs b/compiler/rustc_codegen_cranelift/src/discriminant.rs index 1e8e86add1a..ad635016a91 100644 --- a/compiler/rustc_codegen_cranelift/src/discriminant.rs +++ b/compiler/rustc_codegen_cranelift/src/discriminant.rs @@ -30,8 +30,16 @@ pub(crate) fn codegen_set_discriminant<'tcx>( .ty .discriminant_for_variant(fx.tcx, variant_index) .unwrap() - .val - .into(); + .val; + let to = if ptr.layout().abi.is_signed() { + ty::ScalarInt::try_from_int( + ptr.layout().size.sign_extend(to) as i128, + ptr.layout().size, + ) + .unwrap() + } else { + ty::ScalarInt::try_from_uint(to, ptr.layout().size).unwrap() + }; let discr = CValue::const_val(fx, ptr.layout(), to); ptr.write_cvalue(fx, discr); } @@ -49,8 +57,12 @@ pub(crate) fn codegen_set_discriminant<'tcx>( if variant_index != dataful_variant { let niche = place.place_field(fx, mir::Field::new(tag_field)); let niche_value = variant_index.as_u32() - niche_variants.start().as_u32(); - let niche_value = u128::from(niche_value).wrapping_add(niche_start); - let niche_llval = CValue::const_val(fx, niche.layout(), niche_value.into()); + let niche_value = ty::ScalarInt::try_from_uint( + u128::from(niche_value).wrapping_add(niche_start), + niche.layout().size, + ) + .unwrap(); + let niche_llval = CValue::const_val(fx, niche.layout(), niche_value); niche.write_cvalue(fx, niche_llval); } } @@ -78,7 +90,16 @@ pub(crate) fn codegen_get_discriminant<'tcx>( .ty .discriminant_for_variant(fx.tcx, *index) .map_or(u128::from(index.as_u32()), |discr| discr.val); - return CValue::const_val(fx, dest_layout, discr_val.into()); + let discr_val = if dest_layout.abi.is_signed() { + ty::ScalarInt::try_from_int( + dest_layout.size.sign_extend(discr_val) as i128, + dest_layout.size, + ) + .unwrap() + } else { + ty::ScalarInt::try_from_uint(discr_val, dest_layout.size).unwrap() + }; + return CValue::const_val(fx, dest_layout, discr_val); } Variants::Multiple { tag, diff --git a/compiler/rustc_codegen_cranelift/src/driver/aot.rs b/compiler/rustc_codegen_cranelift/src/driver/aot.rs index c0245aa1e02..491d6cbbf79 100644 --- a/compiler/rustc_codegen_cranelift/src/driver/aot.rs +++ b/compiler/rustc_codegen_cranelift/src/driver/aot.rs @@ -145,7 +145,11 @@ fn module_codegen(tcx: TyCtxt<'_>, cgu_name: rustc_span::Symbol) -> ModuleCodege } let mut cx = crate::CodegenCx::new(tcx, module, tcx.sess.opts.debuginfo != DebugInfo::None); - super::codegen_mono_items(&mut cx, mono_items); + super::predefine_mono_items(&mut cx, &mono_items); + for (mono_item, (linkage, visibility)) in mono_items { + let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility); + super::codegen_mono_item(&mut cx, mono_item, linkage); + } let (mut module, global_asm, debug, mut unwind_context) = tcx.sess.time("finalize CodegenCx", || cx.finalize()); crate::main_shim::maybe_create_entry_wrapper(tcx, &mut module, &mut unwind_context, false); diff --git a/compiler/rustc_codegen_cranelift/src/driver/jit.rs b/compiler/rustc_codegen_cranelift/src/driver/jit.rs index 3f47df7d844..5a844841c2c 100644 --- a/compiler/rustc_codegen_cranelift/src/driver/jit.rs +++ b/compiler/rustc_codegen_cranelift/src/driver/jit.rs @@ -70,7 +70,11 @@ pub(super) fn run_jit(tcx: TyCtxt<'_>) -> ! { let (mut jit_module, global_asm, _debug, mut unwind_context) = super::time(tcx, "codegen mono items", || { - super::codegen_mono_items(&mut cx, mono_items); + super::predefine_mono_items(&mut cx, &mono_items); + for (mono_item, (linkage, visibility)) in mono_items { + let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility); + super::codegen_mono_item(&mut cx, mono_item, linkage); + } tcx.sess.time("finalize CodegenCx", || cx.finalize()) }); if !global_asm.is_empty() { @@ -81,11 +85,11 @@ pub(super) fn run_jit(tcx: TyCtxt<'_>) -> ! { tcx.sess.abort_if_errors(); - let jit_product = jit_module.finish(); + jit_module.finalize_definitions(); - let _unwind_register_guard = unsafe { unwind_context.register_jit(&jit_product) }; + let _unwind_register_guard = unsafe { unwind_context.register_jit(&jit_module) }; - let finalized_main: *const u8 = jit_product.lookup_func(main_func_id); + let finalized_main: *const u8 = jit_module.get_finalized_function(main_func_id); println!("Rustc codegen cranelift will JIT run the executable, because --jit was passed"); @@ -140,11 +144,11 @@ fn load_imported_symbols_for_jit(tcx: TyCtxt<'_>) -> Vec<(String, *const u8)> { let mut imported_symbols = Vec::new(); for path in dylib_paths { - use object::Object; + use object::{Object, ObjectSymbol}; let lib = libloading::Library::new(&path).unwrap(); let obj = std::fs::read(path).unwrap(); let obj = object::File::parse(&obj).unwrap(); - imported_symbols.extend(obj.dynamic_symbols().filter_map(|(_idx, symbol)| { + imported_symbols.extend(obj.dynamic_symbols().filter_map(|symbol| { let name = symbol.name().unwrap().to_string(); if name.is_empty() || !symbol.is_global() || symbol.is_undefined() { return None; diff --git a/compiler/rustc_codegen_cranelift/src/driver/mod.rs b/compiler/rustc_codegen_cranelift/src/driver/mod.rs index a11dc57ee64..7b8cc2ddd48 100644 --- a/compiler/rustc_codegen_cranelift/src/driver/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/driver/mod.rs @@ -1,4 +1,4 @@ -//! Drivers are responsible for calling [`codegen_mono_items`] and performing any further actions +//! Drivers are responsible for calling [`codegen_mono_item`] and performing any further actions //! like JIT executing or writing object files. use std::any::Any; @@ -40,12 +40,12 @@ pub(crate) fn codegen_crate( aot::run_aot(tcx, metadata, need_metadata_module) } -fn codegen_mono_items<'tcx>( +fn predefine_mono_items<'tcx>( cx: &mut crate::CodegenCx<'tcx, impl Module>, - mono_items: Vec<(MonoItem<'tcx>, (RLinkage, Visibility))>, + mono_items: &[(MonoItem<'tcx>, (RLinkage, Visibility))], ) { cx.tcx.sess.time("predefine functions", || { - for &(mono_item, (linkage, visibility)) in &mono_items { + for &(mono_item, (linkage, visibility)) in mono_items { match mono_item { MonoItem::Fn(instance) => { let (name, sig) = get_function_name_and_sig( @@ -61,11 +61,6 @@ fn codegen_mono_items<'tcx>( } } }); - - for (mono_item, (linkage, visibility)) in mono_items { - let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility); - codegen_mono_item(cx, mono_item, linkage); - } } fn codegen_mono_item<'tcx, M: Module>( @@ -73,20 +68,15 @@ fn codegen_mono_item<'tcx, M: Module>( mono_item: MonoItem<'tcx>, linkage: Linkage, ) { - let tcx = cx.tcx; match mono_item { MonoItem::Fn(inst) => { - let _inst_guard = - crate::PrintOnPanic(|| format!("{:?} {}", inst, tcx.symbol_name(inst).name)); - debug_assert!(!inst.substs.needs_infer()); - tcx.sess + cx.tcx + .sess .time("codegen fn", || crate::base::codegen_fn(cx, inst, linkage)); } - MonoItem::Static(def_id) => { - crate::constant::codegen_static(&mut cx.constants_cx, def_id); - } + MonoItem::Static(def_id) => crate::constant::codegen_static(&mut cx.constants_cx, def_id), MonoItem::GlobalAsm(hir_id) => { - let item = tcx.hir().expect_item(hir_id); + let item = cx.tcx.hir().expect_item(hir_id); if let rustc_hir::ItemKind::GlobalAsm(rustc_hir::GlobalAsm { asm }) = item.kind { cx.global_asm.push_str(&*asm.as_str()); cx.global_asm.push_str("\n\n"); diff --git a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs index ab16fabd348..3563aa250a9 100644 --- a/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs +++ b/compiler/rustc_codegen_cranelift/src/intrinsics/mod.rs @@ -263,6 +263,48 @@ fn simd_pair_for_each_lane<'tcx, M: Module>( } } +fn simd_reduce<'tcx, M: Module>( + fx: &mut FunctionCx<'_, 'tcx, M>, + val: CValue<'tcx>, + ret: CPlace<'tcx>, + f: impl Fn(&mut FunctionCx<'_, 'tcx, M>, TyAndLayout<'tcx>, Value, Value) -> Value, +) { + let (lane_layout, lane_count) = lane_type_and_count(fx.tcx, val.layout()); + assert_eq!(lane_layout, ret.layout()); + + let mut res_val = val.value_field(fx, mir::Field::new(0)).load_scalar(fx); + for lane_idx in 1..lane_count { + let lane = val + .value_field(fx, mir::Field::new(lane_idx.into())) + .load_scalar(fx); + res_val = f(fx, lane_layout, res_val, lane); + } + let res = CValue::by_val(res_val, lane_layout); + ret.write_cvalue(fx, res); +} + +fn simd_reduce_bool<'tcx, M: Module>( + fx: &mut FunctionCx<'_, 'tcx, M>, + val: CValue<'tcx>, + ret: CPlace<'tcx>, + f: impl Fn(&mut FunctionCx<'_, 'tcx, M>, Value, Value) -> Value, +) { + let (_lane_layout, lane_count) = lane_type_and_count(fx.tcx, val.layout()); + assert!(ret.layout().ty.is_bool()); + + let res_val = val.value_field(fx, mir::Field::new(0)).load_scalar(fx); + let mut res_val = fx.bcx.ins().band_imm(res_val, 1); // mask to boolean + for lane_idx in 1..lane_count { + let lane = val + .value_field(fx, mir::Field::new(lane_idx.into())) + .load_scalar(fx); + let lane = fx.bcx.ins().band_imm(lane, 1); // mask to boolean + res_val = f(fx, res_val, lane); + } + let res = CValue::by_val(res_val, ret.layout()); + ret.write_cvalue(fx, res); +} + fn bool_to_zero_or_max_uint<'tcx>( fx: &mut FunctionCx<'_, 'tcx, impl Module>, layout: TyAndLayout<'tcx>, @@ -287,7 +329,7 @@ fn bool_to_zero_or_max_uint<'tcx>( } macro simd_cmp { - ($fx:expr, $cc:ident($x:ident, $y:ident) -> $ret:ident) => { + ($fx:expr, $cc:ident|$cc_f:ident($x:ident, $y:ident) -> $ret:ident) => { let vector_ty = clif_vector_type($fx.tcx, $x.layout()); if let Some(vector_ty) = vector_ty { @@ -308,6 +350,7 @@ macro simd_cmp { |fx, lane_layout, res_lane_layout, x_lane, y_lane| { let res_lane = match lane_layout.ty.kind() { ty::Uint(_) | ty::Int(_) => fx.bcx.ins().icmp(IntCC::$cc, x_lane, y_lane), + ty::Float(_) => fx.bcx.ins().fcmp(FloatCC::$cc_f, x_lane, y_lane), _ => unreachable!("{:?}", lane_layout.ty), }; bool_to_zero_or_max_uint(fx, res_lane_layout, res_lane) @@ -315,7 +358,7 @@ macro simd_cmp { ); } }, - ($fx:expr, $cc_u:ident|$cc_s:ident($x:ident, $y:ident) -> $ret:ident) => { + ($fx:expr, $cc_u:ident|$cc_s:ident|$cc_f:ident($x:ident, $y:ident) -> $ret:ident) => { // FIXME use vector icmp when possible simd_pair_for_each_lane( $fx, @@ -326,6 +369,7 @@ macro simd_cmp { let res_lane = match lane_layout.ty.kind() { ty::Uint(_) => fx.bcx.ins().icmp(IntCC::$cc_u, x_lane, y_lane), ty::Int(_) => fx.bcx.ins().icmp(IntCC::$cc_s, x_lane, y_lane), + ty::Float(_) => fx.bcx.ins().fcmp(FloatCC::$cc_f, x_lane, y_lane), _ => unreachable!("{:?}", lane_layout.ty), }; bool_to_zero_or_max_uint(fx, res_lane_layout, res_lane) @@ -497,12 +541,12 @@ pub(crate) fn codegen_intrinsic_call<'tcx>( }; copy | copy_nonoverlapping, <elem_ty> (v src, v dst, v count) { let elem_size: u64 = fx.layout_of(elem_ty).size.bytes(); - let elem_size = fx - .bcx - .ins() - .iconst(fx.pointer_type, elem_size as i64); assert_eq!(args.len(), 3); - let byte_amount = fx.bcx.ins().imul(count, elem_size); + let byte_amount = if elem_size != 1 { + fx.bcx.ins().imul_imm(count, elem_size as i64) + } else { + count + }; if intrinsic.contains("nonoverlapping") { // FIXME emit_small_memcpy @@ -515,12 +559,12 @@ pub(crate) fn codegen_intrinsic_call<'tcx>( // NOTE: the volatile variants have src and dst swapped volatile_copy_memory | volatile_copy_nonoverlapping_memory, <elem_ty> (v dst, v src, v count) { let elem_size: u64 = fx.layout_of(elem_ty).size.bytes(); - let elem_size = fx - .bcx - .ins() - .iconst(fx.pointer_type, elem_size as i64); assert_eq!(args.len(), 3); - let byte_amount = fx.bcx.ins().imul(count, elem_size); + let byte_amount = if elem_size != 1 { + fx.bcx.ins().imul_imm(count, elem_size as i64) + } else { + count + }; // FIXME make the copy actually volatile when using emit_small_mem{cpy,move} if intrinsic.contains("nonoverlapping") { @@ -676,7 +720,11 @@ pub(crate) fn codegen_intrinsic_call<'tcx>( offset | arith_offset, (c base, v offset) { let pointee_ty = base.layout().ty.builtin_deref(true).unwrap().ty; let pointee_size = fx.layout_of(pointee_ty).size.bytes(); - let ptr_diff = fx.bcx.ins().imul_imm(offset, pointee_size as i64); + let ptr_diff = if pointee_size != 1 { + fx.bcx.ins().imul_imm(offset, pointee_size as i64) + } else { + offset + }; let base_val = base.load_scalar(fx); let res = fx.bcx.ins().iadd(base_val, ptr_diff); ret.write_cvalue(fx, CValue::by_val(res, base.layout())); @@ -688,7 +736,11 @@ pub(crate) fn codegen_intrinsic_call<'tcx>( write_bytes | volatile_set_memory, (c dst, v val, v count) { let pointee_ty = dst.layout().ty.builtin_deref(true).unwrap().ty; let pointee_size = fx.layout_of(pointee_ty).size.bytes(); - let count = fx.bcx.ins().imul_imm(count, pointee_size as i64); + let count = if pointee_size != 1 { + fx.bcx.ins().imul_imm(count, pointee_size as i64) + } else { + count + }; let dst_ptr = dst.load_scalar(fx); // FIXME make the memset actually volatile when switching to emit_small_memset // FIXME use emit_small_memset diff --git a/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs b/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs index 2e31c4669e2..2b32e866e5e 100644 --- a/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs +++ b/compiler/rustc_codegen_cranelift/src/intrinsics/simd.rs @@ -35,30 +35,33 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>( }); }; - // FIXME support float comparisons simd_eq, (c x, c y) { validate_simd_type!(fx, intrinsic, span, x.layout().ty); - simd_cmp!(fx, Equal(x, y) -> ret); + simd_cmp!(fx, Equal|Equal(x, y) -> ret); }; simd_ne, (c x, c y) { validate_simd_type!(fx, intrinsic, span, x.layout().ty); - simd_cmp!(fx, NotEqual(x, y) -> ret); + simd_cmp!(fx, NotEqual|NotEqual(x, y) -> ret); }; simd_lt, (c x, c y) { validate_simd_type!(fx, intrinsic, span, x.layout().ty); - simd_cmp!(fx, UnsignedLessThan|SignedLessThan(x, y) -> ret); + simd_cmp!(fx, UnsignedLessThan|SignedLessThan|LessThan(x, y) -> ret); }; simd_le, (c x, c y) { validate_simd_type!(fx, intrinsic, span, x.layout().ty); - simd_cmp!(fx, UnsignedLessThanOrEqual|SignedLessThanOrEqual(x, y) -> ret); + simd_cmp!(fx, UnsignedLessThanOrEqual|SignedLessThanOrEqual|LessThanOrEqual(x, y) -> ret); }; simd_gt, (c x, c y) { validate_simd_type!(fx, intrinsic, span, x.layout().ty); - simd_cmp!(fx, UnsignedGreaterThan|SignedGreaterThan(x, y) -> ret); + simd_cmp!(fx, UnsignedGreaterThan|SignedGreaterThan|GreaterThan(x, y) -> ret); }; simd_ge, (c x, c y) { validate_simd_type!(fx, intrinsic, span, x.layout().ty); - simd_cmp!(fx, UnsignedGreaterThanOrEqual|SignedGreaterThanOrEqual(x, y) -> ret); + simd_cmp!( + fx, + UnsignedGreaterThanOrEqual|SignedGreaterThanOrEqual|GreaterThanOrEqual + (x, y) -> ret + ); }; // simd_shuffle32<T, U>(x: T, y: T, idx: [u32; 32]) -> U @@ -107,9 +110,9 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>( for (out_idx, in_idx) in indexes.into_iter().enumerate() { let in_lane = if in_idx < lane_count { - x.value_field(fx, mir::Field::new(in_idx.try_into().unwrap())) + x.value_field(fx, mir::Field::new(in_idx.into())) } else { - y.value_field(fx, mir::Field::new((in_idx - lane_count).try_into().unwrap())) + y.value_field(fx, mir::Field::new((in_idx - lane_count).into())) }; let out_lane = ret.place_field(fx, mir::Field::new(out_idx)); out_lane.write_cvalue(fx, in_lane); @@ -143,10 +146,17 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>( let idx_const = if let Some(idx_const) = crate::constant::mir_operand_get_const_val(fx, idx) { idx_const } else { - fx.tcx.sess.span_fatal( + fx.tcx.sess.span_warn( span, "Index argument for `simd_extract` is not a constant", ); + let res = crate::trap::trap_unimplemented_ret_value( + fx, + ret.layout(), + "Index argument for `simd_extract` is not a constant", + ); + ret.write_cvalue(fx, res); + return; }; let idx = idx_const.val.try_to_bits(Size::from_bytes(4 /* u32*/)).unwrap_or_else(|| panic!("kind not scalar: {:?}", idx_const)); @@ -207,7 +217,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>( assert_eq!(lane_count, ret_lane_count); for lane in 0..lane_count { - let lane = mir::Field::new(lane.try_into().unwrap()); + let lane = mir::Field::new(lane.into()); let a_lane = a.value_field(fx, lane).load_scalar(fx); let b_lane = b.value_field(fx, lane).load_scalar(fx); let c_lane = c.value_field(fx, lane).load_scalar(fx); @@ -228,11 +238,42 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>( simd_flt_binop!(fx, fmax(x, y) -> ret); }; + simd_reduce_add_ordered | simd_reduce_add_unordered, (c v) { + validate_simd_type!(fx, intrinsic, span, v.layout().ty); + simd_reduce(fx, v, ret, |fx, lane_layout, a, b| { + if lane_layout.ty.is_floating_point() { + fx.bcx.ins().fadd(a, b) + } else { + fx.bcx.ins().iadd(a, b) + } + }); + }; + + simd_reduce_mul_ordered | simd_reduce_mul_unordered, (c v) { + validate_simd_type!(fx, intrinsic, span, v.layout().ty); + simd_reduce(fx, v, ret, |fx, lane_layout, a, b| { + if lane_layout.ty.is_floating_point() { + fx.bcx.ins().fmul(a, b) + } else { + fx.bcx.ins().imul(a, b) + } + }); + }; + + simd_reduce_all, (c v) { + validate_simd_type!(fx, intrinsic, span, v.layout().ty); + simd_reduce_bool(fx, v, ret, |fx, a, b| fx.bcx.ins().band(a, b)); + }; + + simd_reduce_any, (c v) { + validate_simd_type!(fx, intrinsic, span, v.layout().ty); + simd_reduce_bool(fx, v, ret, |fx, a, b| fx.bcx.ins().bor(a, b)); + }; + // simd_fabs // simd_saturating_add // simd_bitmask // simd_select - // simd_reduce_add_{,un}ordered // simd_rem } } diff --git a/compiler/rustc_codegen_cranelift/src/trap.rs b/compiler/rustc_codegen_cranelift/src/trap.rs index 690d96764a8..67495c74148 100644 --- a/compiler/rustc_codegen_cranelift/src/trap.rs +++ b/compiler/rustc_codegen_cranelift/src/trap.rs @@ -67,3 +67,15 @@ pub(crate) fn trap_unimplemented(fx: &mut FunctionCx<'_, '_, impl Module>, msg: let true_ = fx.bcx.ins().iconst(types::I32, 1); fx.bcx.ins().trapnz(true_, TrapCode::User(!0)); } + +/// Like `trap_unimplemented` but returns a fake value of the specified type. +/// +/// Trap code: user65535 +pub(crate) fn trap_unimplemented_ret_value<'tcx>( + fx: &mut FunctionCx<'_, 'tcx, impl Module>, + dest_layout: TyAndLayout<'tcx>, + msg: impl AsRef<str>, +) -> CValue<'tcx> { + trap_unimplemented(fx, msg); + CValue::by_ref(Pointer::const_addr(fx, 0), dest_layout) +} diff --git a/compiler/rustc_codegen_ssa/src/back/link.rs b/compiler/rustc_codegen_ssa/src/back/link.rs index 5a627a0efa3..22e2aa3b5c8 100644 --- a/compiler/rustc_codegen_ssa/src/back/link.rs +++ b/compiler/rustc_codegen_ssa/src/back/link.rs @@ -2090,9 +2090,10 @@ fn add_apple_sdk(cmd: &mut dyn Linker, sess: &Session, flavor: LinkerFlavor) { ("aarch64", "tvos") => "appletvos", ("x86_64", "tvos") => "appletvsimulator", ("arm", "ios") => "iphoneos", + ("aarch64", "ios") if llvm_target.contains("macabi") => "macosx", ("aarch64", "ios") => "iphoneos", ("x86", "ios") => "iphonesimulator", - ("x86_64", "ios") if llvm_target.contains("macabi") => "macosx10.15", + ("x86_64", "ios") if llvm_target.contains("macabi") => "macosx", ("x86_64", "ios") => "iphonesimulator", _ => { sess.err(&format!("unsupported arch `{}` for os `{}`", arch, os)); diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 9651d0505e6..e59832a8eed 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -451,7 +451,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { Inhabited, ZeroValid, UninitValid, - }; + } let panic_intrinsic = intrinsic.and_then(|i| match i { sym::assert_inhabited => Some(AssertIntrinsic::Inhabited), sym::assert_zero_valid => Some(AssertIntrinsic::ZeroValid), diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index 1b5c06a96bc..b1071bf4308 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -374,7 +374,6 @@ macro_rules! make_stmts_default { id: ast::DUMMY_NODE_ID, span: e.span, kind: ast::StmtKind::Expr(e), - tokens: None }] }) }; @@ -617,7 +616,6 @@ impl MacResult for DummyResult { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Expr(DummyResult::raw_expr(self.span, self.is_error)), span: self.span, - tokens: None }]) } diff --git a/compiler/rustc_expand/src/build.rs b/compiler/rustc_expand/src/build.rs index 30f0fc6cddf..fe67b401fcc 100644 --- a/compiler/rustc_expand/src/build.rs +++ b/compiler/rustc_expand/src/build.rs @@ -140,12 +140,7 @@ impl<'a> ExtCtxt<'a> { } pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt { - ast::Stmt { - id: ast::DUMMY_NODE_ID, - span: expr.span, - kind: ast::StmtKind::Expr(expr), - tokens: None, - } + ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr) } } pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: Ident, ex: P<ast::Expr>) -> ast::Stmt { @@ -162,13 +157,9 @@ impl<'a> ExtCtxt<'a> { id: ast::DUMMY_NODE_ID, span: sp, attrs: AttrVec::new(), - }); - ast::Stmt { - id: ast::DUMMY_NODE_ID, - kind: ast::StmtKind::Local(local), - span: sp, tokens: None, - } + }); + ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp } } // Generates `let _: Type;`, which is usually used for type assertions. @@ -180,17 +171,13 @@ impl<'a> ExtCtxt<'a> { id: ast::DUMMY_NODE_ID, span, attrs: AttrVec::new(), + tokens: None, }); - ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span, tokens: None } + ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span } } pub fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt { - ast::Stmt { - id: ast::DUMMY_NODE_ID, - kind: ast::StmtKind::Item(item), - span: sp, - tokens: None, - } + ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Item(item), span: sp } } pub fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block> { @@ -200,7 +187,6 @@ impl<'a> ExtCtxt<'a> { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr), - tokens: None, }], ) } diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index ce560c6c178..4ba75c21cf0 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -1274,12 +1274,6 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { // we'll expand attributes on expressions separately if !stmt.is_expr() { let attr = if stmt.is_item() { - // FIXME: Implement proper token collection for statements - if let StmtKind::Item(item) = &mut stmt.kind { - stmt.tokens = item.tokens.take() - } else { - unreachable!() - }; self.take_first_attr(&mut stmt) } else { // Ignore derives on non-item statements for backwards compatibility. @@ -1295,7 +1289,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { } if let StmtKind::MacCall(mac) = stmt.kind { - let MacCallStmt { mac, style, attrs } = mac.into_inner(); + let MacCallStmt { mac, style, attrs, tokens: _ } = mac.into_inner(); self.check_attributes(&attrs); let mut placeholder = self.collect_bang(mac, stmt.span, AstFragmentKind::Stmts).make_stmts(); @@ -1312,10 +1306,10 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { } // The placeholder expander gives ids to statements, so we avoid folding the id here. - let ast::Stmt { id, kind, span, tokens } = stmt; + let ast::Stmt { id, kind, span } = stmt; noop_flat_map_stmt_kind(kind, self) .into_iter() - .map(|kind| ast::Stmt { id, kind, span, tokens: tokens.clone() }) + .map(|kind| ast::Stmt { id, kind, span }) .collect() } diff --git a/compiler/rustc_expand/src/placeholders.rs b/compiler/rustc_expand/src/placeholders.rs index f0e5826f403..ce19e813bb3 100644 --- a/compiler/rustc_expand/src/placeholders.rs +++ b/compiler/rustc_expand/src/placeholders.rs @@ -104,8 +104,9 @@ pub fn placeholder( mac: mac_placeholder(), style: ast::MacStmtStyle::Braces, attrs: ast::AttrVec::new(), + tokens: None, }); - ast::Stmt { id, span, kind: ast::StmtKind::MacCall(mac), tokens: None } + ast::Stmt { id, span, kind: ast::StmtKind::MacCall(mac) } }]), AstFragmentKind::Arms => AstFragment::Arms(smallvec![ast::Arm { attrs: Default::default(), @@ -331,12 +332,8 @@ impl<'a, 'b> MutVisitor for PlaceholderExpander<'a, 'b> { // FIXME: We will need to preserve the original semicolon token and // span as part of #15701 - let empty_stmt = ast::Stmt { - id: ast::DUMMY_NODE_ID, - kind: ast::StmtKind::Empty, - span: DUMMY_SP, - tokens: None, - }; + let empty_stmt = + ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Empty, span: DUMMY_SP }; if let Some(stmt) = stmts.pop() { if stmt.has_trailing_semicolon() { diff --git a/compiler/rustc_hir/src/arena.rs b/compiler/rustc_hir/src/arena.rs index 85ab7906d25..44dfdcfccab 100644 --- a/compiler/rustc_hir/src/arena.rs +++ b/compiler/rustc_hir/src/arena.rs @@ -29,6 +29,7 @@ macro_rules! arena_types { [] field_pat: rustc_hir::FieldPat<$tcx>, [] fn_decl: rustc_hir::FnDecl<$tcx>, [] foreign_item: rustc_hir::ForeignItem<$tcx>, + [few] foreign_item_ref: rustc_hir::ForeignItemRef<$tcx>, [] impl_item_ref: rustc_hir::ImplItemRef<$tcx>, [few] inline_asm: rustc_hir::InlineAsm<$tcx>, [few] llvm_inline_asm: rustc_hir::LlvmInlineAsm<$tcx>, diff --git a/compiler/rustc_hir/src/hir.rs b/compiler/rustc_hir/src/hir.rs index 4497c8c0eaa..f01d4417105 100644 --- a/compiler/rustc_hir/src/hir.rs +++ b/compiler/rustc_hir/src/hir.rs @@ -579,6 +579,7 @@ pub struct ModuleItems { pub items: BTreeSet<HirId>, pub trait_items: BTreeSet<TraitItemId>, pub impl_items: BTreeSet<ImplItemId>, + pub foreign_items: BTreeSet<ForeignItemId>, } /// A type representing only the top-level module. @@ -612,6 +613,7 @@ pub struct Crate<'hir> { pub trait_items: BTreeMap<TraitItemId, TraitItem<'hir>>, pub impl_items: BTreeMap<ImplItemId, ImplItem<'hir>>, + pub foreign_items: BTreeMap<ForeignItemId, ForeignItem<'hir>>, pub bodies: BTreeMap<BodyId, Body<'hir>>, pub trait_impls: BTreeMap<DefId, Vec<HirId>>, @@ -644,6 +646,10 @@ impl Crate<'hir> { &self.impl_items[&id] } + pub fn foreign_item(&self, id: ForeignItemId) -> &ForeignItem<'hir> { + &self.foreign_items[&id] + } + pub fn body(&self, id: BodyId) -> &Body<'hir> { &self.bodies[&id] } @@ -673,6 +679,10 @@ impl Crate<'_> { for impl_item in self.impl_items.values() { visitor.visit_impl_item(impl_item); } + + for foreign_item in self.foreign_items.values() { + visitor.visit_foreign_item(foreign_item); + } } /// A parallel version of `visit_all_item_likes`. @@ -695,6 +705,11 @@ impl Crate<'_> { par_for_each_in(&self.impl_items, |(_, impl_item)| { visitor.visit_impl_item(impl_item); }); + }, + { + par_for_each_in(&self.foreign_items, |(_, foreign_item)| { + visitor.visit_foreign_item(foreign_item); + }); } ); } @@ -1840,7 +1855,7 @@ pub struct FnSig<'hir> { } // The bodies for items are stored "out of line", in a separate -// hashmap in the `Crate`. Here we just record the node-id of the item +// hashmap in the `Crate`. Here we just record the hir-id of the item // so it can fetched later. #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Encodable, Debug)] pub struct TraitItemId { @@ -1884,7 +1899,7 @@ pub enum TraitItemKind<'hir> { } // The bodies for items are stored "out of line", in a separate -// hashmap in the `Crate`. Here we just record the node-id of the item +// hashmap in the `Crate`. Here we just record the hir-id of the item // so it can fetched later. #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Encodable, Debug)] pub struct ImplItemId { @@ -2269,12 +2284,6 @@ pub struct Mod<'hir> { pub item_ids: &'hir [ItemId], } -#[derive(Debug, HashStable_Generic)] -pub struct ForeignMod<'hir> { - pub abi: Abi, - pub items: &'hir [ForeignItem<'hir>], -} - #[derive(Encodable, Debug, HashStable_Generic)] pub struct GlobalAsm { pub asm: Symbol, @@ -2432,7 +2441,7 @@ impl VariantData<'hir> { } // The bodies for items are stored "out of line", in a separate -// hashmap in the `Crate`. Here we just record the node-id of the item +// hashmap in the `Crate`. Here we just record the hir-id of the item // so it can fetched later. #[derive(Copy, Clone, Encodable, Debug)] pub struct ItemId { @@ -2521,7 +2530,7 @@ pub enum ItemKind<'hir> { /// A module. Mod(Mod<'hir>), /// An external module, e.g. `extern { .. }`. - ForeignMod(ForeignMod<'hir>), + ForeignMod { abi: Abi, items: &'hir [ForeignItemRef<'hir>] }, /// Module-level inline assembly (from `global_asm!`). GlobalAsm(&'hir GlobalAsm), /// A type alias, e.g., `type Foo = Bar<u8>`. @@ -2614,6 +2623,29 @@ pub enum AssocItemKind { Type, } +// The bodies for items are stored "out of line", in a separate +// hashmap in the `Crate`. Here we just record the hir-id of the item +// so it can fetched later. +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Encodable, Debug)] +pub struct ForeignItemId { + pub hir_id: HirId, +} + +/// A reference from a foreign block to one of its items. This +/// contains the item's ID, naturally, but also the item's name and +/// some other high-level details (like whether it is an associated +/// type or method, and whether it is public). This allows other +/// passes to find the impl they want without loading the ID (which +/// means fewer edges in the incremental compilation graph). +#[derive(Debug, HashStable_Generic)] +pub struct ForeignItemRef<'hir> { + pub id: ForeignItemId, + #[stable_hasher(project(name))] + pub ident: Ident, + pub span: Span, + pub vis: Visibility<'hir>, +} + #[derive(Debug, HashStable_Generic)] pub struct ForeignItem<'hir> { #[stable_hasher(project(name))] diff --git a/compiler/rustc_hir/src/intravisit.rs b/compiler/rustc_hir/src/intravisit.rs index 35615af0fc7..3e8fc689acf 100644 --- a/compiler/rustc_hir/src/intravisit.rs +++ b/compiler/rustc_hir/src/intravisit.rs @@ -64,6 +64,10 @@ where fn visit_impl_item(&mut self, impl_item: &'hir ImplItem<'hir>) { self.visitor.visit_impl_item(impl_item); } + + fn visit_foreign_item(&mut self, foreign_item: &'hir ForeignItem<'hir>) { + self.visitor.visit_foreign_item(foreign_item); + } } pub trait IntoVisitor<'hir> { @@ -88,6 +92,10 @@ where fn visit_impl_item(&self, impl_item: &'hir ImplItem<'hir>) { self.0.into_visitor().visit_impl_item(impl_item); } + + fn visit_foreign_item(&self, foreign_item: &'hir ForeignItem<'hir>) { + self.0.into_visitor().visit_foreign_item(foreign_item); + } } #[derive(Copy, Clone)] @@ -128,6 +136,7 @@ pub trait Map<'hir> { fn item(&self, id: HirId) -> &'hir Item<'hir>; fn trait_item(&self, id: TraitItemId) -> &'hir TraitItem<'hir>; fn impl_item(&self, id: ImplItemId) -> &'hir ImplItem<'hir>; + fn foreign_item(&self, id: ForeignItemId) -> &'hir ForeignItem<'hir>; } /// An erased version of `Map<'hir>`, using dynamic dispatch. @@ -150,6 +159,9 @@ impl<'hir> Map<'hir> for ErasedMap<'hir> { fn impl_item(&self, id: ImplItemId) -> &'hir ImplItem<'hir> { self.0.impl_item(id) } + fn foreign_item(&self, id: ForeignItemId) -> &'hir ForeignItem<'hir> { + self.0.foreign_item(id) + } } /// Specifies what nested things a visitor wants to visit. The most @@ -277,6 +289,14 @@ pub trait Visitor<'v>: Sized { walk_list!(self, visit_impl_item, opt_item); } + /// Like `visit_nested_item()`, but for foreign items. See + /// `visit_nested_item()` for advice on when to override this + /// method. + fn visit_nested_foreign_item(&mut self, id: ForeignItemId) { + let opt_item = self.nested_visit_map().inter().map(|map| map.foreign_item(id)); + walk_list!(self, visit_foreign_item, opt_item); + } + /// Invoked to visit the body of a function, method or closure. Like /// visit_nested_item, does nothing by default unless you override /// `nested_visit_map` to return other than `None`, in which case it will walk @@ -378,6 +398,9 @@ pub trait Visitor<'v>: Sized { fn visit_impl_item(&mut self, ii: &'v ImplItem<'v>) { walk_impl_item(self, ii) } + fn visit_foreign_item_ref(&mut self, ii: &'v ForeignItemRef<'v>) { + walk_foreign_item_ref(self, ii) + } fn visit_impl_item_ref(&mut self, ii: &'v ImplItemRef<'v>) { walk_impl_item_ref(self, ii) } @@ -566,9 +589,9 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item<'v>) { // `visit_mod()` takes care of visiting the `Item`'s `HirId`. visitor.visit_mod(module, item.span, item.hir_id) } - ItemKind::ForeignMod(ref foreign_module) => { + ItemKind::ForeignMod { abi: _, items } => { visitor.visit_id(item.hir_id); - walk_list!(visitor, visit_foreign_item, foreign_module.items); + walk_list!(visitor, visit_foreign_item_ref, items); } ItemKind::GlobalAsm(_) => { visitor.visit_id(item.hir_id); @@ -1012,6 +1035,17 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt } } +pub fn walk_foreign_item_ref<'v, V: Visitor<'v>>( + visitor: &mut V, + foreign_item_ref: &'v ForeignItemRef<'v>, +) { + // N.B., deliberately force a compilation error if/when new fields are added. + let ForeignItemRef { id, ident, span: _, ref vis } = *foreign_item_ref; + visitor.visit_nested_foreign_item(id); + visitor.visit_ident(ident); + visitor.visit_vis(vis); +} + pub fn walk_impl_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, impl_item_ref: &'v ImplItemRef<'v>) { // N.B., deliberately force a compilation error if/when new fields are added. let ImplItemRef { id, ident, ref kind, span: _, ref vis, ref defaultness } = *impl_item_ref; diff --git a/compiler/rustc_hir/src/itemlikevisit.rs b/compiler/rustc_hir/src/itemlikevisit.rs index 369cd49621b..0db562f91a6 100644 --- a/compiler/rustc_hir/src/itemlikevisit.rs +++ b/compiler/rustc_hir/src/itemlikevisit.rs @@ -1,4 +1,4 @@ -use super::{ImplItem, Item, TraitItem}; +use super::{ForeignItem, ImplItem, Item, TraitItem}; /// The "item-like visitor" defines only the top-level methods /// that can be invoked by `Crate::visit_all_item_likes()`. Whether @@ -47,6 +47,7 @@ pub trait ItemLikeVisitor<'hir> { fn visit_item(&mut self, item: &'hir Item<'hir>); fn visit_trait_item(&mut self, trait_item: &'hir TraitItem<'hir>); fn visit_impl_item(&mut self, impl_item: &'hir ImplItem<'hir>); + fn visit_foreign_item(&mut self, foreign_item: &'hir ForeignItem<'hir>); } /// A parallel variant of `ItemLikeVisitor`. @@ -54,4 +55,5 @@ pub trait ParItemLikeVisitor<'hir> { fn visit_item(&self, item: &'hir Item<'hir>); fn visit_trait_item(&self, trait_item: &'hir TraitItem<'hir>); fn visit_impl_item(&self, impl_item: &'hir ImplItem<'hir>); + fn visit_foreign_item(&self, foreign_item: &'hir ForeignItem<'hir>); } diff --git a/compiler/rustc_hir/src/stable_hash_impls.rs b/compiler/rustc_hir/src/stable_hash_impls.rs index 1d3f44a0899..439fb88039b 100644 --- a/compiler/rustc_hir/src/stable_hash_impls.rs +++ b/compiler/rustc_hir/src/stable_hash_impls.rs @@ -1,8 +1,8 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey}; use crate::hir::{ - BodyId, Expr, ImplItem, ImplItemId, Item, ItemId, Mod, TraitItem, TraitItemId, Ty, - VisibilityKind, + BodyId, Expr, ForeignItemId, ImplItem, ImplItemId, Item, ItemId, Mod, TraitItem, TraitItemId, + Ty, VisibilityKind, }; use crate::hir_id::{HirId, ItemLocalId}; use rustc_span::def_id::{DefPathHash, LocalDefId}; @@ -52,6 +52,15 @@ impl<HirCtx: crate::HashStableContext> ToStableHashKey<HirCtx> for ImplItemId { } } +impl<HirCtx: crate::HashStableContext> ToStableHashKey<HirCtx> for ForeignItemId { + type KeyType = (DefPathHash, ItemLocalId); + + #[inline] + fn to_stable_hash_key(&self, hcx: &HirCtx) -> (DefPathHash, ItemLocalId) { + self.hir_id.to_stable_hash_key(hcx) + } +} + impl<HirCtx: crate::HashStableContext> HashStable<HirCtx> for HirId { fn hash_stable(&self, hcx: &mut HirCtx, hasher: &mut StableHasher) { hcx.hash_hir_id(*self, hasher) @@ -77,6 +86,12 @@ impl<HirCtx: crate::HashStableContext> HashStable<HirCtx> for ItemId { } } +impl<HirCtx: crate::HashStableContext> HashStable<HirCtx> for ForeignItemId { + fn hash_stable(&self, hcx: &mut HirCtx, hasher: &mut StableHasher) { + hcx.hash_reference_to_item(self.hir_id, hasher) + } +} + impl<HirCtx: crate::HashStableContext> HashStable<HirCtx> for ImplItemId { fn hash_stable(&self, hcx: &mut HirCtx, hasher: &mut StableHasher) { hcx.hash_reference_to_item(self.hir_id, hasher) diff --git a/compiler/rustc_hir/src/target.rs b/compiler/rustc_hir/src/target.rs index fd6a312ef3a..b870e4c6ead 100644 --- a/compiler/rustc_hir/src/target.rs +++ b/compiler/rustc_hir/src/target.rs @@ -91,7 +91,7 @@ impl Target { ItemKind::Const(..) => Target::Const, ItemKind::Fn(..) => Target::Fn, ItemKind::Mod(..) => Target::Mod, - ItemKind::ForeignMod(..) => Target::ForeignMod, + ItemKind::ForeignMod { .. } => Target::ForeignMod, ItemKind::GlobalAsm(..) => Target::GlobalAsm, ItemKind::TyAlias(..) => Target::TyAlias, ItemKind::OpaqueTy(..) => Target::OpaqueTy, diff --git a/compiler/rustc_hir_pretty/src/lib.rs b/compiler/rustc_hir_pretty/src/lib.rs index f7018ae62aa..25b09d76295 100644 --- a/compiler/rustc_hir_pretty/src/lib.rs +++ b/compiler/rustc_hir_pretty/src/lib.rs @@ -36,6 +36,7 @@ pub enum Nested { Item(hir::ItemId), TraitItem(hir::TraitItemId), ImplItem(hir::ImplItemId), + ForeignItem(hir::ForeignItemId), Body(hir::BodyId), BodyParamPat(hir::BodyId, usize), } @@ -56,6 +57,7 @@ impl PpAnn for hir::Crate<'_> { Nested::Item(id) => state.print_item(self.item(id.id)), Nested::TraitItem(id) => state.print_trait_item(self.trait_item(id)), Nested::ImplItem(id) => state.print_impl_item(self.impl_item(id)), + Nested::ForeignItem(id) => state.print_foreign_item(self.foreign_item(id)), Nested::Body(id) => state.print_expr(&self.body(id).value), Nested::BodyParamPat(id, i) => state.print_pat(&self.body(id).params[i].pat), } @@ -70,6 +72,7 @@ impl PpAnn for &dyn rustc_hir::intravisit::Map<'_> { Nested::Item(id) => state.print_item(self.item(id.id)), Nested::TraitItem(id) => state.print_trait_item(self.trait_item(id)), Nested::ImplItem(id) => state.print_impl_item(self.impl_item(id)), + Nested::ForeignItem(id) => state.print_foreign_item(self.foreign_item(id)), Nested::Body(id) => state.print_expr(&self.body(id).value), Nested::BodyParamPat(id, i) => state.print_pat(&self.body(id).params[i].pat), } @@ -349,13 +352,6 @@ impl<'a> State<'a> { } } - pub fn print_foreign_mod(&mut self, nmod: &hir::ForeignMod<'_>, attrs: &[ast::Attribute]) { - self.print_inner_attributes(attrs); - for item in nmod.items { - self.print_foreign_item(item); - } - } - pub fn print_opt_lifetime(&mut self, lifetime: &hir::Lifetime) { if !lifetime.is_elided() { self.print_lifetime(lifetime); @@ -644,11 +640,14 @@ impl<'a> State<'a> { self.print_mod(_mod, &item.attrs); self.bclose(item.span); } - hir::ItemKind::ForeignMod(ref nmod) => { + hir::ItemKind::ForeignMod { abi, items } => { self.head("extern"); - self.word_nbsp(nmod.abi.to_string()); + self.word_nbsp(abi.to_string()); self.bopen(); - self.print_foreign_mod(nmod, &item.attrs); + self.print_inner_attributes(item.attrs); + for item in items { + self.ann.nested(self, Nested::ForeignItem(item.id)); + } self.bclose(item.span); } hir::ItemKind::GlobalAsm(ref ga) => { diff --git a/compiler/rustc_incremental/src/persist/dirty_clean.rs b/compiler/rustc_incremental/src/persist/dirty_clean.rs index d55813f4cc5..e1c60050d94 100644 --- a/compiler/rustc_incremental/src/persist/dirty_clean.rs +++ b/compiler/rustc_incremental/src/persist/dirty_clean.rs @@ -280,7 +280,7 @@ impl DirtyCleanVisitor<'tcx> { HirItem::Mod(..) => ("ItemMod", LABELS_HIR_ONLY), // // An external module - HirItem::ForeignMod(..) => ("ItemForeignMod", LABELS_HIR_ONLY), + HirItem::ForeignMod { .. } => ("ItemForeignMod", LABELS_HIR_ONLY), // Module-level inline assembly (from global_asm!) HirItem::GlobalAsm(..) => ("ItemGlobalAsm", LABELS_HIR_ONLY), @@ -460,6 +460,10 @@ impl ItemLikeVisitor<'tcx> for DirtyCleanVisitor<'tcx> { fn visit_impl_item(&mut self, item: &hir::ImplItem<'_>) { self.check_item(item.hir_id, item.span); } + + fn visit_foreign_item(&mut self, item: &hir::ForeignItem<'_>) { + self.check_item(item.hir_id, item.span); + } } /// Given a `#[rustc_dirty]` or `#[rustc_clean]` attribute, scan diff --git a/compiler/rustc_interface/src/proc_macro_decls.rs b/compiler/rustc_interface/src/proc_macro_decls.rs index d56115fd6ac..de08a4c8242 100644 --- a/compiler/rustc_interface/src/proc_macro_decls.rs +++ b/compiler/rustc_interface/src/proc_macro_decls.rs @@ -33,6 +33,8 @@ impl<'v> ItemLikeVisitor<'v> for Finder<'_> { fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {} fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem<'_>) {} + + fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {} } pub(crate) fn provide(providers: &mut Providers) { diff --git a/compiler/rustc_interface/src/util.rs b/compiler/rustc_interface/src/util.rs index b43cbf46d61..f34990a1a10 100644 --- a/compiler/rustc_interface/src/util.rs +++ b/compiler/rustc_interface/src/util.rs @@ -810,7 +810,6 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a, '_> { id: resolver.next_node_id(), kind: ast::StmtKind::Expr(expr), span: rustc_span::DUMMY_SP, - tokens: None, } } @@ -827,7 +826,6 @@ impl<'a> MutVisitor for ReplaceBodyWithLoop<'a, '_> { id: self.resolver.next_node_id(), span: rustc_span::DUMMY_SP, kind: ast::StmtKind::Expr(loop_expr), - tokens: None, }; if self.within_static_or_const { diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index c65cf65b1c7..676c85e4afd 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -2345,7 +2345,7 @@ impl<'tcx> LateLintPass<'tcx> for InvalidValue { enum InitKind { Zeroed, Uninit, - }; + } /// Information about why a type cannot be initialized this way. /// Contains an error message and optionally a span to point at. diff --git a/compiler/rustc_lint/src/redundant_semicolon.rs b/compiler/rustc_lint/src/redundant_semicolon.rs index 84cc7b68d4c..428198cae89 100644 --- a/compiler/rustc_lint/src/redundant_semicolon.rs +++ b/compiler/rustc_lint/src/redundant_semicolon.rs @@ -28,25 +28,40 @@ declare_lint_pass!(RedundantSemicolons => [REDUNDANT_SEMICOLONS]); impl EarlyLintPass for RedundantSemicolons { fn check_block(&mut self, cx: &EarlyContext<'_>, block: &Block) { + let mut after_item_stmt = false; let mut seq = None; for stmt in block.stmts.iter() { match (&stmt.kind, &mut seq) { (StmtKind::Empty, None) => seq = Some((stmt.span, false)), (StmtKind::Empty, Some(seq)) => *seq = (seq.0.to(stmt.span), true), - (_, seq) => maybe_lint_redundant_semis(cx, seq), + (_, seq) => { + maybe_lint_redundant_semis(cx, seq, after_item_stmt); + after_item_stmt = matches!(stmt.kind, StmtKind::Item(_)); + } } } - maybe_lint_redundant_semis(cx, &mut seq); + maybe_lint_redundant_semis(cx, &mut seq, after_item_stmt); } } -fn maybe_lint_redundant_semis(cx: &EarlyContext<'_>, seq: &mut Option<(Span, bool)>) { +fn maybe_lint_redundant_semis( + cx: &EarlyContext<'_>, + seq: &mut Option<(Span, bool)>, + after_item_stmt: bool, +) { if let Some((span, multiple)) = seq.take() { // FIXME: Find a better way of ignoring the trailing // semicolon from macro expansion if span == rustc_span::DUMMY_SP { return; } + + // FIXME: Lint on semicolons after item statements + // once doing so doesn't break bootstrapping + if after_item_stmt { + return; + } + cx.struct_span_lint(REDUNDANT_SEMICOLONS, span, |lint| { let (msg, rem) = if multiple { ("unnecessary trailing semicolons", "remove these semicolons") diff --git a/compiler/rustc_lint/src/types.rs b/compiler/rustc_lint/src/types.rs index 38c71e6e925..9ad9d53cd0d 100644 --- a/compiler/rustc_lint/src/types.rs +++ b/compiler/rustc_lint/src/types.rs @@ -1131,7 +1131,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { fn check_for_opaque_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool { struct ProhibitOpaqueTypes<'a, 'tcx> { cx: &'a LateContext<'tcx>, - }; + } impl<'a, 'tcx> ty::fold::TypeVisitor<'tcx> for ProhibitOpaqueTypes<'a, 'tcx> { type BreakTy = Ty<'tcx>; diff --git a/compiler/rustc_metadata/src/foreign_modules.rs b/compiler/rustc_metadata/src/foreign_modules.rs index 8675197656a..4785b6c379c 100644 --- a/compiler/rustc_metadata/src/foreign_modules.rs +++ b/compiler/rustc_metadata/src/foreign_modules.rs @@ -16,13 +16,13 @@ struct Collector<'tcx> { impl ItemLikeVisitor<'tcx> for Collector<'tcx> { fn visit_item(&mut self, it: &'tcx hir::Item<'tcx>) { - let fm = match it.kind { - hir::ItemKind::ForeignMod(ref fm) => fm, + let items = match it.kind { + hir::ItemKind::ForeignMod { items, .. } => items, _ => return, }; let foreign_items = - fm.items.iter().map(|it| self.tcx.hir().local_def_id(it.hir_id).to_def_id()).collect(); + items.iter().map(|it| self.tcx.hir().local_def_id(it.id.hir_id).to_def_id()).collect(); self.modules.push(ForeignModule { foreign_items, def_id: self.tcx.hir().local_def_id(it.hir_id).to_def_id(), @@ -31,4 +31,5 @@ impl ItemLikeVisitor<'tcx> for Collector<'tcx> { fn visit_trait_item(&mut self, _it: &'tcx hir::TraitItem<'tcx>) {} fn visit_impl_item(&mut self, _it: &'tcx hir::ImplItem<'tcx>) {} + fn visit_foreign_item(&mut self, _it: &'tcx hir::ForeignItem<'tcx>) {} } diff --git a/compiler/rustc_metadata/src/link_args.rs b/compiler/rustc_metadata/src/link_args.rs index d8f16796083..d088288c507 100644 --- a/compiler/rustc_metadata/src/link_args.rs +++ b/compiler/rustc_metadata/src/link_args.rs @@ -26,11 +26,11 @@ struct Collector<'tcx> { impl<'tcx> ItemLikeVisitor<'tcx> for Collector<'tcx> { fn visit_item(&mut self, it: &'tcx hir::Item<'tcx>) { - let fm = match it.kind { - hir::ItemKind::ForeignMod(ref fm) => fm, + let abi = match it.kind { + hir::ItemKind::ForeignMod { abi, .. } => abi, _ => return, }; - if fm.abi == Abi::Rust || fm.abi == Abi::RustIntrinsic || fm.abi == Abi::PlatformIntrinsic { + if abi == Abi::Rust || abi == Abi::RustIntrinsic || abi == Abi::PlatformIntrinsic { return; } @@ -45,6 +45,7 @@ impl<'tcx> ItemLikeVisitor<'tcx> for Collector<'tcx> { fn visit_trait_item(&mut self, _it: &'tcx hir::TraitItem<'tcx>) {} fn visit_impl_item(&mut self, _it: &'tcx hir::ImplItem<'tcx>) {} + fn visit_foreign_item(&mut self, _it: &'tcx hir::ForeignItem<'tcx>) {} } impl<'tcx> Collector<'tcx> { diff --git a/compiler/rustc_metadata/src/native_libs.rs b/compiler/rustc_metadata/src/native_libs.rs index 2f7c2c2c405..fe29f9d177f 100644 --- a/compiler/rustc_metadata/src/native_libs.rs +++ b/compiler/rustc_metadata/src/native_libs.rs @@ -33,12 +33,12 @@ struct Collector<'tcx> { impl ItemLikeVisitor<'tcx> for Collector<'tcx> { fn visit_item(&mut self, it: &'tcx hir::Item<'tcx>) { - let fm = match it.kind { - hir::ItemKind::ForeignMod(ref fm) => fm, + let abi = match it.kind { + hir::ItemKind::ForeignMod { abi, .. } => abi, _ => return, }; - if fm.abi == Abi::Rust || fm.abi == Abi::RustIntrinsic || fm.abi == Abi::PlatformIntrinsic { + if abi == Abi::Rust || abi == Abi::RustIntrinsic || abi == Abi::PlatformIntrinsic { return; } @@ -127,6 +127,7 @@ impl ItemLikeVisitor<'tcx> for Collector<'tcx> { fn visit_trait_item(&mut self, _it: &'tcx hir::TraitItem<'tcx>) {} fn visit_impl_item(&mut self, _it: &'tcx hir::ImplItem<'tcx>) {} + fn visit_foreign_item(&mut self, _it: &'tcx hir::ForeignItem<'tcx>) {} } impl Collector<'tcx> { diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 19340dd51de..e82449f69fd 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -644,6 +644,7 @@ impl EntryKind { EntryKind::TraitAlias => DefKind::TraitAlias, EntryKind::Enum(..) => DefKind::Enum, EntryKind::MacroDef(_) => DefKind::Macro(MacroKind::Bang), + EntryKind::ProcMacro(kind) => DefKind::Macro(kind), EntryKind::ForeignType => DefKind::ForeignTy, EntryKind::Impl(_) => DefKind::Impl, EntryKind::Closure => DefKind::Closure, @@ -685,20 +686,11 @@ impl CrateRoot<'_> { } impl<'a, 'tcx> CrateMetadataRef<'a> { - fn is_proc_macro(&self, id: DefIndex) -> bool { - self.root - .proc_macro_data - .as_ref() - .and_then(|data| data.macros.decode(self).find(|x| *x == id)) - .is_some() - } - fn maybe_kind(&self, item_id: DefIndex) -> Option<EntryKind> { self.root.tables.kind.get(self, item_id).map(|k| k.decode(self)) } fn kind(&self, item_id: DefIndex) -> EntryKind { - assert!(!self.is_proc_macro(item_id)); self.maybe_kind(item_id).unwrap_or_else(|| { bug!( "CrateMetadata::kind({:?}): id not found, in crate {:?} with number {}", @@ -725,35 +717,24 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { } fn item_ident(&self, item_index: DefIndex, sess: &Session) -> Ident { - if !self.is_proc_macro(item_index) { - let name = self - .def_key(item_index) - .disambiguated_data - .data - .get_opt_name() - .expect("no name in item_ident"); - let span = self - .root - .tables - .ident_span - .get(self, item_index) - .map(|data| data.decode((self, sess))) - .unwrap_or_else(|| panic!("Missing ident span for {:?} ({:?})", name, item_index)); - Ident::new(name, span) - } else { - Ident::new( - Symbol::intern(self.raw_proc_macro(item_index).name()), - self.get_span(item_index, sess), - ) - } + let name = self + .def_key(item_index) + .disambiguated_data + .data + .get_opt_name() + .expect("no name in item_ident"); + let span = self + .root + .tables + .ident_span + .get(self, item_index) + .map(|data| data.decode((self, sess))) + .unwrap_or_else(|| panic!("Missing ident span for {:?} ({:?})", name, item_index)); + Ident::new(name, span) } fn def_kind(&self, index: DefIndex) -> DefKind { - if !self.is_proc_macro(index) { - self.kind(index).def_kind() - } else { - DefKind::Macro(macro_kind(self.raw_proc_macro(index))) - } + self.kind(index).def_kind() } fn get_span(&self, index: DefIndex, sess: &Session) -> Span { @@ -956,10 +937,7 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { } fn get_stability(&self, id: DefIndex) -> Option<attr::Stability> { - match self.is_proc_macro(id) { - true => self.root.proc_macro_data.as_ref().unwrap().stability, - false => self.root.tables.stability.get(self, id).map(|stab| stab.decode(self)), - } + self.root.tables.stability.get(self, id).map(|stab| stab.decode(self)) } fn get_const_stability(&self, id: DefIndex) -> Option<attr::ConstStability> { @@ -967,19 +945,11 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { } fn get_deprecation(&self, id: DefIndex) -> Option<attr::Deprecation> { - self.root - .tables - .deprecation - .get(self, id) - .filter(|_| !self.is_proc_macro(id)) - .map(|depr| depr.decode(self)) + self.root.tables.deprecation.get(self, id).map(|depr| depr.decode(self)) } fn get_visibility(&self, id: DefIndex) -> ty::Visibility { - match self.is_proc_macro(id) { - true => ty::Visibility::Public, - false => self.root.tables.visibility.get(self, id).unwrap().decode(self), - } + self.root.tables.visibility.get(self, id).unwrap().decode(self) } fn get_impl_data(&self, id: DefIndex) -> ImplData { @@ -1191,7 +1161,7 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { } fn is_item_mir_available(&self, id: DefIndex) -> bool { - !self.is_proc_macro(id) && self.root.tables.mir.get(self, id).is_some() + self.root.tables.mir.get(self, id).is_some() } fn module_expansion(&self, id: DefIndex, sess: &Session) -> ExpnId { @@ -1207,7 +1177,6 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { .tables .mir .get(self, id) - .filter(|_| !self.is_proc_macro(id)) .unwrap_or_else(|| { bug!("get_optimized_mir: missing MIR for `{:?}`", self.local_def_id(id)) }) @@ -1223,7 +1192,6 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { .tables .mir_abstract_consts .get(self, id) - .filter(|_| !self.is_proc_macro(id)) .map_or(Ok(None), |v| Ok(Some(v.decode((self, tcx))))) } @@ -1232,7 +1200,6 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { .tables .unused_generic_params .get(self, id) - .filter(|_| !self.is_proc_macro(id)) .map(|params| params.decode(self)) .unwrap_or_default() } @@ -1242,7 +1209,6 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { .tables .promoted_mir .get(self, id) - .filter(|_| !self.is_proc_macro(id)) .unwrap_or_else(|| { bug!("get_promoted_mir: missing MIR for `{:?}`", self.local_def_id(id)) }) @@ -1546,14 +1512,11 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { #[inline] fn def_key(&self, index: DefIndex) -> DefKey { - *self.def_key_cache.lock().entry(index).or_insert_with(|| { - let mut key = self.root.tables.def_keys.get(self, index).unwrap().decode(self); - if self.is_proc_macro(index) { - let name = self.raw_proc_macro(index).name(); - key.disambiguated_data.data = DefPathData::MacroNs(Symbol::intern(name)); - } - key - }) + *self + .def_key_cache + .lock() + .entry(index) + .or_insert_with(|| self.root.tables.def_keys.get(self, index).unwrap().decode(self)) } // Returns the path leading to the thing with this `id`. diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index a7cf1079b8f..46dd0df65e0 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -9,6 +9,7 @@ use rustc_data_structures::sync::{join, Lrc}; use rustc_hir as hir; use rustc_hir::def::CtorKind; use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE}; +use rustc_hir::definitions::DefPathData; use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor}; use rustc_hir::itemlikevisit::{ItemLikeVisitor, ParItemLikeVisitor}; use rustc_hir::lang_items; @@ -27,7 +28,7 @@ use rustc_middle::ty::codec::TyEncoder; use rustc_middle::ty::{self, SymbolName, Ty, TyCtxt}; use rustc_serialize::{opaque, Encodable, Encoder}; use rustc_session::config::CrateType; -use rustc_span::hygiene::{ExpnDataEncodeMode, HygieneEncodeContext}; +use rustc_span::hygiene::{ExpnDataEncodeMode, HygieneEncodeContext, MacroKind}; use rustc_span::symbol::{sym, Ident, Symbol}; use rustc_span::{self, ExternalSource, FileName, SourceFile, Span, SyntaxContext}; use rustc_target::abi::VariantIdx; @@ -1225,7 +1226,7 @@ impl EncodeContext<'a, 'tcx> { hir::ItemKind::Mod(ref m) => { return self.encode_info_for_mod(item.hir_id, m, &item.attrs); } - hir::ItemKind::ForeignMod(_) => EntryKind::ForeignMod, + hir::ItemKind::ForeignMod { .. } => EntryKind::ForeignMod, hir::ItemKind::GlobalAsm(..) => EntryKind::GlobalAsm, hir::ItemKind::TyAlias(..) => EntryKind::Type, hir::ItemKind::OpaqueTy(..) => { @@ -1320,11 +1321,11 @@ impl EncodeContext<'a, 'tcx> { record!(self.tables.expn_that_defined[def_id] <- self.tcx.expansion_that_defined(def_id)); // FIXME(eddyb) there should be a nicer way to do this. match item.kind { - hir::ItemKind::ForeignMod(ref fm) => record!(self.tables.children[def_id] <- - fm.items + hir::ItemKind::ForeignMod { items, .. } => record!(self.tables.children[def_id] <- + items .iter() .map(|foreign_item| tcx.hir().local_def_id( - foreign_item.hir_id).local_def_index) + foreign_item.id.hir_id).local_def_index) ), hir::ItemKind::Enum(..) => record!(self.tables.children[def_id] <- self.tcx.adt_def(def_id).variants.iter().map(|v| { @@ -1539,12 +1540,41 @@ impl EncodeContext<'a, 'tcx> { // so we manually encode just the information that we need for proc_macro in &hir.krate().proc_macros { let id = proc_macro.owner.local_def_index; - let span = self.lazy(hir.span(*proc_macro)); + let mut name = hir.name(*proc_macro); + let span = hir.span(*proc_macro); // Proc-macros may have attributes like `#[allow_internal_unstable]`, // so downstream crates need access to them. - let attrs = self.lazy(hir.attrs(*proc_macro)); - self.tables.span.set(id, span); - self.tables.attributes.set(id, attrs); + let attrs = hir.attrs(*proc_macro); + let macro_kind = if tcx.sess.contains_name(attrs, sym::proc_macro) { + MacroKind::Bang + } else if tcx.sess.contains_name(attrs, sym::proc_macro_attribute) { + MacroKind::Attr + } else if let Some(attr) = tcx.sess.find_by_name(attrs, sym::proc_macro_derive) { + // This unwrap chain should have been checked by the proc-macro harness. + name = attr.meta_item_list().unwrap()[0] + .meta_item() + .unwrap() + .ident() + .unwrap() + .name; + MacroKind::Derive + } else { + bug!("Unknown proc-macro type for item {:?}", id); + }; + + let mut def_key = self.tcx.hir().def_key(proc_macro.owner); + def_key.disambiguated_data.data = DefPathData::MacroNs(name); + + let def_id = DefId::local(id); + record!(self.tables.kind[def_id] <- EntryKind::ProcMacro(macro_kind)); + record!(self.tables.attributes[def_id] <- attrs); + record!(self.tables.def_keys[def_id] <- def_key); + record!(self.tables.ident_span[def_id] <- span); + record!(self.tables.span[def_id] <- span); + record!(self.tables.visibility[def_id] <- ty::Visibility::Public); + if let Some(stability) = stability { + record!(self.tables.stability[def_id] <- stability); + } } Some(ProcMacroData { proc_macro_decls_static, stability, macros }) @@ -1836,7 +1866,7 @@ impl EncodeContext<'a, 'tcx> { | hir::ItemKind::Const(..) | hir::ItemKind::Fn(..) | hir::ItemKind::Mod(..) - | hir::ItemKind::ForeignMod(..) + | hir::ItemKind::ForeignMod { .. } | hir::ItemKind::GlobalAsm(..) | hir::ItemKind::ExternCrate(..) | hir::ItemKind::Use(..) @@ -1913,6 +1943,8 @@ impl<'tcx, 'v> ItemLikeVisitor<'v> for ImplVisitor<'tcx> { fn visit_impl_item(&mut self, _impl_item: &'v hir::ImplItem<'v>) { // handled in `visit_item` above } + + fn visit_foreign_item(&mut self, _foreign_item: &'v hir::ForeignItem<'v>) {} } /// Used to prefetch queries which will be needed later by metadata encoding. @@ -1977,6 +2009,11 @@ impl<'tcx, 'v> ParItemLikeVisitor<'v> for PrefetchVisitor<'tcx> { hir::ImplItemKind::TyAlias(..) => (), } } + + fn visit_foreign_item(&self, _foreign_item: &'v hir::ForeignItem<'v>) { + // This should be kept in sync with `encode_info_for_foreign_item`. + // Foreign items contain no MIR. + } } // NOTE(eddyb) The following comment was preserved for posterity, even diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index 2bd2019d3cd..53606178909 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -20,6 +20,7 @@ use rustc_serialize::opaque::Encoder; use rustc_session::config::SymbolManglingVersion; use rustc_session::CrateDisambiguator; use rustc_span::edition::Edition; +use rustc_span::hygiene::MacroKind; use rustc_span::symbol::{Ident, Symbol}; use rustc_span::{self, ExpnData, ExpnId, Span}; use rustc_target::spec::{PanicStrategy, TargetTriple}; @@ -336,6 +337,7 @@ enum EntryKind { ForeignFn(Lazy<FnData>), Mod(Lazy<ModData>), MacroDef(Lazy<MacroDef>), + ProcMacro(MacroKind), Closure, Generator(hir::GeneratorKind), Trait(Lazy<TraitData>), diff --git a/compiler/rustc_middle/src/hir/map/collector.rs b/compiler/rustc_middle/src/hir/map/collector.rs index 516c9b6752b..912e9672c94 100644 --- a/compiler/rustc_middle/src/hir/map/collector.rs +++ b/compiler/rustc_middle/src/hir/map/collector.rs @@ -112,6 +112,7 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> { items: _, trait_items: _, impl_items: _, + foreign_items: _, bodies: _, trait_impls: _, body_ids: _, @@ -319,6 +320,10 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> { self.visit_impl_item(self.krate.impl_item(item_id)); } + fn visit_nested_foreign_item(&mut self, foreign_id: ForeignItemId) { + self.visit_foreign_item(self.krate.foreign_item(foreign_id)); + } + fn visit_nested_body(&mut self, id: BodyId) { self.visit_body(self.krate.body(id)); } @@ -351,11 +356,17 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> { }); } - fn visit_foreign_item(&mut self, foreign_item: &'hir ForeignItem<'hir>) { - self.insert(foreign_item.span, foreign_item.hir_id, Node::ForeignItem(foreign_item)); + fn visit_foreign_item(&mut self, fi: &'hir ForeignItem<'hir>) { + debug_assert_eq!( + fi.hir_id.owner, + self.definitions.opt_hir_id_to_local_def_id(fi.hir_id).unwrap() + ); + self.with_dep_node_owner(fi.hir_id.owner, fi, |this, hash| { + this.insert_with_hash(fi.span, fi.hir_id, Node::ForeignItem(fi), hash); - self.with_parent(foreign_item.hir_id, |this| { - intravisit::walk_foreign_item(this, foreign_item); + this.with_parent(fi.hir_id, |this| { + intravisit::walk_foreign_item(this, fi); + }); }); } diff --git a/compiler/rustc_middle/src/hir/map/mod.rs b/compiler/rustc_middle/src/hir/map/mod.rs index 37ec3d3d1ca..5e36362ec59 100644 --- a/compiler/rustc_middle/src/hir/map/mod.rs +++ b/compiler/rustc_middle/src/hir/map/mod.rs @@ -205,7 +205,7 @@ impl<'hir> Map<'hir> { ItemKind::TraitAlias(..) => DefKind::TraitAlias, ItemKind::ExternCrate(_) => DefKind::ExternCrate, ItemKind::Use(..) => DefKind::Use, - ItemKind::ForeignMod(..) => DefKind::ForeignMod, + ItemKind::ForeignMod { .. } => DefKind::ForeignMod, ItemKind::GlobalAsm(..) => DefKind::GlobalAsm, ItemKind::Impl { .. } => DefKind::Impl, }, @@ -309,6 +309,13 @@ impl<'hir> Map<'hir> { } } + pub fn foreign_item(&self, id: ForeignItemId) -> &'hir ForeignItem<'hir> { + match self.find(id.hir_id).unwrap() { + Node::ForeignItem(item) => item, + _ => bug!(), + } + } + pub fn body(&self, id: BodyId) -> &'hir Body<'hir> { self.tcx.hir_owner_nodes(id.hir_id.owner).unwrap().bodies.get(&id.hir_id.local_id).unwrap() } @@ -470,6 +477,10 @@ impl<'hir> Map<'hir> { for id in &module.impl_items { visitor.visit_impl_item(self.expect_impl_item(id.hir_id)); } + + for id in &module.foreign_items { + visitor.visit_foreign_item(self.expect_foreign_item(id.hir_id)); + } } /// Retrieves the `Node` corresponding to `id`, panicking if it cannot be found. @@ -718,10 +729,11 @@ impl<'hir> Map<'hir> { let parent = self.get_parent_item(hir_id); if let Some(entry) = self.find_entry(parent) { if let Entry { - node: Node::Item(Item { kind: ItemKind::ForeignMod(ref nm), .. }), .. + node: Node::Item(Item { kind: ItemKind::ForeignMod { abi, .. }, .. }), + .. } = entry { - return nm.abi; + return *abi; } } bug!("expected foreign mod or inlined parent, found {}", self.node_to_string(parent)) @@ -937,6 +949,10 @@ impl<'hir> intravisit::Map<'hir> for Map<'hir> { fn impl_item(&self, id: ImplItemId) -> &'hir ImplItem<'hir> { self.impl_item(id) } + + fn foreign_item(&self, id: ForeignItemId) -> &'hir ForeignItem<'hir> { + self.foreign_item(id) + } } trait Named { @@ -1030,7 +1046,7 @@ fn hir_id_to_string(map: &Map<'_>, id: HirId) -> String { ItemKind::Const(..) => "const", ItemKind::Fn(..) => "fn", ItemKind::Mod(..) => "mod", - ItemKind::ForeignMod(..) => "foreign mod", + ItemKind::ForeignMod { .. } => "foreign mod", ItemKind::GlobalAsm(..) => "global asm", ItemKind::TyAlias(..) => "ty", ItemKind::OpaqueTy(..) => "opaque type", diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index ad47c6e75d3..7538818b8af 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -254,7 +254,7 @@ macro_rules! make_mir_visitor { macro_rules! basic_blocks { (mut) => (body.basic_blocks_mut().iter_enumerated_mut()); () => (body.basic_blocks().iter_enumerated()); - }; + } for (bb, data) in basic_blocks!($($mutability)?) { self.visit_basic_block_data(bb, data); } @@ -275,7 +275,7 @@ macro_rules! make_mir_visitor { macro_rules! type_annotations { (mut) => (body.user_type_annotations.iter_enumerated_mut()); () => (body.user_type_annotations.iter_enumerated()); - }; + } for (index, annotation) in type_annotations!($($mutability)?) { self.visit_user_type_annotation( @@ -909,7 +909,7 @@ macro_rules! make_mir_visitor { macro_rules! basic_blocks { (mut) => (body.basic_blocks_mut()); () => (body.basic_blocks()); - }; + } let basic_block = & $($mutability)? basic_blocks!($($mutability)?)[location.block]; if basic_block.statements.len() == location.statement_index { if let Some(ref $($mutability)? terminator) = basic_block.terminator { diff --git a/compiler/rustc_middle/src/ty/instance.rs b/compiler/rustc_middle/src/ty/instance.rs index f52466d85f8..413c9cca589 100644 --- a/compiler/rustc_middle/src/ty/instance.rs +++ b/compiler/rustc_middle/src/ty/instance.rs @@ -540,7 +540,7 @@ fn polymorphize<'tcx>( struct PolymorphizationFolder<'tcx> { tcx: TyCtxt<'tcx>, - }; + } impl ty::TypeFolder<'tcx> for PolymorphizationFolder<'tcx> { fn tcx<'a>(&'a self) -> TyCtxt<'tcx> { diff --git a/compiler/rustc_mir/src/interpret/util.rs b/compiler/rustc_mir/src/interpret/util.rs index e49b1c9f64d..ec90f063a55 100644 --- a/compiler/rustc_mir/src/interpret/util.rs +++ b/compiler/rustc_mir/src/interpret/util.rs @@ -15,7 +15,7 @@ where struct UsedParamsNeedSubstVisitor<'tcx> { tcx: TyCtxt<'tcx>, - }; + } impl<'tcx> TypeVisitor<'tcx> for UsedParamsNeedSubstVisitor<'tcx> { type BreakTy = (); diff --git a/compiler/rustc_mir/src/monomorphize/collector.rs b/compiler/rustc_mir/src/monomorphize/collector.rs index a6f90992172..6370ead97e7 100644 --- a/compiler/rustc_mir/src/monomorphize/collector.rs +++ b/compiler/rustc_mir/src/monomorphize/collector.rs @@ -993,7 +993,7 @@ impl ItemLikeVisitor<'v> for RootCollector<'_, 'v> { match item.kind { hir::ItemKind::ExternCrate(..) | hir::ItemKind::Use(..) - | hir::ItemKind::ForeignMod(..) + | hir::ItemKind::ForeignMod { .. } | hir::ItemKind::TyAlias(..) | hir::ItemKind::Trait(..) | hir::ItemKind::TraitAlias(..) @@ -1066,6 +1066,8 @@ impl ItemLikeVisitor<'v> for RootCollector<'_, 'v> { self.push_if_root(def_id); } } + + fn visit_foreign_item(&mut self, _foreign_item: &'v hir::ForeignItem<'v>) {} } impl RootCollector<'_, 'v> { diff --git a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs index f9fe261bcee..97edbd83b89 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/check_match.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/check_match.rs @@ -1,5 +1,5 @@ -use super::_match::Usefulness::*; -use super::_match::{ +use super::usefulness::Usefulness::*; +use super::usefulness::{ compute_match_usefulness, expand_pattern, MatchArm, MatchCheckCtxt, UsefulnessReport, }; use super::{PatCtxt, PatKind, PatternError}; diff --git a/compiler/rustc_mir_build/src/thir/pattern/_match.rs b/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs index f299663f679..62b4468eeb3 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/_match.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs @@ -1,322 +1,16 @@ -//! Note: tests specific to this file can be found in: -//! -//! - `ui/pattern/usefulness` -//! - `ui/or-patterns` -//! - `ui/consts/const_in_pattern` -//! - `ui/rfc-2008-non-exhaustive` -//! - `ui/half-open-range-patterns` -//! - probably many others -//! -//! I (Nadrieril) prefer to put new tests in `ui/pattern/usefulness` unless there's a specific -//! reason not to, for example if they depend on a particular feature like `or_patterns`. -//! -//! ----- -//! -//! This file includes the logic for exhaustiveness and usefulness checking for -//! pattern-matching. Specifically, given a list of patterns for a type, we can -//! tell whether: -//! (a) the patterns cover every possible constructor for the type (exhaustiveness) -//! (b) each pattern is necessary (usefulness) -//! -//! The algorithm implemented here is a modified version of the one described in -//! [this paper](http://moscova.inria.fr/~maranget/papers/warn/index.html). -//! However, to save future implementors from reading the original paper, we -//! summarise the algorithm here to hopefully save time and be a little clearer -//! (without being so rigorous). -//! -//! # Premise -//! -//! The core of the algorithm revolves about a "usefulness" check. In particular, we -//! are trying to compute a predicate `U(P, p)` where `P` is a list of patterns (we refer to this as -//! a matrix). `U(P, p)` represents whether, given an existing list of patterns -//! `P_1 ..= P_m`, adding a new pattern `p` will be "useful" (that is, cover previously- -//! uncovered values of the type). -//! -//! If we have this predicate, then we can easily compute both exhaustiveness of an -//! entire set of patterns and the individual usefulness of each one. -//! (a) the set of patterns is exhaustive iff `U(P, _)` is false (i.e., adding a wildcard -//! match doesn't increase the number of values we're matching) -//! (b) a pattern `P_i` is not useful if `U(P[0..=(i-1), P_i)` is false (i.e., adding a -//! pattern to those that have come before it doesn't increase the number of values -//! we're matching). -//! -//! # Core concept -//! -//! The idea that powers everything that is done in this file is the following: a value is made -//! from a constructor applied to some fields. Examples of constructors are `Some`, `None`, `(,)` -//! (the 2-tuple constructor), `Foo {..}` (the constructor for a struct `Foo`), and `2` (the -//! constructor for the number `2`). Fields are just a (possibly empty) list of values. -//! -//! Some of the constructors listed above might feel weird: `None` and `2` don't take any -//! arguments. This is part of what makes constructors so general: we will consider plain values -//! like numbers and string literals to be constructors that take no arguments, also called "0-ary -//! constructors"; they are the simplest case of constructors. This allows us to see any value as -//! made up from a tree of constructors, each having a given number of children. For example: -//! `(None, Ok(0))` is made from 4 different constructors. -//! -//! This idea can be extended to patterns: a pattern captures a set of possible values, and we can -//! describe this set using constructors. For example, `Err(_)` captures all values of the type -//! `Result<T, E>` that start with the `Err` constructor (for some choice of `T` and `E`). The -//! wildcard `_` captures all values of the given type starting with any of the constructors for -//! that type. -//! -//! We use this to compute whether different patterns might capture a same value. Do the patterns -//! `Ok("foo")` and `Err(_)` capture a common value? The answer is no, because the first pattern -//! captures only values starting with the `Ok` constructor and the second only values starting -//! with the `Err` constructor. Do the patterns `Some(42)` and `Some(1..10)` intersect? They might, -//! since they both capture values starting with `Some`. To be certain, we need to dig under the -//! `Some` constructor and continue asking the question. This is the main idea behind the -//! exhaustiveness algorithm: by looking at patterns constructor-by-constructor, we can efficiently -//! figure out if some new pattern might capture a value that hadn't been captured by previous -//! patterns. -//! -//! Constructors are represented by the `Constructor` enum, and its fields by the `Fields` enum. -//! Most of the complexity of this file resides in transforming between patterns and -//! (`Constructor`, `Fields`) pairs, handling all the special cases correctly. -//! -//! Caveat: this constructors/fields distinction doesn't quite cover every Rust value. For example -//! a value of type `Rc<u64>` doesn't fit this idea very well, nor do various other things. -//! However, this idea covers most of the cases that are relevant to exhaustiveness checking. -//! -//! -//! # Algorithm -//! -//! Recall that `U(P, p)` represents whether, given an existing list of patterns (aka matrix) `P`, -//! adding a new pattern `p` will cover previously-uncovered values of the type. -//! During the course of the algorithm, the rows of the matrix won't just be individual patterns, -//! but rather partially-deconstructed patterns in the form of a list of fields. The paper -//! calls those pattern-vectors, and we will call them pattern-stacks. The same holds for the -//! new pattern `p`. -//! -//! For example, say we have the following: -//! -//! ``` -//! // x: (Option<bool>, Result<()>) -//! match x { -//! (Some(true), _) => {} -//! (None, Err(())) => {} -//! (None, Err(_)) => {} -//! } -//! ``` -//! -//! Here, the matrix `P` starts as: -//! -//! ``` -//! [ -//! [(Some(true), _)], -//! [(None, Err(()))], -//! [(None, Err(_))], -//! ] -//! ``` -//! -//! We can tell it's not exhaustive, because `U(P, _)` is true (we're not covering -//! `[(Some(false), _)]`, for instance). In addition, row 3 is not useful, because -//! all the values it covers are already covered by row 2. -//! -//! A list of patterns can be thought of as a stack, because we are mainly interested in the top of -//! the stack at any given point, and we can pop or apply constructors to get new pattern-stacks. -//! To match the paper, the top of the stack is at the beginning / on the left. -//! -//! There are two important operations on pattern-stacks necessary to understand the algorithm: -//! -//! 1. We can pop a given constructor off the top of a stack. This operation is called -//! `specialize`, and is denoted `S(c, p)` where `c` is a constructor (like `Some` or -//! `None`) and `p` a pattern-stack. -//! If the pattern on top of the stack can cover `c`, this removes the constructor and -//! pushes its arguments onto the stack. It also expands OR-patterns into distinct patterns. -//! Otherwise the pattern-stack is discarded. -//! This essentially filters those pattern-stacks whose top covers the constructor `c` and -//! discards the others. -//! -//! For example, the first pattern above initially gives a stack `[(Some(true), _)]`. If we -//! pop the tuple constructor, we are left with `[Some(true), _]`, and if we then pop the -//! `Some` constructor we get `[true, _]`. If we had popped `None` instead, we would get -//! nothing back. -//! -//! This returns zero or more new pattern-stacks, as follows. We look at the pattern `p_1` -//! on top of the stack, and we have four cases: -//! -//! 1.1. `p_1 = c(r_1, .., r_a)`, i.e. the top of the stack has constructor `c`. We -//! push onto the stack the arguments of this constructor, and return the result: -//! `r_1, .., r_a, p_2, .., p_n` -//! -//! 1.2. `p_1 = c'(r_1, .., r_a')` where `c ≠c'`. We discard the current stack and -//! return nothing. -//! -//! 1.3. `p_1 = _`. We push onto the stack as many wildcards as the constructor `c` has -//! arguments (its arity), and return the resulting stack: -//! `_, .., _, p_2, .., p_n` -//! -//! 1.4. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting -//! stack: -//! - `S(c, (r_1, p_2, .., p_n))` -//! - `S(c, (r_2, p_2, .., p_n))` -//! -//! 2. We can pop a wildcard off the top of the stack. This is called `S(_, p)`, where `p` is -//! a pattern-stack. Note: the paper calls this `D(p)`. -//! This is used when we know there are missing constructor cases, but there might be -//! existing wildcard patterns, so to check the usefulness of the matrix, we have to check -//! all its *other* components. -//! -//! It is computed as follows. We look at the pattern `p_1` on top of the stack, -//! and we have three cases: -//! 2.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing. -//! 2.2. `p_1 = _`. We return the rest of the stack: -//! p_2, .., p_n -//! 2.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting -//! stack. -//! - `S(_, (r_1, p_2, .., p_n))` -//! - `S(_, (r_2, p_2, .., p_n))` -//! -//! Note that the OR-patterns are not always used directly in Rust, but are used to derive the -//! exhaustive integer matching rules, so they're written here for posterity. -//! -//! Both those operations extend straightforwardly to a list or pattern-stacks, i.e. a matrix, by -//! working row-by-row. Popping a constructor ends up keeping only the matrix rows that start with -//! the given constructor, and popping a wildcard keeps those rows that start with a wildcard. -//! -//! -//! The algorithm for computing `U` -//! ------------------------------- -//! The algorithm is inductive (on the number of columns: i.e., components of tuple patterns). -//! That means we're going to check the components from left-to-right, so the algorithm -//! operates principally on the first component of the matrix and new pattern-stack `p`. -//! This algorithm is realised in the `is_useful` function. -//! -//! Base case. (`n = 0`, i.e., an empty tuple pattern) -//! - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), -//! then `U(P, p)` is false. -//! - Otherwise, `P` must be empty, so `U(P, p)` is true. -//! -//! Inductive step. (`n > 0`, i.e., whether there's at least one column -//! [which may then be expanded into further columns later]) -//! We're going to match on the top of the new pattern-stack, `p_1`. -//! - If `p_1 == c(r_1, .., r_a)`, i.e. we have a constructor pattern. -//! Then, the usefulness of `p_1` can be reduced to whether it is useful when -//! we ignore all the patterns in the first column of `P` that involve other constructors. -//! This is where `S(c, P)` comes in: -//! `U(P, p) := U(S(c, P), S(c, p))` -//! -//! For example, if `P` is: -//! -//! ``` -//! [ -//! [Some(true), _], -//! [None, 0], -//! ] -//! ``` -//! -//! and `p` is `[Some(false), 0]`, then we don't care about row 2 since we know `p` only -//! matches values that row 2 doesn't. For row 1 however, we need to dig into the -//! arguments of `Some` to know whether some new value is covered. So we compute -//! `U([[true, _]], [false, 0])`. -//! -//! - If `p_1 == _`, then we look at the list of constructors that appear in the first -//! component of the rows of `P`: -//! + If there are some constructors that aren't present, then we might think that the -//! wildcard `_` is useful, since it covers those constructors that weren't covered -//! before. -//! That's almost correct, but only works if there were no wildcards in those first -//! components. So we need to check that `p` is useful with respect to the rows that -//! start with a wildcard, if there are any. This is where `S(_, x)` comes in: -//! `U(P, p) := U(S(_, P), S(_, p))` -//! -//! For example, if `P` is: -//! -//! ``` -//! [ -//! [_, true, _], -//! [None, false, 1], -//! ] -//! ``` -//! -//! and `p` is `[_, false, _]`, the `Some` constructor doesn't appear in `P`. So if we -//! only had row 2, we'd know that `p` is useful. However row 1 starts with a -//! wildcard, so we need to check whether `U([[true, _]], [false, 1])`. -//! -//! + Otherwise, all possible constructors (for the relevant type) are present. In this -//! case we must check whether the wildcard pattern covers any unmatched value. For -//! that, we can think of the `_` pattern as a big OR-pattern that covers all -//! possible constructors. For `Option`, that would mean `_ = None | Some(_)` for -//! example. The wildcard pattern is useful in this case if it is useful when -//! specialized to one of the possible constructors. So we compute: -//! `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))` -//! -//! For example, if `P` is: -//! -//! ``` -//! [ -//! [Some(true), _], -//! [None, false], -//! ] -//! ``` -//! -//! and `p` is `[_, false]`, both `None` and `Some` constructors appear in the first -//! components of `P`. We will therefore try popping both constructors in turn: we -//! compute `U([[true, _]], [_, false])` for the `Some` constructor, and `U([[false]], -//! [false])` for the `None` constructor. The first case returns true, so we know that -//! `p` is useful for `P`. Indeed, it matches `[Some(false), _]` that wasn't matched -//! before. -//! -//! - If `p_1 == r_1 | r_2`, then the usefulness depends on each `r_i` separately: -//! `U(P, p) := U(P, (r_1, p_2, .., p_n)) -//! || U(P, (r_2, p_2, .., p_n))` -//! -//! Modifications to the algorithm -//! ------------------------------ -//! The algorithm in the paper doesn't cover some of the special cases that arise in Rust, for -//! example uninhabited types and variable-length slice patterns. These are drawn attention to -//! throughout the code below. I'll make a quick note here about how exhaustive integer matching is -//! accounted for, though. -//! -//! Exhaustive integer matching -//! --------------------------- -//! An integer type can be thought of as a (huge) sum type: 1 | 2 | 3 | ... -//! So to support exhaustive integer matching, we can make use of the logic in the paper for -//! OR-patterns. However, we obviously can't just treat ranges x..=y as individual sums, because -//! they are likely gigantic. So we instead treat ranges as constructors of the integers. This means -//! that we have a constructor *of* constructors (the integers themselves). We then need to work -//! through all the inductive step rules above, deriving how the ranges would be treated as -//! OR-patterns, and making sure that they're treated in the same way even when they're ranges. -//! There are really only four special cases here: -//! - When we match on a constructor that's actually a range, we have to treat it as if we would -//! an OR-pattern. -//! + It turns out that we can simply extend the case for single-value patterns in -//! `specialize` to either be *equal* to a value constructor, or *contained within* a range -//! constructor. -//! + When the pattern itself is a range, you just want to tell whether any of the values in -//! the pattern range coincide with values in the constructor range, which is precisely -//! intersection. -//! Since when encountering a range pattern for a value constructor, we also use inclusion, it -//! means that whenever the constructor is a value/range and the pattern is also a value/range, -//! we can simply use intersection to test usefulness. -//! - When we're testing for usefulness of a pattern and the pattern's first component is a -//! wildcard. -//! + If all the constructors appear in the matrix, we have a slight complication. By default, -//! the behaviour (i.e., a disjunction over specialised matrices for each constructor) is -//! invalid, because we want a disjunction over every *integer* in each range, not just a -//! disjunction over every range. This is a bit more tricky to deal with: essentially we need -//! to form equivalence classes of subranges of the constructor range for which the behaviour -//! of the matrix `P` and new pattern `p` are the same. This is described in more -//! detail in `Constructor::split`. -//! + If some constructors are missing from the matrix, it turns out we don't need to do -//! anything special (because we know none of the integers are actually wildcards: i.e., we -//! can't span wildcards using ranges). - +//! This module provides functions to deconstruct and reconstruct patterns into a constructor +//! applied to some fields. This is used by the `_match` module to compute pattern +//! usefulness/exhaustiveness. use self::Constructor::*; use self::SliceKind::*; -use self::Usefulness::*; -use self::WitnessPreference::*; + +use super::compare_const_vals; +use super::usefulness::{MatchCheckCtxt, PatCtxt}; +use super::{FieldPat, Pat, PatKind, PatRange}; use rustc_data_structures::captures::Captures; -use rustc_data_structures::fx::FxHashSet; -use rustc_data_structures::sync::OnceCell; use rustc_index::vec::Idx; -use super::{compare_const_vals, PatternFoldable, PatternFolder}; -use super::{FieldPat, Pat, PatKind, PatRange}; - -use rustc_arena::TypedArena; use rustc_attr::{SignedInt, UnsignedInt}; use rustc_hir::def_id::DefId; use rustc_hir::{HirId, RangeEnd}; @@ -330,281 +24,344 @@ use rustc_target::abi::{Integer, Size, VariantIdx}; use smallvec::{smallvec, SmallVec}; use std::cmp::{self, max, min, Ordering}; -use std::fmt; -use std::iter::{FromIterator, IntoIterator}; +use std::iter::IntoIterator; use std::ops::RangeInclusive; -crate fn expand_pattern<'tcx>(pat: Pat<'tcx>) -> Pat<'tcx> { - LiteralExpander.fold_pattern(&pat) -} - -struct LiteralExpander; - -impl<'tcx> PatternFolder<'tcx> for LiteralExpander { - fn fold_pattern(&mut self, pat: &Pat<'tcx>) -> Pat<'tcx> { - debug!("fold_pattern {:?} {:?} {:?}", pat, pat.ty.kind(), pat.kind); - match (pat.ty.kind(), pat.kind.as_ref()) { - (_, PatKind::Binding { subpattern: Some(s), .. }) => s.fold_with(self), - (_, PatKind::AscribeUserType { subpattern: s, .. }) => s.fold_with(self), - (ty::Ref(_, t, _), PatKind::Constant { .. }) if t.is_str() => { - // Treat string literal patterns as deref patterns to a `str` constant, i.e. - // `&CONST`. This expands them like other const patterns. This could have been done - // in `const_to_pat`, but that causes issues with the rest of the matching code. - let mut new_pat = pat.super_fold_with(self); - // Make a fake const pattern of type `str` (instead of `&str`). That the carried - // constant value still knows it is of type `&str`. - new_pat.ty = t; - Pat { - kind: Box::new(PatKind::Deref { subpattern: new_pat }), - span: pat.span, - ty: pat.ty, - } - } - _ => pat.super_fold_with(self), - } - } -} - -impl<'tcx> Pat<'tcx> { - pub(super) fn is_wildcard(&self) -> bool { - matches!(*self.kind, PatKind::Binding { subpattern: None, .. } | PatKind::Wild) - } -} - -/// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]` -/// works well. -#[derive(Debug, Clone)] -struct PatStack<'p, 'tcx> { - pats: SmallVec<[&'p Pat<'tcx>; 2]>, - /// Cache for the constructor of the head - head_ctor: OnceCell<Constructor<'tcx>>, +/// An inclusive interval, used for precise integer exhaustiveness checking. +/// `IntRange`s always store a contiguous range. This means that values are +/// encoded such that `0` encodes the minimum value for the integer, +/// regardless of the signedness. +/// For example, the pattern `-128..=127i8` is encoded as `0..=255`. +/// This makes comparisons and arithmetic on interval endpoints much more +/// straightforward. See `signed_bias` for details. +/// +/// `IntRange` is never used to encode an empty range or a "range" that wraps +/// around the (offset) space: i.e., `range.lo <= range.hi`. +#[derive(Clone, Debug)] +pub(super) struct IntRange<'tcx> { + range: RangeInclusive<u128>, + ty: Ty<'tcx>, + span: Span, } -impl<'p, 'tcx> PatStack<'p, 'tcx> { - fn from_pattern(pat: &'p Pat<'tcx>) -> Self { - Self::from_vec(smallvec![pat]) - } - - fn from_vec(vec: SmallVec<[&'p Pat<'tcx>; 2]>) -> Self { - PatStack { pats: vec, head_ctor: OnceCell::new() } +impl<'tcx> IntRange<'tcx> { + #[inline] + fn is_integral(ty: Ty<'_>) -> bool { + matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_) | ty::Bool) } - fn is_empty(&self) -> bool { - self.pats.is_empty() + fn is_singleton(&self) -> bool { + self.range.start() == self.range.end() } - fn len(&self) -> usize { - self.pats.len() + fn boundaries(&self) -> (u128, u128) { + (*self.range.start(), *self.range.end()) } - fn head(&self) -> &'p Pat<'tcx> { - self.pats[0] + /// Don't treat `usize`/`isize` exhaustively unless the `precise_pointer_size_matching` feature + /// is enabled. + fn treat_exhaustively(&self, tcx: TyCtxt<'tcx>) -> bool { + !self.ty.is_ptr_sized_integral() || tcx.features().precise_pointer_size_matching } - fn head_ctor<'a>(&'a self, cx: &MatchCheckCtxt<'p, 'tcx>) -> &'a Constructor<'tcx> { - self.head_ctor.get_or_init(|| pat_constructor(cx, self.head())) + #[inline] + fn integral_size_and_signed_bias(tcx: TyCtxt<'tcx>, ty: Ty<'_>) -> Option<(Size, u128)> { + match *ty.kind() { + ty::Bool => Some((Size::from_bytes(1), 0)), + ty::Char => Some((Size::from_bytes(4), 0)), + ty::Int(ity) => { + let size = Integer::from_attr(&tcx, SignedInt(ity)).size(); + Some((size, 1u128 << (size.bits() as u128 - 1))) + } + ty::Uint(uty) => Some((Integer::from_attr(&tcx, UnsignedInt(uty)).size(), 0)), + _ => None, + } } - fn iter(&self) -> impl Iterator<Item = &Pat<'tcx>> { - self.pats.iter().copied() + #[inline] + fn from_const( + tcx: TyCtxt<'tcx>, + param_env: ty::ParamEnv<'tcx>, + value: &Const<'tcx>, + span: Span, + ) -> Option<IntRange<'tcx>> { + if let Some((target_size, bias)) = Self::integral_size_and_signed_bias(tcx, value.ty) { + let ty = value.ty; + let val = (|| { + if let ty::ConstKind::Value(ConstValue::Scalar(scalar)) = value.val { + // For this specific pattern we can skip a lot of effort and go + // straight to the result, after doing a bit of checking. (We + // could remove this branch and just fall through, which + // is more general but much slower.) + if let Ok(bits) = scalar.to_bits_or_ptr(target_size, &tcx) { + return Some(bits); + } + } + // This is a more general form of the previous case. + value.try_eval_bits(tcx, param_env, ty) + })()?; + let val = val ^ bias; + Some(IntRange { range: val..=val, ty, span }) + } else { + None + } } - // If the first pattern is an or-pattern, expand this pattern. Otherwise, return `None`. - fn expand_or_pat(&self) -> Option<Vec<Self>> { - if self.is_empty() { - None - } else if let PatKind::Or { pats } = &*self.head().kind { - Some( - pats.iter() - .map(|pat| { - let mut new_patstack = PatStack::from_pattern(pat); - new_patstack.pats.extend_from_slice(&self.pats[1..]); - new_patstack - }) - .collect(), - ) + #[inline] + fn from_range( + tcx: TyCtxt<'tcx>, + lo: u128, + hi: u128, + ty: Ty<'tcx>, + end: &RangeEnd, + span: Span, + ) -> Option<IntRange<'tcx>> { + if Self::is_integral(ty) { + // Perform a shift if the underlying types are signed, + // which makes the interval arithmetic simpler. + let bias = IntRange::signed_bias(tcx, ty); + let (lo, hi) = (lo ^ bias, hi ^ bias); + let offset = (*end == RangeEnd::Excluded) as u128; + if lo > hi || (lo == hi && *end == RangeEnd::Excluded) { + // This should have been caught earlier by E0030. + bug!("malformed range pattern: {}..={}", lo, (hi - offset)); + } + Some(IntRange { range: lo..=(hi - offset), ty, span }) } else { None } } - /// This computes `S(self.head_ctor(), self)`. See top of the file for explanations. - /// - /// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing - /// fields filled with wild patterns. - /// - /// This is roughly the inverse of `Constructor::apply`. - fn pop_head_constructor(&self, ctor_wild_subpatterns: &Fields<'p, 'tcx>) -> PatStack<'p, 'tcx> { - // We pop the head pattern and push the new fields extracted from the arguments of - // `self.head()`. - let new_fields = ctor_wild_subpatterns.replace_with_pattern_arguments(self.head()); - new_fields.push_on_patstack(&self.pats[1..]) + // The return value of `signed_bias` should be XORed with an endpoint to encode/decode it. + fn signed_bias(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> u128 { + match *ty.kind() { + ty::Int(ity) => { + let bits = Integer::from_attr(&tcx, SignedInt(ity)).size().bits() as u128; + 1u128 << (bits - 1) + } + _ => 0, + } } -} -impl<'p, 'tcx> Default for PatStack<'p, 'tcx> { - fn default() -> Self { - Self::from_vec(smallvec![]) + fn is_subrange(&self, other: &Self) -> bool { + other.range.start() <= self.range.start() && self.range.end() <= other.range.end() } -} -impl<'p, 'tcx> PartialEq for PatStack<'p, 'tcx> { - fn eq(&self, other: &Self) -> bool { - self.pats == other.pats + fn intersection(&self, tcx: TyCtxt<'tcx>, other: &Self) -> Option<Self> { + let ty = self.ty; + let (lo, hi) = self.boundaries(); + let (other_lo, other_hi) = other.boundaries(); + if self.treat_exhaustively(tcx) { + if lo <= other_hi && other_lo <= hi { + let span = other.span; + Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi), ty, span }) + } else { + None + } + } else { + // If the range should not be treated exhaustively, fallback to checking for inclusion. + if self.is_subrange(other) { Some(self.clone()) } else { None } + } } -} -impl<'p, 'tcx> FromIterator<&'p Pat<'tcx>> for PatStack<'p, 'tcx> { - fn from_iter<T>(iter: T) -> Self - where - T: IntoIterator<Item = &'p Pat<'tcx>>, - { - Self::from_vec(iter.into_iter().collect()) + fn suspicious_intersection(&self, other: &Self) -> bool { + // `false` in the following cases: + // 1 ---- // 1 ---------- // 1 ---- // 1 ---- + // 2 ---------- // 2 ---- // 2 ---- // 2 ---- + // + // The following are currently `false`, but could be `true` in the future (#64007): + // 1 --------- // 1 --------- + // 2 ---------- // 2 ---------- + // + // `true` in the following cases: + // 1 ------- // 1 ------- + // 2 -------- // 2 ------- + let (lo, hi) = self.boundaries(); + let (other_lo, other_hi) = other.boundaries(); + lo == other_hi || hi == other_lo } -} -/// A 2D matrix. -#[derive(Clone, PartialEq)] -struct Matrix<'p, 'tcx> { - patterns: Vec<PatStack<'p, 'tcx>>, -} + fn to_pat(&self, tcx: TyCtxt<'tcx>) -> Pat<'tcx> { + let (lo, hi) = self.boundaries(); -impl<'p, 'tcx> Matrix<'p, 'tcx> { - fn empty() -> Self { - Matrix { patterns: vec![] } - } + let bias = IntRange::signed_bias(tcx, self.ty); + let (lo, hi) = (lo ^ bias, hi ^ bias); - /// Pushes a new row to the matrix. If the row starts with an or-pattern, this expands it. - fn push(&mut self, row: PatStack<'p, 'tcx>) { - if let Some(rows) = row.expand_or_pat() { - for row in rows { - // We recursively expand the or-patterns of the new rows. - // This is necessary as we might have `0 | (1 | 2)` or e.g., `x @ 0 | x @ (1 | 2)`. - self.push(row) - } - } else { - self.patterns.push(row); - } - } + let ty = ty::ParamEnv::empty().and(self.ty); + let lo_const = ty::Const::from_bits(tcx, lo, ty); + let hi_const = ty::Const::from_bits(tcx, hi, ty); - /// Iterate over the first component of each row - fn heads<'a>(&'a self) -> impl Iterator<Item = &'a Pat<'tcx>> + Captures<'p> { - self.patterns.iter().map(|r| r.head()) - } + let kind = if lo == hi { + PatKind::Constant { value: lo_const } + } else { + PatKind::Range(PatRange { lo: lo_const, hi: hi_const, end: RangeEnd::Included }) + }; - /// Iterate over the first constructor of each row - fn head_ctors<'a>( - &'a self, - cx: &'a MatchCheckCtxt<'p, 'tcx>, - ) -> impl Iterator<Item = &'a Constructor<'tcx>> + Captures<'a> + Captures<'p> { - self.patterns.iter().map(move |r| r.head_ctor(cx)) + // This is a brand new pattern, so we don't reuse `self.span`. + Pat { ty: self.ty, span: DUMMY_SP, kind: Box::new(kind) } } - /// This computes `S(constructor, self)`. See top of the file for explanations. - fn specialize_constructor( + /// For exhaustive integer matching, some constructors are grouped within other constructors + /// (namely integer typed values are grouped within ranges). However, when specialising these + /// constructors, we want to be specialising for the underlying constructors (the integers), not + /// the groups (the ranges). Thus we need to split the groups up. Splitting them up naïvely would + /// mean creating a separate constructor for every single value in the range, which is clearly + /// impractical. However, observe that for some ranges of integers, the specialisation will be + /// identical across all values in that range (i.e., there are equivalence classes of ranges of + /// constructors based on their `U(S(c, P), S(c, p))` outcome). These classes are grouped by + /// the patterns that apply to them (in the matrix `P`). We can split the range whenever the + /// patterns that apply to that range (specifically: the patterns that *intersect* with that range) + /// change. + /// Our solution, therefore, is to split the range constructor into subranges at every single point + /// the group of intersecting patterns changes (using the method described below). + /// And voilà ! We're testing precisely those ranges that we need to, without any exhaustive matching + /// on actual integers. The nice thing about this is that the number of subranges is linear in the + /// number of rows in the matrix (i.e., the number of cases in the `match` statement), so we don't + /// need to be worried about matching over gargantuan ranges. + /// + /// Essentially, given the first column of a matrix representing ranges, looking like the following: + /// + /// |------| |----------| |-------| || + /// |-------| |-------| |----| || + /// |---------| + /// + /// We split the ranges up into equivalence classes so the ranges are no longer overlapping: + /// + /// |--|--|||-||||--||---|||-------| |-|||| || + /// + /// The logic for determining how to split the ranges is fairly straightforward: we calculate + /// boundaries for each interval range, sort them, then create constructors for each new interval + /// between every pair of boundary points. (This essentially sums up to performing the intuitive + /// merging operation depicted above.) + fn split<'p>( &self, pcx: PatCtxt<'_, 'p, 'tcx>, - ctor: &Constructor<'tcx>, - ctor_wild_subpatterns: &Fields<'p, 'tcx>, - ) -> Matrix<'p, 'tcx> { - self.patterns - .iter() - .filter(|r| ctor.is_covered_by(pcx, r.head_ctor(pcx.cx))) - .map(|r| r.pop_head_constructor(ctor_wild_subpatterns)) - .collect() - } -} + hir_id: Option<HirId>, + ) -> SmallVec<[Constructor<'tcx>; 1]> { + let ty = pcx.ty; -/// Pretty-printer for matrices of patterns, example: -/// -/// ```text -/// +++++++++++++++++++++++++++++ -/// + _ + [] + -/// +++++++++++++++++++++++++++++ -/// + true + [First] + -/// +++++++++++++++++++++++++++++ -/// + true + [Second(true)] + -/// +++++++++++++++++++++++++++++ -/// + false + [_] + -/// +++++++++++++++++++++++++++++ -/// + _ + [_, _, tail @ ..] + -/// +++++++++++++++++++++++++++++ -/// ``` -impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "\n")?; - - let Matrix { patterns: m, .. } = self; - let pretty_printed_matrix: Vec<Vec<String>> = - m.iter().map(|row| row.iter().map(|pat| format!("{:?}", pat)).collect()).collect(); - - let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0); - assert!(m.iter().all(|row| row.len() == column_count)); - let column_widths: Vec<usize> = (0..column_count) - .map(|col| pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0)) - .collect(); - - let total_width = column_widths.iter().cloned().sum::<usize>() + column_count * 3 + 1; - let br = "+".repeat(total_width); - write!(f, "{}\n", br)?; - for row in pretty_printed_matrix { - write!(f, "+")?; - for (column, pat_str) in row.into_iter().enumerate() { - write!(f, " ")?; - write!(f, "{:1$}", pat_str, column_widths[column])?; - write!(f, " +")?; - } - write!(f, "\n")?; - write!(f, "{}\n", br)?; + /// Represents a border between 2 integers. Because the intervals spanning borders + /// must be able to cover every integer, we need to be able to represent + /// 2^128 + 1 such borders. + #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] + enum Border { + JustBefore(u128), + AfterMax, } - Ok(()) - } -} -impl<'p, 'tcx> FromIterator<PatStack<'p, 'tcx>> for Matrix<'p, 'tcx> { - fn from_iter<T>(iter: T) -> Self - where - T: IntoIterator<Item = PatStack<'p, 'tcx>>, - { - let mut matrix = Matrix::empty(); - for x in iter { - // Using `push` ensures we correctly expand or-patterns. - matrix.push(x); + // A function for extracting the borders of an integer interval. + fn range_borders(r: IntRange<'_>) -> impl Iterator<Item = Border> { + let (lo, hi) = r.range.into_inner(); + let from = Border::JustBefore(lo); + let to = match hi.checked_add(1) { + Some(m) => Border::JustBefore(m), + None => Border::AfterMax, + }; + vec![from, to].into_iter() } - matrix + + // Collect the span and range of all the intersecting ranges to lint on likely + // incorrect range patterns. (#63987) + let mut overlaps = vec![]; + let row_len = pcx.matrix.column_count().unwrap_or(0); + // `borders` is the set of borders between equivalence classes: each equivalence + // class lies between 2 borders. + let row_borders = pcx + .matrix + .head_ctors(pcx.cx) + .filter_map(|ctor| ctor.as_int_range()) + .filter_map(|range| { + let intersection = self.intersection(pcx.cx.tcx, &range); + let should_lint = self.suspicious_intersection(&range); + if let (Some(range), 1, true) = (&intersection, row_len, should_lint) { + // FIXME: for now, only check for overlapping ranges on simple range + // patterns. Otherwise with the current logic the following is detected + // as overlapping: + // match (10u8, true) { + // (0 ..= 125, false) => {} + // (126 ..= 255, false) => {} + // (0 ..= 255, true) => {} + // } + overlaps.push(range.clone()); + } + intersection + }) + .flat_map(range_borders); + let self_borders = range_borders(self.clone()); + let mut borders: Vec<_> = row_borders.chain(self_borders).collect(); + borders.sort_unstable(); + + self.lint_overlapping_patterns(pcx.cx.tcx, hir_id, ty, overlaps); + + // We're going to iterate through every adjacent pair of borders, making sure that + // each represents an interval of nonnegative length, and convert each such + // interval into a constructor. + borders + .array_windows() + .filter_map(|&pair| match pair { + [Border::JustBefore(n), Border::JustBefore(m)] => { + if n < m { + Some(n..=(m - 1)) + } else { + None + } + } + [Border::JustBefore(n), Border::AfterMax] => Some(n..=u128::MAX), + [Border::AfterMax, _] => None, + }) + .map(|range| IntRange { range, ty, span: pcx.span }) + .map(IntRange) + .collect() } -} -crate struct MatchCheckCtxt<'a, 'tcx> { - crate tcx: TyCtxt<'tcx>, - /// The module in which the match occurs. This is necessary for - /// checking inhabited-ness of types because whether a type is (visibly) - /// inhabited can depend on whether it was defined in the current module or - /// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty - /// outside it's module and should not be matchable with an empty match - /// statement. - crate module: DefId, - crate param_env: ty::ParamEnv<'tcx>, - crate pattern_arena: &'a TypedArena<Pat<'tcx>>, -} + fn lint_overlapping_patterns( + &self, + tcx: TyCtxt<'tcx>, + hir_id: Option<HirId>, + ty: Ty<'tcx>, + overlaps: Vec<IntRange<'tcx>>, + ) { + if let (true, Some(hir_id)) = (!overlaps.is_empty(), hir_id) { + tcx.struct_span_lint_hir( + lint::builtin::OVERLAPPING_PATTERNS, + hir_id, + self.span, + |lint| { + let mut err = lint.build("multiple patterns covering the same range"); + err.span_label(self.span, "overlapping patterns"); + for int_range in overlaps { + // Use the real type for user display of the ranges: + err.span_label( + int_range.span, + &format!( + "this range overlaps on `{}`", + IntRange { range: int_range.range, ty, span: DUMMY_SP }.to_pat(tcx), + ), + ); + } + err.emit(); + }, + ); + } + } -impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> { - fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool { - if self.tcx.features().exhaustive_patterns { - self.tcx.is_ty_uninhabited_from(self.module, ty, self.param_env) + /// See `Constructor::is_covered_by` + fn is_covered_by<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool { + if self.intersection(pcx.cx.tcx, other).is_some() { + // Constructor splitting should ensure that all intersections we encounter are actually + // inclusions. + assert!(self.is_subrange(other)); + true } else { false } } +} - /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`. - fn is_foreign_non_exhaustive_enum(&self, ty: Ty<'tcx>) -> bool { - match ty.kind() { - ty::Adt(def, ..) => { - def.is_enum() && def.is_variant_list_non_exhaustive() && !def.did.is_local() - } - _ => false, - } +/// Ignore spans when comparing, they don't carry semantic information as they are only for lints. +impl<'tcx> std::cmp::PartialEq for IntRange<'tcx> { + fn eq(&self, other: &Self) -> bool { + self.range == other.range && self.ty == other.ty } } @@ -639,7 +396,7 @@ impl SliceKind { /// A constructor for array and slice patterns. #[derive(Copy, Clone, Debug, PartialEq, Eq)] -struct Slice { +pub(super) struct Slice { /// `None` if the matched value is a slice, `Some(n)` if it is an array of size `n`. array_len: Option<u64>, /// The kind of pattern it is: fixed-length `[x, y]` or variable length `[x, .., y]`. @@ -794,7 +551,7 @@ impl Slice { /// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and /// `Fields`. #[derive(Clone, Debug, PartialEq)] -enum Constructor<'tcx> { +pub(super) enum Constructor<'tcx> { /// The constructor for patterns that have a single constructor, like tuples, struct patterns /// and fixed-length arrays. Single, @@ -820,7 +577,7 @@ enum Constructor<'tcx> { } impl<'tcx> Constructor<'tcx> { - fn is_wildcard(&self) -> bool { + pub(super) fn is_wildcard(&self) -> bool { matches!(self, Wildcard) } @@ -849,6 +606,67 @@ impl<'tcx> Constructor<'tcx> { } } + /// Determines the constructor that the given pattern can be specialized to. + pub(super) fn from_pat<'p>(cx: &MatchCheckCtxt<'p, 'tcx>, pat: &'p Pat<'tcx>) -> Self { + match pat.kind.as_ref() { + PatKind::AscribeUserType { .. } => bug!(), // Handled by `expand_pattern` + PatKind::Binding { .. } | PatKind::Wild => Wildcard, + PatKind::Leaf { .. } | PatKind::Deref { .. } => Single, + &PatKind::Variant { adt_def, variant_index, .. } => { + Variant(adt_def.variants[variant_index].def_id) + } + PatKind::Constant { value } => { + if let Some(int_range) = IntRange::from_const(cx.tcx, cx.param_env, value, pat.span) + { + IntRange(int_range) + } else { + match pat.ty.kind() { + ty::Float(_) => FloatRange(value, value, RangeEnd::Included), + // In `expand_pattern`, we convert string literals to `&CONST` patterns with + // `CONST` a pattern of type `str`. In truth this contains a constant of type + // `&str`. + ty::Str => Str(value), + // All constants that can be structurally matched have already been expanded + // into the corresponding `Pat`s by `const_to_pat`. Constants that remain are + // opaque. + _ => Opaque, + } + } + } + &PatKind::Range(PatRange { lo, hi, end }) => { + let ty = lo.ty; + if let Some(int_range) = IntRange::from_range( + cx.tcx, + lo.eval_bits(cx.tcx, cx.param_env, lo.ty), + hi.eval_bits(cx.tcx, cx.param_env, hi.ty), + ty, + &end, + pat.span, + ) { + IntRange(int_range) + } else { + FloatRange(lo, hi, end) + } + } + PatKind::Array { prefix, slice, suffix } | PatKind::Slice { prefix, slice, suffix } => { + let array_len = match pat.ty.kind() { + ty::Array(_, length) => Some(length.eval_usize(cx.tcx, cx.param_env)), + ty::Slice(_) => None, + _ => span_bug!(pat.span, "bad ty {:?} for slice pattern", pat.ty), + }; + let prefix = prefix.len() as u64; + let suffix = suffix.len() as u64; + let kind = if slice.is_some() { + VarLen(prefix, suffix) + } else { + FixedLen(prefix + suffix) + }; + Slice(Slice::new(array_len, kind)) + } + PatKind::Or { .. } => bug!("Or-pattern should have been expanded earlier on."), + } + } + /// Some constructors (namely `Wildcard`, `IntRange` and `Slice`) actually stand for a set of actual /// constructors (like variants, integers or fixed-sized slices). When specializing for these /// constructors, we want to be specialising for the actual underlying constructors. @@ -865,7 +683,11 @@ impl<'tcx> Constructor<'tcx> { /// /// `hir_id` is `None` when we're evaluating the wildcard pattern. In that case we do not want /// to lint for overlapping ranges. - fn split<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>, hir_id: Option<HirId>) -> SmallVec<[Self; 1]> { + pub(super) fn split<'p>( + &self, + pcx: PatCtxt<'_, 'p, 'tcx>, + hir_id: Option<HirId>, + ) -> SmallVec<[Self; 1]> { debug!("Constructor::split({:#?}, {:#?})", self, pcx.matrix); match self { @@ -907,7 +729,7 @@ impl<'tcx> Constructor<'tcx> { /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`. /// For the simple cases, this is simply checking for equality. For the "grouped" constructors, /// this checks for inclusion. - fn is_covered_by<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool { + pub(super) fn is_covered_by<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool { // This must be kept in sync with `is_covered_by_any`. match (self, other) { // Wildcards cover anything @@ -993,92 +815,204 @@ impl<'tcx> Constructor<'tcx> { } } } +} - /// Apply a constructor to a list of patterns, yielding a new pattern. `pats` - /// must have as many elements as this constructor's arity. - /// - /// This is roughly the inverse of `specialize_constructor`. - /// - /// Examples: - /// `self`: `Constructor::Single` - /// `ty`: `(u32, u32, u32)` - /// `pats`: `[10, 20, _]` - /// returns `(10, 20, _)` - /// - /// `self`: `Constructor::Variant(Option::Some)` - /// `ty`: `Option<bool>` - /// `pats`: `[false]` - /// returns `Some(false)` - fn apply<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>, fields: Fields<'p, 'tcx>) -> Pat<'tcx> { - let mut subpatterns = fields.all_patterns(); +/// This determines the set of all possible constructors of a pattern matching +/// values of type `left_ty`. For vectors, this would normally be an infinite set +/// but is instead bounded by the maximum fixed length of slice patterns in +/// the column of patterns being analyzed. +/// +/// We make sure to omit constructors that are statically impossible. E.g., for +/// `Option<!>`, we do not include `Some(_)` in the returned list of constructors. +/// Invariant: this returns an empty `Vec` if and only if the type is uninhabited (as determined by +/// `cx.is_uninhabited()`). +fn all_constructors<'p, 'tcx>(pcx: PatCtxt<'_, 'p, 'tcx>) -> Vec<Constructor<'tcx>> { + debug!("all_constructors({:?})", pcx.ty); + let cx = pcx.cx; + let make_range = |start, end| { + IntRange( + // `unwrap()` is ok because we know the type is an integer. + IntRange::from_range(cx.tcx, start, end, pcx.ty, &RangeEnd::Included, pcx.span) + .unwrap(), + ) + }; + match pcx.ty.kind() { + ty::Bool => vec![make_range(0, 1)], + ty::Array(sub_ty, len) if len.try_eval_usize(cx.tcx, cx.param_env).is_some() => { + let len = len.eval_usize(cx.tcx, cx.param_env); + if len != 0 && cx.is_uninhabited(sub_ty) { + vec![] + } else { + vec![Slice(Slice::new(Some(len), VarLen(0, 0)))] + } + } + // Treat arrays of a constant but unknown length like slices. + ty::Array(sub_ty, _) | ty::Slice(sub_ty) => { + let kind = if cx.is_uninhabited(sub_ty) { FixedLen(0) } else { VarLen(0, 0) }; + vec![Slice(Slice::new(None, kind))] + } + ty::Adt(def, substs) if def.is_enum() => { + // If the enum is declared as `#[non_exhaustive]`, we treat it as if it had an + // additional "unknown" constructor. + // There is no point in enumerating all possible variants, because the user can't + // actually match against them all themselves. So we always return only the fictitious + // constructor. + // E.g., in an example like: + // + // ``` + // let err: io::ErrorKind = ...; + // match err { + // io::ErrorKind::NotFound => {}, + // } + // ``` + // + // we don't want to show every possible IO error, but instead have only `_` as the + // witness. + let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(pcx.ty); - let pat = match self { - Single | Variant(_) => match pcx.ty.kind() { - ty::Adt(..) | ty::Tuple(..) => { - let subpatterns = subpatterns - .enumerate() - .map(|(i, p)| FieldPat { field: Field::new(i), pattern: p }) - .collect(); + // If `exhaustive_patterns` is disabled and our scrutinee is an empty enum, we treat it + // as though it had an "unknown" constructor to avoid exposing its emptiness. The + // exception is if the pattern is at the top level, because we want empty matches to be + // considered exhaustive. + let is_secretly_empty = def.variants.is_empty() + && !cx.tcx.features().exhaustive_patterns + && !pcx.is_top_level; - if let ty::Adt(adt, substs) = pcx.ty.kind() { - if adt.is_enum() { - PatKind::Variant { - adt_def: adt, - substs, - variant_index: self.variant_index_for_adt(adt), - subpatterns, - } - } else { - PatKind::Leaf { subpatterns } - } - } else { - PatKind::Leaf { subpatterns } - } - } - // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should - // be careful to reconstruct the correct constant pattern here. However a string - // literal pattern will never be reported as a non-exhaustiveness witness, so we - // can ignore this issue. - ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() }, - ty::Slice(_) | ty::Array(..) => bug!("bad slice pattern {:?} {:?}", self, pcx.ty), - _ => PatKind::Wild, - }, - Slice(slice) => match slice.kind { - FixedLen(_) => { - PatKind::Slice { prefix: subpatterns.collect(), slice: None, suffix: vec![] } - } - VarLen(prefix, _) => { - let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix as usize).collect(); - if slice.array_len.is_some() { - // Improves diagnostics a bit: if the type is a known-size array, instead - // of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`. - // This is incorrect if the size is not known, since `[_, ..]` captures - // arrays of lengths `>= 1` whereas `[..]` captures any length. - while !prefix.is_empty() && prefix.last().unwrap().is_wildcard() { - prefix.pop(); - } - } - let suffix: Vec<_> = if slice.array_len.is_some() { - // Same as above. - subpatterns.skip_while(Pat::is_wildcard).collect() - } else { - subpatterns.collect() - }; - let wild = Pat::wildcard_from_ty(pcx.ty); - PatKind::Slice { prefix, slice: Some(wild), suffix } - } - }, - &Str(value) => PatKind::Constant { value }, - &FloatRange(lo, hi, end) => PatKind::Range(PatRange { lo, hi, end }), - IntRange(range) => return range.to_pat(pcx.cx.tcx), - NonExhaustive => PatKind::Wild, - Opaque => bug!("we should not try to apply an opaque constructor"), - Wildcard => bug!( - "trying to apply a wildcard constructor; this should have been done in `apply_constructors`" - ), - }; + if is_secretly_empty || is_declared_nonexhaustive { + vec![NonExhaustive] + } else if cx.tcx.features().exhaustive_patterns { + // If `exhaustive_patterns` is enabled, we exclude variants known to be + // uninhabited. + def.variants + .iter() + .filter(|v| { + !v.uninhabited_from(cx.tcx, substs, def.adt_kind(), cx.param_env) + .contains(cx.tcx, cx.module) + }) + .map(|v| Variant(v.def_id)) + .collect() + } else { + def.variants.iter().map(|v| Variant(v.def_id)).collect() + } + } + ty::Char => { + vec![ + // The valid Unicode Scalar Value ranges. + make_range('\u{0000}' as u128, '\u{D7FF}' as u128), + make_range('\u{E000}' as u128, '\u{10FFFF}' as u128), + ] + } + ty::Int(_) | ty::Uint(_) + if pcx.ty.is_ptr_sized_integral() + && !cx.tcx.features().precise_pointer_size_matching => + { + // `usize`/`isize` are not allowed to be matched exhaustively unless the + // `precise_pointer_size_matching` feature is enabled. So we treat those types like + // `#[non_exhaustive]` enums by returning a special unmatcheable constructor. + vec![NonExhaustive] + } + &ty::Int(ity) => { + let bits = Integer::from_attr(&cx.tcx, SignedInt(ity)).size().bits() as u128; + let min = 1u128 << (bits - 1); + let max = min - 1; + vec![make_range(min, max)] + } + &ty::Uint(uty) => { + let size = Integer::from_attr(&cx.tcx, UnsignedInt(uty)).size(); + let max = size.truncate(u128::MAX); + vec![make_range(0, max)] + } + // If `exhaustive_patterns` is disabled and our scrutinee is the never type, we cannot + // expose its emptiness. The exception is if the pattern is at the top level, because we + // want empty matches to be considered exhaustive. + ty::Never if !cx.tcx.features().exhaustive_patterns && !pcx.is_top_level => { + vec![NonExhaustive] + } + ty::Never => vec![], + _ if cx.is_uninhabited(pcx.ty) => vec![], + ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => vec![Single], + // This type is one for which we cannot list constructors, like `str` or `f64`. + _ => vec![NonExhaustive], + } +} - Pat { ty: pcx.ty, span: DUMMY_SP, kind: Box::new(pat) } +// A struct to compute a set of constructors equivalent to `all_ctors \ used_ctors`. +#[derive(Debug)] +pub(super) struct MissingConstructors<'tcx> { + all_ctors: SmallVec<[Constructor<'tcx>; 1]>, + used_ctors: Vec<Constructor<'tcx>>, +} + +impl<'tcx> MissingConstructors<'tcx> { + pub(super) fn new<'p>(pcx: PatCtxt<'_, 'p, 'tcx>) -> Self { + let used_ctors: Vec<Constructor<'_>> = + pcx.matrix.head_ctors(pcx.cx).cloned().filter(|c| !c.is_wildcard()).collect(); + // Since `all_ctors` never contains wildcards, this won't recurse further. + let all_ctors = + all_constructors(pcx).into_iter().flat_map(|ctor| ctor.split(pcx, None)).collect(); + + MissingConstructors { all_ctors, used_ctors } + } + + fn is_empty<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>) -> bool { + self.iter(pcx).next().is_none() + } + + /// Iterate over all_ctors \ used_ctors + fn iter<'a, 'p>( + &'a self, + pcx: PatCtxt<'a, 'p, 'tcx>, + ) -> impl Iterator<Item = &'a Constructor<'tcx>> + Captures<'p> { + self.all_ctors.iter().filter(move |ctor| !ctor.is_covered_by_any(pcx, &self.used_ctors)) + } + + /// List the patterns corresponding to the missing constructors. In some cases, instead of + /// listing all constructors of a given type, we prefer to simply report a wildcard. + pub(super) fn report_patterns<'p>( + &self, + pcx: PatCtxt<'_, 'p, 'tcx>, + ) -> SmallVec<[Pat<'tcx>; 1]> { + // There are 2 ways we can report a witness here. + // Commonly, we can report all the "free" + // constructors as witnesses, e.g., if we have: + // + // ``` + // enum Direction { N, S, E, W } + // let Direction::N = ...; + // ``` + // + // we can report 3 witnesses: `S`, `E`, and `W`. + // + // However, there is a case where we don't want + // to do this and instead report a single `_` witness: + // if the user didn't actually specify a constructor + // in this arm, e.g., in + // + // ``` + // let x: (Direction, Direction, bool) = ...; + // let (_, _, false) = x; + // ``` + // + // we don't want to show all 16 possible witnesses + // `(<direction-1>, <direction-2>, true)` - we are + // satisfied with `(_, _, true)`. In this case, + // `used_ctors` is empty. + // The exception is: if we are at the top-level, for example in an empty match, we + // sometimes prefer reporting the list of constructors instead of just `_`. + let report_when_all_missing = pcx.is_top_level && !IntRange::is_integral(pcx.ty); + if self.used_ctors.is_empty() && !report_when_all_missing { + // All constructors are unused. Report only a wildcard + // rather than each individual constructor. + smallvec![Pat::wildcard_from_ty(pcx.ty)] + } else { + // Construct for each missing constructor a "wild" version of this + // constructor, that matches everything that can be built with + // it. For example, if `ctor` is a `Constructor::Variant` for + // `Option::Some`, we get the pattern `Some(_)`. + self.iter(pcx) + .map(|missing_ctor| Fields::wildcards(pcx, &missing_ctor).apply(pcx, missing_ctor)) + .collect() + } } } @@ -1086,7 +1020,7 @@ impl<'tcx> Constructor<'tcx> { /// `Fields` struct. This struct represents such a potentially-hidden field. When a field is hidden /// we still keep its type around. #[derive(Debug, Copy, Clone)] -enum FilteredField<'p, 'tcx> { +pub(super) enum FilteredField<'p, 'tcx> { Kept(&'p Pat<'tcx>), Hidden(Ty<'tcx>), } @@ -1119,7 +1053,7 @@ impl<'p, 'tcx> FilteredField<'p, 'tcx> { /// This filtering is uncommon in practice, because uninhabited fields are rarely used, so we avoid /// it when possible to preserve performance. #[derive(Debug, Clone)] -enum Fields<'p, 'tcx> { +pub(super) enum Fields<'p, 'tcx> { /// Lists of patterns that don't contain any filtered fields. /// `Slice` and `Vec` behave the same; the difference is only to avoid allocating and /// triple-dereferences when possible. Frankly this is premature optimization, I (Nadrieril) @@ -1156,7 +1090,7 @@ impl<'p, 'tcx> Fields<'p, 'tcx> { } /// Creates a new list of wildcard fields for a given constructor. - fn wildcards(pcx: PatCtxt<'_, 'p, 'tcx>, constructor: &Constructor<'tcx>) -> Self { + pub(super) fn wildcards(pcx: PatCtxt<'_, 'p, 'tcx>, constructor: &Constructor<'tcx>) -> Self { let ty = pcx.ty; let cx = pcx.cx; let wildcard_from_ty = |ty| &*cx.pattern_arena.alloc(Pat::wildcard_from_ty(ty)); @@ -1228,10 +1162,97 @@ impl<'p, 'tcx> Fields<'p, 'tcx> { ret } + /// Apply a constructor to a list of patterns, yielding a new pattern. `self` + /// must have as many elements as this constructor's arity. + /// + /// This is roughly the inverse of `specialize_constructor`. + /// + /// Examples: + /// `ctor`: `Constructor::Single` + /// `ty`: `Foo(u32, u32, u32)` + /// `self`: `[10, 20, _]` + /// returns `Foo(10, 20, _)` + /// + /// `ctor`: `Constructor::Variant(Option::Some)` + /// `ty`: `Option<bool>` + /// `self`: `[false]` + /// returns `Some(false)` + pub(super) fn apply(self, pcx: PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Pat<'tcx> { + let mut subpatterns = self.all_patterns(); + + let pat = match ctor { + Single | Variant(_) => match pcx.ty.kind() { + ty::Adt(..) | ty::Tuple(..) => { + let subpatterns = subpatterns + .enumerate() + .map(|(i, p)| FieldPat { field: Field::new(i), pattern: p }) + .collect(); + + if let ty::Adt(adt, substs) = pcx.ty.kind() { + if adt.is_enum() { + PatKind::Variant { + adt_def: adt, + substs, + variant_index: ctor.variant_index_for_adt(adt), + subpatterns, + } + } else { + PatKind::Leaf { subpatterns } + } + } else { + PatKind::Leaf { subpatterns } + } + } + // Note: given the expansion of `&str` patterns done in `expand_pattern`, we should + // be careful to reconstruct the correct constant pattern here. However a string + // literal pattern will never be reported as a non-exhaustiveness witness, so we + // can ignore this issue. + ty::Ref(..) => PatKind::Deref { subpattern: subpatterns.next().unwrap() }, + ty::Slice(_) | ty::Array(..) => bug!("bad slice pattern {:?} {:?}", ctor, pcx.ty), + _ => PatKind::Wild, + }, + Slice(slice) => match slice.kind { + FixedLen(_) => { + PatKind::Slice { prefix: subpatterns.collect(), slice: None, suffix: vec![] } + } + VarLen(prefix, _) => { + let mut prefix: Vec<_> = subpatterns.by_ref().take(prefix as usize).collect(); + if slice.array_len.is_some() { + // Improves diagnostics a bit: if the type is a known-size array, instead + // of reporting `[x, _, .., _, y]`, we prefer to report `[x, .., y]`. + // This is incorrect if the size is not known, since `[_, ..]` captures + // arrays of lengths `>= 1` whereas `[..]` captures any length. + while !prefix.is_empty() && prefix.last().unwrap().is_wildcard() { + prefix.pop(); + } + } + let suffix: Vec<_> = if slice.array_len.is_some() { + // Same as above. + subpatterns.skip_while(Pat::is_wildcard).collect() + } else { + subpatterns.collect() + }; + let wild = Pat::wildcard_from_ty(pcx.ty); + PatKind::Slice { prefix, slice: Some(wild), suffix } + } + }, + &Str(value) => PatKind::Constant { value }, + &FloatRange(lo, hi, end) => PatKind::Range(PatRange { lo, hi, end }), + IntRange(range) => return range.to_pat(pcx.cx.tcx), + NonExhaustive => PatKind::Wild, + Opaque => bug!("we should not try to apply an opaque constructor"), + Wildcard => bug!( + "trying to apply a wildcard constructor; this should have been done in `apply_constructors`" + ), + }; + + Pat { ty: pcx.ty, span: DUMMY_SP, kind: Box::new(pat) } + } + /// Returns the number of patterns from the viewpoint of match-checking, i.e. excluding hidden /// fields. This is what we want in most cases in this file, the only exception being /// conversion to/from `Pat`. - fn len(&self) -> usize { + pub(super) fn len(&self) -> usize { match self { Fields::Slice(pats) => pats.len(), Fields::Vec(pats) => pats.len(), @@ -1252,6 +1273,18 @@ impl<'p, 'tcx> Fields<'p, 'tcx> { pats.into_iter() } + /// Returns the filtered list of patterns, not including hidden fields. + pub(super) fn filtered_patterns(self) -> SmallVec<[&'p Pat<'tcx>; 2]> { + match self { + Fields::Slice(pats) => pats.iter().collect(), + Fields::Vec(pats) => pats, + Fields::Filtered { fields, .. } => { + // We skip hidden fields here + fields.into_iter().filter_map(|p| p.kept()).collect() + } + } + } + /// Overrides some of the fields with the provided patterns. Exactly like /// `replace_fields_indexed`, except that it takes `FieldPat`s as input. fn replace_with_fieldpats( @@ -1297,7 +1330,7 @@ impl<'p, 'tcx> Fields<'p, 'tcx> { /// Replaces contained fields with the given filtered list of patterns, e.g. taken from the /// matrix. There must be `len()` patterns in `pats`. - fn replace_fields( + pub(super) fn replace_fields( &self, cx: &MatchCheckCtxt<'p, 'tcx>, pats: impl IntoIterator<Item = Pat<'tcx>>, @@ -1335,7 +1368,7 @@ impl<'p, 'tcx> Fields<'p, 'tcx> { /// } /// ``` /// This is guaranteed to preserve the number of patterns in `self`. - fn replace_with_pattern_arguments(&self, pat: &'p Pat<'tcx>) -> Self { + pub(super) fn replace_with_pattern_arguments(&self, pat: &'p Pat<'tcx>) -> Self { match pat.kind.as_ref() { PatKind::Deref { subpattern } => { assert_eq!(self.len(), 1); @@ -1358,1005 +1391,4 @@ impl<'p, 'tcx> Fields<'p, 'tcx> { _ => self.clone(), } } - - fn push_on_patstack(self, stack: &[&'p Pat<'tcx>]) -> PatStack<'p, 'tcx> { - let pats: SmallVec<_> = match self { - Fields::Slice(pats) => pats.iter().chain(stack.iter().copied()).collect(), - Fields::Vec(mut pats) => { - pats.extend_from_slice(stack); - pats - } - Fields::Filtered { fields, .. } => { - // We skip hidden fields here - fields.into_iter().filter_map(|p| p.kept()).chain(stack.iter().copied()).collect() - } - }; - PatStack::from_vec(pats) - } -} - -#[derive(Clone, Debug)] -crate enum Usefulness<'tcx> { - /// Carries, for each column in the matrix, a set of sub-branches that have been found to be - /// unreachable. Used only in the presence of or-patterns, otherwise it stays empty. - Useful(Vec<FxHashSet<Span>>), - /// Carries a list of witnesses of non-exhaustiveness. - UsefulWithWitness(Vec<Witness<'tcx>>), - NotUseful, -} - -impl<'tcx> Usefulness<'tcx> { - fn new_useful(preference: WitnessPreference) -> Self { - match preference { - ConstructWitness => UsefulWithWitness(vec![Witness(vec![])]), - LeaveOutWitness => Useful(vec![]), - } - } - - fn is_useful(&self) -> bool { - !matches!(*self, NotUseful) - } - - fn apply_constructor<'p>( - self, - pcx: PatCtxt<'_, 'p, 'tcx>, - ctor: &Constructor<'tcx>, - ctor_wild_subpatterns: &Fields<'p, 'tcx>, - ) -> Self { - match self { - UsefulWithWitness(witnesses) => { - let new_witnesses = if ctor.is_wildcard() { - let missing_ctors = MissingConstructors::new(pcx); - let new_patterns = missing_ctors.report_patterns(pcx); - witnesses - .into_iter() - .flat_map(|witness| { - new_patterns.iter().map(move |pat| { - let mut witness = witness.clone(); - witness.0.push(pat.clone()); - witness - }) - }) - .collect() - } else { - witnesses - .into_iter() - .map(|witness| witness.apply_constructor(pcx, &ctor, ctor_wild_subpatterns)) - .collect() - }; - UsefulWithWitness(new_witnesses) - } - Useful(mut unreachables) => { - if !unreachables.is_empty() { - // When we apply a constructor, there are `arity` columns of the matrix that - // corresponded to its arguments. All the unreachables found in these columns - // will, after `apply`, come from the first column. So we take the union of all - // the corresponding sets and put them in the first column. - // Note that `arity` may be 0, in which case we just push a new empty set. - let len = unreachables.len(); - let arity = ctor_wild_subpatterns.len(); - let mut unioned = FxHashSet::default(); - for set in unreachables.drain((len - arity)..) { - unioned.extend(set) - } - unreachables.push(unioned); - } - Useful(unreachables) - } - x => x, - } - } -} - -#[derive(Copy, Clone, Debug)] -enum WitnessPreference { - ConstructWitness, - LeaveOutWitness, -} - -#[derive(Copy, Clone)] -struct PatCtxt<'a, 'p, 'tcx> { - cx: &'a MatchCheckCtxt<'p, 'tcx>, - /// Current state of the matrix. - matrix: &'a Matrix<'p, 'tcx>, - /// Type of the current column under investigation. - ty: Ty<'tcx>, - /// Span of the current pattern under investigation. - span: Span, - /// Whether the current pattern is the whole pattern as found in a match arm, or if it's a - /// subpattern. - is_top_level: bool, -} - -/// A witness of non-exhaustiveness for error reporting, represented -/// as a list of patterns (in reverse order of construction) with -/// wildcards inside to represent elements that can take any inhabitant -/// of the type as a value. -/// -/// A witness against a list of patterns should have the same types -/// and length as the pattern matched against. Because Rust `match` -/// is always against a single pattern, at the end the witness will -/// have length 1, but in the middle of the algorithm, it can contain -/// multiple patterns. -/// -/// For example, if we are constructing a witness for the match against -/// -/// ``` -/// struct Pair(Option<(u32, u32)>, bool); -/// -/// match (p: Pair) { -/// Pair(None, _) => {} -/// Pair(_, false) => {} -/// } -/// ``` -/// -/// We'll perform the following steps: -/// 1. Start with an empty witness -/// `Witness(vec![])` -/// 2. Push a witness `Some(_)` against the `None` -/// `Witness(vec![Some(_)])` -/// 3. Push a witness `true` against the `false` -/// `Witness(vec![Some(_), true])` -/// 4. Apply the `Pair` constructor to the witnesses -/// `Witness(vec![Pair(Some(_), true)])` -/// -/// The final `Pair(Some(_), true)` is then the resulting witness. -#[derive(Clone, Debug)] -crate struct Witness<'tcx>(Vec<Pat<'tcx>>); - -impl<'tcx> Witness<'tcx> { - /// Asserts that the witness contains a single pattern, and returns it. - fn single_pattern(self) -> Pat<'tcx> { - assert_eq!(self.0.len(), 1); - self.0.into_iter().next().unwrap() - } - - /// Constructs a partial witness for a pattern given a list of - /// patterns expanded by the specialization step. - /// - /// When a pattern P is discovered to be useful, this function is used bottom-up - /// to reconstruct a complete witness, e.g., a pattern P' that covers a subset - /// of values, V, where each value in that set is not covered by any previously - /// used patterns and is covered by the pattern P'. Examples: - /// - /// left_ty: tuple of 3 elements - /// pats: [10, 20, _] => (10, 20, _) - /// - /// left_ty: struct X { a: (bool, &'static str), b: usize} - /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 } - fn apply_constructor<'p>( - mut self, - pcx: PatCtxt<'_, 'p, 'tcx>, - ctor: &Constructor<'tcx>, - ctor_wild_subpatterns: &Fields<'p, 'tcx>, - ) -> Self { - let pat = { - let len = self.0.len(); - let arity = ctor_wild_subpatterns.len(); - let pats = self.0.drain((len - arity)..).rev(); - let fields = ctor_wild_subpatterns.replace_fields(pcx.cx, pats); - ctor.apply(pcx, fields) - }; - - self.0.push(pat); - - self - } -} - -/// This determines the set of all possible constructors of a pattern matching -/// values of type `left_ty`. For vectors, this would normally be an infinite set -/// but is instead bounded by the maximum fixed length of slice patterns in -/// the column of patterns being analyzed. -/// -/// We make sure to omit constructors that are statically impossible. E.g., for -/// `Option<!>`, we do not include `Some(_)` in the returned list of constructors. -/// Invariant: this returns an empty `Vec` if and only if the type is uninhabited (as determined by -/// `cx.is_uninhabited()`). -fn all_constructors<'p, 'tcx>(pcx: PatCtxt<'_, 'p, 'tcx>) -> Vec<Constructor<'tcx>> { - debug!("all_constructors({:?})", pcx.ty); - let cx = pcx.cx; - let make_range = |start, end| { - IntRange( - // `unwrap()` is ok because we know the type is an integer. - IntRange::from_range(cx.tcx, start, end, pcx.ty, &RangeEnd::Included, pcx.span) - .unwrap(), - ) - }; - match pcx.ty.kind() { - ty::Bool => vec![make_range(0, 1)], - ty::Array(sub_ty, len) if len.try_eval_usize(cx.tcx, cx.param_env).is_some() => { - let len = len.eval_usize(cx.tcx, cx.param_env); - if len != 0 && cx.is_uninhabited(sub_ty) { - vec![] - } else { - vec![Slice(Slice::new(Some(len), VarLen(0, 0)))] - } - } - // Treat arrays of a constant but unknown length like slices. - ty::Array(sub_ty, _) | ty::Slice(sub_ty) => { - let kind = if cx.is_uninhabited(sub_ty) { FixedLen(0) } else { VarLen(0, 0) }; - vec![Slice(Slice::new(None, kind))] - } - ty::Adt(def, substs) if def.is_enum() => { - // If the enum is declared as `#[non_exhaustive]`, we treat it as if it had an - // additional "unknown" constructor. - // There is no point in enumerating all possible variants, because the user can't - // actually match against them all themselves. So we always return only the fictitious - // constructor. - // E.g., in an example like: - // - // ``` - // let err: io::ErrorKind = ...; - // match err { - // io::ErrorKind::NotFound => {}, - // } - // ``` - // - // we don't want to show every possible IO error, but instead have only `_` as the - // witness. - let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(pcx.ty); - - // If `exhaustive_patterns` is disabled and our scrutinee is an empty enum, we treat it - // as though it had an "unknown" constructor to avoid exposing its emptiness. The - // exception is if the pattern is at the top level, because we want empty matches to be - // considered exhaustive. - let is_secretly_empty = def.variants.is_empty() - && !cx.tcx.features().exhaustive_patterns - && !pcx.is_top_level; - - if is_secretly_empty || is_declared_nonexhaustive { - vec![NonExhaustive] - } else if cx.tcx.features().exhaustive_patterns { - // If `exhaustive_patterns` is enabled, we exclude variants known to be - // uninhabited. - def.variants - .iter() - .filter(|v| { - !v.uninhabited_from(cx.tcx, substs, def.adt_kind(), cx.param_env) - .contains(cx.tcx, cx.module) - }) - .map(|v| Variant(v.def_id)) - .collect() - } else { - def.variants.iter().map(|v| Variant(v.def_id)).collect() - } - } - ty::Char => { - vec![ - // The valid Unicode Scalar Value ranges. - make_range('\u{0000}' as u128, '\u{D7FF}' as u128), - make_range('\u{E000}' as u128, '\u{10FFFF}' as u128), - ] - } - ty::Int(_) | ty::Uint(_) - if pcx.ty.is_ptr_sized_integral() - && !cx.tcx.features().precise_pointer_size_matching => - { - // `usize`/`isize` are not allowed to be matched exhaustively unless the - // `precise_pointer_size_matching` feature is enabled. So we treat those types like - // `#[non_exhaustive]` enums by returning a special unmatcheable constructor. - vec![NonExhaustive] - } - &ty::Int(ity) => { - let bits = Integer::from_attr(&cx.tcx, SignedInt(ity)).size().bits() as u128; - let min = 1u128 << (bits - 1); - let max = min - 1; - vec![make_range(min, max)] - } - &ty::Uint(uty) => { - let size = Integer::from_attr(&cx.tcx, UnsignedInt(uty)).size(); - let max = size.truncate(u128::MAX); - vec![make_range(0, max)] - } - // If `exhaustive_patterns` is disabled and our scrutinee is the never type, we cannot - // expose its emptiness. The exception is if the pattern is at the top level, because we - // want empty matches to be considered exhaustive. - ty::Never if !cx.tcx.features().exhaustive_patterns && !pcx.is_top_level => { - vec![NonExhaustive] - } - ty::Never => vec![], - _ if cx.is_uninhabited(pcx.ty) => vec![], - ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => vec![Single], - // This type is one for which we cannot list constructors, like `str` or `f64`. - _ => vec![NonExhaustive], - } -} - -/// An inclusive interval, used for precise integer exhaustiveness checking. -/// `IntRange`s always store a contiguous range. This means that values are -/// encoded such that `0` encodes the minimum value for the integer, -/// regardless of the signedness. -/// For example, the pattern `-128..=127i8` is encoded as `0..=255`. -/// This makes comparisons and arithmetic on interval endpoints much more -/// straightforward. See `signed_bias` for details. -/// -/// `IntRange` is never used to encode an empty range or a "range" that wraps -/// around the (offset) space: i.e., `range.lo <= range.hi`. -#[derive(Clone, Debug)] -struct IntRange<'tcx> { - range: RangeInclusive<u128>, - ty: Ty<'tcx>, - span: Span, -} - -impl<'tcx> IntRange<'tcx> { - #[inline] - fn is_integral(ty: Ty<'_>) -> bool { - matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_) | ty::Bool) - } - - fn is_singleton(&self) -> bool { - self.range.start() == self.range.end() - } - - fn boundaries(&self) -> (u128, u128) { - (*self.range.start(), *self.range.end()) - } - - /// Don't treat `usize`/`isize` exhaustively unless the `precise_pointer_size_matching` feature - /// is enabled. - fn treat_exhaustively(&self, tcx: TyCtxt<'tcx>) -> bool { - !self.ty.is_ptr_sized_integral() || tcx.features().precise_pointer_size_matching - } - - #[inline] - fn integral_size_and_signed_bias(tcx: TyCtxt<'tcx>, ty: Ty<'_>) -> Option<(Size, u128)> { - match *ty.kind() { - ty::Bool => Some((Size::from_bytes(1), 0)), - ty::Char => Some((Size::from_bytes(4), 0)), - ty::Int(ity) => { - let size = Integer::from_attr(&tcx, SignedInt(ity)).size(); - Some((size, 1u128 << (size.bits() as u128 - 1))) - } - ty::Uint(uty) => Some((Integer::from_attr(&tcx, UnsignedInt(uty)).size(), 0)), - _ => None, - } - } - - #[inline] - fn from_const( - tcx: TyCtxt<'tcx>, - param_env: ty::ParamEnv<'tcx>, - value: &Const<'tcx>, - span: Span, - ) -> Option<IntRange<'tcx>> { - if let Some((target_size, bias)) = Self::integral_size_and_signed_bias(tcx, value.ty) { - let ty = value.ty; - let val = (|| { - if let ty::ConstKind::Value(ConstValue::Scalar(scalar)) = value.val { - // For this specific pattern we can skip a lot of effort and go - // straight to the result, after doing a bit of checking. (We - // could remove this branch and just fall through, which - // is more general but much slower.) - if let Ok(bits) = scalar.to_bits_or_ptr(target_size, &tcx) { - return Some(bits); - } - } - // This is a more general form of the previous case. - value.try_eval_bits(tcx, param_env, ty) - })()?; - let val = val ^ bias; - Some(IntRange { range: val..=val, ty, span }) - } else { - None - } - } - - #[inline] - fn from_range( - tcx: TyCtxt<'tcx>, - lo: u128, - hi: u128, - ty: Ty<'tcx>, - end: &RangeEnd, - span: Span, - ) -> Option<IntRange<'tcx>> { - if Self::is_integral(ty) { - // Perform a shift if the underlying types are signed, - // which makes the interval arithmetic simpler. - let bias = IntRange::signed_bias(tcx, ty); - let (lo, hi) = (lo ^ bias, hi ^ bias); - let offset = (*end == RangeEnd::Excluded) as u128; - if lo > hi || (lo == hi && *end == RangeEnd::Excluded) { - // This should have been caught earlier by E0030. - bug!("malformed range pattern: {}..={}", lo, (hi - offset)); - } - Some(IntRange { range: lo..=(hi - offset), ty, span }) - } else { - None - } - } - - // The return value of `signed_bias` should be XORed with an endpoint to encode/decode it. - fn signed_bias(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> u128 { - match *ty.kind() { - ty::Int(ity) => { - let bits = Integer::from_attr(&tcx, SignedInt(ity)).size().bits() as u128; - 1u128 << (bits - 1) - } - _ => 0, - } - } - - fn is_subrange(&self, other: &Self) -> bool { - other.range.start() <= self.range.start() && self.range.end() <= other.range.end() - } - - fn intersection(&self, tcx: TyCtxt<'tcx>, other: &Self) -> Option<Self> { - let ty = self.ty; - let (lo, hi) = self.boundaries(); - let (other_lo, other_hi) = other.boundaries(); - if self.treat_exhaustively(tcx) { - if lo <= other_hi && other_lo <= hi { - let span = other.span; - Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi), ty, span }) - } else { - None - } - } else { - // If the range should not be treated exhaustively, fallback to checking for inclusion. - if self.is_subrange(other) { Some(self.clone()) } else { None } - } - } - - fn suspicious_intersection(&self, other: &Self) -> bool { - // `false` in the following cases: - // 1 ---- // 1 ---------- // 1 ---- // 1 ---- - // 2 ---------- // 2 ---- // 2 ---- // 2 ---- - // - // The following are currently `false`, but could be `true` in the future (#64007): - // 1 --------- // 1 --------- - // 2 ---------- // 2 ---------- - // - // `true` in the following cases: - // 1 ------- // 1 ------- - // 2 -------- // 2 ------- - let (lo, hi) = self.boundaries(); - let (other_lo, other_hi) = other.boundaries(); - lo == other_hi || hi == other_lo - } - - fn to_pat(&self, tcx: TyCtxt<'tcx>) -> Pat<'tcx> { - let (lo, hi) = self.boundaries(); - - let bias = IntRange::signed_bias(tcx, self.ty); - let (lo, hi) = (lo ^ bias, hi ^ bias); - - let ty = ty::ParamEnv::empty().and(self.ty); - let lo_const = ty::Const::from_bits(tcx, lo, ty); - let hi_const = ty::Const::from_bits(tcx, hi, ty); - - let kind = if lo == hi { - PatKind::Constant { value: lo_const } - } else { - PatKind::Range(PatRange { lo: lo_const, hi: hi_const, end: RangeEnd::Included }) - }; - - // This is a brand new pattern, so we don't reuse `self.span`. - Pat { ty: self.ty, span: DUMMY_SP, kind: Box::new(kind) } - } - - /// For exhaustive integer matching, some constructors are grouped within other constructors - /// (namely integer typed values are grouped within ranges). However, when specialising these - /// constructors, we want to be specialising for the underlying constructors (the integers), not - /// the groups (the ranges). Thus we need to split the groups up. Splitting them up naïvely would - /// mean creating a separate constructor for every single value in the range, which is clearly - /// impractical. However, observe that for some ranges of integers, the specialisation will be - /// identical across all values in that range (i.e., there are equivalence classes of ranges of - /// constructors based on their `U(S(c, P), S(c, p))` outcome). These classes are grouped by - /// the patterns that apply to them (in the matrix `P`). We can split the range whenever the - /// patterns that apply to that range (specifically: the patterns that *intersect* with that range) - /// change. - /// Our solution, therefore, is to split the range constructor into subranges at every single point - /// the group of intersecting patterns changes (using the method described below). - /// And voilà ! We're testing precisely those ranges that we need to, without any exhaustive matching - /// on actual integers. The nice thing about this is that the number of subranges is linear in the - /// number of rows in the matrix (i.e., the number of cases in the `match` statement), so we don't - /// need to be worried about matching over gargantuan ranges. - /// - /// Essentially, given the first column of a matrix representing ranges, looking like the following: - /// - /// |------| |----------| |-------| || - /// |-------| |-------| |----| || - /// |---------| - /// - /// We split the ranges up into equivalence classes so the ranges are no longer overlapping: - /// - /// |--|--|||-||||--||---|||-------| |-|||| || - /// - /// The logic for determining how to split the ranges is fairly straightforward: we calculate - /// boundaries for each interval range, sort them, then create constructors for each new interval - /// between every pair of boundary points. (This essentially sums up to performing the intuitive - /// merging operation depicted above.) - fn split<'p>( - &self, - pcx: PatCtxt<'_, 'p, 'tcx>, - hir_id: Option<HirId>, - ) -> SmallVec<[Constructor<'tcx>; 1]> { - let ty = pcx.ty; - - /// Represents a border between 2 integers. Because the intervals spanning borders - /// must be able to cover every integer, we need to be able to represent - /// 2^128 + 1 such borders. - #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] - enum Border { - JustBefore(u128), - AfterMax, - } - - // A function for extracting the borders of an integer interval. - fn range_borders(r: IntRange<'_>) -> impl Iterator<Item = Border> { - let (lo, hi) = r.range.into_inner(); - let from = Border::JustBefore(lo); - let to = match hi.checked_add(1) { - Some(m) => Border::JustBefore(m), - None => Border::AfterMax, - }; - vec![from, to].into_iter() - } - - // Collect the span and range of all the intersecting ranges to lint on likely - // incorrect range patterns. (#63987) - let mut overlaps = vec![]; - let row_len = pcx.matrix.patterns.get(0).map(|r| r.len()).unwrap_or(0); - // `borders` is the set of borders between equivalence classes: each equivalence - // class lies between 2 borders. - let row_borders = pcx - .matrix - .head_ctors(pcx.cx) - .filter_map(|ctor| ctor.as_int_range()) - .filter_map(|range| { - let intersection = self.intersection(pcx.cx.tcx, &range); - let should_lint = self.suspicious_intersection(&range); - if let (Some(range), 1, true) = (&intersection, row_len, should_lint) { - // FIXME: for now, only check for overlapping ranges on simple range - // patterns. Otherwise with the current logic the following is detected - // as overlapping: - // match (10u8, true) { - // (0 ..= 125, false) => {} - // (126 ..= 255, false) => {} - // (0 ..= 255, true) => {} - // } - overlaps.push(range.clone()); - } - intersection - }) - .flat_map(range_borders); - let self_borders = range_borders(self.clone()); - let mut borders: Vec<_> = row_borders.chain(self_borders).collect(); - borders.sort_unstable(); - - self.lint_overlapping_patterns(pcx.cx.tcx, hir_id, ty, overlaps); - - // We're going to iterate through every adjacent pair of borders, making sure that - // each represents an interval of nonnegative length, and convert each such - // interval into a constructor. - borders - .array_windows() - .filter_map(|&pair| match pair { - [Border::JustBefore(n), Border::JustBefore(m)] => { - if n < m { - Some(n..=(m - 1)) - } else { - None - } - } - [Border::JustBefore(n), Border::AfterMax] => Some(n..=u128::MAX), - [Border::AfterMax, _] => None, - }) - .map(|range| IntRange { range, ty, span: pcx.span }) - .map(IntRange) - .collect() - } - - fn lint_overlapping_patterns( - &self, - tcx: TyCtxt<'tcx>, - hir_id: Option<HirId>, - ty: Ty<'tcx>, - overlaps: Vec<IntRange<'tcx>>, - ) { - if let (true, Some(hir_id)) = (!overlaps.is_empty(), hir_id) { - tcx.struct_span_lint_hir( - lint::builtin::OVERLAPPING_PATTERNS, - hir_id, - self.span, - |lint| { - let mut err = lint.build("multiple patterns covering the same range"); - err.span_label(self.span, "overlapping patterns"); - for int_range in overlaps { - // Use the real type for user display of the ranges: - err.span_label( - int_range.span, - &format!( - "this range overlaps on `{}`", - IntRange { range: int_range.range, ty, span: DUMMY_SP }.to_pat(tcx), - ), - ); - } - err.emit(); - }, - ); - } - } - - /// See `Constructor::is_covered_by` - fn is_covered_by<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool { - if self.intersection(pcx.cx.tcx, other).is_some() { - // Constructor splitting should ensure that all intersections we encounter are actually - // inclusions. - assert!(self.is_subrange(other)); - true - } else { - false - } - } -} - -/// Ignore spans when comparing, they don't carry semantic information as they are only for lints. -impl<'tcx> std::cmp::PartialEq for IntRange<'tcx> { - fn eq(&self, other: &Self) -> bool { - self.range == other.range && self.ty == other.ty - } -} - -// A struct to compute a set of constructors equivalent to `all_ctors \ used_ctors`. -#[derive(Debug)] -struct MissingConstructors<'tcx> { - all_ctors: SmallVec<[Constructor<'tcx>; 1]>, - used_ctors: Vec<Constructor<'tcx>>, -} - -impl<'tcx> MissingConstructors<'tcx> { - fn new<'p>(pcx: PatCtxt<'_, 'p, 'tcx>) -> Self { - let used_ctors: Vec<Constructor<'_>> = - pcx.matrix.head_ctors(pcx.cx).cloned().filter(|c| !c.is_wildcard()).collect(); - // Since `all_ctors` never contains wildcards, this won't recurse further. - let all_ctors = - all_constructors(pcx).into_iter().flat_map(|ctor| ctor.split(pcx, None)).collect(); - - MissingConstructors { all_ctors, used_ctors } - } - - fn is_empty<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>) -> bool { - self.iter(pcx).next().is_none() - } - - /// Iterate over all_ctors \ used_ctors - fn iter<'a, 'p>( - &'a self, - pcx: PatCtxt<'a, 'p, 'tcx>, - ) -> impl Iterator<Item = &'a Constructor<'tcx>> + Captures<'p> { - self.all_ctors.iter().filter(move |ctor| !ctor.is_covered_by_any(pcx, &self.used_ctors)) - } - - /// List the patterns corresponding to the missing constructors. In some cases, instead of - /// listing all constructors of a given type, we prefer to simply report a wildcard. - fn report_patterns<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>) -> SmallVec<[Pat<'tcx>; 1]> { - // There are 2 ways we can report a witness here. - // Commonly, we can report all the "free" - // constructors as witnesses, e.g., if we have: - // - // ``` - // enum Direction { N, S, E, W } - // let Direction::N = ...; - // ``` - // - // we can report 3 witnesses: `S`, `E`, and `W`. - // - // However, there is a case where we don't want - // to do this and instead report a single `_` witness: - // if the user didn't actually specify a constructor - // in this arm, e.g., in - // - // ``` - // let x: (Direction, Direction, bool) = ...; - // let (_, _, false) = x; - // ``` - // - // we don't want to show all 16 possible witnesses - // `(<direction-1>, <direction-2>, true)` - we are - // satisfied with `(_, _, true)`. In this case, - // `used_ctors` is empty. - // The exception is: if we are at the top-level, for example in an empty match, we - // sometimes prefer reporting the list of constructors instead of just `_`. - let report_when_all_missing = pcx.is_top_level && !IntRange::is_integral(pcx.ty); - if self.used_ctors.is_empty() && !report_when_all_missing { - // All constructors are unused. Report only a wildcard - // rather than each individual constructor. - smallvec![Pat::wildcard_from_ty(pcx.ty)] - } else { - // Construct for each missing constructor a "wild" version of this - // constructor, that matches everything that can be built with - // it. For example, if `ctor` is a `Constructor::Variant` for - // `Option::Some`, we get the pattern `Some(_)`. - self.iter(pcx) - .map(|missing_ctor| { - let fields = Fields::wildcards(pcx, &missing_ctor); - missing_ctor.apply(pcx, fields) - }) - .collect() - } - } -} - -/// Algorithm from <http://moscova.inria.fr/~maranget/papers/warn/index.html>. -/// The algorithm from the paper has been modified to correctly handle empty -/// types. The changes are: -/// (0) We don't exit early if the pattern matrix has zero rows. We just -/// continue to recurse over columns. -/// (1) all_constructors will only return constructors that are statically -/// possible. E.g., it will only return `Ok` for `Result<T, !>`. -/// -/// This finds whether a (row) vector `v` of patterns is 'useful' in relation -/// to a set of such vectors `m` - this is defined as there being a set of -/// inputs that will match `v` but not any of the sets in `m`. -/// -/// All the patterns at each column of the `matrix ++ v` matrix must have the same type. -/// -/// This is used both for reachability checking (if a pattern isn't useful in -/// relation to preceding patterns, it is not reachable) and exhaustiveness -/// checking (if a wildcard pattern is useful in relation to a matrix, the -/// matrix isn't exhaustive). -/// -/// `is_under_guard` is used to inform if the pattern has a guard. If it -/// has one it must not be inserted into the matrix. This shouldn't be -/// relied on for soundness. -fn is_useful<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, - matrix: &Matrix<'p, 'tcx>, - v: &PatStack<'p, 'tcx>, - witness_preference: WitnessPreference, - hir_id: HirId, - is_under_guard: bool, - is_top_level: bool, -) -> Usefulness<'tcx> { - let Matrix { patterns: rows, .. } = matrix; - debug!("is_useful({:#?}, {:#?})", matrix, v); - - // The base case. We are pattern-matching on () and the return value is - // based on whether our matrix has a row or not. - // NOTE: This could potentially be optimized by checking rows.is_empty() - // first and then, if v is non-empty, the return value is based on whether - // the type of the tuple we're checking is inhabited or not. - if v.is_empty() { - return if rows.is_empty() { - Usefulness::new_useful(witness_preference) - } else { - NotUseful - }; - }; - - assert!(rows.iter().all(|r| r.len() == v.len())); - - // If the first pattern is an or-pattern, expand it. - if let Some(vs) = v.expand_or_pat() { - // We expand the or pattern, trying each of its branches in turn and keeping careful track - // of possible unreachable sub-branches. - // - // If two branches have detected some unreachable sub-branches, we need to be careful. If - // they were detected in columns that are not the current one, we want to keep only the - // sub-branches that were unreachable in _all_ branches. Eg. in the following, the last - // `true` is unreachable in the second branch of the first or-pattern, but not otherwise. - // Therefore we don't want to lint that it is unreachable. - // - // ``` - // match (true, true) { - // (true, true) => {} - // (false | true, false | true) => {} - // } - // ``` - // If however the sub-branches come from the current column, they come from the inside of - // the current or-pattern, and we want to keep them all. Eg. in the following, we _do_ want - // to lint that the last `false` is unreachable. - // ``` - // match None { - // Some(false) => {} - // None | Some(true | false) => {} - // } - // ``` - - let mut matrix = matrix.clone(); - // We keep track of sub-branches separately depending on whether they come from this column - // or from others. - let mut unreachables_this_column: FxHashSet<Span> = FxHashSet::default(); - let mut unreachables_other_columns: Vec<FxHashSet<Span>> = Vec::default(); - // Whether at least one branch is reachable. - let mut any_is_useful = false; - - for v in vs { - let res = is_useful(cx, &matrix, &v, witness_preference, hir_id, is_under_guard, false); - match res { - Useful(unreachables) => { - if let Some((this_column, other_columns)) = unreachables.split_last() { - // We keep the union of unreachables found in the first column. - unreachables_this_column.extend(this_column); - // We keep the intersection of unreachables found in other columns. - if unreachables_other_columns.is_empty() { - unreachables_other_columns = other_columns.to_vec(); - } else { - unreachables_other_columns = unreachables_other_columns - .into_iter() - .zip(other_columns) - .map(|(x, y)| x.intersection(&y).copied().collect()) - .collect(); - } - } - any_is_useful = true; - } - NotUseful => { - unreachables_this_column.insert(v.head().span); - } - UsefulWithWitness(_) => bug!( - "encountered or-pat in the expansion of `_` during exhaustiveness checking" - ), - } - - // If pattern has a guard don't add it to the matrix. - if !is_under_guard { - // We push the already-seen patterns into the matrix in order to detect redundant - // branches like `Some(_) | Some(0)`. - matrix.push(v); - } - } - - return if any_is_useful { - let mut unreachables = if unreachables_other_columns.is_empty() { - let n_columns = v.len(); - (0..n_columns - 1).map(|_| FxHashSet::default()).collect() - } else { - unreachables_other_columns - }; - unreachables.push(unreachables_this_column); - Useful(unreachables) - } else { - NotUseful - }; - } - - // FIXME(Nadrieril): Hack to work around type normalization issues (see #72476). - let ty = matrix.heads().next().map(|r| r.ty).unwrap_or(v.head().ty); - let pcx = PatCtxt { cx, matrix, ty, span: v.head().span, is_top_level }; - - debug!("is_useful_expand_first_col: ty={:#?}, expanding {:#?}", pcx.ty, v.head()); - - let ret = v - .head_ctor(cx) - .split(pcx, Some(hir_id)) - .into_iter() - .map(|ctor| { - // We cache the result of `Fields::wildcards` because it is used a lot. - let ctor_wild_subpatterns = Fields::wildcards(pcx, &ctor); - let matrix = pcx.matrix.specialize_constructor(pcx, &ctor, &ctor_wild_subpatterns); - let v = v.pop_head_constructor(&ctor_wild_subpatterns); - let usefulness = - is_useful(pcx.cx, &matrix, &v, witness_preference, hir_id, is_under_guard, false); - usefulness.apply_constructor(pcx, &ctor, &ctor_wild_subpatterns) - }) - .find(|result| result.is_useful()) - .unwrap_or(NotUseful); - debug!("is_useful::returns({:#?}, {:#?}) = {:?}", matrix, v, ret); - ret -} - -/// Determines the constructor that the given pattern can be specialized to. -/// Returns `None` in case of a catch-all, which can't be specialized. -fn pat_constructor<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, - pat: &'p Pat<'tcx>, -) -> Constructor<'tcx> { - match pat.kind.as_ref() { - PatKind::AscribeUserType { .. } => bug!(), // Handled by `expand_pattern` - PatKind::Binding { .. } | PatKind::Wild => Wildcard, - PatKind::Leaf { .. } | PatKind::Deref { .. } => Single, - &PatKind::Variant { adt_def, variant_index, .. } => { - Variant(adt_def.variants[variant_index].def_id) - } - PatKind::Constant { value } => { - if let Some(int_range) = IntRange::from_const(cx.tcx, cx.param_env, value, pat.span) { - IntRange(int_range) - } else { - match pat.ty.kind() { - ty::Float(_) => FloatRange(value, value, RangeEnd::Included), - // In `expand_pattern`, we convert string literals to `&CONST` patterns with - // `CONST` a pattern of type `str`. In truth this contains a constant of type - // `&str`. - ty::Str => Str(value), - // All constants that can be structurally matched have already been expanded - // into the corresponding `Pat`s by `const_to_pat`. Constants that remain are - // opaque. - _ => Opaque, - } - } - } - &PatKind::Range(PatRange { lo, hi, end }) => { - let ty = lo.ty; - if let Some(int_range) = IntRange::from_range( - cx.tcx, - lo.eval_bits(cx.tcx, cx.param_env, lo.ty), - hi.eval_bits(cx.tcx, cx.param_env, hi.ty), - ty, - &end, - pat.span, - ) { - IntRange(int_range) - } else { - FloatRange(lo, hi, end) - } - } - PatKind::Array { prefix, slice, suffix } | PatKind::Slice { prefix, slice, suffix } => { - let array_len = match pat.ty.kind() { - ty::Array(_, length) => Some(length.eval_usize(cx.tcx, cx.param_env)), - ty::Slice(_) => None, - _ => span_bug!(pat.span, "bad ty {:?} for slice pattern", pat.ty), - }; - let prefix = prefix.len() as u64; - let suffix = suffix.len() as u64; - let kind = - if slice.is_some() { VarLen(prefix, suffix) } else { FixedLen(prefix + suffix) }; - Slice(Slice::new(array_len, kind)) - } - PatKind::Or { .. } => bug!("Or-pattern should have been expanded earlier on."), - } -} - -/// The arm of a match expression. -#[derive(Clone, Copy)] -crate struct MatchArm<'p, 'tcx> { - /// The pattern must have been lowered through `MatchVisitor::lower_pattern`. - crate pat: &'p super::Pat<'tcx>, - crate hir_id: HirId, - crate has_guard: bool, -} - -/// The output of checking a match for exhaustiveness and arm reachability. -crate struct UsefulnessReport<'p, 'tcx> { - /// For each arm of the input, whether that arm is reachable after the arms above it. - crate arm_usefulness: Vec<(MatchArm<'p, 'tcx>, Usefulness<'tcx>)>, - /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of - /// exhaustiveness. - crate non_exhaustiveness_witnesses: Vec<super::Pat<'tcx>>, -} - -/// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which -/// of its arms are reachable. -/// -/// Note: the input patterns must have been lowered through `MatchVisitor::lower_pattern`. -crate fn compute_match_usefulness<'p, 'tcx>( - cx: &MatchCheckCtxt<'p, 'tcx>, - arms: &[MatchArm<'p, 'tcx>], - scrut_hir_id: HirId, - scrut_ty: Ty<'tcx>, -) -> UsefulnessReport<'p, 'tcx> { - let mut matrix = Matrix::empty(); - let arm_usefulness: Vec<_> = arms - .iter() - .copied() - .map(|arm| { - let v = PatStack::from_pattern(arm.pat); - let usefulness = - is_useful(cx, &matrix, &v, LeaveOutWitness, arm.hir_id, arm.has_guard, true); - if !arm.has_guard { - matrix.push(v); - } - (arm, usefulness) - }) - .collect(); - - let wild_pattern = cx.pattern_arena.alloc(super::Pat::wildcard_from_ty(scrut_ty)); - let v = PatStack::from_pattern(wild_pattern); - let usefulness = is_useful(cx, &matrix, &v, ConstructWitness, scrut_hir_id, false, true); - let non_exhaustiveness_witnesses = match usefulness { - NotUseful => vec![], // Wildcard pattern isn't useful, so the match is exhaustive. - UsefulWithWitness(pats) => { - if pats.is_empty() { - bug!("Exhaustiveness check returned no witnesses") - } else { - pats.into_iter().map(|w| w.single_pattern()).collect() - } - } - Useful(_) => bug!(), - }; - UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses } } diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs index db0ecd701bc..7e9a3a37278 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs @@ -1,8 +1,9 @@ //! Validation of patterns/matches. -mod _match; mod check_match; mod const_to_pat; +mod deconstruct_pat; +mod usefulness; pub(crate) use self::check_match::check_match; diff --git a/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs b/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs new file mode 100644 index 00000000000..be96d5ae816 --- /dev/null +++ b/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs @@ -0,0 +1,992 @@ +//! Note: tests specific to this file can be found in: +//! +//! - `ui/pattern/usefulness` +//! - `ui/or-patterns` +//! - `ui/consts/const_in_pattern` +//! - `ui/rfc-2008-non-exhaustive` +//! - `ui/half-open-range-patterns` +//! - probably many others +//! +//! I (Nadrieril) prefer to put new tests in `ui/pattern/usefulness` unless there's a specific +//! reason not to, for example if they depend on a particular feature like `or_patterns`. +//! +//! ----- +//! +//! This file includes the logic for exhaustiveness and usefulness checking for +//! pattern-matching. Specifically, given a list of patterns for a type, we can +//! tell whether: +//! (a) the patterns cover every possible constructor for the type (exhaustiveness) +//! (b) each pattern is necessary (usefulness) +//! +//! The algorithm implemented here is a modified version of the one described in +//! [this paper](http://moscova.inria.fr/~maranget/papers/warn/index.html). +//! However, to save future implementors from reading the original paper, we +//! summarise the algorithm here to hopefully save time and be a little clearer +//! (without being so rigorous). +//! +//! # Premise +//! +//! The core of the algorithm revolves about a "usefulness" check. In particular, we +//! are trying to compute a predicate `U(P, p)` where `P` is a list of patterns (we refer to this as +//! a matrix). `U(P, p)` represents whether, given an existing list of patterns +//! `P_1 ..= P_m`, adding a new pattern `p` will be "useful" (that is, cover previously- +//! uncovered values of the type). +//! +//! If we have this predicate, then we can easily compute both exhaustiveness of an +//! entire set of patterns and the individual usefulness of each one. +//! (a) the set of patterns is exhaustive iff `U(P, _)` is false (i.e., adding a wildcard +//! match doesn't increase the number of values we're matching) +//! (b) a pattern `P_i` is not useful if `U(P[0..=(i-1), P_i)` is false (i.e., adding a +//! pattern to those that have come before it doesn't increase the number of values +//! we're matching). +//! +//! # Core concept +//! +//! The idea that powers everything that is done in this file is the following: a value is made +//! from a constructor applied to some fields. Examples of constructors are `Some`, `None`, `(,)` +//! (the 2-tuple constructor), `Foo {..}` (the constructor for a struct `Foo`), and `2` (the +//! constructor for the number `2`). Fields are just a (possibly empty) list of values. +//! +//! Some of the constructors listed above might feel weird: `None` and `2` don't take any +//! arguments. This is part of what makes constructors so general: we will consider plain values +//! like numbers and string literals to be constructors that take no arguments, also called "0-ary +//! constructors"; they are the simplest case of constructors. This allows us to see any value as +//! made up from a tree of constructors, each having a given number of children. For example: +//! `(None, Ok(0))` is made from 4 different constructors. +//! +//! This idea can be extended to patterns: a pattern captures a set of possible values, and we can +//! describe this set using constructors. For example, `Err(_)` captures all values of the type +//! `Result<T, E>` that start with the `Err` constructor (for some choice of `T` and `E`). The +//! wildcard `_` captures all values of the given type starting with any of the constructors for +//! that type. +//! +//! We use this to compute whether different patterns might capture a same value. Do the patterns +//! `Ok("foo")` and `Err(_)` capture a common value? The answer is no, because the first pattern +//! captures only values starting with the `Ok` constructor and the second only values starting +//! with the `Err` constructor. Do the patterns `Some(42)` and `Some(1..10)` intersect? They might, +//! since they both capture values starting with `Some`. To be certain, we need to dig under the +//! `Some` constructor and continue asking the question. This is the main idea behind the +//! exhaustiveness algorithm: by looking at patterns constructor-by-constructor, we can efficiently +//! figure out if some new pattern might capture a value that hadn't been captured by previous +//! patterns. +//! +//! Constructors are represented by the `Constructor` enum, and its fields by the `Fields` enum. +//! Most of the complexity of this file resides in transforming between patterns and +//! (`Constructor`, `Fields`) pairs, handling all the special cases correctly. +//! +//! Caveat: this constructors/fields distinction doesn't quite cover every Rust value. For example +//! a value of type `Rc<u64>` doesn't fit this idea very well, nor do various other things. +//! However, this idea covers most of the cases that are relevant to exhaustiveness checking. +//! +//! +//! # Algorithm +//! +//! Recall that `U(P, p)` represents whether, given an existing list of patterns (aka matrix) `P`, +//! adding a new pattern `p` will cover previously-uncovered values of the type. +//! During the course of the algorithm, the rows of the matrix won't just be individual patterns, +//! but rather partially-deconstructed patterns in the form of a list of fields. The paper +//! calls those pattern-vectors, and we will call them pattern-stacks. The same holds for the +//! new pattern `p`. +//! +//! For example, say we have the following: +//! +//! ``` +//! // x: (Option<bool>, Result<()>) +//! match x { +//! (Some(true), _) => {} +//! (None, Err(())) => {} +//! (None, Err(_)) => {} +//! } +//! ``` +//! +//! Here, the matrix `P` starts as: +//! +//! ``` +//! [ +//! [(Some(true), _)], +//! [(None, Err(()))], +//! [(None, Err(_))], +//! ] +//! ``` +//! +//! We can tell it's not exhaustive, because `U(P, _)` is true (we're not covering +//! `[(Some(false), _)]`, for instance). In addition, row 3 is not useful, because +//! all the values it covers are already covered by row 2. +//! +//! A list of patterns can be thought of as a stack, because we are mainly interested in the top of +//! the stack at any given point, and we can pop or apply constructors to get new pattern-stacks. +//! To match the paper, the top of the stack is at the beginning / on the left. +//! +//! There are two important operations on pattern-stacks necessary to understand the algorithm: +//! +//! 1. We can pop a given constructor off the top of a stack. This operation is called +//! `specialize`, and is denoted `S(c, p)` where `c` is a constructor (like `Some` or +//! `None`) and `p` a pattern-stack. +//! If the pattern on top of the stack can cover `c`, this removes the constructor and +//! pushes its arguments onto the stack. It also expands OR-patterns into distinct patterns. +//! Otherwise the pattern-stack is discarded. +//! This essentially filters those pattern-stacks whose top covers the constructor `c` and +//! discards the others. +//! +//! For example, the first pattern above initially gives a stack `[(Some(true), _)]`. If we +//! pop the tuple constructor, we are left with `[Some(true), _]`, and if we then pop the +//! `Some` constructor we get `[true, _]`. If we had popped `None` instead, we would get +//! nothing back. +//! +//! This returns zero or more new pattern-stacks, as follows. We look at the pattern `p_1` +//! on top of the stack, and we have four cases: +//! +//! 1.1. `p_1 = c(r_1, .., r_a)`, i.e. the top of the stack has constructor `c`. We +//! push onto the stack the arguments of this constructor, and return the result: +//! `r_1, .., r_a, p_2, .., p_n` +//! +//! 1.2. `p_1 = c'(r_1, .., r_a')` where `c ≠c'`. We discard the current stack and +//! return nothing. +//! +//! 1.3. `p_1 = _`. We push onto the stack as many wildcards as the constructor `c` has +//! arguments (its arity), and return the resulting stack: +//! `_, .., _, p_2, .., p_n` +//! +//! 1.4. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting +//! stack: +//! - `S(c, (r_1, p_2, .., p_n))` +//! - `S(c, (r_2, p_2, .., p_n))` +//! +//! 2. We can pop a wildcard off the top of the stack. This is called `S(_, p)`, where `p` is +//! a pattern-stack. Note: the paper calls this `D(p)`. +//! This is used when we know there are missing constructor cases, but there might be +//! existing wildcard patterns, so to check the usefulness of the matrix, we have to check +//! all its *other* components. +//! +//! It is computed as follows. We look at the pattern `p_1` on top of the stack, +//! and we have three cases: +//! 2.1. `p_1 = c(r_1, .., r_a)`. We discard the current stack and return nothing. +//! 2.2. `p_1 = _`. We return the rest of the stack: +//! p_2, .., p_n +//! 2.3. `p_1 = r_1 | r_2`. We expand the OR-pattern and then recurse on each resulting +//! stack. +//! - `S(_, (r_1, p_2, .., p_n))` +//! - `S(_, (r_2, p_2, .., p_n))` +//! +//! Note that the OR-patterns are not always used directly in Rust, but are used to derive the +//! exhaustive integer matching rules, so they're written here for posterity. +//! +//! Both those operations extend straightforwardly to a list or pattern-stacks, i.e. a matrix, by +//! working row-by-row. Popping a constructor ends up keeping only the matrix rows that start with +//! the given constructor, and popping a wildcard keeps those rows that start with a wildcard. +//! +//! +//! The algorithm for computing `U` +//! ------------------------------- +//! The algorithm is inductive (on the number of columns: i.e., components of tuple patterns). +//! That means we're going to check the components from left-to-right, so the algorithm +//! operates principally on the first component of the matrix and new pattern-stack `p`. +//! This algorithm is realised in the `is_useful` function. +//! +//! Base case. (`n = 0`, i.e., an empty tuple pattern) +//! - If `P` already contains an empty pattern (i.e., if the number of patterns `m > 0`), +//! then `U(P, p)` is false. +//! - Otherwise, `P` must be empty, so `U(P, p)` is true. +//! +//! Inductive step. (`n > 0`, i.e., whether there's at least one column +//! [which may then be expanded into further columns later]) +//! We're going to match on the top of the new pattern-stack, `p_1`. +//! - If `p_1 == c(r_1, .., r_a)`, i.e. we have a constructor pattern. +//! Then, the usefulness of `p_1` can be reduced to whether it is useful when +//! we ignore all the patterns in the first column of `P` that involve other constructors. +//! This is where `S(c, P)` comes in: +//! `U(P, p) := U(S(c, P), S(c, p))` +//! +//! For example, if `P` is: +//! +//! ``` +//! [ +//! [Some(true), _], +//! [None, 0], +//! ] +//! ``` +//! +//! and `p` is `[Some(false), 0]`, then we don't care about row 2 since we know `p` only +//! matches values that row 2 doesn't. For row 1 however, we need to dig into the +//! arguments of `Some` to know whether some new value is covered. So we compute +//! `U([[true, _]], [false, 0])`. +//! +//! - If `p_1 == _`, then we look at the list of constructors that appear in the first +//! component of the rows of `P`: +//! + If there are some constructors that aren't present, then we might think that the +//! wildcard `_` is useful, since it covers those constructors that weren't covered +//! before. +//! That's almost correct, but only works if there were no wildcards in those first +//! components. So we need to check that `p` is useful with respect to the rows that +//! start with a wildcard, if there are any. This is where `S(_, x)` comes in: +//! `U(P, p) := U(S(_, P), S(_, p))` +//! +//! For example, if `P` is: +//! +//! ``` +//! [ +//! [_, true, _], +//! [None, false, 1], +//! ] +//! ``` +//! +//! and `p` is `[_, false, _]`, the `Some` constructor doesn't appear in `P`. So if we +//! only had row 2, we'd know that `p` is useful. However row 1 starts with a +//! wildcard, so we need to check whether `U([[true, _]], [false, 1])`. +//! +//! + Otherwise, all possible constructors (for the relevant type) are present. In this +//! case we must check whether the wildcard pattern covers any unmatched value. For +//! that, we can think of the `_` pattern as a big OR-pattern that covers all +//! possible constructors. For `Option`, that would mean `_ = None | Some(_)` for +//! example. The wildcard pattern is useful in this case if it is useful when +//! specialized to one of the possible constructors. So we compute: +//! `U(P, p) := ∃(k ϵ constructors) U(S(k, P), S(k, p))` +//! +//! For example, if `P` is: +//! +//! ``` +//! [ +//! [Some(true), _], +//! [None, false], +//! ] +//! ``` +//! +//! and `p` is `[_, false]`, both `None` and `Some` constructors appear in the first +//! components of `P`. We will therefore try popping both constructors in turn: we +//! compute `U([[true, _]], [_, false])` for the `Some` constructor, and `U([[false]], +//! [false])` for the `None` constructor. The first case returns true, so we know that +//! `p` is useful for `P`. Indeed, it matches `[Some(false), _]` that wasn't matched +//! before. +//! +//! - If `p_1 == r_1 | r_2`, then the usefulness depends on each `r_i` separately: +//! `U(P, p) := U(P, (r_1, p_2, .., p_n)) +//! || U(P, (r_2, p_2, .., p_n))` +//! +//! Modifications to the algorithm +//! ------------------------------ +//! The algorithm in the paper doesn't cover some of the special cases that arise in Rust, for +//! example uninhabited types and variable-length slice patterns. These are drawn attention to +//! throughout the code below. I'll make a quick note here about how exhaustive integer matching is +//! accounted for, though. +//! +//! Exhaustive integer matching +//! --------------------------- +//! An integer type can be thought of as a (huge) sum type: 1 | 2 | 3 | ... +//! So to support exhaustive integer matching, we can make use of the logic in the paper for +//! OR-patterns. However, we obviously can't just treat ranges x..=y as individual sums, because +//! they are likely gigantic. So we instead treat ranges as constructors of the integers. This means +//! that we have a constructor *of* constructors (the integers themselves). We then need to work +//! through all the inductive step rules above, deriving how the ranges would be treated as +//! OR-patterns, and making sure that they're treated in the same way even when they're ranges. +//! There are really only four special cases here: +//! - When we match on a constructor that's actually a range, we have to treat it as if we would +//! an OR-pattern. +//! + It turns out that we can simply extend the case for single-value patterns in +//! `specialize` to either be *equal* to a value constructor, or *contained within* a range +//! constructor. +//! + When the pattern itself is a range, you just want to tell whether any of the values in +//! the pattern range coincide with values in the constructor range, which is precisely +//! intersection. +//! Since when encountering a range pattern for a value constructor, we also use inclusion, it +//! means that whenever the constructor is a value/range and the pattern is also a value/range, +//! we can simply use intersection to test usefulness. +//! - When we're testing for usefulness of a pattern and the pattern's first component is a +//! wildcard. +//! + If all the constructors appear in the matrix, we have a slight complication. By default, +//! the behaviour (i.e., a disjunction over specialised matrices for each constructor) is +//! invalid, because we want a disjunction over every *integer* in each range, not just a +//! disjunction over every range. This is a bit more tricky to deal with: essentially we need +//! to form equivalence classes of subranges of the constructor range for which the behaviour +//! of the matrix `P` and new pattern `p` are the same. This is described in more +//! detail in `Constructor::split`. +//! + If some constructors are missing from the matrix, it turns out we don't need to do +//! anything special (because we know none of the integers are actually wildcards: i.e., we +//! can't span wildcards using ranges). + +use self::Usefulness::*; +use self::WitnessPreference::*; + +use super::deconstruct_pat::{Constructor, Fields, MissingConstructors}; +use super::{Pat, PatKind}; +use super::{PatternFoldable, PatternFolder}; + +use rustc_data_structures::captures::Captures; +use rustc_data_structures::fx::FxHashSet; +use rustc_data_structures::sync::OnceCell; + +use rustc_arena::TypedArena; +use rustc_hir::def_id::DefId; +use rustc_hir::HirId; +use rustc_middle::ty::{self, Ty, TyCtxt}; +use rustc_span::Span; + +use smallvec::{smallvec, SmallVec}; +use std::fmt; +use std::iter::{FromIterator, IntoIterator}; + +crate struct MatchCheckCtxt<'a, 'tcx> { + crate tcx: TyCtxt<'tcx>, + /// The module in which the match occurs. This is necessary for + /// checking inhabited-ness of types because whether a type is (visibly) + /// inhabited can depend on whether it was defined in the current module or + /// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty + /// outside its module and should not be matchable with an empty match statement. + crate module: DefId, + crate param_env: ty::ParamEnv<'tcx>, + crate pattern_arena: &'a TypedArena<Pat<'tcx>>, +} + +impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> { + pub(super) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool { + if self.tcx.features().exhaustive_patterns { + self.tcx.is_ty_uninhabited_from(self.module, ty, self.param_env) + } else { + false + } + } + + /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`. + pub(super) fn is_foreign_non_exhaustive_enum(&self, ty: Ty<'tcx>) -> bool { + match ty.kind() { + ty::Adt(def, ..) => { + def.is_enum() && def.is_variant_list_non_exhaustive() && !def.did.is_local() + } + _ => false, + } + } +} + +#[derive(Copy, Clone)] +pub(super) struct PatCtxt<'a, 'p, 'tcx> { + pub(super) cx: &'a MatchCheckCtxt<'p, 'tcx>, + /// Current state of the matrix. + pub(super) matrix: &'a Matrix<'p, 'tcx>, + /// Type of the current column under investigation. + pub(super) ty: Ty<'tcx>, + /// Span of the current pattern under investigation. + pub(super) span: Span, + /// Whether the current pattern is the whole pattern as found in a match arm, or if it's a + /// subpattern. + pub(super) is_top_level: bool, +} + +crate fn expand_pattern<'tcx>(pat: Pat<'tcx>) -> Pat<'tcx> { + LiteralExpander.fold_pattern(&pat) +} + +struct LiteralExpander; + +impl<'tcx> PatternFolder<'tcx> for LiteralExpander { + fn fold_pattern(&mut self, pat: &Pat<'tcx>) -> Pat<'tcx> { + debug!("fold_pattern {:?} {:?} {:?}", pat, pat.ty.kind(), pat.kind); + match (pat.ty.kind(), pat.kind.as_ref()) { + (_, PatKind::Binding { subpattern: Some(s), .. }) => s.fold_with(self), + (_, PatKind::AscribeUserType { subpattern: s, .. }) => s.fold_with(self), + (ty::Ref(_, t, _), PatKind::Constant { .. }) if t.is_str() => { + // Treat string literal patterns as deref patterns to a `str` constant, i.e. + // `&CONST`. This expands them like other const patterns. This could have been done + // in `const_to_pat`, but that causes issues with the rest of the matching code. + let mut new_pat = pat.super_fold_with(self); + // Make a fake const pattern of type `str` (instead of `&str`). That the carried + // constant value still knows it is of type `&str`. + new_pat.ty = t; + Pat { + kind: Box::new(PatKind::Deref { subpattern: new_pat }), + span: pat.span, + ty: pat.ty, + } + } + _ => pat.super_fold_with(self), + } + } +} + +impl<'tcx> Pat<'tcx> { + pub(super) fn is_wildcard(&self) -> bool { + matches!(*self.kind, PatKind::Binding { subpattern: None, .. } | PatKind::Wild) + } +} + +/// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]` +/// works well. +#[derive(Debug, Clone)] +struct PatStack<'p, 'tcx> { + pats: SmallVec<[&'p Pat<'tcx>; 2]>, + /// Cache for the constructor of the head + head_ctor: OnceCell<Constructor<'tcx>>, +} + +impl<'p, 'tcx> PatStack<'p, 'tcx> { + fn from_pattern(pat: &'p Pat<'tcx>) -> Self { + Self::from_vec(smallvec![pat]) + } + + fn from_vec(vec: SmallVec<[&'p Pat<'tcx>; 2]>) -> Self { + PatStack { pats: vec, head_ctor: OnceCell::new() } + } + + fn is_empty(&self) -> bool { + self.pats.is_empty() + } + + fn len(&self) -> usize { + self.pats.len() + } + + fn head(&self) -> &'p Pat<'tcx> { + self.pats[0] + } + + fn head_ctor<'a>(&'a self, cx: &MatchCheckCtxt<'p, 'tcx>) -> &'a Constructor<'tcx> { + self.head_ctor.get_or_init(|| Constructor::from_pat(cx, self.head())) + } + + fn iter(&self) -> impl Iterator<Item = &Pat<'tcx>> { + self.pats.iter().copied() + } + + // If the first pattern is an or-pattern, expand this pattern. Otherwise, return `None`. + fn expand_or_pat(&self) -> Option<Vec<Self>> { + if self.is_empty() { + None + } else if let PatKind::Or { pats } = &*self.head().kind { + Some( + pats.iter() + .map(|pat| { + let mut new_patstack = PatStack::from_pattern(pat); + new_patstack.pats.extend_from_slice(&self.pats[1..]); + new_patstack + }) + .collect(), + ) + } else { + None + } + } + + /// This computes `S(self.head_ctor(), self)`. See top of the file for explanations. + /// + /// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing + /// fields filled with wild patterns. + /// + /// This is roughly the inverse of `Constructor::apply`. + fn pop_head_constructor(&self, ctor_wild_subpatterns: &Fields<'p, 'tcx>) -> PatStack<'p, 'tcx> { + // We pop the head pattern and push the new fields extracted from the arguments of + // `self.head()`. + let mut new_fields = + ctor_wild_subpatterns.replace_with_pattern_arguments(self.head()).filtered_patterns(); + new_fields.extend_from_slice(&self.pats[1..]); + PatStack::from_vec(new_fields) + } +} + +impl<'p, 'tcx> Default for PatStack<'p, 'tcx> { + fn default() -> Self { + Self::from_vec(smallvec![]) + } +} + +impl<'p, 'tcx> PartialEq for PatStack<'p, 'tcx> { + fn eq(&self, other: &Self) -> bool { + self.pats == other.pats + } +} + +impl<'p, 'tcx> FromIterator<&'p Pat<'tcx>> for PatStack<'p, 'tcx> { + fn from_iter<T>(iter: T) -> Self + where + T: IntoIterator<Item = &'p Pat<'tcx>>, + { + Self::from_vec(iter.into_iter().collect()) + } +} + +/// A 2D matrix. +#[derive(Clone, PartialEq)] +pub(super) struct Matrix<'p, 'tcx> { + patterns: Vec<PatStack<'p, 'tcx>>, +} + +impl<'p, 'tcx> Matrix<'p, 'tcx> { + fn empty() -> Self { + Matrix { patterns: vec![] } + } + + /// Number of columns of this matrix. `None` is the matrix is empty. + pub(super) fn column_count(&self) -> Option<usize> { + self.patterns.get(0).map(|r| r.len()) + } + + /// Pushes a new row to the matrix. If the row starts with an or-pattern, this expands it. + fn push(&mut self, row: PatStack<'p, 'tcx>) { + if let Some(rows) = row.expand_or_pat() { + for row in rows { + // We recursively expand the or-patterns of the new rows. + // This is necessary as we might have `0 | (1 | 2)` or e.g., `x @ 0 | x @ (1 | 2)`. + self.push(row) + } + } else { + self.patterns.push(row); + } + } + + /// Iterate over the first component of each row + fn heads<'a>(&'a self) -> impl Iterator<Item = &'a Pat<'tcx>> + Captures<'p> { + self.patterns.iter().map(|r| r.head()) + } + + /// Iterate over the first constructor of each row + pub(super) fn head_ctors<'a>( + &'a self, + cx: &'a MatchCheckCtxt<'p, 'tcx>, + ) -> impl Iterator<Item = &'a Constructor<'tcx>> + Captures<'a> + Captures<'p> { + self.patterns.iter().map(move |r| r.head_ctor(cx)) + } + + /// This computes `S(constructor, self)`. See top of the file for explanations. + fn specialize_constructor( + &self, + pcx: PatCtxt<'_, 'p, 'tcx>, + ctor: &Constructor<'tcx>, + ctor_wild_subpatterns: &Fields<'p, 'tcx>, + ) -> Matrix<'p, 'tcx> { + self.patterns + .iter() + .filter(|r| ctor.is_covered_by(pcx, r.head_ctor(pcx.cx))) + .map(|r| r.pop_head_constructor(ctor_wild_subpatterns)) + .collect() + } +} + +/// Pretty-printer for matrices of patterns, example: +/// +/// ```text +/// +++++++++++++++++++++++++++++ +/// + _ + [] + +/// +++++++++++++++++++++++++++++ +/// + true + [First] + +/// +++++++++++++++++++++++++++++ +/// + true + [Second(true)] + +/// +++++++++++++++++++++++++++++ +/// + false + [_] + +/// +++++++++++++++++++++++++++++ +/// + _ + [_, _, tail @ ..] + +/// +++++++++++++++++++++++++++++ +/// ``` +impl<'p, 'tcx> fmt::Debug for Matrix<'p, 'tcx> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "\n")?; + + let Matrix { patterns: m, .. } = self; + let pretty_printed_matrix: Vec<Vec<String>> = + m.iter().map(|row| row.iter().map(|pat| format!("{:?}", pat)).collect()).collect(); + + let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0); + assert!(m.iter().all(|row| row.len() == column_count)); + let column_widths: Vec<usize> = (0..column_count) + .map(|col| pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0)) + .collect(); + + let total_width = column_widths.iter().cloned().sum::<usize>() + column_count * 3 + 1; + let br = "+".repeat(total_width); + write!(f, "{}\n", br)?; + for row in pretty_printed_matrix { + write!(f, "+")?; + for (column, pat_str) in row.into_iter().enumerate() { + write!(f, " ")?; + write!(f, "{:1$}", pat_str, column_widths[column])?; + write!(f, " +")?; + } + write!(f, "\n")?; + write!(f, "{}\n", br)?; + } + Ok(()) + } +} + +impl<'p, 'tcx> FromIterator<PatStack<'p, 'tcx>> for Matrix<'p, 'tcx> { + fn from_iter<T>(iter: T) -> Self + where + T: IntoIterator<Item = PatStack<'p, 'tcx>>, + { + let mut matrix = Matrix::empty(); + for x in iter { + // Using `push` ensures we correctly expand or-patterns. + matrix.push(x); + } + matrix + } +} + +#[derive(Clone, Debug)] +crate enum Usefulness<'tcx> { + /// Carries, for each column in the matrix, a set of sub-branches that have been found to be + /// unreachable. Used only in the presence of or-patterns, otherwise it stays empty. + Useful(Vec<FxHashSet<Span>>), + /// Carries a list of witnesses of non-exhaustiveness. + UsefulWithWitness(Vec<Witness<'tcx>>), + NotUseful, +} + +impl<'tcx> Usefulness<'tcx> { + fn new_useful(preference: WitnessPreference) -> Self { + match preference { + ConstructWitness => UsefulWithWitness(vec![Witness(vec![])]), + LeaveOutWitness => Useful(vec![]), + } + } + + fn is_useful(&self) -> bool { + !matches!(*self, NotUseful) + } + + fn apply_constructor<'p>( + self, + pcx: PatCtxt<'_, 'p, 'tcx>, + ctor: &Constructor<'tcx>, + ctor_wild_subpatterns: &Fields<'p, 'tcx>, + ) -> Self { + match self { + UsefulWithWitness(witnesses) => { + let new_witnesses = if ctor.is_wildcard() { + let missing_ctors = MissingConstructors::new(pcx); + let new_patterns = missing_ctors.report_patterns(pcx); + witnesses + .into_iter() + .flat_map(|witness| { + new_patterns.iter().map(move |pat| { + let mut witness = witness.clone(); + witness.0.push(pat.clone()); + witness + }) + }) + .collect() + } else { + witnesses + .into_iter() + .map(|witness| witness.apply_constructor(pcx, &ctor, ctor_wild_subpatterns)) + .collect() + }; + UsefulWithWitness(new_witnesses) + } + Useful(mut unreachables) => { + if !unreachables.is_empty() { + // When we apply a constructor, there are `arity` columns of the matrix that + // corresponded to its arguments. All the unreachables found in these columns + // will, after `apply`, come from the first column. So we take the union of all + // the corresponding sets and put them in the first column. + // Note that `arity` may be 0, in which case we just push a new empty set. + let len = unreachables.len(); + let arity = ctor_wild_subpatterns.len(); + let mut unioned = FxHashSet::default(); + for set in unreachables.drain((len - arity)..) { + unioned.extend(set) + } + unreachables.push(unioned); + } + Useful(unreachables) + } + x => x, + } + } +} + +#[derive(Copy, Clone, Debug)] +enum WitnessPreference { + ConstructWitness, + LeaveOutWitness, +} + +/// A witness of non-exhaustiveness for error reporting, represented +/// as a list of patterns (in reverse order of construction) with +/// wildcards inside to represent elements that can take any inhabitant +/// of the type as a value. +/// +/// A witness against a list of patterns should have the same types +/// and length as the pattern matched against. Because Rust `match` +/// is always against a single pattern, at the end the witness will +/// have length 1, but in the middle of the algorithm, it can contain +/// multiple patterns. +/// +/// For example, if we are constructing a witness for the match against +/// +/// ``` +/// struct Pair(Option<(u32, u32)>, bool); +/// +/// match (p: Pair) { +/// Pair(None, _) => {} +/// Pair(_, false) => {} +/// } +/// ``` +/// +/// We'll perform the following steps: +/// 1. Start with an empty witness +/// `Witness(vec![])` +/// 2. Push a witness `Some(_)` against the `None` +/// `Witness(vec![Some(_)])` +/// 3. Push a witness `true` against the `false` +/// `Witness(vec![Some(_), true])` +/// 4. Apply the `Pair` constructor to the witnesses +/// `Witness(vec![Pair(Some(_), true)])` +/// +/// The final `Pair(Some(_), true)` is then the resulting witness. +#[derive(Clone, Debug)] +crate struct Witness<'tcx>(Vec<Pat<'tcx>>); + +impl<'tcx> Witness<'tcx> { + /// Asserts that the witness contains a single pattern, and returns it. + fn single_pattern(self) -> Pat<'tcx> { + assert_eq!(self.0.len(), 1); + self.0.into_iter().next().unwrap() + } + + /// Constructs a partial witness for a pattern given a list of + /// patterns expanded by the specialization step. + /// + /// When a pattern P is discovered to be useful, this function is used bottom-up + /// to reconstruct a complete witness, e.g., a pattern P' that covers a subset + /// of values, V, where each value in that set is not covered by any previously + /// used patterns and is covered by the pattern P'. Examples: + /// + /// left_ty: tuple of 3 elements + /// pats: [10, 20, _] => (10, 20, _) + /// + /// left_ty: struct X { a: (bool, &'static str), b: usize} + /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 } + fn apply_constructor<'p>( + mut self, + pcx: PatCtxt<'_, 'p, 'tcx>, + ctor: &Constructor<'tcx>, + ctor_wild_subpatterns: &Fields<'p, 'tcx>, + ) -> Self { + let pat = { + let len = self.0.len(); + let arity = ctor_wild_subpatterns.len(); + let pats = self.0.drain((len - arity)..).rev(); + ctor_wild_subpatterns.replace_fields(pcx.cx, pats).apply(pcx, ctor) + }; + + self.0.push(pat); + + self + } +} + +/// Algorithm from <http://moscova.inria.fr/~maranget/papers/warn/index.html>. +/// The algorithm from the paper has been modified to correctly handle empty +/// types. The changes are: +/// (0) We don't exit early if the pattern matrix has zero rows. We just +/// continue to recurse over columns. +/// (1) all_constructors will only return constructors that are statically +/// possible. E.g., it will only return `Ok` for `Result<T, !>`. +/// +/// This finds whether a (row) vector `v` of patterns is 'useful' in relation +/// to a set of such vectors `m` - this is defined as there being a set of +/// inputs that will match `v` but not any of the sets in `m`. +/// +/// All the patterns at each column of the `matrix ++ v` matrix must have the same type. +/// +/// This is used both for reachability checking (if a pattern isn't useful in +/// relation to preceding patterns, it is not reachable) and exhaustiveness +/// checking (if a wildcard pattern is useful in relation to a matrix, the +/// matrix isn't exhaustive). +/// +/// `is_under_guard` is used to inform if the pattern has a guard. If it +/// has one it must not be inserted into the matrix. This shouldn't be +/// relied on for soundness. +fn is_useful<'p, 'tcx>( + cx: &MatchCheckCtxt<'p, 'tcx>, + matrix: &Matrix<'p, 'tcx>, + v: &PatStack<'p, 'tcx>, + witness_preference: WitnessPreference, + hir_id: HirId, + is_under_guard: bool, + is_top_level: bool, +) -> Usefulness<'tcx> { + let Matrix { patterns: rows, .. } = matrix; + debug!("is_useful({:#?}, {:#?})", matrix, v); + + // The base case. We are pattern-matching on () and the return value is + // based on whether our matrix has a row or not. + // NOTE: This could potentially be optimized by checking rows.is_empty() + // first and then, if v is non-empty, the return value is based on whether + // the type of the tuple we're checking is inhabited or not. + if v.is_empty() { + return if rows.is_empty() { + Usefulness::new_useful(witness_preference) + } else { + NotUseful + }; + }; + + assert!(rows.iter().all(|r| r.len() == v.len())); + + // If the first pattern is an or-pattern, expand it. + if let Some(vs) = v.expand_or_pat() { + // We expand the or pattern, trying each of its branches in turn and keeping careful track + // of possible unreachable sub-branches. + // + // If two branches have detected some unreachable sub-branches, we need to be careful. If + // they were detected in columns that are not the current one, we want to keep only the + // sub-branches that were unreachable in _all_ branches. Eg. in the following, the last + // `true` is unreachable in the second branch of the first or-pattern, but not otherwise. + // Therefore we don't want to lint that it is unreachable. + // + // ``` + // match (true, true) { + // (true, true) => {} + // (false | true, false | true) => {} + // } + // ``` + // If however the sub-branches come from the current column, they come from the inside of + // the current or-pattern, and we want to keep them all. Eg. in the following, we _do_ want + // to lint that the last `false` is unreachable. + // ``` + // match None { + // Some(false) => {} + // None | Some(true | false) => {} + // } + // ``` + + let mut matrix = matrix.clone(); + // We keep track of sub-branches separately depending on whether they come from this column + // or from others. + let mut unreachables_this_column: FxHashSet<Span> = FxHashSet::default(); + let mut unreachables_other_columns: Vec<FxHashSet<Span>> = Vec::default(); + // Whether at least one branch is reachable. + let mut any_is_useful = false; + + for v in vs { + let res = is_useful(cx, &matrix, &v, witness_preference, hir_id, is_under_guard, false); + match res { + Useful(unreachables) => { + if let Some((this_column, other_columns)) = unreachables.split_last() { + // We keep the union of unreachables found in the first column. + unreachables_this_column.extend(this_column); + // We keep the intersection of unreachables found in other columns. + if unreachables_other_columns.is_empty() { + unreachables_other_columns = other_columns.to_vec(); + } else { + unreachables_other_columns = unreachables_other_columns + .into_iter() + .zip(other_columns) + .map(|(x, y)| x.intersection(&y).copied().collect()) + .collect(); + } + } + any_is_useful = true; + } + NotUseful => { + unreachables_this_column.insert(v.head().span); + } + UsefulWithWitness(_) => bug!( + "encountered or-pat in the expansion of `_` during exhaustiveness checking" + ), + } + + // If pattern has a guard don't add it to the matrix. + if !is_under_guard { + // We push the already-seen patterns into the matrix in order to detect redundant + // branches like `Some(_) | Some(0)`. + matrix.push(v); + } + } + + return if any_is_useful { + let mut unreachables = if unreachables_other_columns.is_empty() { + let n_columns = v.len(); + (0..n_columns - 1).map(|_| FxHashSet::default()).collect() + } else { + unreachables_other_columns + }; + unreachables.push(unreachables_this_column); + Useful(unreachables) + } else { + NotUseful + }; + } + + // FIXME(Nadrieril): Hack to work around type normalization issues (see #72476). + let ty = matrix.heads().next().map(|r| r.ty).unwrap_or(v.head().ty); + let pcx = PatCtxt { cx, matrix, ty, span: v.head().span, is_top_level }; + + debug!("is_useful_expand_first_col: ty={:#?}, expanding {:#?}", pcx.ty, v.head()); + + let ret = v + .head_ctor(cx) + .split(pcx, Some(hir_id)) + .into_iter() + .map(|ctor| { + // We cache the result of `Fields::wildcards` because it is used a lot. + let ctor_wild_subpatterns = Fields::wildcards(pcx, &ctor); + let matrix = pcx.matrix.specialize_constructor(pcx, &ctor, &ctor_wild_subpatterns); + let v = v.pop_head_constructor(&ctor_wild_subpatterns); + let usefulness = + is_useful(pcx.cx, &matrix, &v, witness_preference, hir_id, is_under_guard, false); + usefulness.apply_constructor(pcx, &ctor, &ctor_wild_subpatterns) + }) + .find(|result| result.is_useful()) + .unwrap_or(NotUseful); + debug!("is_useful::returns({:#?}, {:#?}) = {:?}", matrix, v, ret); + ret +} + +/// The arm of a match expression. +#[derive(Clone, Copy)] +crate struct MatchArm<'p, 'tcx> { + /// The pattern must have been lowered through `MatchVisitor::lower_pattern`. + crate pat: &'p super::Pat<'tcx>, + crate hir_id: HirId, + crate has_guard: bool, +} + +/// The output of checking a match for exhaustiveness and arm reachability. +crate struct UsefulnessReport<'p, 'tcx> { + /// For each arm of the input, whether that arm is reachable after the arms above it. + crate arm_usefulness: Vec<(MatchArm<'p, 'tcx>, Usefulness<'tcx>)>, + /// If the match is exhaustive, this is empty. If not, this contains witnesses for the lack of + /// exhaustiveness. + crate non_exhaustiveness_witnesses: Vec<super::Pat<'tcx>>, +} + +/// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which +/// of its arms are reachable. +/// +/// Note: the input patterns must have been lowered through `MatchVisitor::lower_pattern`. +crate fn compute_match_usefulness<'p, 'tcx>( + cx: &MatchCheckCtxt<'p, 'tcx>, + arms: &[MatchArm<'p, 'tcx>], + scrut_hir_id: HirId, + scrut_ty: Ty<'tcx>, +) -> UsefulnessReport<'p, 'tcx> { + let mut matrix = Matrix::empty(); + let arm_usefulness: Vec<_> = arms + .iter() + .copied() + .map(|arm| { + let v = PatStack::from_pattern(arm.pat); + let usefulness = + is_useful(cx, &matrix, &v, LeaveOutWitness, arm.hir_id, arm.has_guard, true); + if !arm.has_guard { + matrix.push(v); + } + (arm, usefulness) + }) + .collect(); + + let wild_pattern = cx.pattern_arena.alloc(super::Pat::wildcard_from_ty(scrut_ty)); + let v = PatStack::from_pattern(wild_pattern); + let usefulness = is_useful(cx, &matrix, &v, ConstructWitness, scrut_hir_id, false, true); + let non_exhaustiveness_witnesses = match usefulness { + NotUseful => vec![], // Wildcard pattern isn't useful, so the match is exhaustive. + UsefulWithWitness(pats) => { + if pats.is_empty() { + bug!("Exhaustiveness check returned no witnesses") + } else { + pats.into_iter().map(|w| w.single_pattern()).collect() + } + } + Useful(_) => bug!(), + }; + UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses } +} diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index 8a6b0230023..44999c9b63a 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -6,15 +6,18 @@ #![feature(or_patterns)] use rustc_ast as ast; +use rustc_ast::attr::HasAttrs; use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind}; use rustc_ast::tokenstream::{self, LazyTokenStream, TokenStream, TokenTree}; use rustc_ast_pretty::pprust; +use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::sync::Lrc; use rustc_errors::{Diagnostic, FatalError, Level, PResult}; use rustc_session::parse::ParseSess; use rustc_span::{symbol::kw, FileName, SourceFile, Span, DUMMY_SP}; use smallvec::SmallVec; +use std::cell::RefCell; use std::mem; use std::path::Path; use std::str; @@ -249,29 +252,23 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // before we fall back to the stringification. let convert_tokens = - |tokens: &Option<LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream()); + |tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream()); let tokens = match *nt { Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), - Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens), - Nonterminal::NtStmt(ref stmt) => { - // FIXME: We currently only collect tokens for `:stmt` - // matchers in `macro_rules!` macros. When we start collecting - // tokens for attributes on statements, we will need to prepend - // attributes here - convert_tokens(&stmt.tokens) - } - Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens), - Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens), + Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()), + Nonterminal::NtStmt(ref stmt) => prepend_attrs(stmt.attrs(), stmt.tokens()), + Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()), + Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()), Nonterminal::NtIdent(ident, is_raw) => { Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into()) } Nonterminal::NtLifetime(ident) => { Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into()) } - Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens), - Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens), - Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens), + Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.as_ref()), + Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.as_ref()), + Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.as_ref()), Nonterminal::NtTT(ref tt) => Some(tt.clone().into()), Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => { if expr.tokens.is_none() { @@ -281,6 +278,25 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke } }; + // Caches the stringification of 'good' `TokenStreams` which passed + // `tokenstream_probably_equal_for_proc_macro`. This allows us to avoid + // repeatedly stringifying and comparing the same `TokenStream` for deeply + // nested nonterminals. + // + // We cache by the strinification instead of the `TokenStream` to avoid + // needing to implement `Hash` for `TokenStream`. Note that it's possible to + // have two distinct `TokenStream`s that stringify to the same result + // (e.g. if they differ only in hygiene information). However, any + // information lost during the stringification process is also intentionally + // ignored by `tokenstream_probably_equal_for_proc_macro`, so it's fine + // that a single cache entry may 'map' to multiple distinct `TokenStream`s. + // + // This is a temporary hack to prevent compilation blowup on certain inputs. + // The entire pretty-print/retokenize process will be removed soon. + thread_local! { + static GOOD_TOKEN_CACHE: RefCell<FxHashSet<String>> = Default::default(); + } + // FIXME(#43081): Avoid this pretty-print + reparse hack // Pretty-print the AST struct without inserting any parenthesis // beyond those explicitly written by the user (e.g. `ExpnKind::Paren`). @@ -288,7 +304,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // ever used for a comparison against the capture tokenstream. let source = pprust::nonterminal_to_string_no_extra_parens(nt); let filename = FileName::macro_expansion_source_code(&source); - let reparsed_tokens = parse_stream_from_source_str(filename, source, sess, Some(span)); + let reparsed_tokens = parse_stream_from_source_str(filename, source.clone(), sess, Some(span)); // During early phases of the compiler the AST could get modified // directly (e.g., attributes added or removed) and the internal cache @@ -314,8 +330,13 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // modifications, including adding/removing typically non-semantic // tokens such as extra braces and commas, don't happen. if let Some(tokens) = tokens { + if GOOD_TOKEN_CACHE.with(|cache| cache.borrow().contains(&source)) { + return tokens; + } + // Compare with a non-relaxed delim match to start. if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess, false) { + GOOD_TOKEN_CACHE.with(|cache| cache.borrow_mut().insert(source.clone())); return tokens; } @@ -324,6 +345,11 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke // token stream to match up with inserted parenthesis in the reparsed stream. let source_with_parens = pprust::nonterminal_to_string(nt); let filename_with_parens = FileName::macro_expansion_source_code(&source_with_parens); + + if GOOD_TOKEN_CACHE.with(|cache| cache.borrow().contains(&source_with_parens)) { + return tokens; + } + let reparsed_tokens_with_parens = parse_stream_from_source_str( filename_with_parens, source_with_parens, @@ -339,6 +365,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke sess, true, ) { + GOOD_TOKEN_CACHE.with(|cache| cache.borrow_mut().insert(source.clone())); return tokens; } @@ -418,9 +445,9 @@ pub fn tokenstream_probably_equal_for_proc_macro( // to iterate breaking tokens mutliple times. For example: // '[BinOpEq(Shr)] => [Gt, Ge] -> [Gt, Gt, Eq]' let mut token_trees: SmallVec<[_; 2]>; - if let TokenTree::Token(token) = &tree { + if let TokenTree::Token(token) = tree { let mut out = SmallVec::<[_; 2]>::new(); - out.push(token.clone()); + out.push(token); // Iterate to fixpoint: // * We start off with 'out' containing our initial token, and `temp` empty // * If we are able to break any tokens in `out`, then `out` will have diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 41985757b57..fae09fa6fec 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -8,8 +8,9 @@ use rustc_span::{sym, Span}; use tracing::debug; +// Public for rustfmt usage #[derive(Debug)] -pub(super) enum InnerAttrPolicy<'a> { +pub enum InnerAttrPolicy<'a> { Permitted, Forbidden { reason: &'a str, saw_doc_comment: bool, prev_attr_sp: Option<Span> }, } @@ -78,7 +79,8 @@ impl<'a> Parser<'a> { /// Matches `attribute = # ! [ meta_item ]`. /// `inner_parse_policy` prescribes how to handle inner attributes. - fn parse_attribute( + // Public for rustfmt usage. + pub fn parse_attribute( &mut self, inner_parse_policy: InnerAttrPolicy<'_>, ) -> PResult<'a, ast::Attribute> { diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 40aa2db58c7..b746256f5fe 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -873,7 +873,8 @@ impl<'a> Parser<'a> { id: DUMMY_NODE_ID, value: self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new()), }; - Ok(self.mk_expr(span, ExprKind::ConstBlock(anon_const), AttrVec::new())) + let blk_span = anon_const.value.span; + Ok(self.mk_expr(span.to(blk_span), ExprKind::ConstBlock(anon_const), AttrVec::new())) } /// Parses mutability (`mut` or nothing). @@ -1213,14 +1214,20 @@ impl<'a> Parser<'a> { // // This also makes `Parser` very cheap to clone, since // there is no intermediate collection buffer to clone. + #[derive(Clone)] struct LazyTokenStreamImpl { start_token: (Token, Spacing), cursor_snapshot: TokenCursor, num_calls: usize, desugar_doc_comments: bool, + trailing_semi: bool, } impl CreateTokenStream for LazyTokenStreamImpl { fn create_token_stream(&self) -> TokenStream { + let mut num_calls = self.num_calls; + if self.trailing_semi { + num_calls += 1; + } // The token produced by the final call to `next` or `next_desugared` // was not actually consumed by the callback. The combination // of chaining the initial token and using `take` produces the desired @@ -1228,17 +1235,25 @@ impl<'a> Parser<'a> { // and omit the final token otherwise. let mut cursor_snapshot = self.cursor_snapshot.clone(); let tokens = std::iter::once(self.start_token.clone()) - .chain((0..self.num_calls).map(|_| { + .chain((0..num_calls).map(|_| { if self.desugar_doc_comments { cursor_snapshot.next_desugared() } else { cursor_snapshot.next() } })) - .take(self.num_calls); + .take(num_calls); make_token_stream(tokens) } + fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> { + if self.trailing_semi { + panic!("Called `add_trailing_semi` twice!"); + } + let mut new = self.clone(); + new.trailing_semi = true; + Box::new(new) + } } let lazy_impl = LazyTokenStreamImpl { @@ -1246,6 +1261,7 @@ impl<'a> Parser<'a> { num_calls: self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls, cursor_snapshot, desugar_doc_comments: self.desugar_doc_comments, + trailing_semi: false, }; Ok((ret, Some(LazyTokenStream::new(lazy_impl)))) } diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index ab88362dad9..c007f96a798 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -117,8 +117,8 @@ impl<'a> Parser<'a> { let (stmt, tokens) = self.collect_tokens(|this| this.parse_stmt())?; match stmt { Some(mut s) => { - if s.tokens.is_none() { - s.tokens = tokens; + if s.tokens().is_none() { + s.set_tokens(tokens); } token::NtStmt(s) } diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index d64fd59b0a6..17e5bcf7605 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -3,10 +3,9 @@ use super::{Parser, TokenType}; use crate::maybe_whole; use rustc_ast::ptr::P; use rustc_ast::token::{self, Token}; -use rustc_ast::{ - self as ast, AngleBracketedArg, AngleBracketedArgs, GenericArg, ParenthesizedArgs, -}; +use rustc_ast::{self as ast, AngleBracketedArg, AngleBracketedArgs, ParenthesizedArgs}; use rustc_ast::{AnonConst, AssocTyConstraint, AssocTyConstraintKind, BlockCheckMode}; +use rustc_ast::{GenericArg, GenericArgs}; use rustc_ast::{Path, PathSegment, QSelf}; use rustc_errors::{pluralize, Applicability, PResult}; use rustc_span::source_map::{BytePos, Span}; @@ -414,32 +413,40 @@ impl<'a> Parser<'a> { /// Parses a single argument in the angle arguments `<...>` of a path segment. fn parse_angle_arg(&mut self) -> PResult<'a, Option<AngleBracketedArg>> { - if self.check_ident() && self.look_ahead(1, |t| matches!(t.kind, token::Eq | token::Colon)) - { - // Parse associated type constraint. - let lo = self.token.span; - let ident = self.parse_ident()?; - let kind = if self.eat(&token::Eq) { - let ty = self.parse_assoc_equality_term(ident, self.prev_token.span)?; - AssocTyConstraintKind::Equality { ty } - } else if self.eat(&token::Colon) { - let bounds = self.parse_generic_bounds(Some(self.prev_token.span))?; - AssocTyConstraintKind::Bound { bounds } - } else { - unreachable!(); - }; + let lo = self.token.span; + let arg = self.parse_generic_arg()?; + match arg { + Some(arg) => { + if self.check(&token::Colon) | self.check(&token::Eq) { + let (ident, gen_args) = self.get_ident_from_generic_arg(arg, lo)?; + let kind = if self.eat(&token::Colon) { + // Parse associated type constraint bound. + + let bounds = self.parse_generic_bounds(Some(self.prev_token.span))?; + AssocTyConstraintKind::Bound { bounds } + } else if self.eat(&token::Eq) { + // Parse associated type equality constraint + + let ty = self.parse_assoc_equality_term(ident, self.prev_token.span)?; + AssocTyConstraintKind::Equality { ty } + } else { + unreachable!(); + }; - let span = lo.to(self.prev_token.span); + let span = lo.to(self.prev_token.span); - // Gate associated type bounds, e.g., `Iterator<Item: Ord>`. - if let AssocTyConstraintKind::Bound { .. } = kind { - self.sess.gated_spans.gate(sym::associated_type_bounds, span); + // Gate associated type bounds, e.g., `Iterator<Item: Ord>`. + if let AssocTyConstraintKind::Bound { .. } = kind { + self.sess.gated_spans.gate(sym::associated_type_bounds, span); + } + let constraint = + AssocTyConstraint { id: ast::DUMMY_NODE_ID, ident, gen_args, kind, span }; + Ok(Some(AngleBracketedArg::Constraint(constraint))) + } else { + Ok(Some(AngleBracketedArg::Arg(arg))) + } } - - let constraint = AssocTyConstraint { id: ast::DUMMY_NODE_ID, ident, kind, span }; - Ok(Some(AngleBracketedArg::Constraint(constraint))) - } else { - Ok(self.parse_generic_arg()?.map(AngleBracketedArg::Arg)) + _ => Ok(None), } } @@ -542,4 +549,54 @@ impl<'a> Parser<'a> { }; Ok(Some(arg)) } + + fn get_ident_from_generic_arg( + &self, + gen_arg: GenericArg, + lo: Span, + ) -> PResult<'a, (Ident, Option<GenericArgs>)> { + let gen_arg_span = gen_arg.span(); + match gen_arg { + GenericArg::Type(t) => match t.into_inner().kind { + ast::TyKind::Path(qself, mut path) => { + if let Some(qself) = qself { + let mut err = self.struct_span_err( + gen_arg_span, + "qualified paths cannot be used in associated type constraints", + ); + err.span_label( + qself.path_span, + "not allowed in associated type constraints", + ); + return Err(err); + } + if path.segments.len() == 1 { + let path_seg = path.segments.remove(0); + let ident = path_seg.ident; + let gen_args = path_seg.args.map(|args| args.into_inner()); + return Ok((ident, gen_args)); + } + let err = self.struct_span_err( + path.span, + "paths with multiple segments cannot be used in associated type constraints", + ); + return Err(err); + } + _ => { + let span = lo.to(self.prev_token.span); + let err = self.struct_span_err( + span, + "only path types can be used in associated type constraints", + ); + return Err(err); + } + }, + _ => { + let span = lo.to(self.prev_token.span); + let err = self + .struct_span_err(span, "only types can be used in associated type constraints"); + return Err(err); + } + } + } } diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 131ff1ae6b3..e974556f43a 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -7,8 +7,10 @@ use super::{BlockMode, Parser, Restrictions, SemiColonMode}; use crate::maybe_whole; use rustc_ast as ast; +use rustc_ast::attr::HasAttrs; use rustc_ast::ptr::P; use rustc_ast::token::{self, TokenKind}; +use rustc_ast::tokenstream::LazyTokenStream; use rustc_ast::util::classify; use rustc_ast::{AttrStyle, AttrVec, Attribute, MacCall, MacCallStmt, MacStmtStyle}; use rustc_ast::{Block, BlockCheckMode, Expr, ExprKind, Local, Stmt, StmtKind, DUMMY_NODE_ID}; @@ -31,45 +33,75 @@ impl<'a> Parser<'a> { } fn parse_stmt_without_recovery(&mut self) -> PResult<'a, Option<Stmt>> { - maybe_whole!(self, NtStmt, |x| Some(x)); - - let attrs = self.parse_outer_attributes()?; + let mut attrs = self.parse_outer_attributes()?; + let has_attrs = !attrs.is_empty(); let lo = self.token.span; - let stmt = if self.eat_keyword(kw::Let) { - self.parse_local_mk(lo, attrs.into())? - } else if self.is_kw_followed_by_ident(kw::Mut) { - self.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")? - } else if self.is_kw_followed_by_ident(kw::Auto) { - self.bump(); // `auto` - let msg = "write `let` instead of `auto` to introduce a new variable"; - self.recover_stmt_local(lo, attrs.into(), msg, "let")? - } else if self.is_kw_followed_by_ident(sym::var) { - self.bump(); // `var` - let msg = "write `let` instead of `var` to introduce a new variable"; - self.recover_stmt_local(lo, attrs.into(), msg, "let")? - } else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() { - // We have avoided contextual keywords like `union`, items with `crate` visibility, - // or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something - // that starts like a path (1 token), but it fact not a path. - // Also, we avoid stealing syntax from `parse_item_`. - self.parse_stmt_path_start(lo, attrs)? - } else if let Some(item) = self.parse_item_common(attrs.clone(), false, true, |_| true)? { - // FIXME: Bad copy of attrs - self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item))) - } else if self.eat(&token::Semi) { - // Do not attempt to parse an expression if we're done here. - self.error_outer_attrs(&attrs); - self.mk_stmt(lo, StmtKind::Empty) - } else if self.token != token::CloseDelim(token::Brace) { - // Remainder are line-expr stmts. - let e = self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?; - self.mk_stmt(lo.to(e.span), StmtKind::Expr(e)) + maybe_whole!(self, NtStmt, |stmt| { + let mut stmt = stmt; + stmt.visit_attrs(|stmt_attrs| { + mem::swap(stmt_attrs, &mut attrs); + stmt_attrs.extend(attrs); + }); + Some(stmt) + }); + + let parse_stmt_inner = |this: &mut Self| { + let stmt = if this.eat_keyword(kw::Let) { + this.parse_local_mk(lo, attrs.into())? + } else if this.is_kw_followed_by_ident(kw::Mut) { + this.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")? + } else if this.is_kw_followed_by_ident(kw::Auto) { + this.bump(); // `auto` + let msg = "write `let` instead of `auto` to introduce a new variable"; + this.recover_stmt_local(lo, attrs.into(), msg, "let")? + } else if this.is_kw_followed_by_ident(sym::var) { + this.bump(); // `var` + let msg = "write `let` instead of `var` to introduce a new variable"; + this.recover_stmt_local(lo, attrs.into(), msg, "let")? + } else if this.check_path() + && !this.token.is_qpath_start() + && !this.is_path_start_item() + { + // We have avoided contextual keywords like `union`, items with `crate` visibility, + // or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something + // that starts like a path (1 token), but it fact not a path. + // Also, we avoid stealing syntax from `parse_item_`. + this.parse_stmt_path_start(lo, attrs)? + } else if let Some(item) = + this.parse_item_common(attrs.clone(), false, true, |_| true)? + { + // FIXME: Bad copy of attrs + this.mk_stmt(lo.to(item.span), StmtKind::Item(P(item))) + } else if this.eat(&token::Semi) { + // Do not attempt to parse an expression if we're done here. + this.error_outer_attrs(&attrs); + this.mk_stmt(lo, StmtKind::Empty) + } else if this.token != token::CloseDelim(token::Brace) { + // Remainder are line-expr stmts. + let e = this.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?; + this.mk_stmt(lo.to(e.span), StmtKind::Expr(e)) + } else { + this.error_outer_attrs(&attrs); + return Ok(None); + }; + Ok(Some(stmt)) + }; + + let stmt = if has_attrs { + let (mut stmt, tokens) = self.collect_tokens(parse_stmt_inner)?; + if let Some(stmt) = &mut stmt { + // If we already have tokens (e.g. due to encounting an `NtStmt`), + // use those instead. + if stmt.tokens().is_none() { + stmt.set_tokens(tokens); + } + } + stmt } else { - self.error_outer_attrs(&attrs); - return Ok(None); + parse_stmt_inner(self)? }; - Ok(Some(stmt)) + Ok(stmt) } fn parse_stmt_path_start(&mut self, lo: Span, attrs: Vec<Attribute>) -> PResult<'a, Stmt> { @@ -107,7 +139,7 @@ impl<'a> Parser<'a> { let kind = if delim == token::Brace || self.token == token::Semi || self.token == token::Eof { - StmtKind::MacCall(P(MacCallStmt { mac, style, attrs })) + StmtKind::MacCall(P(MacCallStmt { mac, style, attrs, tokens: None })) } else { // Since none of the above applied, this is an expression statement macro. let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new()); @@ -219,7 +251,7 @@ impl<'a> Parser<'a> { } }; let hi = if self.token == token::Semi { self.token.span } else { self.prev_token.span }; - Ok(P(ast::Local { ty, pat, init, id: DUMMY_NODE_ID, span: lo.to(hi), attrs })) + Ok(P(ast::Local { ty, pat, init, id: DUMMY_NODE_ID, span: lo.to(hi), attrs, tokens: None })) } /// Parses the RHS of a local variable declaration (e.g., '= 14;'). @@ -376,6 +408,12 @@ impl<'a> Parser<'a> { None => return Ok(None), }; + let add_semi_token = |tokens: Option<&mut LazyTokenStream>| { + if let Some(tokens) = tokens { + *tokens = tokens.add_trailing_semi(); + } + }; + let mut eat_semi = true; match stmt.kind { // Expression without semicolon. @@ -417,6 +455,7 @@ impl<'a> Parser<'a> { *expr = self.mk_expr_err(sp); } } + StmtKind::Expr(_) | StmtKind::MacCall(_) => {} StmtKind::Local(ref mut local) => { if let Err(e) = self.expect_semi() { // We might be at the `,` in `let x = foo<bar, baz>;`. Try to recover. @@ -430,13 +469,18 @@ impl<'a> Parser<'a> { } } eat_semi = false; + // We just checked that there's a semicolon in the tokenstream, + // so capture it + add_semi_token(local.tokens.as_mut()); } - StmtKind::Empty => eat_semi = false, - _ => {} + StmtKind::Empty | StmtKind::Item(_) | StmtKind::Semi(_) => eat_semi = false, } if eat_semi && self.eat(&token::Semi) { stmt = stmt.add_trailing_semicolon(); + // We just checked that we have a semicolon in the tokenstream, + // so capture it + add_semi_token(stmt.tokens_mut()); } stmt.span = stmt.span.to(self.prev_token.span); Ok(Some(stmt)) @@ -447,7 +491,7 @@ impl<'a> Parser<'a> { } pub(super) fn mk_stmt(&self, span: Span, kind: StmtKind) -> Stmt { - Stmt { id: DUMMY_NODE_ID, kind, span, tokens: None } + Stmt { id: DUMMY_NODE_ID, kind, span } } pub(super) fn mk_stmt_err(&self, span: Span) -> Stmt { diff --git a/compiler/rustc_passes/src/dead.rs b/compiler/rustc_passes/src/dead.rs index f567dd83bc1..b87b13cff80 100644 --- a/compiler/rustc_passes/src/dead.rs +++ b/compiler/rustc_passes/src/dead.rs @@ -190,7 +190,7 @@ impl<'tcx> MarkSymbolVisitor<'tcx> { intravisit::walk_item(self, &item); } - hir::ItemKind::ForeignMod(..) => {} + hir::ItemKind::ForeignMod { .. } => {} _ => { intravisit::walk_item(self, &item); } @@ -447,6 +447,8 @@ impl<'v, 'k, 'tcx> ItemLikeVisitor<'v> for LifeSeeder<'k, 'tcx> { fn visit_impl_item(&mut self, _item: &hir::ImplItem<'_>) { // ignore: we are handling this in `visit_item` above } + + fn visit_foreign_item(&mut self, _item: &'v hir::ForeignItem<'v>) {} } fn create_and_seed_worklist<'tcx>( diff --git a/compiler/rustc_passes/src/diagnostic_items.rs b/compiler/rustc_passes/src/diagnostic_items.rs index 5a087c41f58..699c96bc49d 100644 --- a/compiler/rustc_passes/src/diagnostic_items.rs +++ b/compiler/rustc_passes/src/diagnostic_items.rs @@ -37,6 +37,10 @@ impl<'v, 'tcx> ItemLikeVisitor<'v> for DiagnosticItemCollector<'tcx> { fn visit_impl_item(&mut self, impl_item: &hir::ImplItem<'_>) { self.observe_item(&impl_item.attrs, impl_item.hir_id); } + + fn visit_foreign_item(&mut self, foreign_item: &hir::ForeignItem<'_>) { + self.observe_item(foreign_item.attrs, foreign_item.hir_id); + } } impl<'tcx> DiagnosticItemCollector<'tcx> { @@ -100,18 +104,6 @@ fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> FxHashMap<Symbol, DefId> { // Collect diagnostic items in this crate. tcx.hir().krate().visit_all_item_likes(&mut collector); - // FIXME(visit_all_item_likes): Foreign items are not visited - // here, so we have to manually look at them for now. - for (_, foreign_module) in tcx.foreign_modules(LOCAL_CRATE).iter() { - for &foreign_item in foreign_module.foreign_items.iter() { - match tcx.hir().get(tcx.hir().local_def_id_to_hir_id(foreign_item.expect_local())) { - hir::Node::ForeignItem(item) => { - collector.observe_item(item.attrs, item.hir_id); - } - item => bug!("unexpected foreign item {:?}", item), - } - } - } for m in tcx.hir().krate().exported_macros { collector.observe_item(m.attrs, m.hir_id); diff --git a/compiler/rustc_passes/src/entry.rs b/compiler/rustc_passes/src/entry.rs index e87adb378e7..5ff631a2457 100644 --- a/compiler/rustc_passes/src/entry.rs +++ b/compiler/rustc_passes/src/entry.rs @@ -2,7 +2,7 @@ use rustc_ast::entry::EntryPointType; use rustc_errors::struct_span_err; use rustc_hir::def_id::{CrateNum, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use rustc_hir::itemlikevisit::ItemLikeVisitor; -use rustc_hir::{HirId, ImplItem, Item, ItemKind, TraitItem}; +use rustc_hir::{ForeignItem, HirId, ImplItem, Item, ItemKind, TraitItem}; use rustc_middle::hir::map::Map; use rustc_middle::ty::query::Providers; use rustc_middle::ty::TyCtxt; @@ -45,6 +45,10 @@ impl<'a, 'tcx> ItemLikeVisitor<'tcx> for EntryContext<'a, 'tcx> { fn visit_impl_item(&mut self, _impl_item: &'tcx ImplItem<'tcx>) { // Entry fn is never a trait item. } + + fn visit_foreign_item(&mut self, _: &'tcx ForeignItem<'tcx>) { + // Entry fn is never a foreign item. + } } fn entry_fn(tcx: TyCtxt<'_>, cnum: CrateNum) -> Option<(LocalDefId, EntryFnType)> { diff --git a/compiler/rustc_passes/src/hir_id_validator.rs b/compiler/rustc_passes/src/hir_id_validator.rs index 6d1a5fcc10b..c7e057927ab 100644 --- a/compiler/rustc_passes/src/hir_id_validator.rs +++ b/compiler/rustc_passes/src/hir_id_validator.rs @@ -68,6 +68,11 @@ impl<'a, 'hir> ItemLikeVisitor<'hir> for OuterVisitor<'a, 'hir> { let mut inner_visitor = self.new_inner_visitor(self.hir_map); inner_visitor.check(i.hir_id, |this| intravisit::walk_impl_item(this, i)); } + + fn visit_foreign_item(&mut self, i: &'hir hir::ForeignItem<'hir>) { + let mut inner_visitor = self.new_inner_visitor(self.hir_map); + inner_visitor.check(i.hir_id, |this| intravisit::walk_foreign_item(this, i)); + } } impl<'a, 'hir> HirIdValidator<'a, 'hir> { diff --git a/compiler/rustc_passes/src/lang_items.rs b/compiler/rustc_passes/src/lang_items.rs index 0ae0c381a11..3132661e5f5 100644 --- a/compiler/rustc_passes/src/lang_items.rs +++ b/compiler/rustc_passes/src/lang_items.rs @@ -54,6 +54,8 @@ impl ItemLikeVisitor<'v> for LanguageItemCollector<'tcx> { impl_item.attrs, ) } + + fn visit_foreign_item(&mut self, _: &hir::ForeignItem<'_>) {} } impl LanguageItemCollector<'tcx> { diff --git a/compiler/rustc_passes/src/layout_test.rs b/compiler/rustc_passes/src/layout_test.rs index 504cbbfcb76..9e83cbd6680 100644 --- a/compiler/rustc_passes/src/layout_test.rs +++ b/compiler/rustc_passes/src/layout_test.rs @@ -40,6 +40,7 @@ impl ItemLikeVisitor<'tcx> for LayoutTest<'tcx> { fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem<'tcx>) {} fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem<'tcx>) {} + fn visit_foreign_item(&mut self, _: &'tcx hir::ForeignItem<'tcx>) {} } impl LayoutTest<'tcx> { diff --git a/compiler/rustc_passes/src/reachable.rs b/compiler/rustc_passes/src/reachable.rs index 8d5c980609c..fde83af99a5 100644 --- a/compiler/rustc_passes/src/reachable.rs +++ b/compiler/rustc_passes/src/reachable.rs @@ -262,7 +262,7 @@ impl<'tcx> ReachableContext<'tcx> { | hir::ItemKind::TyAlias(..) | hir::ItemKind::Static(..) | hir::ItemKind::Mod(..) - | hir::ItemKind::ForeignMod(..) + | hir::ItemKind::ForeignMod { .. } | hir::ItemKind::Impl { .. } | hir::ItemKind::Trait(..) | hir::ItemKind::TraitAlias(..) @@ -378,6 +378,10 @@ impl<'a, 'tcx> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a, 'tcx fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem<'_>) { // processed in visit_item above } + + fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) { + // We never export foreign functions as they have no body to export. + } } fn reachable_set<'tcx>(tcx: TyCtxt<'tcx>, crate_num: CrateNum) -> FxHashSet<LocalDefId> { diff --git a/compiler/rustc_passes/src/stability.rs b/compiler/rustc_passes/src/stability.rs index 04b5c65e464..f6bbbd80bf1 100644 --- a/compiler/rustc_passes/src/stability.rs +++ b/compiler/rustc_passes/src/stability.rs @@ -326,7 +326,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> { // they don't have their own stability. They still can be annotated as unstable // and propagate this unstability to children, but this annotation is completely // optional. They inherit stability from their parents when unannotated. - hir::ItemKind::Impl { of_trait: None, .. } | hir::ItemKind::ForeignMod(..) => { + hir::ItemKind::Impl { of_trait: None, .. } | hir::ItemKind::ForeignMod { .. } => { self.in_trait_impl = false; kind = AnnotationKind::Container; } @@ -499,7 +499,7 @@ impl<'tcx> Visitor<'tcx> for MissingStabilityAnnotations<'tcx> { // optional. They inherit stability from their parents when unannotated. if !matches!( i.kind, - hir::ItemKind::Impl { of_trait: None, .. } | hir::ItemKind::ForeignMod(..) + hir::ItemKind::Impl { of_trait: None, .. } | hir::ItemKind::ForeignMod { .. } ) { self.check_missing_stability(i.hir_id, i.span); } diff --git a/compiler/rustc_plugin_impl/src/build.rs b/compiler/rustc_plugin_impl/src/build.rs index d16dd701a12..4796d9a80b6 100644 --- a/compiler/rustc_plugin_impl/src/build.rs +++ b/compiler/rustc_plugin_impl/src/build.rs @@ -25,6 +25,8 @@ impl<'v, 'tcx> ItemLikeVisitor<'v> for RegistrarFinder<'tcx> { fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {} fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem<'_>) {} + + fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {} } /// Finds the function marked with `#[plugin_registrar]`, if any. diff --git a/compiler/rustc_privacy/src/lib.rs b/compiler/rustc_privacy/src/lib.rs index b8fa9081aa3..4414bf57c6b 100644 --- a/compiler/rustc_privacy/src/lib.rs +++ b/compiler/rustc_privacy/src/lib.rs @@ -592,7 +592,7 @@ impl Visitor<'tcx> for EmbargoVisitor<'tcx> { Option::<AccessLevel>::of_impl(item.hir_id, self.tcx, &self.access_levels) } // Foreign modules inherit level from parents. - hir::ItemKind::ForeignMod(..) => self.prev_level, + hir::ItemKind::ForeignMod { .. } => self.prev_level, // Other `pub` items inherit levels from parents. hir::ItemKind::Const(..) | hir::ItemKind::Enum(..) @@ -654,10 +654,10 @@ impl Visitor<'tcx> for EmbargoVisitor<'tcx> { } } } - hir::ItemKind::ForeignMod(ref foreign_mod) => { - for foreign_item in foreign_mod.items { + hir::ItemKind::ForeignMod { items, .. } => { + for foreign_item in items { if foreign_item.vis.node.is_pub() { - self.update(foreign_item.hir_id, item_level); + self.update(foreign_item.id.hir_id, item_level); } } } @@ -770,11 +770,11 @@ impl Visitor<'tcx> for EmbargoVisitor<'tcx> { } } // Visit everything, but foreign items have their own levels. - hir::ItemKind::ForeignMod(ref foreign_mod) => { - for foreign_item in foreign_mod.items { - let foreign_item_level = self.get(foreign_item.hir_id); + hir::ItemKind::ForeignMod { items, .. } => { + for foreign_item in items { + let foreign_item_level = self.get(foreign_item.id.hir_id); if foreign_item_level.is_some() { - self.reach(foreign_item.hir_id, foreign_item_level) + self.reach(foreign_item.id.hir_id, foreign_item_level) .generics() .predicates() .ty(); @@ -1430,7 +1430,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> { // An `extern {}` doesn't introduce a new privacy // namespace (the contents have their own privacies). - hir::ItemKind::ForeignMod(_) => {} + hir::ItemKind::ForeignMod { .. } => {} hir::ItemKind::Trait(.., ref bounds, _) => { if !self.trait_is_public(item.hir_id) { @@ -1948,10 +1948,10 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> } } // Subitems of foreign modules have their own publicity. - hir::ItemKind::ForeignMod(ref foreign_mod) => { - for foreign_item in foreign_mod.items { - let vis = tcx.visibility(tcx.hir().local_def_id(foreign_item.hir_id)); - self.check(foreign_item.hir_id, vis).generics().predicates().ty(); + hir::ItemKind::ForeignMod { items, .. } => { + for foreign_item in items { + let vis = tcx.visibility(tcx.hir().local_def_id(foreign_item.id.hir_id)); + self.check(foreign_item.id.hir_id, vis).generics().predicates().ty(); } } // Subitems of structs and unions have their own publicity. diff --git a/compiler/rustc_resolve/src/late/lifetimes.rs b/compiler/rustc_resolve/src/late/lifetimes.rs index c79d670737e..91edbebc05f 100644 --- a/compiler/rustc_resolve/src/late/lifetimes.rs +++ b/compiler/rustc_resolve/src/late/lifetimes.rs @@ -388,7 +388,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> { hir::ItemKind::ExternCrate(_) | hir::ItemKind::Use(..) | hir::ItemKind::Mod(..) - | hir::ItemKind::ForeignMod(..) + | hir::ItemKind::ForeignMod { .. } | hir::ItemKind::GlobalAsm(..) => { // These sorts of items have no lifetime parameters at all. intravisit::walk_item(self, item); diff --git a/compiler/rustc_save_analysis/src/sig.rs b/compiler/rustc_save_analysis/src/sig.rs index 2f82d0546ba..ff445d727fa 100644 --- a/compiler/rustc_save_analysis/src/sig.rs +++ b/compiler/rustc_save_analysis/src/sig.rs @@ -550,7 +550,7 @@ impl<'hir> Sig for hir::Item<'hir> { // FIXME where clause } - hir::ItemKind::ForeignMod(_) => Err("extern mod"), + hir::ItemKind::ForeignMod { .. } => Err("extern mod"), hir::ItemKind::GlobalAsm(_) => Err("global asm"), hir::ItemKind::ExternCrate(_) => Err("extern crate"), hir::ItemKind::OpaqueTy(..) => Err("opaque type"), diff --git a/compiler/rustc_symbol_mangling/src/test.rs b/compiler/rustc_symbol_mangling/src/test.rs index a28c8cac728..8c5e438a728 100644 --- a/compiler/rustc_symbol_mangling/src/test.rs +++ b/compiler/rustc_symbol_mangling/src/test.rs @@ -71,4 +71,8 @@ impl hir::itemlikevisit::ItemLikeVisitor<'tcx> for SymbolNamesTest<'tcx> { fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem<'tcx>) { self.process_attrs(impl_item.hir_id); } + + fn visit_foreign_item(&mut self, foreign_item: &'tcx hir::ForeignItem<'tcx>) { + self.process_attrs(foreign_item.hir_id); + } } diff --git a/compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs b/compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs new file mode 100644 index 00000000000..3a881975236 --- /dev/null +++ b/compiler/rustc_target/src/spec/aarch64_apple_ios_macabi.rs @@ -0,0 +1,31 @@ +use super::apple_sdk_base::{opts, Arch}; +use crate::spec::{Target, TargetOptions}; + +pub fn target() -> Target { + let base = opts("ios", Arch::Arm64_macabi); + Target { + llvm_target: "arm64-apple-ios-macabi".to_string(), + pointer_width: 64, + data_layout: "e-m:o-i64:64-i128:128-n32:64-S128".to_string(), + arch: "aarch64".to_string(), + options: TargetOptions { + features: "+neon,+fp-armv8,+apple-a7".to_string(), + eliminate_frame_pointer: false, + max_atomic_width: Some(128), + unsupported_abis: super::arm_base::unsupported_abis(), + forces_embed_bitcode: true, + // Taken from a clang build on Xcode 11.4.1. + // These arguments are not actually invoked - they just have + // to look right to pass App Store validation. + bitcode_llvm_cmdline: "-triple\0\ + arm64-apple-ios-macabi\0\ + -emit-obj\0\ + -disable-llvm-passes\0\ + -target-abi\0\ + darwinpcs\0\ + -Os\0" + .to_string(), + ..base + }, + } +} diff --git a/compiler/rustc_target/src/spec/apple_sdk_base.rs b/compiler/rustc_target/src/spec/apple_sdk_base.rs index 092401f1146..d894f759937 100644 --- a/compiler/rustc_target/src/spec/apple_sdk_base.rs +++ b/compiler/rustc_target/src/spec/apple_sdk_base.rs @@ -10,6 +10,7 @@ pub enum Arch { I386, X86_64, X86_64_macabi, + Arm64_macabi, } fn target_cpu(arch: Arch) -> String { @@ -20,6 +21,7 @@ fn target_cpu(arch: Arch) -> String { I386 => "yonah", X86_64 => "core2", X86_64_macabi => "core2", + Arm64_macabi => "apple-a12", } .to_string() } @@ -27,7 +29,7 @@ fn target_cpu(arch: Arch) -> String { fn link_env_remove(arch: Arch) -> Vec<String> { match arch { Armv7 | Armv7s | Arm64 | I386 | X86_64 => vec!["MACOSX_DEPLOYMENT_TARGET".to_string()], - X86_64_macabi => vec!["IPHONEOS_DEPLOYMENT_TARGET".to_string()], + X86_64_macabi | Arm64_macabi => vec!["IPHONEOS_DEPLOYMENT_TARGET".to_string()], } } diff --git a/compiler/rustc_target/src/spec/mod.rs b/compiler/rustc_target/src/spec/mod.rs index 1bda33939b8..b4414e53331 100644 --- a/compiler/rustc_target/src/spec/mod.rs +++ b/compiler/rustc_target/src/spec/mod.rs @@ -579,6 +579,7 @@ supported_targets! { ("armv7-apple-ios", armv7_apple_ios), ("armv7s-apple-ios", armv7s_apple_ios), ("x86_64-apple-ios-macabi", x86_64_apple_ios_macabi), + ("aarch64-apple-ios-macabi", aarch64_apple_ios_macabi), ("aarch64-apple-tvos", aarch64_apple_tvos), ("x86_64-apple-tvos", x86_64_apple_tvos), diff --git a/compiler/rustc_trait_selection/src/traits/mod.rs b/compiler/rustc_trait_selection/src/traits/mod.rs index 2d7df2ddd11..2fb9b3cd5d3 100644 --- a/compiler/rustc_trait_selection/src/traits/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/mod.rs @@ -97,13 +97,13 @@ impl Default for SkipLeakCheck { /// The mode that trait queries run in. #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum TraitQueryMode { - // Standard/un-canonicalized queries get accurate - // spans etc. passed in and hence can do reasonable - // error reporting on their own. + /// Standard/un-canonicalized queries get accurate + /// spans etc. passed in and hence can do reasonable + /// error reporting on their own. Standard, - // Canonicalized queries get dummy spans and hence - // must generally propagate errors to - // pre-canonicalization callsites. + /// Canonicalized queries get dummy spans and hence + /// must generally propagate errors to + /// pre-canonicalization callsites. Canonical, } diff --git a/compiler/rustc_ty_utils/src/ty.rs b/compiler/rustc_ty_utils/src/ty.rs index 720ad42da2a..aa1de6d51cb 100644 --- a/compiler/rustc_ty_utils/src/ty.rs +++ b/compiler/rustc_ty_utils/src/ty.rs @@ -309,7 +309,7 @@ fn well_formed_types_in_env<'tcx>( InherentImpl, Fn, Other, - }; + } let node_kind = match node { Node::TraitItem(item) => match item.kind { diff --git a/compiler/rustc_typeck/src/astconv/mod.rs b/compiler/rustc_typeck/src/astconv/mod.rs index 9b814f6b7ee..7888cb1b9f5 100644 --- a/compiler/rustc_typeck/src/astconv/mod.rs +++ b/compiler/rustc_typeck/src/astconv/mod.rs @@ -337,6 +337,14 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { infer_args, ); + // Skip processing if type has no generic parameters. + // Traits always have `Self` as a generic parameter, which means they will not return early + // here and so associated type bindings will be handled regardless of whether there are any + // non-`Self` generic parameters. + if generic_params.params.len() == 0 { + return (tcx.intern_substs(&[]), vec![], arg_count); + } + let is_object = self_ty.map_or(false, |ty| ty == self.tcx().types.trait_object_dummy_self); struct SubstsForAstPathCtxt<'a, 'tcx> { @@ -815,34 +823,25 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o { ast_bounds: &[hir::GenericBound<'_>], bounds: &mut Bounds<'tcx>, ) { - let mut trait_bounds = Vec::new(); - let mut region_bounds = Vec::new(); - let constness = self.default_constness_for_trait_bounds(); for ast_bound in ast_bounds { match *ast_bound { hir::GenericBound::Trait(ref b, hir::TraitBoundModifier::None) => { - trait_bounds.push((b, constness)) + self.instantiate_poly_trait_ref(b, constness, param_ty, bounds); } hir::GenericBound::Trait(ref b, hir::TraitBoundModifier::MaybeConst) => { - trait_bounds.push((b, Constness::NotConst)) + self.instantiate_poly_trait_ref(b, Constness::NotConst, param_ty, bounds); } hir::GenericBound::Trait(_, hir::TraitBoundModifier::Maybe) => {} hir::GenericBound::LangItemTrait(lang_item, span, hir_id, args) => self .instantiate_lang_item_trait_ref( lang_item, span, hir_id, args, param_ty, bounds, ), - hir::GenericBound::Outlives(ref l) => region_bounds.push(l), + hir::GenericBound::Outlives(ref l) => { + bounds.region_bounds.push((self.ast_region_to_region(l, None), l.span)) + } } } - - for (bound, constness) in trait_bounds { - let _ = self.instantiate_poly_trait_ref(bound, constness, param_ty, bounds); - } - - bounds.region_bounds.extend( - region_bounds.into_iter().map(|r| (self.ast_region_to_region(r, None), r.span)), - ); } /// Translates a list of bounds from the HIR into the `Bounds` data structure. diff --git a/compiler/rustc_typeck/src/check/check.rs b/compiler/rustc_typeck/src/check/check.rs index d5518dfc15a..55c815b21ad 100644 --- a/compiler/rustc_typeck/src/check/check.rs +++ b/compiler/rustc_typeck/src/check/check.rs @@ -477,7 +477,7 @@ pub(super) fn check_opaque_for_inheriting_lifetimes( struct ProhibitOpaqueVisitor<'tcx> { opaque_identity_ty: Ty<'tcx>, generics: &'tcx ty::Generics, - }; + } impl<'tcx> ty::fold::TypeVisitor<'tcx> for ProhibitOpaqueVisitor<'tcx> { type BreakTy = Option<Ty<'tcx>>; @@ -746,20 +746,22 @@ pub fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, it: &'tcx hir::Item<'tcx>) { let generics = tcx.generics_of(def_id); check_type_params_are_used(tcx, &generics, pty_ty); } - hir::ItemKind::ForeignMod(ref m) => { - check_abi(tcx, it.span, m.abi); + hir::ItemKind::ForeignMod { abi, items } => { + check_abi(tcx, it.span, abi); - if m.abi == Abi::RustIntrinsic { - for item in m.items { + if abi == Abi::RustIntrinsic { + for item in items { + let item = tcx.hir().foreign_item(item.id); intrinsic::check_intrinsic_type(tcx, item); } - } else if m.abi == Abi::PlatformIntrinsic { - for item in m.items { + } else if abi == Abi::PlatformIntrinsic { + for item in items { + let item = tcx.hir().foreign_item(item.id); intrinsic::check_platform_intrinsic_type(tcx, item); } } else { - for item in m.items { - let def_id = tcx.hir().local_def_id(item.hir_id); + for item in items { + let def_id = tcx.hir().local_def_id(item.id.hir_id); let generics = tcx.generics_of(def_id); let own_counts = generics.own_counts(); if generics.params.len() - own_counts.lifetimes != 0 { @@ -791,9 +793,10 @@ pub fn check_item_type<'tcx>(tcx: TyCtxt<'tcx>, it: &'tcx hir::Item<'tcx>) { .emit(); } + let item = tcx.hir().foreign_item(item.id); match item.kind { hir::ForeignItemKind::Fn(ref fn_decl, _, _) => { - require_c_abi_if_c_variadic(tcx, fn_decl, m.abi, item.span); + require_c_abi_if_c_variadic(tcx, fn_decl, abi, item.span); } hir::ForeignItemKind::Static(..) => { check_static_inhabited(tcx, def_id, item.span); diff --git a/compiler/rustc_typeck/src/check/method/suggest.rs b/compiler/rustc_typeck/src/check/method/suggest.rs index a979bc470d8..7ed2933c08b 100644 --- a/compiler/rustc_typeck/src/check/method/suggest.rs +++ b/compiler/rustc_typeck/src/check/method/suggest.rs @@ -1308,6 +1308,8 @@ fn compute_all_traits(tcx: TyCtxt<'_>) -> Vec<DefId> { fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {} fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem<'_>) {} + + fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {} } tcx.hir().krate().visit_all_item_likes(&mut Visitor { map: &tcx.hir(), traits: &mut traits }); diff --git a/compiler/rustc_typeck/src/check/mod.rs b/compiler/rustc_typeck/src/check/mod.rs index 1479eadf1b0..d27a68ccf1b 100644 --- a/compiler/rustc_typeck/src/check/mod.rs +++ b/compiler/rustc_typeck/src/check/mod.rs @@ -1134,6 +1134,7 @@ impl ItemLikeVisitor<'tcx> for CheckItemTypesVisitor<'tcx> { } fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem<'tcx>) {} fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem<'tcx>) {} + fn visit_foreign_item(&mut self, _: &'tcx hir::ForeignItem<'tcx>) {} } fn typeck_item_bodies(tcx: TyCtxt<'_>, crate_num: CrateNum) { diff --git a/compiler/rustc_typeck/src/check/wfcheck.rs b/compiler/rustc_typeck/src/check/wfcheck.rs index aeca801a4ee..c09f8cce5b4 100644 --- a/compiler/rustc_typeck/src/check/wfcheck.rs +++ b/compiler/rustc_typeck/src/check/wfcheck.rs @@ -156,8 +156,9 @@ pub fn check_item_well_formed(tcx: TyCtxt<'_>, def_id: LocalDefId) { hir::ItemKind::Const(ref ty, ..) => { check_item_type(tcx, item.hir_id, ty.span, false); } - hir::ItemKind::ForeignMod(ref module) => { - for it in module.items.iter() { + hir::ItemKind::ForeignMod { items, .. } => { + for it in items.iter() { + let it = tcx.hir().foreign_item(it.id); match it.kind { hir::ForeignItemKind::Fn(ref decl, ..) => { check_item_fn(tcx, it.hir_id, it.ident, it.span, decl) @@ -1345,6 +1346,10 @@ impl ParItemLikeVisitor<'tcx> for CheckTypeWellFormedVisitor<'tcx> { fn visit_impl_item(&self, impl_item: &'tcx hir::ImplItem<'tcx>) { Visitor::visit_impl_item(&mut self.clone(), impl_item); } + + fn visit_foreign_item(&self, foreign_item: &'tcx hir::ForeignItem<'tcx>) { + Visitor::visit_foreign_item(&mut self.clone(), foreign_item) + } } impl Visitor<'tcx> for CheckTypeWellFormedVisitor<'tcx> { diff --git a/compiler/rustc_typeck/src/check_unused.rs b/compiler/rustc_typeck/src/check_unused.rs index 4fda8932e21..31121ece898 100644 --- a/compiler/rustc_typeck/src/check_unused.rs +++ b/compiler/rustc_typeck/src/check_unused.rs @@ -35,6 +35,8 @@ impl ItemLikeVisitor<'v> for CheckVisitor<'tcx> { fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {} fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem<'_>) {} + + fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {} } struct CheckVisitor<'tcx> { @@ -225,4 +227,6 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for CollectExternCrateVisitor<'a, 'tcx> { fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {} fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem<'_>) {} + + fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {} } diff --git a/compiler/rustc_typeck/src/coherence/inherent_impls.rs b/compiler/rustc_typeck/src/coherence/inherent_impls.rs index 373acb95c9e..483ab2f58f2 100644 --- a/compiler/rustc_typeck/src/coherence/inherent_impls.rs +++ b/compiler/rustc_typeck/src/coherence/inherent_impls.rs @@ -334,6 +334,8 @@ impl ItemLikeVisitor<'v> for InherentCollect<'tcx> { fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {} fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem<'_>) {} + + fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {} } impl InherentCollect<'tcx> { diff --git a/compiler/rustc_typeck/src/coherence/inherent_impls_overlap.rs b/compiler/rustc_typeck/src/coherence/inherent_impls_overlap.rs index ce157f809ef..dd90724e93f 100644 --- a/compiler/rustc_typeck/src/coherence/inherent_impls_overlap.rs +++ b/compiler/rustc_typeck/src/coherence/inherent_impls_overlap.rs @@ -149,4 +149,6 @@ impl ItemLikeVisitor<'v> for InherentOverlapChecker<'tcx> { fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'v>) {} fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem<'v>) {} + + fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'v>) {} } diff --git a/compiler/rustc_typeck/src/coherence/orphan.rs b/compiler/rustc_typeck/src/coherence/orphan.rs index b2009962aba..253dcf06e01 100644 --- a/compiler/rustc_typeck/src/coherence/orphan.rs +++ b/compiler/rustc_typeck/src/coherence/orphan.rs @@ -244,4 +244,6 @@ impl ItemLikeVisitor<'v> for OrphanChecker<'tcx> { fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {} fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem<'_>) {} + + fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {} } diff --git a/compiler/rustc_typeck/src/coherence/unsafety.rs b/compiler/rustc_typeck/src/coherence/unsafety.rs index b281092ea63..2d9128e7dc0 100644 --- a/compiler/rustc_typeck/src/coherence/unsafety.rs +++ b/compiler/rustc_typeck/src/coherence/unsafety.rs @@ -94,4 +94,6 @@ impl ItemLikeVisitor<'v> for UnsafetyChecker<'tcx> { fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {} fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem<'_>) {} + + fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {} } diff --git a/compiler/rustc_typeck/src/collect.rs b/compiler/rustc_typeck/src/collect.rs index dee0e6c2ebb..0ff10abb60a 100644 --- a/compiler/rustc_typeck/src/collect.rs +++ b/compiler/rustc_typeck/src/collect.rs @@ -646,8 +646,9 @@ fn convert_item(tcx: TyCtxt<'_>, item_id: hir::HirId) { | hir::ItemKind::Use(..) | hir::ItemKind::Mod(_) | hir::ItemKind::GlobalAsm(_) => {} - hir::ItemKind::ForeignMod(ref foreign_mod) => { - for item in foreign_mod.items { + hir::ItemKind::ForeignMod { items, .. } => { + for item in items { + let item = tcx.hir().foreign_item(item.id); let def_id = tcx.hir().local_def_id(item.hir_id); tcx.ensure().generics_of(def_id); tcx.ensure().type_of(def_id); diff --git a/compiler/rustc_typeck/src/collect/type_of.rs b/compiler/rustc_typeck/src/collect/type_of.rs index c4f4c8bc76b..88ba5788b05 100644 --- a/compiler/rustc_typeck/src/collect/type_of.rs +++ b/compiler/rustc_typeck/src/collect/type_of.rs @@ -259,7 +259,7 @@ pub(super) fn type_of(tcx: TyCtxt<'_>, def_id: DefId) -> Ty<'_> { ItemKind::Trait(..) | ItemKind::TraitAlias(..) | ItemKind::Mod(..) - | ItemKind::ForeignMod(..) + | ItemKind::ForeignMod { .. } | ItemKind::GlobalAsm(..) | ItemKind::ExternCrate(..) | ItemKind::Use(..) => { diff --git a/compiler/rustc_typeck/src/impl_wf_check.rs b/compiler/rustc_typeck/src/impl_wf_check.rs index 4901d6041d6..14daa97c2c5 100644 --- a/compiler/rustc_typeck/src/impl_wf_check.rs +++ b/compiler/rustc_typeck/src/impl_wf_check.rs @@ -93,6 +93,8 @@ impl ItemLikeVisitor<'tcx> for ImplWfCheck<'tcx> { fn visit_trait_item(&mut self, _trait_item: &'tcx hir::TraitItem<'tcx>) {} fn visit_impl_item(&mut self, _impl_item: &'tcx hir::ImplItem<'tcx>) {} + + fn visit_foreign_item(&mut self, _foreign_item: &'tcx hir::ForeignItem<'tcx>) {} } fn enforce_impl_params_are_constrained( diff --git a/compiler/rustc_typeck/src/outlives/implicit_infer.rs b/compiler/rustc_typeck/src/outlives/implicit_infer.rs index e7a9e078a73..3d0635e3fe4 100644 --- a/compiler/rustc_typeck/src/outlives/implicit_infer.rs +++ b/compiler/rustc_typeck/src/outlives/implicit_infer.rs @@ -109,6 +109,8 @@ impl<'cx, 'tcx> ItemLikeVisitor<'tcx> for InferVisitor<'cx, 'tcx> { fn visit_trait_item(&mut self, _trait_item: &'tcx hir::TraitItem<'tcx>) {} fn visit_impl_item(&mut self, _impl_item: &'tcx hir::ImplItem<'tcx>) {} + + fn visit_foreign_item(&mut self, _foreign_item: &'tcx hir::ForeignItem<'tcx>) {} } fn insert_required_predicates_to_be_wf<'tcx>( diff --git a/compiler/rustc_typeck/src/outlives/test.rs b/compiler/rustc_typeck/src/outlives/test.rs index abe9319d71c..56d42f756c4 100644 --- a/compiler/rustc_typeck/src/outlives/test.rs +++ b/compiler/rustc_typeck/src/outlives/test.rs @@ -26,4 +26,5 @@ impl ItemLikeVisitor<'tcx> for OutlivesTest<'tcx> { fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem<'tcx>) {} fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem<'tcx>) {} + fn visit_foreign_item(&mut self, _: &'tcx hir::ForeignItem<'tcx>) {} } diff --git a/compiler/rustc_typeck/src/variance/constraints.rs b/compiler/rustc_typeck/src/variance/constraints.rs index b2b062e4095..a8fbdfb7c65 100644 --- a/compiler/rustc_typeck/src/variance/constraints.rs +++ b/compiler/rustc_typeck/src/variance/constraints.rs @@ -92,14 +92,6 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ConstraintContext<'a, 'tcx> { self.visit_node_helper(item.hir_id); } - hir::ItemKind::ForeignMod(ref foreign_mod) => { - for foreign_item in foreign_mod.items { - if let hir::ForeignItemKind::Fn(..) = foreign_item.kind { - self.visit_node_helper(foreign_item.hir_id); - } - } - } - _ => {} } } @@ -115,6 +107,12 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ConstraintContext<'a, 'tcx> { self.visit_node_helper(impl_item.hir_id); } } + + fn visit_foreign_item(&mut self, foreign_item: &hir::ForeignItem<'_>) { + if let hir::ForeignItemKind::Fn(..) = foreign_item.kind { + self.visit_node_helper(foreign_item.hir_id); + } + } } impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { diff --git a/compiler/rustc_typeck/src/variance/terms.rs b/compiler/rustc_typeck/src/variance/terms.rs index 81c858c53cb..3b2a1c24ddd 100644 --- a/compiler/rustc_typeck/src/variance/terms.rs +++ b/compiler/rustc_typeck/src/variance/terms.rs @@ -153,14 +153,6 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for TermsContext<'a, 'tcx> { self.add_inferreds_for_item(item.hir_id); } - hir::ItemKind::ForeignMod(ref foreign_mod) => { - for foreign_item in foreign_mod.items { - if let hir::ForeignItemKind::Fn(..) = foreign_item.kind { - self.add_inferreds_for_item(foreign_item.hir_id); - } - } - } - _ => {} } } @@ -176,4 +168,10 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for TermsContext<'a, 'tcx> { self.add_inferreds_for_item(impl_item.hir_id); } } + + fn visit_foreign_item(&mut self, foreign_item: &hir::ForeignItem<'_>) { + if let hir::ForeignItemKind::Fn(..) = foreign_item.kind { + self.add_inferreds_for_item(foreign_item.hir_id); + } + } } diff --git a/compiler/rustc_typeck/src/variance/test.rs b/compiler/rustc_typeck/src/variance/test.rs index 1aab89310c6..d6e43b6d669 100644 --- a/compiler/rustc_typeck/src/variance/test.rs +++ b/compiler/rustc_typeck/src/variance/test.rs @@ -26,4 +26,5 @@ impl ItemLikeVisitor<'tcx> for VarianceTest<'tcx> { fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem<'tcx>) {} fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem<'tcx>) {} + fn visit_foreign_item(&mut self, _: &'tcx hir::ForeignItem<'tcx>) {} } diff --git a/library/alloc/tests/vec.rs b/library/alloc/tests/vec.rs index 47ebe56f9fd..e19406d7a06 100644 --- a/library/alloc/tests/vec.rs +++ b/library/alloc/tests/vec.rs @@ -919,7 +919,7 @@ fn test_from_iter_partially_drained_in_place_specialization() { #[test] fn test_from_iter_specialization_with_iterator_adapters() { - fn assert_in_place_trait<T: InPlaceIterable>(_: &T) {}; + fn assert_in_place_trait<T: InPlaceIterable>(_: &T) {} let src: Vec<usize> = vec![0usize; 256]; let srcptr = src.as_ptr(); let iter = src @@ -1198,7 +1198,7 @@ fn drain_filter_consumed_panic() { struct Check { index: usize, drop_counts: Rc<Mutex<Vec<usize>>>, - }; + } impl Drop for Check { fn drop(&mut self) { @@ -1250,7 +1250,7 @@ fn drain_filter_unconsumed_panic() { struct Check { index: usize, drop_counts: Rc<Mutex<Vec<usize>>>, - }; + } impl Drop for Check { fn drop(&mut self) { diff --git a/library/core/src/fmt/mod.rs b/library/core/src/fmt/mod.rs index 506d7780686..23e8d1d856a 100644 --- a/library/core/src/fmt/mod.rs +++ b/library/core/src/fmt/mod.rs @@ -1182,7 +1182,7 @@ impl<'a> Formatter<'a> { /// ``` /// use std::fmt; /// - /// struct Foo { nb: i32 }; + /// struct Foo { nb: i32 } /// /// impl Foo { /// fn new(nb: i32) -> Foo { diff --git a/library/core/src/future/poll_fn.rs b/library/core/src/future/poll_fn.rs index f302cda09e7..af63e1bb097 100644 --- a/library/core/src/future/poll_fn.rs +++ b/library/core/src/future/poll_fn.rs @@ -21,7 +21,7 @@ use crate::task::{Context, Poll}; /// /// let read_future = poll_fn(read_line); /// assert_eq!(read_future.await, "Hello, World!".to_owned()); -/// # }; +/// # } /// ``` #[unstable(feature = "future_poll_fn", issue = "72302")] pub fn poll_fn<T, F>(f: F) -> PollFn<F> diff --git a/library/core/src/iter/adapters/peekable.rs b/library/core/src/iter/adapters/peekable.rs index ebdc2555db2..2f8b9653c59 100644 --- a/library/core/src/iter/adapters/peekable.rs +++ b/library/core/src/iter/adapters/peekable.rs @@ -230,20 +230,24 @@ impl<I: Iterator> Peekable<I> { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// #![feature(peekable_peek_mut)] /// let mut iter = [1, 2, 3].iter().peekable(); /// + /// // Like with `peek()`, we can see into the future without advancing the iterator. + /// assert_eq!(iter.peek_mut(), Some(&mut &1)); /// assert_eq!(iter.peek_mut(), Some(&mut &1)); /// assert_eq!(iter.next(), Some(&1)); /// - /// // Peek into the iterator and modify the value which will be returned next - /// if let Some(mut p) = iter.peek_mut() { - /// if *p == &2 { - /// *p = &5; - /// } + /// // Peek into the iterator and set the value behind the mutable reference. + /// if let Some(p) = iter.peek_mut() { + /// assert_eq!(*p, &2); + /// *p = &5; /// } /// + /// // The value we put in reappears as the iterator continues. /// assert_eq!(iter.collect::<Vec<_>>(), vec![&5, &3]); /// ``` #[inline] diff --git a/library/core/src/mem/maybe_uninit.rs b/library/core/src/mem/maybe_uninit.rs index 94ac16954a7..1924720b949 100644 --- a/library/core/src/mem/maybe_uninit.rs +++ b/library/core/src/mem/maybe_uninit.rs @@ -348,7 +348,7 @@ impl<T> MaybeUninit<T> { /// ```rust,no_run /// use std::mem::MaybeUninit; /// - /// enum NotZero { One = 1, Two = 2 }; + /// enum NotZero { One = 1, Two = 2 } /// /// let x = MaybeUninit::<(u8, NotZero)>::zeroed(); /// let x = unsafe { x.assume_init() }; diff --git a/library/core/tests/ptr.rs b/library/core/tests/ptr.rs index bf977c141cb..1970b17e267 100644 --- a/library/core/tests/ptr.rs +++ b/library/core/tests/ptr.rs @@ -18,7 +18,7 @@ fn test() { struct Pair { fst: isize, snd: isize, - }; + } let mut p = Pair { fst: 10, snd: 20 }; let pptr: *mut Pair = &mut p; let iptr: *mut isize = pptr as *mut isize; diff --git a/library/std/src/keyword_docs.rs b/library/std/src/keyword_docs.rs index 80b74a9ba9b..dad3add5c55 100644 --- a/library/std/src/keyword_docs.rs +++ b/library/std/src/keyword_docs.rs @@ -20,19 +20,30 @@ /// explicitly using `as` allows a few more coercions that aren't allowed implicitly, such as /// changing the type of a raw pointer or turning closures into raw pointers. /// -/// `as` is also used to rename imports in [`use`] and [`extern crate`] statements: +/// `as` can be seen as the primitive for `From` and `Into`: `as` only works with primitives +/// (`u8`, `bool`, `str`, pointers, ...) whereas `From` and `Into` also works with types like +/// `String` or `Vec`. +/// +/// `as` can also be used with the `_` placeholder when the destination type can be inferred. Note +/// that this can cause inference breakage and usually such code should use an explicit type for +/// both clarity and stability. This is most useful when converting pointers using `as *const _` or +/// `as *mut _` though the [`cast`][const-cast] method is recommended over `as *const _` and it is +/// [the same][mut-cast] for `as *mut _`: those methods make the intent clearer. +/// +/// `as` is also used to rename imports in [`use`] and [`extern crate`][`crate`] statements: /// /// ``` /// # #[allow(unused_imports)] /// use std::{mem as memory, net as network}; /// // Now you can use the names `memory` and `network` to refer to `std::mem` and `std::net`. /// ``` -/// /// For more information on what `as` is capable of, see the [Reference]. /// /// [Reference]: ../reference/expressions/operator-expr.html#type-cast-expressions +/// [`crate`]: keyword.crate.html /// [`use`]: keyword.use.html -/// [`extern crate`]: keyword.crate.html +/// [const-cast]: primitive.pointer.html#method.cast +/// [mut-cast]: primitive.pointer.html#method.cast-1 mod as_keyword {} #[doc(keyword = "break")] @@ -707,8 +718,8 @@ mod impl_keyword {} /// /// ## Literal Examples: /// -/// * `for _ **in** 1..3 {}` - Iterate over an exclusive range up to but excluding 3. -/// * `for _ **in** 1..=3 {}` - Iterate over an inclusive range up to and including 3. +/// * `for _ in 1..3 {}` - Iterate over an exclusive range up to but excluding 3. +/// * `for _ in 1..=3 {}` - Iterate over an inclusive range up to and including 3. /// /// (Read more about [range patterns]) /// diff --git a/library/test/src/lib.rs b/library/test/src/lib.rs index c95404d0946..5c12a54eef1 100644 --- a/library/test/src/lib.rs +++ b/library/test/src/lib.rs @@ -265,14 +265,14 @@ where running_tests.remove(test); } timed_out - }; + } fn calc_timeout(running_tests: &TestMap) -> Option<Duration> { running_tests.values().min().map(|next_timeout| { let now = Instant::now(); if *next_timeout >= now { *next_timeout - now } else { Duration::new(0, 0) } }) - }; + } if concurrency == 1 { while !remaining.is_empty() { diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index 9b77e38a847..354be109cf2 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -1183,7 +1183,11 @@ impl Step for PlainSourceTarball { // characters and on `C:\` paths, so normalize both of them away. pub fn sanitize_sh(path: &Path) -> String { let path = path.to_str().unwrap().replace("\\", "/"); - return change_drive(&path).unwrap_or(path); + return change_drive(unc_to_lfs(&path)).unwrap_or(path); + + fn unc_to_lfs(s: &str) -> &str { + if s.starts_with("//?/") { &s[4..] } else { s } + } fn change_drive(s: &str) -> Option<String> { let mut ch = s.chars(); diff --git a/src/doc/rustc/src/platform-support.md b/src/doc/rustc/src/platform-support.md index 8eb4821396d..99e0a2b177f 100644 --- a/src/doc/rustc/src/platform-support.md +++ b/src/doc/rustc/src/platform-support.md @@ -152,6 +152,7 @@ not available. target | std | host | notes -------|-----|------|------- +`aarch64-apple-ios-macabi` | ? | | Apple Catalyst on ARM64 `aarch64-apple-tvos` | * | | ARM64 tvOS `aarch64-unknown-freebsd` | ✓ | ✓ | ARM64 FreeBSD `aarch64-unknown-hermit` | ? | | @@ -207,7 +208,7 @@ target | std | host | notes `thumbv7a-uwp-windows-msvc` | ✓ | | `thumbv7neon-unknown-linux-musleabihf` | ? | | Thumb2-mode ARMv7a Linux with NEON, MUSL `thumbv4t-none-eabi` | * | | ARMv4T T32 -`x86_64-apple-ios-macabi` | ✓ | | Apple Catalyst +`x86_64-apple-ios-macabi` | ✓ | | Apple Catalyst on x86_64 `x86_64-apple-tvos` | * | | x86 64-bit tvOS `x86_64-linux-kernel` | * | | Linux kernel modules `x86_64-pc-solaris` | ? | | diff --git a/src/etc/gdb_providers.py b/src/etc/gdb_providers.py index b74d47a8002..cabf5dccbfe 100644 --- a/src/etc/gdb_providers.py +++ b/src/etc/gdb_providers.py @@ -352,7 +352,7 @@ class StdHashMapProvider: ctrl = table["ctrl"]["pointer"] self.size = int(table["items"]) - self.pair_type = table.type.template_argument(0) + self.pair_type = table.type.template_argument(0).strip_typedefs() self.new_layout = not table.type.has_key("data") if self.new_layout: diff --git a/src/etc/lldb_providers.py b/src/etc/lldb_providers.py index 64cb9837943..9c7b07efbaa 100644 --- a/src/etc/lldb_providers.py +++ b/src/etc/lldb_providers.py @@ -531,7 +531,7 @@ class StdHashMapSyntheticProvider: ctrl = table.GetChildMemberWithName("ctrl").GetChildAtIndex(0) self.size = table.GetChildMemberWithName("items").GetValueAsUnsigned() - self.pair_type = table.type.template_args[0] + self.pair_type = table.type.template_args[0].GetTypedefedType() self.pair_type_size = self.pair_type.GetByteSize() self.new_layout = not table.GetChildMemberWithName("data").IsValid() diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index cc3e8707e52..61121c776f4 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -124,7 +124,7 @@ crate fn try_inline( let attrs = merge_attrs(cx, Some(parent_module), target_attrs, attrs_clone); cx.renderinfo.borrow_mut().inlined.insert(did); - let what_rustc_thinks = clean::Item::from_def_id_and_parts(did, Some(name), kind, cx); + let what_rustc_thinks = clean::Item::from_def_id_and_parts(did, Some(name.clean(cx)), kind, cx); ret.push(clean::Item { attrs, ..what_rustc_thinks }); Some(ret) } diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index ea34085823f..13643fbf3d3 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -122,7 +122,7 @@ impl Clean<ExternalCrate> for CrateNum { } } } - return prim.map(|p| (def_id, p, attrs)); + return prim.map(|p| (def_id, p)); } None }; @@ -144,9 +144,9 @@ impl Clean<ExternalCrate> for CrateNum { hir::ItemKind::Use(ref path, hir::UseKind::Single) if item.vis.node.is_pub() => { - as_primitive(path.res).map(|(_, prim, attrs)| { + as_primitive(path.res).map(|(_, prim)| { // Pretend the primitive is local. - (cx.tcx.hir().local_def_id(id.id).to_def_id(), prim, attrs) + (cx.tcx.hir().local_def_id(id.id).to_def_id(), prim) }) } _ => None, @@ -177,7 +177,7 @@ impl Clean<ExternalCrate> for CrateNum { } } } - return keyword.map(|p| (def_id, p, attrs)); + return keyword.map(|p| (def_id, p)); } None }; @@ -199,8 +199,8 @@ impl Clean<ExternalCrate> for CrateNum { hir::ItemKind::Use(ref path, hir::UseKind::Single) if item.vis.node.is_pub() => { - as_keyword(path.res).map(|(_, prim, attrs)| { - (cx.tcx.hir().local_def_id(id.id).to_def_id(), prim, attrs) + as_keyword(path.res).map(|(_, prim)| { + (cx.tcx.hir().local_def_id(id.id).to_def_id(), prim) }) } _ => None, @@ -1099,7 +1099,7 @@ impl Clean<Item> for hir::TraitItem<'_> { AssocTypeItem(bounds.clean(cx), default.clean(cx)) } }; - Item::from_def_id_and_parts(local_did, Some(self.ident.name), inner, cx) + Item::from_def_id_and_parts(local_did, Some(self.ident.name.clean(cx)), inner, cx) }) } } @@ -1127,7 +1127,7 @@ impl Clean<Item> for hir::ImplItem<'_> { TypedefItem(Typedef { type_, generics: Generics::default(), item_type }, true) } }; - Item::from_def_id_and_parts(local_did, Some(self.ident.name), inner, cx) + Item::from_def_id_and_parts(local_did, Some(self.ident.name.clean(cx)), inner, cx) }) } } @@ -1284,7 +1284,7 @@ impl Clean<Item> for ty::AssocItem { } }; - Item::from_def_id_and_parts(self.def_id, Some(self.ident.name), kind, cx) + Item::from_def_id_and_parts(self.def_id, Some(self.ident.name.clean(cx)), kind, cx) } } @@ -1503,7 +1503,9 @@ impl Clean<Type> for hir::Ty<'_> { } /// Returns `None` if the type could not be normalized +#[allow(unreachable_code, unused_variables)] fn normalize(cx: &DocContext<'tcx>, ty: Ty<'_>) -> Option<Ty<'tcx>> { + return None; // HACK: low-churn fix for #79459 while we wait for a trait normalization fix use crate::rustc_trait_selection::infer::TyCtxtInferExt; use crate::rustc_trait_selection::traits::query::normalize::AtExt; use rustc_middle::traits::ObligationCause; @@ -1769,7 +1771,7 @@ impl Clean<Item> for ty::FieldDef { fn clean(&self, cx: &DocContext<'_>) -> Item { let what_rustc_thinks = Item::from_def_id_and_parts( self.did, - Some(self.ident.name), + Some(self.ident.name.clean(cx)), StructFieldItem(cx.tcx.type_of(self.did).clean(cx)), cx, ); @@ -1844,22 +1846,20 @@ impl Clean<Item> for ty::VariantDef { fields: self .fields .iter() - .map(|field| Item { - source: cx.tcx.def_span(field.did).clean(cx), - name: Some(field.ident.name.clean(cx)), - attrs: cx.tcx.get_attrs(field.did).clean(cx), - visibility: Visibility::Inherited, - def_id: field.did, - stability: get_stability(cx, field.did), - deprecation: get_deprecation(cx, field.did), - kind: StructFieldItem(cx.tcx.type_of(field.did).clean(cx)), + .map(|field| { + let name = Some(field.ident.name.clean(cx)); + let kind = StructFieldItem(cx.tcx.type_of(field.did).clean(cx)); + let what_rustc_thinks = + Item::from_def_id_and_parts(field.did, name, kind, cx); + // don't show `pub` for fields, which are always public + Item { visibility: Visibility::Inherited, ..what_rustc_thinks } }) .collect(), }), }; let what_rustc_thinks = Item::from_def_id_and_parts( self.def_id, - Some(self.ident.name), + Some(self.ident.name.clean(cx)), VariantItem(Variant { kind }), cx, ); @@ -2057,7 +2057,7 @@ impl Clean<Vec<Item>> for (&hir::Item<'_>, Option<Ident>) { _ => unreachable!("not yet converted"), }; - vec![Item::from_def_id_and_parts(def_id, Some(name), kind, cx)] + vec![Item::from_def_id_and_parts(def_id, Some(name.clean(cx)), kind, cx)] }) } } @@ -2319,7 +2319,7 @@ impl Clean<Item> for doctree::Macro { fn clean(&self, cx: &DocContext<'_>) -> Item { Item::from_def_id_and_parts( self.def_id, - Some(self.name), + Some(self.name.clean(cx)), MacroItem(Macro { source: format!( "macro_rules! {} {{\n{}}}", diff --git a/src/librustdoc/clean/types.rs b/src/librustdoc/clean/types.rs index 43b986aae1c..2283b71a94f 100644 --- a/src/librustdoc/clean/types.rs +++ b/src/librustdoc/clean/types.rs @@ -34,6 +34,7 @@ use crate::clean::cfg::Cfg; use crate::clean::external_path; use crate::clean::inline; use crate::clean::types::Type::{QPath, ResolvedPath}; +use crate::clean::Clean; use crate::core::DocContext; use crate::doctree; use crate::formats::cache::cache; @@ -54,7 +55,7 @@ crate struct Crate { crate src: FileName, crate module: Option<Item>, crate externs: Vec<(CrateNum, ExternalCrate)>, - crate primitives: Vec<(DefId, PrimitiveType, Attributes)>, + crate primitives: Vec<(DefId, PrimitiveType)>, // These are later on moved into `CACHEKEY`, leaving the map empty. // Only here so that they can be filtered through the rustdoc passes. crate external_traits: Rc<RefCell<FxHashMap<DefId, Trait>>>, @@ -67,8 +68,8 @@ crate struct ExternalCrate { crate name: String, crate src: FileName, crate attrs: Attributes, - crate primitives: Vec<(DefId, PrimitiveType, Attributes)>, - crate keywords: Vec<(DefId, String, Attributes)>, + crate primitives: Vec<(DefId, PrimitiveType)>, + crate keywords: Vec<(DefId, String)>, } /// Anything with a source location and set of attributes and, optionally, a @@ -120,17 +121,20 @@ impl Item { kind: ItemKind, cx: &DocContext<'_>, ) -> Item { - Item::from_def_id_and_parts(cx.tcx.hir().local_def_id(hir_id).to_def_id(), name, kind, cx) + Item::from_def_id_and_parts( + cx.tcx.hir().local_def_id(hir_id).to_def_id(), + name.clean(cx), + kind, + cx, + ) } pub fn from_def_id_and_parts( def_id: DefId, - name: Option<Symbol>, + name: Option<String>, kind: ItemKind, cx: &DocContext<'_>, ) -> Item { - use super::Clean; - debug!("name={:?}, def_id={:?}", name, def_id); // `span_if_local()` lies about functions and only gives the span of the function signature @@ -145,7 +149,7 @@ impl Item { Item { def_id, kind, - name: name.clean(cx), + name, source: source.clean(cx), attrs: cx.tcx.get_attrs(def_id).clean(cx), visibility: cx.tcx.visibility(def_id).clean(cx), diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs index 22917fbceb4..1b22d26f49b 100644 --- a/src/librustdoc/clean/utils.rs +++ b/src/librustdoc/clean/utils.rs @@ -1,15 +1,14 @@ use crate::clean::auto_trait::AutoTraitFinder; use crate::clean::blanket_impl::BlanketImplFinder; use crate::clean::{ - inline, Clean, Crate, Deprecation, ExternalCrate, FnDecl, FnRetTy, Generic, GenericArg, - GenericArgs, GenericBound, Generics, GetDefId, ImportSource, Item, ItemKind, Lifetime, - MacroKind, Path, PathSegment, Primitive, PrimitiveType, ResolvedPath, Span, Type, TypeBinding, - TypeKind, Visibility, WherePredicate, + inline, Clean, Crate, ExternalCrate, FnDecl, FnRetTy, Generic, GenericArg, GenericArgs, + GenericBound, Generics, GetDefId, ImportSource, Item, ItemKind, Lifetime, MacroKind, Path, + PathSegment, Primitive, PrimitiveType, ResolvedPath, Type, TypeBinding, TypeKind, + WherePredicate, }; use crate::core::DocContext; use itertools::Itertools; -use rustc_attr::Stability; use rustc_data_structures::fx::FxHashSet; use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; @@ -66,25 +65,16 @@ crate fn krate(mut cx: &mut DocContext<'_>) -> Crate { ItemKind::ModuleItem(ref mut m) => m, _ => unreachable!(), }; - m.items.extend(primitives.iter().map(|&(def_id, prim, ref attrs)| Item { - source: Span::empty(), - name: Some(prim.to_url_str().to_string()), - attrs: attrs.clone(), - visibility: Visibility::Public, - stability: get_stability(cx, def_id), - deprecation: get_deprecation(cx, def_id), - def_id, - kind: ItemKind::PrimitiveItem(prim), + m.items.extend(primitives.iter().map(|&(def_id, prim)| { + Item::from_def_id_and_parts( + def_id, + Some(prim.to_url_str().to_owned()), + ItemKind::PrimitiveItem(prim), + cx, + ) })); - m.items.extend(keywords.into_iter().map(|(def_id, kw, attrs)| Item { - source: Span::empty(), - name: Some(kw.clone()), - attrs, - visibility: Visibility::Public, - stability: get_stability(cx, def_id), - deprecation: get_deprecation(cx, def_id), - def_id, - kind: ItemKind::KeywordItem(kw), + m.items.extend(keywords.into_iter().map(|(def_id, kw)| { + Item::from_def_id_and_parts(def_id, Some(kw.clone()), ItemKind::KeywordItem(kw), cx) })); } @@ -101,15 +91,6 @@ crate fn krate(mut cx: &mut DocContext<'_>) -> Crate { } } -// extract the stability index for a node from tcx, if possible -crate fn get_stability(cx: &DocContext<'_>, def_id: DefId) -> Option<Stability> { - cx.tcx.lookup_stability(def_id).cloned() -} - -crate fn get_deprecation(cx: &DocContext<'_>, def_id: DefId) -> Option<Deprecation> { - cx.tcx.lookup_deprecation(def_id).clean(cx) -} - fn external_generic_args( cx: &DocContext<'_>, trait_did: Option<DefId>, diff --git a/src/librustdoc/formats/cache.rs b/src/librustdoc/formats/cache.rs index 39b750279ac..c3153f2d4b6 100644 --- a/src/librustdoc/formats/cache.rs +++ b/src/librustdoc/formats/cache.rs @@ -187,11 +187,11 @@ impl Cache { // Favor linking to as local extern as possible, so iterate all crates in // reverse topological order. for &(_, ref e) in krate.externs.iter().rev() { - for &(def_id, prim, _) in &e.primitives { + for &(def_id, prim) in &e.primitives { cache.primitive_locations.insert(prim, def_id); } } - for &(def_id, prim, _) in &krate.primitives { + for &(def_id, prim) in &krate.primitives { cache.primitive_locations.insert(prim, def_id); } diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index e57717dab76..02152edbbc2 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -240,8 +240,9 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { } match item.kind { - hir::ItemKind::ForeignMod(ref fm) => { - for item in fm.items { + hir::ItemKind::ForeignMod { items, .. } => { + for item in items { + let item = self.cx.tcx.hir().foreign_item(item.id); self.visit_foreign_item(item, None, om); } } diff --git a/src/test/incremental/hashes/extern_mods.rs b/src/test/incremental/hashes/extern_mods.rs index 0b9a0fd7945..dd775167757 100644 --- a/src/test/incremental/hashes/extern_mods.rs +++ b/src/test/incremental/hashes/extern_mods.rs @@ -13,114 +13,99 @@ #![feature(rustc_attrs)] #![feature(unboxed_closures)] #![feature(link_args)] -#![crate_type="rlib"] - +#![crate_type = "rlib"] // Change function name -------------------------------------------------------- #[cfg(cfail1)] -extern { +extern "C" { pub fn change_function_name1(c: i64) -> i32; } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] -extern { +#[rustc_dirty(cfg = "cfail2", except = "hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] +extern "C" { pub fn change_function_name2(c: i64) -> i32; } - - // Change parameter name ------------------------------------------------------- #[cfg(cfail1)] -extern { +extern "C" { pub fn change_parameter_name(c: i64) -> i32; } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] -extern { +#[rustc_dirty(cfg = "cfail2", except = "hir_owner,hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] +extern "C" { pub fn change_parameter_name(d: i64) -> i32; } - - // Change parameter type ------------------------------------------------------- #[cfg(cfail1)] -extern { +extern "C" { pub fn change_parameter_type(c: i64) -> i32; } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] -extern { +#[rustc_dirty(cfg = "cfail2", except = "hir_owner,hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] +extern "C" { pub fn change_parameter_type(c: i32) -> i32; } - - // Change return type ---------------------------------------------------------- #[cfg(cfail1)] -extern { +extern "C" { pub fn change_return_type(c: i32) -> i32; } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] -extern { +#[rustc_dirty(cfg = "cfail2", except = "hir_owner,hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] +extern "C" { pub fn change_return_type(c: i32) -> i8; } - - // Add parameter --------------------------------------------------------------- #[cfg(cfail1)] -extern { +extern "C" { pub fn add_parameter(c: i32) -> i32; } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] -extern { +#[rustc_dirty(cfg = "cfail2", except = "hir_owner,hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] +extern "C" { pub fn add_parameter(c: i32, d: i32) -> i32; } - - // Add return type ------------------------------------------------------------- #[cfg(cfail1)] -extern { +extern "C" { pub fn add_return_type(c: i32); } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] -extern { +#[rustc_dirty(cfg = "cfail2", except = "hir_owner,hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] +extern "C" { pub fn add_return_type(c: i32) -> i32; } - - // Make function variadic ------------------------------------------------------ #[cfg(cfail1)] -extern { +extern "C" { pub fn make_function_variadic(c: i32); } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] -extern { +#[rustc_dirty(cfg = "cfail2", except = "hir_owner,hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] +extern "C" { pub fn make_function_variadic(c: i32, ...); } - - // Change calling convention --------------------------------------------------- #[cfg(cfail1)] extern "C" { @@ -128,74 +113,66 @@ extern "C" { } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] +#[rustc_dirty(cfg = "cfail2", except = "hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] extern "rust-call" { pub fn change_calling_convention(c: i32); } - - // Make function public -------------------------------------------------------- #[cfg(cfail1)] -extern { +extern "C" { fn make_function_public(c: i32); } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] -extern { +#[rustc_dirty(cfg = "cfail2", except = "hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] +extern "C" { pub fn make_function_public(c: i32); } - - // Add function ---------------------------------------------------------------- #[cfg(cfail1)] -extern { +extern "C" { pub fn add_function1(c: i32); } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] -extern { +#[rustc_dirty(cfg = "cfail2", except = "hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] +extern "C" { pub fn add_function1(c: i32); pub fn add_function2(); } - - // Change link-args ------------------------------------------------------------ #[cfg(cfail1)] #[link_args = "-foo -bar"] -extern { +extern "C" { pub fn change_link_args(c: i32); } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] +#[rustc_dirty(cfg = "cfail2", except = "hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] #[link_args = "-foo -bar -baz"] -extern { +extern "C" { pub fn change_link_args(c: i32); } - - // Change link-name ------------------------------------------------------------ #[cfg(cfail1)] #[link(name = "foo")] -extern { +extern "C" { pub fn change_link_name(c: i32); } #[cfg(not(cfail1))] -#[rustc_dirty(cfg="cfail2")] -#[rustc_clean(cfg="cfail3")] +#[rustc_dirty(cfg = "cfail2", except = "hir_owner_nodes")] +#[rustc_clean(cfg = "cfail3")] #[link(name = "bar")] -extern { +extern "C" { pub fn change_link_name(c: i32); } @@ -209,15 +186,13 @@ mod indirectly_change_parameter_type { #[cfg(not(cfail1))] use super::c_i64 as c_int; - #[rustc_dirty(cfg="cfail2")] - #[rustc_clean(cfg="cfail3")] - extern { + #[rustc_dirty(cfg = "cfail2", except = "hir_owner,hir_owner_nodes")] + #[rustc_clean(cfg = "cfail3")] + extern "C" { pub fn indirectly_change_parameter_type(c: c_int); } } - - // Indirectly change return type -------------------------------------------- mod indirectly_change_return_type { #[cfg(cfail1)] @@ -225,9 +200,9 @@ mod indirectly_change_return_type { #[cfg(not(cfail1))] use super::c_i64 as c_int; - #[rustc_dirty(cfg="cfail2")] - #[rustc_clean(cfg="cfail3")] - extern { + #[rustc_dirty(cfg = "cfail2", except = "hir_owner,hir_owner_nodes")] + #[rustc_clean(cfg = "cfail3")] + extern "C" { pub fn indirectly_change_return_type() -> c_int; } } diff --git a/src/test/rustdoc-ui/coverage/exotic.stdout b/src/test/rustdoc-ui/coverage/exotic.stdout index e282ff12843..27798b81310 100644 --- a/src/test/rustdoc-ui/coverage/exotic.stdout +++ b/src/test/rustdoc-ui/coverage/exotic.stdout @@ -1,8 +1,7 @@ +-------------------------------------+------------+------------+------------+------------+ | File | Documented | Percentage | Examples | Percentage | +-------------------------------------+------------+------------+------------+------------+ -| ...st/rustdoc-ui/coverage/exotic.rs | 1 | 100.0% | 0 | 0.0% | -| <anon> | 2 | 100.0% | 0 | 0.0% | +| ...st/rustdoc-ui/coverage/exotic.rs | 3 | 100.0% | 0 | 0.0% | +-------------------------------------+------------+------------+------------+------------+ | Total | 3 | 100.0% | 0 | 0.0% | +-------------------------------------+------------+------------+------------+------------+ diff --git a/src/test/rustdoc/normalize-assoc-item.rs b/src/test/rustdoc/normalize-assoc-item.rs index 137fd354a87..70b3c66fd2b 100644 --- a/src/test/rustdoc/normalize-assoc-item.rs +++ b/src/test/rustdoc/normalize-assoc-item.rs @@ -1,6 +1,7 @@ // ignore-tidy-linelength // aux-build:normalize-assoc-item.rs // build-aux-docs +// ignore-test pub trait Trait { type X; diff --git a/src/test/ui/asm/naked-invalid-attr.stderr b/src/test/ui/asm/naked-invalid-attr.stderr index beaa34140c9..565c2986a66 100644 --- a/src/test/ui/asm/naked-invalid-attr.stderr +++ b/src/test/ui/asm/naked-invalid-attr.stderr @@ -1,12 +1,4 @@ error: attribute should be applied to a function definition - --> $DIR/naked-invalid-attr.rs:9:5 - | -LL | #[naked] - | ^^^^^^^^ -LL | fn f(); - | ------- not a function definition - -error: attribute should be applied to a function definition --> $DIR/naked-invalid-attr.rs:13:1 | LL | #[naked] @@ -33,6 +25,14 @@ LL | extern "C" fn invoke(&self); | ---------------------------- not a function definition error: attribute should be applied to a function definition + --> $DIR/naked-invalid-attr.rs:9:5 + | +LL | #[naked] + | ^^^^^^^^ +LL | fn f(); + | ------- not a function definition + +error: attribute should be applied to a function definition --> $DIR/naked-invalid-attr.rs:6:1 | LL | #![naked] diff --git a/src/test/ui/associated-type-bounds/dyn-impl-trait-type.rs b/src/test/ui/associated-type-bounds/dyn-impl-trait-type.rs index fd9e52a6ff2..a8d00803a53 100644 --- a/src/test/ui/associated-type-bounds/dyn-impl-trait-type.rs +++ b/src/test/ui/associated-type-bounds/dyn-impl-trait-type.rs @@ -30,7 +30,7 @@ fn def_et3() -> Et3 { impl Tr1 for A { type As1 = core::ops::Range<u8>; fn mk(&self) -> Self::As1 { 0..10 } - }; + } Box::new(A) } pub fn use_et3() { diff --git a/src/test/ui/associated-type-bounds/dyn-lcsit.rs b/src/test/ui/associated-type-bounds/dyn-lcsit.rs index c936fe0550a..b7869e22b4a 100644 --- a/src/test/ui/associated-type-bounds/dyn-lcsit.rs +++ b/src/test/ui/associated-type-bounds/dyn-lcsit.rs @@ -33,7 +33,7 @@ const cdef_et3: &dyn Tr1<As1: Clone + Iterator<Item: Add<u8, Output: Into<u8>>>> impl Tr1 for A { type As1 = core::ops::Range<u8>; fn mk(&self) -> Self::As1 { 0..10 } - }; + } &A }; pub fn use_et3() { diff --git a/src/test/ui/associated-type-bounds/dyn-rpit-and-let.rs b/src/test/ui/associated-type-bounds/dyn-rpit-and-let.rs index f22a6c44cb8..08f965452ef 100644 --- a/src/test/ui/associated-type-bounds/dyn-rpit-and-let.rs +++ b/src/test/ui/associated-type-bounds/dyn-rpit-and-let.rs @@ -35,7 +35,7 @@ fn def_et3() -> Box<dyn Tr1<As1: Clone + Iterator<Item: Add<u8, Output: Into<u8> impl Tr1 for A { type As1 = core::ops::Range<u8>; fn mk(&self) -> Self::As1 { 0..10 } - }; + } let x /* : Box<dyn Tr1<As1: Clone + Iterator<Item: Add<u8, Output: Into<u8>>>>> */ = Box::new(A); x diff --git a/src/test/ui/associated-type-bounds/lcsit.rs b/src/test/ui/associated-type-bounds/lcsit.rs index 497205f9f18..5364f25f89a 100644 --- a/src/test/ui/associated-type-bounds/lcsit.rs +++ b/src/test/ui/associated-type-bounds/lcsit.rs @@ -39,7 +39,7 @@ const cdef_et3: impl Tr1<As1: Clone + Iterator<Item: Add<u8, Output: Into<u8>>>> impl Tr1 for A { type As1 = core::ops::Range<u8>; fn mk(&self) -> Self::As1 { 0..10 } - }; + } let x: impl Tr1<As1: Clone + Iterator<Item: Add<u8, Output: Into<u8>>>> = A; x }; diff --git a/src/test/ui/associated-type-bounds/rpit.rs b/src/test/ui/associated-type-bounds/rpit.rs index 7b640d5a457..47cadf3310b 100644 --- a/src/test/ui/associated-type-bounds/rpit.rs +++ b/src/test/ui/associated-type-bounds/rpit.rs @@ -27,7 +27,7 @@ fn def_et3() -> impl Tr1<As1: Clone + Iterator<Item: Add<u8, Output: Into<u8>>>> impl Tr1 for A { type As1 = core::ops::Range<u8>; fn mk(self) -> Self::As1 { 0..10 } - }; + } A } diff --git a/src/test/ui/associated-type-bounds/trait-alias-impl-trait.rs b/src/test/ui/associated-type-bounds/trait-alias-impl-trait.rs index 9ee33e4149a..025540ce200 100644 --- a/src/test/ui/associated-type-bounds/trait-alias-impl-trait.rs +++ b/src/test/ui/associated-type-bounds/trait-alias-impl-trait.rs @@ -31,7 +31,7 @@ fn def_et3() -> Et3 { impl Tr1 for A { type As1 = core::ops::Range<u8>; fn mk(self) -> Self::As1 { 0..10 } - }; + } A } pub fn use_et3() { diff --git a/src/test/ui/const-generics/min_const_generics/macro.rs b/src/test/ui/const-generics/min_const_generics/macro.rs index 85ecce551d4..575fbd33572 100644 --- a/src/test/ui/const-generics/min_const_generics/macro.rs +++ b/src/test/ui/const-generics/min_const_generics/macro.rs @@ -15,14 +15,14 @@ impl<const N: usize> Marker<N> for Example<N> {} fn make_marker() -> impl Marker<{ #[macro_export] - macro_rules! const_macro { () => {{ 3 }} }; inline!() + macro_rules! const_macro { () => {{ 3 }} } inline!() }> { Example::<{ const_macro!() }> } fn from_marker(_: impl Marker<{ #[macro_export] - macro_rules! inline { () => {{ 3 }} }; inline!() + macro_rules! inline { () => {{ 3 }} } inline!() }>) {} fn main() { @@ -30,7 +30,7 @@ fn main() { #[macro_export] macro_rules! gimme_a_const { ($rusty: ident) => {{ let $rusty = 3; *&$rusty }} - }; + } gimme_a_const!(run) }>; @@ -42,13 +42,13 @@ fn main() { let _ok: [u8; { #[macro_export] - macro_rules! const_two { () => {{ 2 }} }; + macro_rules! const_two { () => {{ 2 }} } const_two!() }]; let _ok = [0; { #[macro_export] - macro_rules! const_three { () => {{ 3 }} }; + macro_rules! const_three { () => {{ 3 }} } const_three!() }]; let _ok = [0; const_three!()]; diff --git a/src/test/ui/generic-associated-types/parse/trait-path-expected-token.rs b/src/test/ui/generic-associated-types/parse/trait-path-expected-token.rs new file mode 100644 index 00000000000..b10bfea9feb --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-expected-token.rs @@ -0,0 +1,11 @@ +#![feature(generic_associated_types)] +//~^ WARNING: the feature `generic_associated_types` is incomplete + +trait X { + type Y<'a>; +} + +fn f1<'a>(arg : Box<dyn X<Y = B = &'a ()>>) {} + //~^ ERROR: expected one of `!`, `(`, `+`, `,`, `::`, `<`, or `>`, found `=` + +fn main() {} diff --git a/src/test/ui/generic-associated-types/parse/trait-path-expected-token.stderr b/src/test/ui/generic-associated-types/parse/trait-path-expected-token.stderr new file mode 100644 index 00000000000..051253cadc6 --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-expected-token.stderr @@ -0,0 +1,17 @@ +error: expected one of `!`, `(`, `+`, `,`, `::`, `<`, or `>`, found `=` + --> $DIR/trait-path-expected-token.rs:8:33 + | +LL | fn f1<'a>(arg : Box<dyn X<Y = B = &'a ()>>) {} + | ^ expected one of 7 possible tokens + +warning: the feature `generic_associated_types` is incomplete and may not be safe to use and/or cause compiler crashes + --> $DIR/trait-path-expected-token.rs:1:12 + | +LL | #![feature(generic_associated_types)] + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(incomplete_features)]` on by default + = note: see issue #44265 <https://github.com/rust-lang/rust/issues/44265> for more information + +error: aborting due to previous error; 1 warning emitted + diff --git a/src/test/ui/generic-associated-types/parse/trait-path-expressions.rs b/src/test/ui/generic-associated-types/parse/trait-path-expressions.rs new file mode 100644 index 00000000000..de61cfa1cf7 --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-expressions.rs @@ -0,0 +1,23 @@ +#![feature(generic_associated_types)] +//~^ WARNING: the feature `generic_associated_types` is incomplete + +mod error1 { + trait X { + type Y<'a>; + } + + fn f1<'a>(arg : Box<dyn X< 1 = 32 >>) {} + //~^ ERROR: expected expression, found `)` +} + +mod error2 { + + trait X { + type Y<'a>; + } + + fn f2<'a>(arg : Box<dyn X< { 1 } = 32 >>) {} + //~^ ERROR: only types can be used in associated type constraints +} + +fn main() {} diff --git a/src/test/ui/generic-associated-types/parse/trait-path-expressions.stderr b/src/test/ui/generic-associated-types/parse/trait-path-expressions.stderr new file mode 100644 index 00000000000..a9ba8adcaba --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-expressions.stderr @@ -0,0 +1,25 @@ +error: expected expression, found `)` + --> $DIR/trait-path-expressions.rs:9:39 + | +LL | fn f1<'a>(arg : Box<dyn X< 1 = 32 >>) {} + | - ^ expected expression + | | + | while parsing a const generic argument starting here + +error: only types can be used in associated type constraints + --> $DIR/trait-path-expressions.rs:19:30 + | +LL | fn f2<'a>(arg : Box<dyn X< { 1 } = 32 >>) {} + | ^^^^^ + +warning: the feature `generic_associated_types` is incomplete and may not be safe to use and/or cause compiler crashes + --> $DIR/trait-path-expressions.rs:1:12 + | +LL | #![feature(generic_associated_types)] + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(incomplete_features)]` on by default + = note: see issue #44265 <https://github.com/rust-lang/rust/issues/44265> for more information + +error: aborting due to 2 previous errors; 1 warning emitted + diff --git a/src/test/ui/generic-associated-types/parse/trait-path-missing-gen_arg.rs b/src/test/ui/generic-associated-types/parse/trait-path-missing-gen_arg.rs new file mode 100644 index 00000000000..dad8c2a2909 --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-missing-gen_arg.rs @@ -0,0 +1,21 @@ +#![feature(generic_associated_types)] +//~^ WARNING: the feature `generic_associated_types` is incomplete + +trait X { + type Y<'a>; +} + +const _: () = { + fn f1<'a>(arg : Box<dyn X< : 32 >>) {} + //~^ ERROR: expected one of `>`, const, lifetime, or type, found `:` + //~| ERROR: expected parameter name, found `>` + //~| ERROR: expected one of `!`, `)`, `+`, `,`, or `::`, found `>` + //~| ERROR: constant provided when a type was expected +}; + +const _: () = { + fn f1<'a>(arg : Box<dyn X< = 32 >>) {} + //~^ ERROR: expected one of `>`, const, lifetime, or type, found `=` +}; + +fn main() {} diff --git a/src/test/ui/generic-associated-types/parse/trait-path-missing-gen_arg.stderr b/src/test/ui/generic-associated-types/parse/trait-path-missing-gen_arg.stderr new file mode 100644 index 00000000000..583697f0b67 --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-missing-gen_arg.stderr @@ -0,0 +1,50 @@ +error: expected one of `>`, const, lifetime, or type, found `:` + --> $DIR/trait-path-missing-gen_arg.rs:9:30 + | +LL | fn f1<'a>(arg : Box<dyn X< : 32 >>) {} + | ^ expected one of `>`, const, lifetime, or type + | +help: expressions must be enclosed in braces to be used as const generic arguments + | +LL | fn f1<'a>(arg : Box<{ dyn X< : 32 } >>) {} + | ^ ^ + +error: expected parameter name, found `>` + --> $DIR/trait-path-missing-gen_arg.rs:9:36 + | +LL | fn f1<'a>(arg : Box<dyn X< : 32 >>) {} + | ^ expected parameter name + +error: expected one of `!`, `)`, `+`, `,`, or `::`, found `>` + --> $DIR/trait-path-missing-gen_arg.rs:9:36 + | +LL | fn f1<'a>(arg : Box<dyn X< : 32 >>) {} + | ^ + | | + | expected one of `!`, `)`, `+`, `,`, or `::` + | help: missing `,` + +error: expected one of `>`, const, lifetime, or type, found `=` + --> $DIR/trait-path-missing-gen_arg.rs:17:30 + | +LL | fn f1<'a>(arg : Box<dyn X< = 32 >>) {} + | ^ expected one of `>`, const, lifetime, or type + +warning: the feature `generic_associated_types` is incomplete and may not be safe to use and/or cause compiler crashes + --> $DIR/trait-path-missing-gen_arg.rs:1:12 + | +LL | #![feature(generic_associated_types)] + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(incomplete_features)]` on by default + = note: see issue #44265 <https://github.com/rust-lang/rust/issues/44265> for more information + +error[E0747]: constant provided when a type was expected + --> $DIR/trait-path-missing-gen_arg.rs:9:23 + | +LL | fn f1<'a>(arg : Box<dyn X< : 32 >>) {} + | ^^^^^^^^^^^ + +error: aborting due to 5 previous errors; 1 warning emitted + +For more information about this error, try `rustc --explain E0747`. diff --git a/src/test/ui/generic-associated-types/parse/trait-path-segments.rs b/src/test/ui/generic-associated-types/parse/trait-path-segments.rs new file mode 100644 index 00000000000..0bf48b1f418 --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-segments.rs @@ -0,0 +1,35 @@ +#![feature(generic_associated_types)] +//~^ WARNING: the feature `generic_associated_types` is incomplete + +const _: () = { + trait X { + type Y<'a>; + } + + fn f1<'a>(arg : Box<dyn X<X::Y = u32>>) {} + //~^ ERROR: paths with multiple segments cannot be used in associated type constraints + }; + +const _: () = { + trait X { + type Y<'a>; + } + + trait Z {} + + impl<T : X<<Self as X>::Y<'a> = &'a u32>> Z for T {} + //~^ ERROR: qualified paths cannot be used in associated type constraints +}; + +const _: () = { + trait X { + type Y<'a>; + } + + trait Z {} + + impl<T : X<X::Y<'a> = &'a u32>> Z for T {} + //~^ ERROR: paths with multiple segments cannot be used in associated type constraints +}; + +fn main() {} diff --git a/src/test/ui/generic-associated-types/parse/trait-path-segments.stderr b/src/test/ui/generic-associated-types/parse/trait-path-segments.stderr new file mode 100644 index 00000000000..4e2b84d0182 --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-segments.stderr @@ -0,0 +1,31 @@ +error: paths with multiple segments cannot be used in associated type constraints + --> $DIR/trait-path-segments.rs:9:31 + | +LL | fn f1<'a>(arg : Box<dyn X<X::Y = u32>>) {} + | ^^^^ + +error: qualified paths cannot be used in associated type constraints + --> $DIR/trait-path-segments.rs:20:16 + | +LL | impl<T : X<<Self as X>::Y<'a> = &'a u32>> Z for T {} + | ^^^^^^^^^-^^^^^^^^ + | | + | not allowed in associated type constraints + +error: paths with multiple segments cannot be used in associated type constraints + --> $DIR/trait-path-segments.rs:31:16 + | +LL | impl<T : X<X::Y<'a> = &'a u32>> Z for T {} + | ^^^^^^^^ + +warning: the feature `generic_associated_types` is incomplete and may not be safe to use and/or cause compiler crashes + --> $DIR/trait-path-segments.rs:1:12 + | +LL | #![feature(generic_associated_types)] + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(incomplete_features)]` on by default + = note: see issue #44265 <https://github.com/rust-lang/rust/issues/44265> for more information + +error: aborting due to 3 previous errors; 1 warning emitted + diff --git a/src/test/ui/generic-associated-types/parse/trait-path-type-error-once-implemented.rs b/src/test/ui/generic-associated-types/parse/trait-path-type-error-once-implemented.rs new file mode 100644 index 00000000000..e203a5e0d2d --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-type-error-once-implemented.rs @@ -0,0 +1,10 @@ +#![feature(generic_associated_types)] + +trait X { + type Y<'a>; +} + +const _: () = { + fn f2<'a>(arg : Box<dyn X<Y<1> = &'a ()>>) {} + //~^ ERROR: generic associated types in trait paths are currently not implemented +}; diff --git a/src/test/ui/generic-associated-types/parse/trait-path-type-error-once-implemented.stderr b/src/test/ui/generic-associated-types/parse/trait-path-type-error-once-implemented.stderr new file mode 100644 index 00000000000..e59a72a99ee --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-type-error-once-implemented.stderr @@ -0,0 +1,8 @@ +error: generic associated types in trait paths are currently not implemented + --> $DIR/trait-path-type-error-once-implemented.rs:8:30 + | +LL | fn f2<'a>(arg : Box<dyn X<Y<1> = &'a ()>>) {} + | ^^^ + +error: aborting due to previous error + diff --git a/src/test/ui/generic-associated-types/parse/trait-path-types.rs b/src/test/ui/generic-associated-types/parse/trait-path-types.rs new file mode 100644 index 00000000000..6cdb501ec65 --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-types.rs @@ -0,0 +1,23 @@ +#![feature(generic_associated_types)] +//~^ WARNING: the feature `generic_associated_types` is incomplete + +trait X { + type Y<'a>; +} + +const _: () = { + fn f<'a>(arg : Box<dyn X< [u8; 1] = u32>>) {} + //~^ ERROR: only path types can be used in associated type constraints +}; + +const _: () = { + fn f1<'a>(arg : Box<dyn X<(Y<'a>) = &'a ()>>) {} + //~^ ERROR: only path types can be used in associated type constraints +}; + +const _: () = { + fn f1<'a>(arg : Box<dyn X< 'a = u32 >>) {} + //~^ ERROR: only types can be used in associated type constraints +}; + +fn main() {} diff --git a/src/test/ui/generic-associated-types/parse/trait-path-types.stderr b/src/test/ui/generic-associated-types/parse/trait-path-types.stderr new file mode 100644 index 00000000000..f5be084613b --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-types.stderr @@ -0,0 +1,29 @@ +error: only path types can be used in associated type constraints + --> $DIR/trait-path-types.rs:9:29 + | +LL | fn f<'a>(arg : Box<dyn X< [u8; 1] = u32>>) {} + | ^^^^^^^ + +error: only path types can be used in associated type constraints + --> $DIR/trait-path-types.rs:14:29 + | +LL | fn f1<'a>(arg : Box<dyn X<(Y<'a>) = &'a ()>>) {} + | ^^^^^^^ + +error: only types can be used in associated type constraints + --> $DIR/trait-path-types.rs:19:30 + | +LL | fn f1<'a>(arg : Box<dyn X< 'a = u32 >>) {} + | ^^ + +warning: the feature `generic_associated_types` is incomplete and may not be safe to use and/or cause compiler crashes + --> $DIR/trait-path-types.rs:1:12 + | +LL | #![feature(generic_associated_types)] + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `#[warn(incomplete_features)]` on by default + = note: see issue #44265 <https://github.com/rust-lang/rust/issues/44265> for more information + +error: aborting due to 3 previous errors; 1 warning emitted + diff --git a/src/test/ui/generic-associated-types/parse/trait-path-unimplemented.rs b/src/test/ui/generic-associated-types/parse/trait-path-unimplemented.rs new file mode 100644 index 00000000000..02d53d5faee --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-unimplemented.rs @@ -0,0 +1,17 @@ +#![feature(generic_associated_types)] + +trait X { + type Y<'a>; +} + +const _: () = { + fn f1<'a>(arg : Box<dyn X<Y<'a> = &'a ()>>) {} + //~^ ERROR: generic associated types in trait paths are currently not implemented +}; + +const _: () = { + fn f1<'a>(arg : Box<dyn X<Y('a) = &'a ()>>) {} + //~^ ERROR: lifetime in trait object type must be followed by `+` +}; + +fn main() {} diff --git a/src/test/ui/generic-associated-types/parse/trait-path-unimplemented.stderr b/src/test/ui/generic-associated-types/parse/trait-path-unimplemented.stderr new file mode 100644 index 00000000000..1fba9cebd24 --- /dev/null +++ b/src/test/ui/generic-associated-types/parse/trait-path-unimplemented.stderr @@ -0,0 +1,14 @@ +error: lifetime in trait object type must be followed by `+` + --> $DIR/trait-path-unimplemented.rs:13:31 + | +LL | fn f1<'a>(arg : Box<dyn X<Y('a) = &'a ()>>) {} + | ^^ + +error: generic associated types in trait paths are currently not implemented + --> $DIR/trait-path-unimplemented.rs:8:30 + | +LL | fn f1<'a>(arg : Box<dyn X<Y<'a> = &'a ()>>) {} + | ^^^^ + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/issues/issue-10767.rs b/src/test/ui/issues/issue-10767.rs index fa10f073b45..f40815fdbdb 100644 --- a/src/test/ui/issues/issue-10767.rs +++ b/src/test/ui/issues/issue-10767.rs @@ -5,6 +5,6 @@ pub fn main() { fn f() { - }; + } let _: Box<fn()> = box (f as fn()); } diff --git a/src/test/ui/issues/issue-20616-2.rs b/src/test/ui/issues/issue-20616-2.rs index 2f2c6903a9f..f108ae5de14 100644 --- a/src/test/ui/issues/issue-20616-2.rs +++ b/src/test/ui/issues/issue-20616-2.rs @@ -9,7 +9,7 @@ type Type_1_<'a, T> = &'a T; //type Type_1<'a T> = &'a T; // error: expected `,` or `>` after lifetime name, found `T` -type Type_2 = Type_1_<'static ()>; //~ error: expected one of `,` or `>`, found `(` +type Type_2 = Type_1_<'static ()>; //~ error: expected one of `,`, `:`, `=`, or `>`, found `(` //type Type_3<T> = Box<T,,>; // error: expected type, found `,` diff --git a/src/test/ui/issues/issue-20616-2.stderr b/src/test/ui/issues/issue-20616-2.stderr index 50ec7a304c5..01e3d3dd7cc 100644 --- a/src/test/ui/issues/issue-20616-2.stderr +++ b/src/test/ui/issues/issue-20616-2.stderr @@ -1,8 +1,8 @@ -error: expected one of `,` or `>`, found `(` +error: expected one of `,`, `:`, `=`, or `>`, found `(` --> $DIR/issue-20616-2.rs:12:31 | LL | type Type_2 = Type_1_<'static ()>; - | ^ expected one of `,` or `>` + | ^ expected one of `,`, `:`, `=`, or `>` error: aborting due to previous error diff --git a/src/test/ui/issues/issue-20616-3.rs b/src/test/ui/issues/issue-20616-3.rs index 9bfd5bf2313..780038c11b8 100644 --- a/src/test/ui/issues/issue-20616-3.rs +++ b/src/test/ui/issues/issue-20616-3.rs @@ -11,7 +11,7 @@ type Type_1_<'a, T> = &'a T; type Type_3<T> = Box<T,,>; -//~^ error: expected one of `>`, const, identifier, lifetime, or type, found `,` +//~^ error: expected one of `>`, const, lifetime, or type, found `,` //type Type_4<T> = Type_1_<'static,, T>; // error: expected type, found `,` diff --git a/src/test/ui/issues/issue-20616-3.stderr b/src/test/ui/issues/issue-20616-3.stderr index cc4d79484e7..2f8cf8a79ed 100644 --- a/src/test/ui/issues/issue-20616-3.stderr +++ b/src/test/ui/issues/issue-20616-3.stderr @@ -1,8 +1,8 @@ -error: expected one of `>`, const, identifier, lifetime, or type, found `,` +error: expected one of `>`, const, lifetime, or type, found `,` --> $DIR/issue-20616-3.rs:13:24 | LL | type Type_3<T> = Box<T,,>; - | ^ expected one of `>`, const, identifier, lifetime, or type + | ^ expected one of `>`, const, lifetime, or type error: aborting due to previous error diff --git a/src/test/ui/issues/issue-20616-4.rs b/src/test/ui/issues/issue-20616-4.rs index e9a34a04667..85aa9c1146d 100644 --- a/src/test/ui/issues/issue-20616-4.rs +++ b/src/test/ui/issues/issue-20616-4.rs @@ -14,7 +14,7 @@ type Type_1_<'a, T> = &'a T; type Type_4<T> = Type_1_<'static,, T>; -//~^ error: expected one of `>`, const, identifier, lifetime, or type, found `,` +//~^ error: expected one of `>`, const, lifetime, or type, found `,` type Type_5_<'a> = Type_1_<'a, ()>; diff --git a/src/test/ui/issues/issue-20616-4.stderr b/src/test/ui/issues/issue-20616-4.stderr index 254e4d6a34d..3be6c2e78ce 100644 --- a/src/test/ui/issues/issue-20616-4.stderr +++ b/src/test/ui/issues/issue-20616-4.stderr @@ -1,8 +1,8 @@ -error: expected one of `>`, const, identifier, lifetime, or type, found `,` +error: expected one of `>`, const, lifetime, or type, found `,` --> $DIR/issue-20616-4.rs:16:34 | LL | type Type_4<T> = Type_1_<'static,, T>; - | ^ expected one of `>`, const, identifier, lifetime, or type + | ^ expected one of `>`, const, lifetime, or type error: aborting due to previous error diff --git a/src/test/ui/issues/issue-20616-5.rs b/src/test/ui/issues/issue-20616-5.rs index 23862516d2c..c0c6bc6dd97 100644 --- a/src/test/ui/issues/issue-20616-5.rs +++ b/src/test/ui/issues/issue-20616-5.rs @@ -20,7 +20,7 @@ type Type_5_<'a> = Type_1_<'a, ()>; type Type_5<'a> = Type_1_<'a, (),,>; -//~^ error: expected one of `>`, const, identifier, lifetime, or type, found `,` +//~^ error: expected one of `>`, const, lifetime, or type, found `,` //type Type_6 = Type_5_<'a,,>; // error: expected type, found `,` diff --git a/src/test/ui/issues/issue-20616-5.stderr b/src/test/ui/issues/issue-20616-5.stderr index aee8bf01a43..b90fbf60051 100644 --- a/src/test/ui/issues/issue-20616-5.stderr +++ b/src/test/ui/issues/issue-20616-5.stderr @@ -1,8 +1,8 @@ -error: expected one of `>`, const, identifier, lifetime, or type, found `,` +error: expected one of `>`, const, lifetime, or type, found `,` --> $DIR/issue-20616-5.rs:22:34 | LL | type Type_5<'a> = Type_1_<'a, (),,>; - | ^ expected one of `>`, const, identifier, lifetime, or type + | ^ expected one of `>`, const, lifetime, or type error: aborting due to previous error diff --git a/src/test/ui/issues/issue-20616-6.rs b/src/test/ui/issues/issue-20616-6.rs index dc327f3f788..73c75bdc45f 100644 --- a/src/test/ui/issues/issue-20616-6.rs +++ b/src/test/ui/issues/issue-20616-6.rs @@ -23,7 +23,7 @@ type Type_5_<'a> = Type_1_<'a, ()>; type Type_6 = Type_5_<'a,,>; -//~^ error: expected one of `>`, const, identifier, lifetime, or type, found `,` +//~^ error: expected one of `>`, const, lifetime, or type, found `,` //type Type_7 = Box<(),,>; // error: expected type, found `,` diff --git a/src/test/ui/issues/issue-20616-6.stderr b/src/test/ui/issues/issue-20616-6.stderr index 7192a87bc18..ea1c15ba423 100644 --- a/src/test/ui/issues/issue-20616-6.stderr +++ b/src/test/ui/issues/issue-20616-6.stderr @@ -1,8 +1,8 @@ -error: expected one of `>`, const, identifier, lifetime, or type, found `,` +error: expected one of `>`, const, lifetime, or type, found `,` --> $DIR/issue-20616-6.rs:25:26 | LL | type Type_6 = Type_5_<'a,,>; - | ^ expected one of `>`, const, identifier, lifetime, or type + | ^ expected one of `>`, const, lifetime, or type error: aborting due to previous error diff --git a/src/test/ui/issues/issue-20616-7.rs b/src/test/ui/issues/issue-20616-7.rs index ffd1620c1d3..8beeebd7a95 100644 --- a/src/test/ui/issues/issue-20616-7.rs +++ b/src/test/ui/issues/issue-20616-7.rs @@ -26,7 +26,7 @@ type Type_5_<'a> = Type_1_<'a, ()>; type Type_7 = Box<(),,>; -//~^ error: expected one of `>`, const, identifier, lifetime, or type, found `,` +//~^ error: expected one of `>`, const, lifetime, or type, found `,` //type Type_8<'a,,> = &'a (); // error: expected ident, found `,` diff --git a/src/test/ui/issues/issue-20616-7.stderr b/src/test/ui/issues/issue-20616-7.stderr index 123dc1e2b7d..dcd199902fc 100644 --- a/src/test/ui/issues/issue-20616-7.stderr +++ b/src/test/ui/issues/issue-20616-7.stderr @@ -1,8 +1,8 @@ -error: expected one of `>`, const, identifier, lifetime, or type, found `,` +error: expected one of `>`, const, lifetime, or type, found `,` --> $DIR/issue-20616-7.rs:28:22 | LL | type Type_7 = Box<(),,>; - | ^ expected one of `>`, const, identifier, lifetime, or type + | ^ expected one of `>`, const, lifetime, or type error: aborting due to previous error diff --git a/src/test/ui/issues/issue-2074.rs b/src/test/ui/issues/issue-2074.rs index bd5f015cca0..a6bea385804 100644 --- a/src/test/ui/issues/issue-2074.rs +++ b/src/test/ui/issues/issue-2074.rs @@ -5,11 +5,11 @@ pub fn main() { let one = || { - enum r { a }; + enum r { a } r::a as usize }; let two = || { - enum r { a }; + enum r { a } r::a as usize }; one(); two(); diff --git a/src/test/ui/issues/issue-34334.rs b/src/test/ui/issues/issue-34334.rs index bf2d091a01e..b45c00f6943 100644 --- a/src/test/ui/issues/issue-34334.rs +++ b/src/test/ui/issues/issue-34334.rs @@ -1,6 +1,6 @@ fn main () { let sr: Vec<(u32, _, _) = vec![]; - //~^ ERROR expected one of `,` or `>`, found `=` + //~^ ERROR only path types can be used in associated type constraints let sr2: Vec<(u32, _, _)> = sr.iter().map(|(faction, th_sender, th_receiver)| {}).collect(); //~^ ERROR a value of type `Vec<(u32, _, _)>` cannot be built } diff --git a/src/test/ui/issues/issue-34334.stderr b/src/test/ui/issues/issue-34334.stderr index c10a4144305..a9b9bf06d7f 100644 --- a/src/test/ui/issues/issue-34334.stderr +++ b/src/test/ui/issues/issue-34334.stderr @@ -1,8 +1,8 @@ -error: expected one of `,` or `>`, found `=` - --> $DIR/issue-34334.rs:2:29 +error: only path types can be used in associated type constraints + --> $DIR/issue-34334.rs:2:17 | LL | let sr: Vec<(u32, _, _) = vec![]; - | -- ^ expected one of `,` or `>` + | -- ^^^^^^^^^^^ | | | while parsing the type for `sr` diff --git a/src/test/ui/lint/inline-trait-and-foreign-items.stderr b/src/test/ui/lint/inline-trait-and-foreign-items.stderr index ae04612a4dd..6ac884c12ce 100644 --- a/src/test/ui/lint/inline-trait-and-foreign-items.stderr +++ b/src/test/ui/lint/inline-trait-and-foreign-items.stderr @@ -1,19 +1,3 @@ -error[E0518]: attribute should be applied to function or closure - --> $DIR/inline-trait-and-foreign-items.rs:30:5 - | -LL | #[inline] - | ^^^^^^^^^ -LL | static X: u32; - | -------------- not a function or closure - -error[E0518]: attribute should be applied to function or closure - --> $DIR/inline-trait-and-foreign-items.rs:33:5 - | -LL | #[inline] - | ^^^^^^^^^ -LL | type T; - | ------- not a function or closure - warning: `#[inline]` is ignored on constants --> $DIR/inline-trait-and-foreign-items.rs:7:5 | @@ -61,6 +45,22 @@ LL | #[inline] LL | type U = impl Trait; | -------------------- not a function or closure +error[E0518]: attribute should be applied to function or closure + --> $DIR/inline-trait-and-foreign-items.rs:30:5 + | +LL | #[inline] + | ^^^^^^^^^ +LL | static X: u32; + | -------------- not a function or closure + +error[E0518]: attribute should be applied to function or closure + --> $DIR/inline-trait-and-foreign-items.rs:33:5 + | +LL | #[inline] + | ^^^^^^^^^ +LL | type T; + | ------- not a function or closure + error: could not find defining uses --> $DIR/inline-trait-and-foreign-items.rs:26:14 | diff --git a/src/test/ui/lint/redundant-semicolon/item-stmt-semi.rs b/src/test/ui/lint/redundant-semicolon/item-stmt-semi.rs new file mode 100644 index 00000000000..4592bc31a39 --- /dev/null +++ b/src/test/ui/lint/redundant-semicolon/item-stmt-semi.rs @@ -0,0 +1,10 @@ +// check-pass +// This test should stop compiling +// we decide to enable this lint for item statements. + +#![deny(redundant_semicolons)] + +fn main() { + fn inner() {}; + struct Bar {}; +} diff --git a/src/test/ui/lint/warn-unused-inline-on-fn-prototypes.stderr b/src/test/ui/lint/warn-unused-inline-on-fn-prototypes.stderr index 843db8ce815..ab19d80e732 100644 --- a/src/test/ui/lint/warn-unused-inline-on-fn-prototypes.stderr +++ b/src/test/ui/lint/warn-unused-inline-on-fn-prototypes.stderr @@ -1,5 +1,5 @@ error: `#[inline]` is ignored on function prototypes - --> $DIR/warn-unused-inline-on-fn-prototypes.rs:9:5 + --> $DIR/warn-unused-inline-on-fn-prototypes.rs:4:5 | LL | #[inline] | ^^^^^^^^^ @@ -11,7 +11,7 @@ LL | #![deny(unused_attributes)] | ^^^^^^^^^^^^^^^^^ error: `#[inline]` is ignored on function prototypes - --> $DIR/warn-unused-inline-on-fn-prototypes.rs:4:5 + --> $DIR/warn-unused-inline-on-fn-prototypes.rs:9:5 | LL | #[inline] | ^^^^^^^^^ diff --git a/src/test/ui/macros/macro-2.rs b/src/test/ui/macros/macro-2.rs index 4890c991dcd..a315981b6a6 100644 --- a/src/test/ui/macros/macro-2.rs +++ b/src/test/ui/macros/macro-2.rs @@ -3,7 +3,7 @@ pub fn main() { macro_rules! mylambda_tt { ($x:ident, $body:expr) => ({ - fn f($x: isize) -> isize { return $body; }; + fn f($x: isize) -> isize { return $body; } f }) } diff --git a/src/test/ui/macros/macro-path.rs b/src/test/ui/macros/macro-path.rs index be59d8d139b..6c011c897da 100644 --- a/src/test/ui/macros/macro-path.rs +++ b/src/test/ui/macros/macro-path.rs @@ -8,7 +8,7 @@ mod m { macro_rules! foo { ($p:path) => ({ - fn f() -> $p { 10 }; + fn f() -> $p { 10 } f() }) } diff --git a/src/test/ui/parser/issue-62660.rs b/src/test/ui/parser/issue-62660.rs index 33c8a9fa328..4f866b78976 100644 --- a/src/test/ui/parser/issue-62660.rs +++ b/src/test/ui/parser/issue-62660.rs @@ -5,7 +5,7 @@ struct Foo; impl Foo { pub fn foo(_: i32, self: Box<Self) {} - //~^ ERROR expected one of `!`, `(`, `+`, `,`, `::`, `<`, or `>`, found `)` + //~^ ERROR expected one of `!`, `(`, `+`, `,`, `::`, `:`, `<`, `=`, or `>`, found `)` } fn main() {} diff --git a/src/test/ui/parser/issue-62660.stderr b/src/test/ui/parser/issue-62660.stderr index 0844da1bd92..a50ada9056b 100644 --- a/src/test/ui/parser/issue-62660.stderr +++ b/src/test/ui/parser/issue-62660.stderr @@ -1,8 +1,8 @@ -error: expected one of `!`, `(`, `+`, `,`, `::`, `<`, or `>`, found `)` +error: expected one of `!`, `(`, `+`, `,`, `::`, `:`, `<`, `=`, or `>`, found `)` --> $DIR/issue-62660.rs:7:38 | LL | pub fn foo(_: i32, self: Box<Self) {} - | ^ expected one of 7 possible tokens + | ^ expected one of 9 possible tokens error: aborting due to previous error diff --git a/src/test/ui/parser/issue-63116.stderr b/src/test/ui/parser/issue-63116.stderr index 80a450dbd36..e249a93df92 100644 --- a/src/test/ui/parser/issue-63116.stderr +++ b/src/test/ui/parser/issue-63116.stderr @@ -12,7 +12,7 @@ error: expected one of `!`, `(`, `)`, `+`, `,`, `::`, or `<`, found `;` LL | impl W <s(f;Y(;] | ^ expected one of 7 possible tokens -error: expected one of `!`, `&&`, `&`, `(`, `)`, `*`, `+`, `,`, `->`, `...`, `::`, `<`, `>`, `?`, `[`, `_`, `async`, `const`, `dyn`, `extern`, `fn`, `for`, `impl`, `unsafe`, lifetime, or path, found `;` +error: expected one of `!`, `&&`, `&`, `(`, `)`, `*`, `+`, `,`, `->`, `...`, `::`, `:`, `<`, `=`, `>`, `?`, `[`, `_`, `async`, `const`, `dyn`, `extern`, `fn`, `for`, `impl`, `unsafe`, lifetime, or path, found `;` --> $DIR/issue-63116.rs:3:15 | LL | impl W <s(f;Y(;] diff --git a/src/test/ui/parser/lifetime-semicolon.rs b/src/test/ui/parser/lifetime-semicolon.rs index 1f147216ea6..7cc14971f63 100644 --- a/src/test/ui/parser/lifetime-semicolon.rs +++ b/src/test/ui/parser/lifetime-semicolon.rs @@ -3,6 +3,6 @@ struct Foo<'a, 'b> { } fn foo<'a, 'b>(x: &mut Foo<'a; 'b>) {} -//~^ ERROR expected one of `,` or `>`, found `;` +//~^ ERROR expected one of `,`, `:`, `=`, or `>`, found `;` fn main() {} diff --git a/src/test/ui/parser/lifetime-semicolon.stderr b/src/test/ui/parser/lifetime-semicolon.stderr index 4641c286cb8..3b67705aae9 100644 --- a/src/test/ui/parser/lifetime-semicolon.stderr +++ b/src/test/ui/parser/lifetime-semicolon.stderr @@ -1,8 +1,8 @@ -error: expected one of `,` or `>`, found `;` +error: expected one of `,`, `:`, `=`, or `>`, found `;` --> $DIR/lifetime-semicolon.rs:5:30 | LL | fn foo<'a, 'b>(x: &mut Foo<'a; 'b>) {} - | ^ expected one of `,` or `>` + | ^ expected one of `,`, `:`, `=`, or `>` error: aborting due to previous error diff --git a/src/test/ui/parser/removed-syntax-closure-lifetime.rs b/src/test/ui/parser/removed-syntax-closure-lifetime.rs index ceac9408006..e807a179473 100644 --- a/src/test/ui/parser/removed-syntax-closure-lifetime.rs +++ b/src/test/ui/parser/removed-syntax-closure-lifetime.rs @@ -1,2 +1,2 @@ type closure = Box<lt/fn()>; -//~^ ERROR expected one of `!`, `(`, `+`, `,`, `::`, `<`, or `>`, found `/` +//~^ ERROR expected one of `!`, `(`, `+`, `,`, `::`, `:`, `<`, `=`, or `>`, found `/` diff --git a/src/test/ui/parser/removed-syntax-closure-lifetime.stderr b/src/test/ui/parser/removed-syntax-closure-lifetime.stderr index a100f689fb8..63b6e138ce5 100644 --- a/src/test/ui/parser/removed-syntax-closure-lifetime.stderr +++ b/src/test/ui/parser/removed-syntax-closure-lifetime.stderr @@ -1,8 +1,8 @@ -error: expected one of `!`, `(`, `+`, `,`, `::`, `<`, or `>`, found `/` +error: expected one of `!`, `(`, `+`, `,`, `::`, `:`, `<`, `=`, or `>`, found `/` --> $DIR/removed-syntax-closure-lifetime.rs:1:22 | LL | type closure = Box<lt/fn()>; - | ^ expected one of 7 possible tokens + | ^ expected one of 9 possible tokens error: aborting due to previous error diff --git a/src/test/ui/proc-macro/allowed-attr-stmt-expr.rs b/src/test/ui/proc-macro/allowed-attr-stmt-expr.rs index 03c10a43248..25243aeef3b 100644 --- a/src/test/ui/proc-macro/allowed-attr-stmt-expr.rs +++ b/src/test/ui/proc-macro/allowed-attr-stmt-expr.rs @@ -13,19 +13,28 @@ extern crate std; extern crate attr_stmt_expr; extern crate test_macros; -use attr_stmt_expr::{expect_let, expect_print_stmt, expect_expr, expect_print_expr}; +use attr_stmt_expr::{expect_let, expect_my_macro_stmt, expect_expr, expect_my_macro_expr}; use test_macros::print_attr; -use std::println; + +// We don't use `std::println` so that we avoid loading hygiene +// information from libstd, which would affect the SyntaxContext ids +macro_rules! my_macro { + ($($tt:tt)*) => { () } +} + fn print_str(string: &'static str) { // macros are handled a bit differently - #[expect_print_expr] - println!("{}", string) + #[expect_my_macro_expr] + my_macro!("{}", string) } macro_rules! make_stmt { ($stmt:stmt) => { - $stmt + #[print_attr] + #[rustc_dummy] + $stmt; // This semicolon is *not* passed to the macro, + // since `$stmt` is already a statement. } } @@ -35,6 +44,10 @@ macro_rules! second_make_stmt { } } +// The macro will see a semicolon here +#[print_attr] +struct ItemWithSemi; + fn main() { make_stmt!(struct Foo {}); @@ -44,8 +57,8 @@ fn main() { let string = "Hello, world!"; #[print_attr] - #[expect_print_stmt] - println!("{}", string); + #[expect_my_macro_stmt] + my_macro!("{}", string); #[print_attr] second_make_stmt!(#[allow(dead_code)] struct Bar {}); @@ -54,6 +67,12 @@ fn main() { #[rustc_dummy] struct Other {}; + // The macro also sees a semicolon, + // for consistency with the `ItemWithSemi` case above. + #[print_attr] + #[rustc_dummy] + struct NonBracedStruct; + #[expect_expr] print_str("string") } diff --git a/src/test/ui/proc-macro/allowed-attr-stmt-expr.stdout b/src/test/ui/proc-macro/allowed-attr-stmt-expr.stdout index 0c7ac4fb682..6cf864f3590 100644 --- a/src/test/ui/proc-macro/allowed-attr-stmt-expr.stdout +++ b/src/test/ui/proc-macro/allowed-attr-stmt-expr.stdout @@ -1,70 +1,117 @@ +PRINT-ATTR INPUT (DISPLAY): struct ItemWithSemi ; +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Ident { + ident: "struct", + span: $DIR/allowed-attr-stmt-expr.rs:49:1: 49:7 (#0), + }, + Ident { + ident: "ItemWithSemi", + span: $DIR/allowed-attr-stmt-expr.rs:49:8: 49:20 (#0), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:49:20: 49:21 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct Foo { } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:35:9: 35:10 (#11), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "rustc_dummy", + span: $DIR/allowed-attr-stmt-expr.rs:35:11: 35:22 (#11), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:35:10: 35:23 (#11), + }, + Ident { + ident: "struct", + span: $DIR/allowed-attr-stmt-expr.rs:53:16: 53:22 (#0), + }, + Ident { + ident: "Foo", + span: $DIR/allowed-attr-stmt-expr.rs:53:23: 53:26 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/allowed-attr-stmt-expr.rs:53:27: 53:29 (#0), + }, +] PRINT-ATTR INPUT (DISPLAY): #[expect_let] let string = "Hello, world!" ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:56:5: 56:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "expect_let", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:56:7: 56:17 (#0), }, ], - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:56:6: 56:18 (#0), }, Ident { ident: "let", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:57:5: 57:8 (#0), }, Ident { ident: "string", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:57:9: 57:15 (#0), }, Punct { ch: '=', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:57:16: 57:17 (#0), }, Literal { kind: Str, symbol: "Hello, world!", suffix: None, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:57:18: 57:33 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:57:33: 57:34 (#0), }, ] -PRINT-ATTR INPUT (DISPLAY): #[expect_print_stmt] println ! ("{}", string) ; +PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro ! ("{}", string) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:60:5: 60:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { - ident: "expect_print_stmt", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + ident: "expect_my_macro_stmt", + span: $DIR/allowed-attr-stmt-expr.rs:60:7: 60:27 (#0), }, ], - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:60:6: 60:28 (#0), }, Ident { - ident: "println", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + ident: "my_macro", + span: $DIR/allowed-attr-stmt-expr.rs:61:5: 61:13 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:61:13: 61:14 (#0), }, Group { delimiter: Parenthesis, @@ -73,36 +120,36 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ kind: Str, symbol: "{}", suffix: None, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:61:15: 61:19 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:61:19: 61:20 (#0), }, Ident { ident: "string", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:61:21: 61:27 (#0), }, ], - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:61:14: 61:28 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:61:28: 61:29 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): second_make_stmt ! (#[allow(dead_code)] struct Bar { }) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "second_make_stmt", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:5: 64:21 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:21: 64:22 (#0), }, Group { delimiter: Parenthesis, @@ -110,48 +157,104 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:23: 64:24 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "allow", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:25: 64:30 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "dead_code", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:31: 64:40 (#0), }, ], - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:30: 64:41 (#0), }, ], - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:24: 64:42 (#0), }, Ident { ident: "struct", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:43: 64:49 (#0), }, Ident { ident: "Bar", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:50: 64:53 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:54: 64:56 (#0), }, ], - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:22: 64:57 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:64:57: 64:58 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] #[allow(dead_code)] struct Bar { } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:35:9: 35:10 (#32), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "rustc_dummy", + span: $DIR/allowed-attr-stmt-expr.rs:35:11: 35:22 (#32), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:35:10: 35:23 (#32), + }, + Punct { + ch: '#', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:64:23: 64:24 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "allow", + span: $DIR/allowed-attr-stmt-expr.rs:64:25: 64:30 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "dead_code", + span: $DIR/allowed-attr-stmt-expr.rs:64:31: 64:40 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:64:30: 64:41 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:64:24: 64:42 (#0), + }, + Ident { + ident: "struct", + span: $DIR/allowed-attr-stmt-expr.rs:64:43: 64:49 (#0), + }, + Ident { + ident: "Bar", + span: $DIR/allowed-attr-stmt-expr.rs:64:50: 64:53 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/allowed-attr-stmt-expr.rs:64:54: 64:56 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct Other { } @@ -159,29 +262,60 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:67:5: 67:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "rustc_dummy", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:67:7: 67:18 (#0), }, ], - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:67:6: 67:19 (#0), }, Ident { ident: "struct", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:68:5: 68:11 (#0), }, Ident { ident: "Other", - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:68:12: 68:17 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/allowed-attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/allowed-attr-stmt-expr.rs:68:18: 68:20 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct NonBracedStruct ; +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:73:5: 73:6 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "rustc_dummy", + span: $DIR/allowed-attr-stmt-expr.rs:73:7: 73:18 (#0), + }, + ], + span: $DIR/allowed-attr-stmt-expr.rs:73:6: 73:19 (#0), + }, + Ident { + ident: "struct", + span: $DIR/allowed-attr-stmt-expr.rs:74:5: 74:11 (#0), + }, + Ident { + ident: "NonBracedStruct", + span: $DIR/allowed-attr-stmt-expr.rs:74:12: 74:27 (#0), + }, + Punct { + ch: ';', + spacing: Alone, + span: $DIR/allowed-attr-stmt-expr.rs:74:27: 74:28 (#0), }, ] diff --git a/src/test/ui/proc-macro/attr-stmt-expr.rs b/src/test/ui/proc-macro/attr-stmt-expr.rs index ca1b163c986..0403684cda0 100644 --- a/src/test/ui/proc-macro/attr-stmt-expr.rs +++ b/src/test/ui/proc-macro/attr-stmt-expr.rs @@ -11,19 +11,26 @@ extern crate test_macros; extern crate attr_stmt_expr; use test_macros::print_attr; -use std::println; -use attr_stmt_expr::{expect_let, expect_print_stmt, expect_expr, expect_print_expr}; +use attr_stmt_expr::{expect_let, expect_my_macro_stmt, expect_expr, expect_my_macro_expr}; + +// We don't use `std::println` so that we avoid loading hygiene +// information from libstd, which would affect the SyntaxContext ids +macro_rules! my_macro { + ($($tt:tt)*) => { () } +} fn print_str(string: &'static str) { // macros are handled a bit differently - #[expect_print_expr] + #[expect_my_macro_expr] //~^ ERROR attributes on expressions are experimental //~| HELP add `#![feature(stmt_expr_attributes)]` to the crate attributes to enable - println!("{}", string) + my_macro!("{}", string) } macro_rules! make_stmt { ($stmt:stmt) => { + #[print_attr] + #[rustc_dummy] $stmt } } @@ -42,8 +49,8 @@ fn main() { let string = "Hello, world!"; #[print_attr] - #[expect_print_stmt] - println!("{}", string); + #[expect_my_macro_stmt] + my_macro!("{}", string); #[print_attr] second_make_stmt!(#[allow(dead_code)] struct Bar {}); diff --git a/src/test/ui/proc-macro/attr-stmt-expr.stderr b/src/test/ui/proc-macro/attr-stmt-expr.stderr index 7bd60e8ee77..56178259d43 100644 --- a/src/test/ui/proc-macro/attr-stmt-expr.stderr +++ b/src/test/ui/proc-macro/attr-stmt-expr.stderr @@ -1,14 +1,14 @@ error[E0658]: attributes on expressions are experimental - --> $DIR/attr-stmt-expr.rs:19:5 + --> $DIR/attr-stmt-expr.rs:24:5 | -LL | #[expect_print_expr] - | ^^^^^^^^^^^^^^^^^^^^ +LL | #[expect_my_macro_expr] + | ^^^^^^^^^^^^^^^^^^^^^^^ | = note: see issue #15701 <https://github.com/rust-lang/rust/issues/15701> for more information = help: add `#![feature(stmt_expr_attributes)]` to the crate attributes to enable error[E0658]: attributes on expressions are experimental - --> $DIR/attr-stmt-expr.rs:55:5 + --> $DIR/attr-stmt-expr.rs:62:5 | LL | #[expect_expr] | ^^^^^^^^^^^^^^ diff --git a/src/test/ui/proc-macro/attr-stmt-expr.stdout b/src/test/ui/proc-macro/attr-stmt-expr.stdout index 5c1b586725b..f75309e6872 100644 --- a/src/test/ui/proc-macro/attr-stmt-expr.stdout +++ b/src/test/ui/proc-macro/attr-stmt-expr.stdout @@ -1,70 +1,101 @@ +PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct Foo { } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/attr-stmt-expr.rs:33:9: 33:10 (#8), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "rustc_dummy", + span: $DIR/attr-stmt-expr.rs:33:11: 33:22 (#8), + }, + ], + span: $DIR/attr-stmt-expr.rs:33:10: 33:23 (#8), + }, + Ident { + ident: "struct", + span: $DIR/attr-stmt-expr.rs:45:16: 45:22 (#0), + }, + Ident { + ident: "Foo", + span: $DIR/attr-stmt-expr.rs:45:23: 45:26 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/attr-stmt-expr.rs:45:27: 45:29 (#0), + }, +] PRINT-ATTR INPUT (DISPLAY): #[expect_let] let string = "Hello, world!" ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:48:5: 48:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "expect_let", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:48:7: 48:17 (#0), }, ], - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:48:6: 48:18 (#0), }, Ident { ident: "let", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:49:5: 49:8 (#0), }, Ident { ident: "string", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:49:9: 49:15 (#0), }, Punct { ch: '=', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:49:16: 49:17 (#0), }, Literal { kind: Str, symbol: "Hello, world!", suffix: None, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:49:18: 49:33 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:49:33: 49:34 (#0), }, ] -PRINT-ATTR INPUT (DISPLAY): #[expect_print_stmt] println ! ("{}", string) ; +PRINT-ATTR INPUT (DISPLAY): #[expect_my_macro_stmt] my_macro ! ("{}", string) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:52:5: 52:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { - ident: "expect_print_stmt", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + ident: "expect_my_macro_stmt", + span: $DIR/attr-stmt-expr.rs:52:7: 52:27 (#0), }, ], - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:52:6: 52:28 (#0), }, Ident { - ident: "println", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + ident: "my_macro", + span: $DIR/attr-stmt-expr.rs:53:5: 53:13 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:53:13: 53:14 (#0), }, Group { delimiter: Parenthesis, @@ -73,36 +104,36 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ kind: Str, symbol: "{}", suffix: None, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:53:15: 53:19 (#0), }, Punct { ch: ',', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:53:19: 53:20 (#0), }, Ident { ident: "string", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:53:21: 53:27 (#0), }, ], - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:53:14: 53:28 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:53:28: 53:29 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): second_make_stmt ! (#[allow(dead_code)] struct Bar { }) ; PRINT-ATTR INPUT (DEBUG): TokenStream [ Ident { ident: "second_make_stmt", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:5: 56:21 (#0), }, Punct { ch: '!', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:21: 56:22 (#0), }, Group { delimiter: Parenthesis, @@ -110,48 +141,104 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:23: 56:24 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "allow", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:25: 56:30 (#0), }, Group { delimiter: Parenthesis, stream: TokenStream [ Ident { ident: "dead_code", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:31: 56:40 (#0), }, ], - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:30: 56:41 (#0), }, ], - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:24: 56:42 (#0), }, Ident { ident: "struct", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:43: 56:49 (#0), }, Ident { ident: "Bar", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:50: 56:53 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:54: 56:56 (#0), }, ], - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:22: 56:57 (#0), }, Punct { ch: ';', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:56:57: 56:58 (#0), + }, +] +PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] #[allow(dead_code)] struct Bar { } +PRINT-ATTR INPUT (DEBUG): TokenStream [ + Punct { + ch: '#', + spacing: Alone, + span: $DIR/attr-stmt-expr.rs:33:9: 33:10 (#29), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "rustc_dummy", + span: $DIR/attr-stmt-expr.rs:33:11: 33:22 (#29), + }, + ], + span: $DIR/attr-stmt-expr.rs:33:10: 33:23 (#29), + }, + Punct { + ch: '#', + spacing: Alone, + span: $DIR/attr-stmt-expr.rs:56:23: 56:24 (#0), + }, + Group { + delimiter: Bracket, + stream: TokenStream [ + Ident { + ident: "allow", + span: $DIR/attr-stmt-expr.rs:56:25: 56:30 (#0), + }, + Group { + delimiter: Parenthesis, + stream: TokenStream [ + Ident { + ident: "dead_code", + span: $DIR/attr-stmt-expr.rs:56:31: 56:40 (#0), + }, + ], + span: $DIR/attr-stmt-expr.rs:56:30: 56:41 (#0), + }, + ], + span: $DIR/attr-stmt-expr.rs:56:24: 56:42 (#0), + }, + Ident { + ident: "struct", + span: $DIR/attr-stmt-expr.rs:56:43: 56:49 (#0), + }, + Ident { + ident: "Bar", + span: $DIR/attr-stmt-expr.rs:56:50: 56:53 (#0), + }, + Group { + delimiter: Brace, + stream: TokenStream [], + span: $DIR/attr-stmt-expr.rs:56:54: 56:56 (#0), }, ] PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] struct Other { } @@ -159,29 +246,29 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:59:5: 59:6 (#0), }, Group { delimiter: Bracket, stream: TokenStream [ Ident { ident: "rustc_dummy", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:59:7: 59:18 (#0), }, ], - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:59:6: 59:19 (#0), }, Ident { ident: "struct", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:60:5: 60:11 (#0), }, Ident { ident: "Other", - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:60:12: 60:17 (#0), }, Group { delimiter: Brace, stream: TokenStream [], - span: $DIR/attr-stmt-expr.rs:1:1: 1:1 (#0), + span: $DIR/attr-stmt-expr.rs:60:18: 60:20 (#0), }, ] diff --git a/src/test/ui/proc-macro/auxiliary/attr-stmt-expr.rs b/src/test/ui/proc-macro/auxiliary/attr-stmt-expr.rs index 213f999e9d0..19183c61651 100644 --- a/src/test/ui/proc-macro/auxiliary/attr-stmt-expr.rs +++ b/src/test/ui/proc-macro/auxiliary/attr-stmt-expr.rs @@ -15,9 +15,9 @@ pub fn expect_let(attr: TokenStream, item: TokenStream) -> TokenStream { } #[proc_macro_attribute] -pub fn expect_print_stmt(attr: TokenStream, item: TokenStream) -> TokenStream { +pub fn expect_my_macro_stmt(attr: TokenStream, item: TokenStream) -> TokenStream { assert!(attr.to_string().is_empty()); - assert_eq!(item.to_string(), "println ! (\"{}\", string) ;"); + assert_eq!(item.to_string(), "my_macro ! (\"{}\", string) ;"); item } @@ -29,9 +29,9 @@ pub fn expect_expr(attr: TokenStream, item: TokenStream) -> TokenStream { } #[proc_macro_attribute] -pub fn expect_print_expr(attr: TokenStream, item: TokenStream) -> TokenStream { +pub fn expect_my_macro_expr(attr: TokenStream, item: TokenStream) -> TokenStream { assert!(attr.to_string().is_empty()); - assert_eq!(item.to_string(), "println ! (\"{}\", string)"); + assert_eq!(item.to_string(), "my_macro ! (\"{}\", string)"); item } diff --git a/src/test/ui/proc-macro/auxiliary/issue-79242.rs b/src/test/ui/proc-macro/auxiliary/issue-79242.rs new file mode 100644 index 00000000000..e586980f0ad --- /dev/null +++ b/src/test/ui/proc-macro/auxiliary/issue-79242.rs @@ -0,0 +1,16 @@ +// force-host +// no-prefer-dynamic + +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::TokenStream; + +#[proc_macro] +pub fn dummy(input: TokenStream) -> TokenStream { + // Iterate to force internal conversion of nonterminals + // to `proc_macro` structs + for _ in input {} + TokenStream::new() +} diff --git a/src/test/ui/proc-macro/issue-79242-slow-retokenize-check.rs b/src/test/ui/proc-macro/issue-79242-slow-retokenize-check.rs new file mode 100644 index 00000000000..b68f19c5dd2 --- /dev/null +++ b/src/test/ui/proc-macro/issue-79242-slow-retokenize-check.rs @@ -0,0 +1,34 @@ +// check-pass +// aux-build:issue-79242.rs + +// Regression test for issue #79242 +// Tests that compilation time doesn't blow up for a proc-macro +// invocation with deeply nested nonterminals + +#![allow(unused)] + +extern crate issue_79242; + +macro_rules! declare_nats { + ($prev:ty) => {}; + ($prev:ty, $n:literal$(, $tail:literal)*) => { + + issue_79242::dummy! { + $prev + } + + declare_nats!(Option<$prev>$(, $tail)*); + }; + (0, $($n:literal),+) => { + pub struct N0; + declare_nats!(N0, $($n),+); + }; +} + +declare_nats! { + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28 +} + + +fn main() {} diff --git a/src/test/ui/structs-enums/nested-enum-same-names.rs b/src/test/ui/structs-enums/nested-enum-same-names.rs index dece3dcd54b..111b9ba9477 100644 --- a/src/test/ui/structs-enums/nested-enum-same-names.rs +++ b/src/test/ui/structs-enums/nested-enum-same-names.rs @@ -17,10 +17,10 @@ as it does not include the method name in the symbol name. pub struct Foo; impl Foo { pub fn foo() { - enum Panic { Common }; + enum Panic { Common } } pub fn bar() { - enum Panic { Common }; + enum Panic { Common } } } diff --git a/src/test/ui/try-is-identifier-edition2015.rs b/src/test/ui/try-is-identifier-edition2015.rs index dfb05599be6..90f56d5fa71 100644 --- a/src/test/ui/try-is-identifier-edition2015.rs +++ b/src/test/ui/try-is-identifier-edition2015.rs @@ -5,7 +5,7 @@ fn main() { let try = 2; - struct try { try: u32 }; + struct try { try: u32 } let try: try = try { try }; assert_eq!(try.try, 2); } diff --git a/src/test/ui/zero-sized/zero-size-type-destructors.rs b/src/test/ui/zero-sized/zero-size-type-destructors.rs index 98b5a439c82..fb87d8ea0ba 100644 --- a/src/test/ui/zero-sized/zero-size-type-destructors.rs +++ b/src/test/ui/zero-sized/zero-size-type-destructors.rs @@ -10,7 +10,7 @@ pub fn foo() { fn drop(&mut self) { unsafe { destructions -= 1 }; } - }; + } let _x = [Foo, Foo, Foo]; } diff --git a/src/tools/clippy/clippy_lints/src/missing_doc.rs b/src/tools/clippy/clippy_lints/src/missing_doc.rs index 009e3d8937e..4678f6872f3 100644 --- a/src/tools/clippy/clippy_lints/src/missing_doc.rs +++ b/src/tools/clippy/clippy_lints/src/missing_doc.rs @@ -147,7 +147,7 @@ impl<'tcx> LateLintPass<'tcx> for MissingDoc { hir::ItemKind::Union(..) => "a union", hir::ItemKind::OpaqueTy(..) => "an existential type", hir::ItemKind::ExternCrate(..) - | hir::ItemKind::ForeignMod(..) + | hir::ItemKind::ForeignMod { .. } | hir::ItemKind::GlobalAsm(..) | hir::ItemKind::Impl { .. } | hir::ItemKind::Use(..) => return, diff --git a/src/tools/clippy/clippy_lints/src/missing_inline.rs b/src/tools/clippy/clippy_lints/src/missing_inline.rs index 53abe6086ea..913d9daff46 100644 --- a/src/tools/clippy/clippy_lints/src/missing_inline.rs +++ b/src/tools/clippy/clippy_lints/src/missing_inline.rs @@ -125,7 +125,7 @@ impl<'tcx> LateLintPass<'tcx> for MissingInline { | hir::ItemKind::Union(..) | hir::ItemKind::OpaqueTy(..) | hir::ItemKind::ExternCrate(..) - | hir::ItemKind::ForeignMod(..) + | hir::ItemKind::ForeignMod { .. } | hir::ItemKind::Impl { .. } | hir::ItemKind::Use(..) => {}, }; diff --git a/src/tools/clippy/clippy_lints/src/utils/inspector.rs b/src/tools/clippy/clippy_lints/src/utils/inspector.rs index 4fbfb3be32c..8f0ef9150d4 100644 --- a/src/tools/clippy/clippy_lints/src/utils/inspector.rs +++ b/src/tools/clippy/clippy_lints/src/utils/inspector.rs @@ -395,7 +395,7 @@ fn print_item(cx: &LateContext<'_>, item: &hir::Item<'_>) { println!("function of type {:#?}", item_ty); }, hir::ItemKind::Mod(..) => println!("module"), - hir::ItemKind::ForeignMod(ref fm) => println!("foreign module with abi: {}", fm.abi), + hir::ItemKind::ForeignMod { abi, .. } => println!("foreign module with abi: {}", abi), hir::ItemKind::GlobalAsm(ref asm) => println!("global asm: {:?}", asm), hir::ItemKind::TyAlias(..) => { println!("type alias for {:?}", cx.tcx.type_of(did)); |
