From 94f121ff3f47fecdcf458b691f1bf87f8b1f1f1d Mon Sep 17 00:00:00 2001 From: Taiki Endo Date: Mon, 4 Feb 2019 21:49:54 +0900 Subject: libsyntax_ext => 2018 --- src/libsyntax_ext/Cargo.toml | 1 + src/libsyntax_ext/asm.rs | 11 +-- src/libsyntax_ext/assert.rs | 5 +- src/libsyntax_ext/cfg.rs | 8 +- src/libsyntax_ext/compile_error.rs | 5 +- src/libsyntax_ext/concat.rs | 3 +- src/libsyntax_ext/concat_idents.rs | 5 +- src/libsyntax_ext/deriving/bounds.rs | 11 +-- src/libsyntax_ext/deriving/clone.rs | 25 +++--- src/libsyntax_ext/deriving/cmp/eq.rs | 19 +++-- src/libsyntax_ext/deriving/cmp/ord.rs | 12 +-- src/libsyntax_ext/deriving/cmp/partial_eq.rs | 18 ++--- src/libsyntax_ext/deriving/cmp/partial_ord.rs | 22 +++--- src/libsyntax_ext/deriving/custom.rs | 15 ++-- src/libsyntax_ext/deriving/debug.rs | 12 +-- src/libsyntax_ext/deriving/decodable.rs | 22 +++--- src/libsyntax_ext/deriving/default.rs | 14 ++-- src/libsyntax_ext/deriving/encodable.rs | 18 ++--- src/libsyntax_ext/deriving/generic/mod.rs | 108 +++++++++++++------------- src/libsyntax_ext/deriving/generic/ty.rs | 29 ++++--- src/libsyntax_ext/deriving/hash.rs | 10 +-- src/libsyntax_ext/deriving/mod.rs | 4 +- src/libsyntax_ext/diagnostics.rs | 2 + src/libsyntax_ext/env.rs | 7 +- src/libsyntax_ext/format.rs | 33 ++++---- src/libsyntax_ext/format_foreign.rs | 30 ++++--- src/libsyntax_ext/global_asm.rs | 9 ++- src/libsyntax_ext/lib.rs | 16 +--- src/libsyntax_ext/log_syntax.rs | 2 +- src/libsyntax_ext/proc_macro_decls.rs | 7 +- src/libsyntax_ext/proc_macro_impl.rs | 26 +++---- src/libsyntax_ext/proc_macro_server.rs | 7 +- src/libsyntax_ext/test.rs | 14 ++-- src/libsyntax_ext/test_case.rs | 2 +- src/libsyntax_ext/trace_macros.rs | 5 +- 35 files changed, 269 insertions(+), 268 deletions(-) (limited to 'src/libsyntax_ext') diff --git a/src/libsyntax_ext/Cargo.toml b/src/libsyntax_ext/Cargo.toml index 7ad08f75e8b..c22b55b8c13 100644 --- a/src/libsyntax_ext/Cargo.toml +++ b/src/libsyntax_ext/Cargo.toml @@ -2,6 +2,7 @@ authors = ["The Rust Project Developers"] name = "syntax_ext" version = "0.0.0" +edition = "2018" [lib] name = "syntax_ext" diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index 41ee6e91b3d..ebcdceea7c5 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -1,13 +1,13 @@ // Inline assembly support. // -use self::State::*; +use State::*; use rustc_data_structures::thin_vec::ThinVec; -use errors::DiagnosticBuilder; +use crate::errors::DiagnosticBuilder; + use syntax::ast; -use syntax::ext::base; -use syntax::ext::base::*; +use syntax::ext::base::{self, *}; use syntax::feature_gate; use syntax::parse::{self, token}; use syntax::ptr::P; @@ -15,6 +15,7 @@ use syntax::symbol::Symbol; use syntax::ast::AsmDialect; use syntax_pos::Span; use syntax::tokenstream; +use syntax::{span_err, struct_span_err}; enum State { Asm, @@ -40,7 +41,7 @@ impl State { const OPTIONS: &[&str] = &["volatile", "alignstack", "intel"]; -pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, +pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs index b27f495322a..984ef26f5ab 100644 --- a/src/libsyntax_ext/assert.rs +++ b/src/libsyntax_ext/assert.rs @@ -1,4 +1,5 @@ -use errors::DiagnosticBuilder; +use crate::errors::DiagnosticBuilder; + use syntax::ast::{self, *}; use syntax::source_map::Spanned; use syntax::ext::base::*; @@ -11,7 +12,7 @@ use syntax::tokenstream::{TokenStream, TokenTree}; use syntax_pos::{Span, DUMMY_SP}; pub fn expand_assert<'cx>( - cx: &'cx mut ExtCtxt, + cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[TokenTree], ) -> Box { diff --git a/src/libsyntax_ext/cfg.rs b/src/libsyntax_ext/cfg.rs index 3b47b03cbe8..e2104550878 100644 --- a/src/libsyntax_ext/cfg.rs +++ b/src/libsyntax_ext/cfg.rs @@ -2,17 +2,17 @@ /// a literal `true` or `false` based on whether the given cfg matches the /// current compilation environment. -use errors::DiagnosticBuilder; +use crate::errors::DiagnosticBuilder; + use syntax::ast; -use syntax::ext::base::*; -use syntax::ext::base; +use syntax::ext::base::{self, *}; use syntax::ext::build::AstBuilder; use syntax::attr; use syntax::tokenstream; use syntax::parse::token; use syntax_pos::Span; -pub fn expand_cfg<'cx>(cx: &mut ExtCtxt, +pub fn expand_cfg<'cx>(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { diff --git a/src/libsyntax_ext/compile_error.rs b/src/libsyntax_ext/compile_error.rs index 8f7f5deb091..59d3f2c9c78 100644 --- a/src/libsyntax_ext/compile_error.rs +++ b/src/libsyntax_ext/compile_error.rs @@ -1,11 +1,10 @@ // The compiler code necessary to support the compile_error! extension. -use syntax::ext::base::*; -use syntax::ext::base; +use syntax::ext::base::{self, *}; use syntax_pos::Span; use syntax::tokenstream; -pub fn expand_compile_error<'cx>(cx: &'cx mut ExtCtxt, +pub fn expand_compile_error<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { diff --git a/src/libsyntax_ext/concat.rs b/src/libsyntax_ext/concat.rs index f148f8e003d..230b00c0f8f 100644 --- a/src/libsyntax_ext/concat.rs +++ b/src/libsyntax_ext/concat.rs @@ -3,12 +3,11 @@ use syntax::ext::base; use syntax::ext::build::AstBuilder; use syntax::symbol::Symbol; use syntax::tokenstream; -use syntax_pos; use std::string::String; pub fn expand_syntax_ext( - cx: &mut base::ExtCtxt, + cx: &mut base::ExtCtxt<'_>, sp: syntax_pos::Span, tts: &[tokenstream::TokenTree], ) -> Box { diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index de96de4bdc2..8c9eb4bf2d8 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -1,8 +1,7 @@ use rustc_data_structures::thin_vec::ThinVec; use syntax::ast; -use syntax::ext::base::*; -use syntax::ext::base; +use syntax::ext::base::{self, *}; use syntax::feature_gate; use syntax::parse::token; use syntax::ptr::P; @@ -10,7 +9,7 @@ use syntax_pos::Span; use syntax_pos::symbol::Symbol; use syntax::tokenstream::TokenTree; -pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, +pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[TokenTree]) -> Box { diff --git a/src/libsyntax_ext/deriving/bounds.rs b/src/libsyntax_ext/deriving/bounds.rs index dcfc6ab0391..c7b805e0bdc 100644 --- a/src/libsyntax_ext/deriving/bounds.rs +++ b/src/libsyntax_ext/deriving/bounds.rs @@ -1,11 +1,12 @@ -use deriving::path_std; -use deriving::generic::*; -use deriving::generic::ty::*; +use crate::deriving::path_std; +use crate::deriving::generic::*; +use crate::deriving::generic::ty::*; + use syntax::ast::MetaItem; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax_pos::Span; -pub fn expand_deriving_unsafe_bound(cx: &mut ExtCtxt, +pub fn expand_deriving_unsafe_bound(cx: &mut ExtCtxt<'_>, span: Span, _: &MetaItem, _: &Annotatable, @@ -13,7 +14,7 @@ pub fn expand_deriving_unsafe_bound(cx: &mut ExtCtxt, cx.span_err(span, "this unsafe trait should be implemented explicitly"); } -pub fn expand_deriving_copy(cx: &mut ExtCtxt, +pub fn expand_deriving_copy(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index 38d433e842c..b347092e1bc 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -1,9 +1,8 @@ -use deriving::path_std; -use deriving::generic::*; -use deriving::generic::ty::*; +use crate::deriving::path_std; +use crate::deriving::generic::*; +use crate::deriving::generic::ty::*; -use syntax::ast::{self, Expr, Generics, ItemKind, MetaItem, VariantData}; -use syntax::ast::GenericArg; +use syntax::ast::{self, Expr, GenericArg, Generics, ItemKind, MetaItem, VariantData}; use syntax::attr; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; @@ -11,7 +10,7 @@ use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; use syntax_pos::Span; -pub fn expand_deriving_clone(cx: &mut ExtCtxt, +pub fn expand_deriving_clone(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -105,12 +104,12 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt, } fn cs_clone_shallow(name: &str, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, trait_span: Span, - substr: &Substructure, + substr: &Substructure<'_>, is_union: bool) -> P { - fn assert_ty_bounds(cx: &mut ExtCtxt, stmts: &mut Vec, + fn assert_ty_bounds(cx: &mut ExtCtxt<'_>, stmts: &mut Vec, ty: P, span: Span, helper_name: &str) { // Generate statement `let _: helper_name;`, // set the expn ID so we can use the unstable struct. @@ -120,7 +119,7 @@ fn cs_clone_shallow(name: &str, vec![GenericArg::Type(ty)], vec![]); stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path))); } - fn process_variant(cx: &mut ExtCtxt, stmts: &mut Vec, variant: &VariantData) { + fn process_variant(cx: &mut ExtCtxt<'_>, stmts: &mut Vec, variant: &VariantData) { for field in variant.fields() { // let _: AssertParamIsClone; assert_ty_bounds(cx, stmts, field.ty.clone(), field.span, "AssertParamIsClone"); @@ -151,14 +150,14 @@ fn cs_clone_shallow(name: &str, } fn cs_clone(name: &str, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, trait_span: Span, - substr: &Substructure) + substr: &Substructure<'_>) -> P { let ctor_path; let all_fields; let fn_path = cx.std_path(&["clone", "Clone", "clone"]); - let subcall = |cx: &mut ExtCtxt, field: &FieldInfo| { + let subcall = |cx: &mut ExtCtxt<'_>, field: &FieldInfo<'_>| { let args = vec![cx.expr_addr_of(field.span, field.self_.clone())]; cx.expr_call_global(field.span, fn_path.clone(), args) }; diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index dbba8c3b7a0..a1035ff641f 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -1,6 +1,6 @@ -use deriving::path_std; -use deriving::generic::*; -use deriving::generic::ty::*; +use crate::deriving::path_std; +use crate::deriving::generic::*; +use crate::deriving::generic::ty::*; use syntax::ast::{self, Expr, MetaItem, GenericArg}; use syntax::ext::base::{Annotatable, ExtCtxt}; @@ -9,7 +9,7 @@ use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; -pub fn expand_deriving_eq(cx: &mut ExtCtxt, +pub fn expand_deriving_eq(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -44,8 +44,11 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt, trait_def.expand_ext(cx, mitem, item, push, true) } -fn cs_total_eq_assert(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P { - fn assert_ty_bounds(cx: &mut ExtCtxt, stmts: &mut Vec, +fn cs_total_eq_assert(cx: &mut ExtCtxt<'_>, + trait_span: Span, + substr: &Substructure<'_>) + -> P { + fn assert_ty_bounds(cx: &mut ExtCtxt<'_>, stmts: &mut Vec, ty: P, span: Span, helper_name: &str) { // Generate statement `let _: helper_name;`, // set the expn ID so we can use the unstable struct. @@ -55,7 +58,9 @@ fn cs_total_eq_assert(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) vec![GenericArg::Type(ty)], vec![]); stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path))); } - fn process_variant(cx: &mut ExtCtxt, stmts: &mut Vec, variant: &ast::VariantData) { + fn process_variant(cx: &mut ExtCtxt<'_>, + stmts: &mut Vec, + variant: &ast::VariantData) { for field in variant.fields() { // let _: AssertParamIsEq; assert_ty_bounds(cx, stmts, field.ty.clone(), field.span, "AssertParamIsEq"); diff --git a/src/libsyntax_ext/deriving/cmp/ord.rs b/src/libsyntax_ext/deriving/cmp/ord.rs index 21bd56710ac..e4f939c151f 100644 --- a/src/libsyntax_ext/deriving/cmp/ord.rs +++ b/src/libsyntax_ext/deriving/cmp/ord.rs @@ -1,6 +1,6 @@ -use deriving::path_std; -use deriving::generic::*; -use deriving::generic::ty::*; +use crate::deriving::path_std; +use crate::deriving::generic::*; +use crate::deriving::generic::ty::*; use syntax::ast::{self, Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; @@ -9,7 +9,7 @@ use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; -pub fn expand_deriving_ord(cx: &mut ExtCtxt, +pub fn expand_deriving_ord(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -44,7 +44,7 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt, } -pub fn ordering_collapsed(cx: &mut ExtCtxt, +pub fn ordering_collapsed(cx: &mut ExtCtxt<'_>, span: Span, self_arg_tags: &[ast::Ident]) -> P { @@ -53,7 +53,7 @@ pub fn ordering_collapsed(cx: &mut ExtCtxt, cx.expr_method_call(span, lft, cx.ident_of("cmp"), vec![rgt]) } -pub fn cs_cmp(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P { +pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P { let test_id = cx.ident_of("cmp").gensym(); let equals_path = cx.path_global(span, cx.std_path(&["cmp", "Ordering", "Equal"])); diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs index 4ec24bce4cd..07026ae3739 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_eq.rs @@ -1,6 +1,6 @@ -use deriving::{path_local, path_std}; -use deriving::generic::*; -use deriving::generic::ty::*; +use crate::deriving::{path_local, path_std}; +use crate::deriving::generic::*; +use crate::deriving::generic::ty::*; use syntax::ast::{BinOpKind, Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; @@ -9,22 +9,22 @@ use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; -pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, +pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, push: &mut dyn FnMut(Annotatable)) { // structures are equal if all fields are equal, and non equal, if // any fields are not equal or if the enum variants are different - fn cs_op(cx: &mut ExtCtxt, + fn cs_op(cx: &mut ExtCtxt<'_>, span: Span, - substr: &Substructure, + substr: &Substructure<'_>, op: BinOpKind, combiner: BinOpKind, base: bool) -> P { - let op = |cx: &mut ExtCtxt, span: Span, self_f: P, other_fs: &[P]| { + let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P, other_fs: &[P]| { let other_f = match (other_fs.len(), other_fs.get(0)) { (1, Some(o_f)) => o_f, _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"), @@ -53,10 +53,10 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, substr) } - fn cs_eq(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P { + fn cs_eq(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P { cs_op(cx, span, substr, BinOpKind::Eq, BinOpKind::And, true) } - fn cs_ne(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P { + fn cs_ne(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P { cs_op(cx, span, substr, BinOpKind::Ne, BinOpKind::Or, false) } diff --git a/src/libsyntax_ext/deriving/cmp/partial_ord.rs b/src/libsyntax_ext/deriving/cmp/partial_ord.rs index 9ef481edf51..e99abeb118e 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_ord.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_ord.rs @@ -1,8 +1,8 @@ -pub use self::OrderingOp::*; +pub use OrderingOp::*; -use deriving::{path_local, pathvec_std, path_std}; -use deriving::generic::*; -use deriving::generic::ty::*; +use crate::deriving::{path_local, pathvec_std, path_std}; +use crate::deriving::generic::*; +use crate::deriving::generic::ty::*; use syntax::ast::{self, BinOpKind, Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; @@ -11,7 +11,7 @@ use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; -pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, +pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -95,7 +95,7 @@ pub enum OrderingOp { GeOp, } -pub fn some_ordering_collapsed(cx: &mut ExtCtxt, +pub fn some_ordering_collapsed(cx: &mut ExtCtxt<'_>, span: Span, op: OrderingOp, self_arg_tags: &[ast::Ident]) @@ -112,7 +112,7 @@ pub fn some_ordering_collapsed(cx: &mut ExtCtxt, cx.expr_method_call(span, lft, cx.ident_of(op_str), vec![rgt]) } -pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P { +pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P { let test_id = cx.ident_of("cmp").gensym(); let ordering = cx.path_global(span, cx.std_path(&["cmp", "Ordering", "Equal"])); let ordering_expr = cx.expr_path(ordering.clone()); @@ -184,14 +184,14 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P< /// Strict inequality. fn cs_op(less: bool, inclusive: bool, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, span: Span, - substr: &Substructure) -> P { - let ordering_path = |cx: &mut ExtCtxt, name: &str| { + substr: &Substructure<'_>) -> P { + let ordering_path = |cx: &mut ExtCtxt<'_>, name: &str| { cx.expr_path(cx.path_global(span, cx.std_path(&["cmp", "Ordering", name]))) }; - let par_cmp = |cx: &mut ExtCtxt, span, self_f: P, other_fs: &[P], default| { + let par_cmp = |cx: &mut ExtCtxt<'_>, span, self_f: P, other_fs: &[P], default| { let other_f = match (other_fs.len(), other_fs.get(0)) { (1, Some(o_f)) => o_f, _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"), diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index 2f20814ef3e..7d9b8402cac 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -1,4 +1,7 @@ -use errors::FatalError; +use crate::errors::FatalError; +use crate::proc_macro_impl::EXEC_STRATEGY; +use crate::proc_macro_server; + use syntax::ast::{self, ItemKind, Attribute, Mac}; use syntax::attr::{mark_used, mark_known}; use syntax::source_map::Span; @@ -9,8 +12,6 @@ use syntax::tokenstream; use syntax::visit::Visitor; use syntax_pos::DUMMY_SP; -use proc_macro_impl::EXEC_STRATEGY; - struct MarkAttrs<'a>(&'a [ast::Name]); impl<'a> Visitor<'a> for MarkAttrs<'a> { @@ -25,15 +26,15 @@ impl<'a> Visitor<'a> for MarkAttrs<'a> { } pub struct ProcMacroDerive { - pub client: ::proc_macro::bridge::client::Client< - fn(::proc_macro::TokenStream) -> ::proc_macro::TokenStream, + pub client: proc_macro::bridge::client::Client< + fn(proc_macro::TokenStream) -> proc_macro::TokenStream, >, pub attrs: Vec, } impl MultiItemModifier for ProcMacroDerive { fn expand(&self, - ecx: &mut ExtCtxt, + ecx: &mut ExtCtxt<'_>, span: Span, _meta_item: &ast::MetaItem, item: Annotatable) @@ -67,7 +68,7 @@ impl MultiItemModifier for ProcMacroDerive { let token = Token::interpolated(token::NtItem(item)); let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into(); - let server = ::proc_macro_server::Rustc::new(ecx); + let server = proc_macro_server::Rustc::new(ecx); let stream = match self.client.run(&EXEC_STRATEGY, server, input) { Ok(stream) => stream, Err(e) => { diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index b3e5bd9283e..7dc2d007d73 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -1,6 +1,6 @@ -use deriving::path_std; -use deriving::generic::*; -use deriving::generic::ty::*; +use crate::deriving::path_std; +use crate::deriving::generic::*; +use crate::deriving::generic::ty::*; use rustc_data_structures::thin_vec::ThinVec; @@ -11,7 +11,7 @@ use syntax::ext::build::AstBuilder; use syntax::ptr::P; use syntax_pos::{DUMMY_SP, Span}; -pub fn expand_deriving_debug(cx: &mut ExtCtxt, +pub fn expand_deriving_debug(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -47,7 +47,7 @@ pub fn expand_deriving_debug(cx: &mut ExtCtxt, } /// We use the debug builders to do the heavy lifting here -fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P { +fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P { // build fmt.debug_struct().field(, &)....build() // or fmt.debug_tuple().field(&)....build() // based on the "shape". @@ -124,7 +124,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P) -> ast::Stmt { +fn stmt_let_undescore(cx: &mut ExtCtxt<'_>, sp: Span, expr: P) -> ast::Stmt { let local = P(ast::Local { pat: cx.pat_wild(sp), ty: None, diff --git a/src/libsyntax_ext/deriving/decodable.rs b/src/libsyntax_ext/deriving/decodable.rs index 89c630e9915..b082351d5f6 100644 --- a/src/libsyntax_ext/deriving/decodable.rs +++ b/src/libsyntax_ext/deriving/decodable.rs @@ -1,9 +1,9 @@ //! The compiler code necessary for `#[derive(Decodable)]`. See encodable.rs for more. -use deriving::{self, pathvec_std}; -use deriving::generic::*; -use deriving::generic::ty::*; -use deriving::warn_if_deprecated; +use crate::deriving::{self, pathvec_std}; +use crate::deriving::generic::*; +use crate::deriving::generic::ty::*; +use crate::deriving::warn_if_deprecated; use syntax::ast; use syntax::ast::{Expr, MetaItem, Mutability}; @@ -13,7 +13,7 @@ use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; -pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt, +pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -21,7 +21,7 @@ pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt, expand_deriving_decodable_imp(cx, span, mitem, item, push, "rustc_serialize") } -pub fn expand_deriving_decodable(cx: &mut ExtCtxt, +pub fn expand_deriving_decodable(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -30,7 +30,7 @@ pub fn expand_deriving_decodable(cx: &mut ExtCtxt, expand_deriving_decodable_imp(cx, span, mitem, item, push, "serialize") } -fn expand_deriving_decodable_imp(cx: &mut ExtCtxt, +fn expand_deriving_decodable_imp(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -79,9 +79,9 @@ fn expand_deriving_decodable_imp(cx: &mut ExtCtxt, trait_def.expand(cx, mitem, item, push) } -fn decodable_substructure(cx: &mut ExtCtxt, +fn decodable_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, - substr: &Substructure, + substr: &Substructure<'_>, krate: &str) -> P { let decoder = substr.nonself_args[0].clone(); @@ -168,13 +168,13 @@ fn decodable_substructure(cx: &mut ExtCtxt, /// Create a decoder for a single enum variant/struct: /// - `outer_pat_path` is the path to this enum variant/struct /// - `getarg` should retrieve the `usize`-th field with name `@str`. -fn decode_static_fields(cx: &mut ExtCtxt, +fn decode_static_fields(cx: &mut ExtCtxt<'_>, trait_span: Span, outer_pat_path: ast::Path, fields: &StaticFields, mut getarg: F) -> P - where F: FnMut(&mut ExtCtxt, Span, Symbol, usize) -> P + where F: FnMut(&mut ExtCtxt<'_>, Span, Symbol, usize) -> P { match *fields { Unnamed(ref fields, is_tuple) => { diff --git a/src/libsyntax_ext/deriving/default.rs b/src/libsyntax_ext/deriving/default.rs index 32d02bec798..6db0a29165a 100644 --- a/src/libsyntax_ext/deriving/default.rs +++ b/src/libsyntax_ext/deriving/default.rs @@ -1,15 +1,16 @@ -use deriving::path_std; -use deriving::generic::*; -use deriving::generic::ty::*; +use crate::deriving::path_std; +use crate::deriving::generic::*; +use crate::deriving::generic::ty::*; use syntax::ast::{Expr, MetaItem}; use syntax::ext::base::{Annotatable, DummyResult, ExtCtxt}; use syntax::ext::build::AstBuilder; use syntax::ptr::P; use syntax::symbol::Symbol; +use syntax::span_err; use syntax_pos::Span; -pub fn expand_deriving_default(cx: &mut ExtCtxt, +pub fn expand_deriving_default(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -42,7 +43,10 @@ pub fn expand_deriving_default(cx: &mut ExtCtxt, trait_def.expand(cx, mitem, item, push) } -fn default_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P { +fn default_substructure(cx: &mut ExtCtxt<'_>, + trait_span: Span, + substr: &Substructure<'_>) + -> P { let default_ident = cx.std_path(&["default", "Default", "default"]); let default_call = |span| cx.expr_call_global(span, default_ident.clone(), Vec::new()); diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs index c8935874158..dd5646342b3 100644 --- a/src/libsyntax_ext/deriving/encodable.rs +++ b/src/libsyntax_ext/deriving/encodable.rs @@ -82,10 +82,10 @@ //! } //! ``` -use deriving::{self, pathvec_std}; -use deriving::generic::*; -use deriving::generic::ty::*; -use deriving::warn_if_deprecated; +use crate::deriving::{self, pathvec_std}; +use crate::deriving::generic::*; +use crate::deriving::generic::ty::*; +use crate::deriving::warn_if_deprecated; use syntax::ast::{Expr, ExprKind, MetaItem, Mutability}; use syntax::ext::base::{Annotatable, ExtCtxt}; @@ -94,7 +94,7 @@ use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; -pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt, +pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -102,7 +102,7 @@ pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt, expand_deriving_encodable_imp(cx, span, mitem, item, push, "rustc_serialize") } -pub fn expand_deriving_encodable(cx: &mut ExtCtxt, +pub fn expand_deriving_encodable(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -111,7 +111,7 @@ pub fn expand_deriving_encodable(cx: &mut ExtCtxt, expand_deriving_encodable_imp(cx, span, mitem, item, push, "serialize") } -fn expand_deriving_encodable_imp(cx: &mut ExtCtxt, +fn expand_deriving_encodable_imp(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -162,9 +162,9 @@ fn expand_deriving_encodable_imp(cx: &mut ExtCtxt, trait_def.expand(cx, mitem, item, push) } -fn encodable_substructure(cx: &mut ExtCtxt, +fn encodable_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, - substr: &Substructure, + substr: &Substructure<'_>, krate: &'static str) -> P { let encoder = substr.nonself_args[0].clone(); diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 22643db5016..0c88ae0311d 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -174,8 +174,8 @@ //! (, , Named(vec![(, )]))]) //! ``` -pub use self::StaticFields::*; -pub use self::SubstructureFields::*; +pub use StaticFields::*; +pub use SubstructureFields::*; use std::cell::RefCell; use std::iter; @@ -195,9 +195,9 @@ use syntax::symbol::{Symbol, keywords}; use syntax::parse::ParseSess; use syntax_pos::{DUMMY_SP, Span}; -use self::ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty}; +use ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty}; -use deriving; +use crate::deriving; pub mod ty; @@ -321,7 +321,7 @@ pub enum SubstructureFields<'a> { /// Combine the values of all the fields together. The last argument is /// all the fields of all the structures. pub type CombineSubstructureFunc<'a> = - Box P + 'a>; + Box, Span, &Substructure<'_>) -> P + 'a>; /// Deal with non-matching enum variants. The tuple is a list of /// identifiers (one for each `Self` argument, which could be any of the @@ -329,7 +329,7 @@ pub type CombineSubstructureFunc<'a> = /// holding the variant index value for each of the `Self` arguments. The /// last argument is all the non-`Self` args of the method being derived. pub type EnumNonMatchCollapsedFunc<'a> = - Box]) -> P + 'a>; + Box, Span, (&[Ident], &[Ident]), &[P]) -> P + 'a>; pub fn combine_substructure<'a>(f: CombineSubstructureFunc<'a>) -> RefCell> { @@ -342,7 +342,7 @@ pub fn combine_substructure<'a>(f: CombineSubstructureFunc<'a>) fn find_type_parameters(ty: &ast::Ty, ty_param_names: &[ast::Name], span: Span, - cx: &ExtCtxt) + cx: &ExtCtxt<'_>) -> Vec> { use syntax::visit; @@ -386,7 +386,7 @@ fn find_type_parameters(ty: &ast::Ty, impl<'a> TraitDef<'a> { pub fn expand(self, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, mitem: &ast::MetaItem, item: &'a Annotatable, push: &mut dyn FnMut(Annotatable)) { @@ -394,7 +394,7 @@ impl<'a> TraitDef<'a> { } pub fn expand_ext(self, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, mitem: &ast::MetaItem, item: &'a Annotatable, push: &mut dyn FnMut(Annotatable), @@ -513,7 +513,7 @@ impl<'a> TraitDef<'a> { /// where B1, ..., BN are the bounds given by `bounds_paths`.'. Z is a phantom type, and /// therefore does not get bound by the derived trait. fn create_derived_impl(&self, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, type_ident: Ident, generics: &Generics, field_tys: Vec>, @@ -696,7 +696,7 @@ impl<'a> TraitDef<'a> { } fn expand_struct_def(&self, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, struct_def: &'a VariantData, type_ident: Ident, generics: &Generics, @@ -746,7 +746,7 @@ impl<'a> TraitDef<'a> { } fn expand_enum_def(&self, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, enum_def: &'a EnumDef, type_attrs: &[ast::Attribute], type_ident: Ident, @@ -832,12 +832,12 @@ fn find_repr_type_name(sess: &ParseSess, type_attrs: &[ast::Attribute]) -> &'sta impl<'a> MethodDef<'a> { fn call_substructure_method(&self, - cx: &mut ExtCtxt, - trait_: &TraitDef, + cx: &mut ExtCtxt<'_>, + trait_: &TraitDef<'_>, type_ident: Ident, self_args: &[P], nonself_args: &[P], - fields: &SubstructureFields) + fields: &SubstructureFields<'_>) -> P { let substructure = Substructure { type_ident, @@ -847,13 +847,13 @@ impl<'a> MethodDef<'a> { fields, }; let mut f = self.combine_substructure.borrow_mut(); - let f: &mut CombineSubstructureFunc = &mut *f; + let f: &mut CombineSubstructureFunc<'_> = &mut *f; f(cx, trait_.span, &substructure) } fn get_ret_ty(&self, - cx: &mut ExtCtxt, - trait_: &TraitDef, + cx: &mut ExtCtxt<'_>, + trait_: &TraitDef<'_>, generics: &Generics, type_ident: Ident) -> P { @@ -866,8 +866,8 @@ impl<'a> MethodDef<'a> { fn split_self_nonself_args (&self, - cx: &mut ExtCtxt, - trait_: &TraitDef, + cx: &mut ExtCtxt<'_>, + trait_: &TraitDef<'_>, type_ident: Ident, generics: &Generics) -> (Option, Vec>, Vec>, Vec<(Ident, P)>) { @@ -912,8 +912,8 @@ impl<'a> MethodDef<'a> { } fn create_method(&self, - cx: &mut ExtCtxt, - trait_: &TraitDef, + cx: &mut ExtCtxt<'_>, + trait_: &TraitDef<'_>, type_ident: Ident, generics: &Generics, abi: Abi, @@ -1005,7 +1005,7 @@ impl<'a> MethodDef<'a> { /// } /// ``` fn expand_struct_method_body<'b>(&self, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, trait_: &TraitDef<'b>, struct_def: &'b VariantData, type_ident: Ident, @@ -1077,8 +1077,8 @@ impl<'a> MethodDef<'a> { } fn expand_static_struct_method_body(&self, - cx: &mut ExtCtxt, - trait_: &TraitDef, + cx: &mut ExtCtxt<'_>, + trait_: &TraitDef<'_>, struct_def: &VariantData, type_ident: Ident, self_args: &[P], @@ -1125,7 +1125,7 @@ impl<'a> MethodDef<'a> { /// as their results are unused. The point of `__self_vi` and /// `__arg_1_vi` is for `PartialOrd`; see #15503.) fn expand_enum_method_body<'b>(&self, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, trait_: &TraitDef<'b>, enum_def: &'b EnumDef, type_attrs: &[ast::Attribute], @@ -1179,7 +1179,7 @@ impl<'a> MethodDef<'a> { /// } /// ``` fn build_enum_match_tuple<'b>(&self, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, trait_: &TraitDef<'b>, enum_def: &'b EnumDef, type_attrs: &[ast::Attribute], @@ -1230,7 +1230,7 @@ impl<'a> MethodDef<'a> { .enumerate() .filter(|&(_, v)| !(self.unify_fieldless_variants && v.node.data.fields().is_empty())) .map(|(index, variant)| { - let mk_self_pat = |cx: &mut ExtCtxt, self_arg_name: &str| { + let mk_self_pat = |cx: &mut ExtCtxt<'_>, self_arg_name: &str| { let (p, idents) = trait_.create_enum_variant_pattern(cx, type_ident, variant, @@ -1296,7 +1296,7 @@ impl<'a> MethodDef<'a> { other: others, attrs, } - }).collect::>(); + }).collect::>>(); // Now, for some given VariantK, we have built up // expressions for referencing every field of every @@ -1501,8 +1501,8 @@ impl<'a> MethodDef<'a> { } fn expand_static_enum_method_body(&self, - cx: &mut ExtCtxt, - trait_: &TraitDef, + cx: &mut ExtCtxt<'_>, + trait_: &TraitDef<'_>, enum_def: &EnumDef, type_ident: Ident, self_args: &[P], @@ -1527,7 +1527,7 @@ impl<'a> MethodDef<'a> { // general helper methods. impl<'a> TraitDef<'a> { - fn summarise_struct(&self, cx: &mut ExtCtxt, struct_def: &VariantData) -> StaticFields { + fn summarise_struct(&self, cx: &mut ExtCtxt<'_>, struct_def: &VariantData) -> StaticFields { let mut named_idents = Vec::new(); let mut just_spans = Vec::new(); for field in struct_def.fields() { @@ -1553,7 +1553,7 @@ impl<'a> TraitDef<'a> { } fn create_subpatterns(&self, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, field_paths: Vec, mutbl: ast::Mutability, use_temporaries: bool) @@ -1573,7 +1573,7 @@ impl<'a> TraitDef<'a> { fn create_struct_pattern (&self, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, struct_path: ast::Path, struct_def: &'a VariantData, prefix: &str, @@ -1633,7 +1633,7 @@ impl<'a> TraitDef<'a> { fn create_enum_variant_pattern (&self, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, enum_ident: ast::Ident, variant: &'a ast::Variant, prefix: &str, @@ -1652,10 +1652,10 @@ impl<'a> TraitDef<'a> { pub fn cs_fold_fields<'a, F>(use_foldl: bool, mut f: F, base: P, - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, all_fields: &[FieldInfo<'a>]) -> P - where F: FnMut(&mut ExtCtxt, Span, P, P, &[P]) -> P + where F: FnMut(&mut ExtCtxt<'_>, Span, P, P, &[P]) -> P { if use_foldl { all_fields.iter().fold(base, |old, field| { @@ -1668,10 +1668,10 @@ pub fn cs_fold_fields<'a, F>(use_foldl: bool, } } -pub fn cs_fold_enumnonmatch(mut enum_nonmatch_f: EnumNonMatchCollapsedFunc, - cx: &mut ExtCtxt, +pub fn cs_fold_enumnonmatch(mut enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>, + cx: &mut ExtCtxt<'_>, trait_span: Span, - substructure: &Substructure) + substructure: &Substructure<'_>) -> P { match *substructure.fields { @@ -1685,7 +1685,7 @@ pub fn cs_fold_enumnonmatch(mut enum_nonmatch_f: EnumNonMatchCollapsedFunc, } } -pub fn cs_fold_static(cx: &mut ExtCtxt, +pub fn cs_fold_static(cx: &mut ExtCtxt<'_>, trait_span: Span) -> P { @@ -1697,12 +1697,12 @@ pub fn cs_fold_static(cx: &mut ExtCtxt, pub fn cs_fold(use_foldl: bool, f: F, base: P, - enum_nonmatch_f: EnumNonMatchCollapsedFunc, - cx: &mut ExtCtxt, + enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>, + cx: &mut ExtCtxt<'_>, trait_span: Span, - substructure: &Substructure) + substructure: &Substructure<'_>) -> P - where F: FnMut(&mut ExtCtxt, Span, P, P, &[P]) -> P + where F: FnMut(&mut ExtCtxt<'_>, Span, P, P, &[P]) -> P { match *substructure.fields { EnumMatching(.., ref all_fields) | @@ -1730,13 +1730,13 @@ pub fn cs_fold(use_foldl: bool, pub fn cs_fold1(use_foldl: bool, f: F, mut b: B, - enum_nonmatch_f: EnumNonMatchCollapsedFunc, - cx: &mut ExtCtxt, + enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>, + cx: &mut ExtCtxt<'_>, trait_span: Span, - substructure: &Substructure) + substructure: &Substructure<'_>) -> P - where F: FnMut(&mut ExtCtxt, Span, P, P, &[P]) -> P, - B: FnMut(&mut ExtCtxt, Option<(Span, P, &[P])>) -> P + where F: FnMut(&mut ExtCtxt<'_>, Span, P, P, &[P]) -> P, + B: FnMut(&mut ExtCtxt<'_>, Option<(Span, P, &[P])>) -> P { match *substructure.fields { EnumMatching(.., ref all_fields) | @@ -1776,12 +1776,12 @@ pub fn cs_fold1(use_foldl: bool, /// ``` #[inline] pub fn cs_same_method(f: F, - mut enum_nonmatch_f: EnumNonMatchCollapsedFunc, - cx: &mut ExtCtxt, + mut enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>, + cx: &mut ExtCtxt<'_>, trait_span: Span, - substructure: &Substructure) + substructure: &Substructure<'_>) -> P - where F: FnOnce(&mut ExtCtxt, Span, Vec>) -> P + where F: FnOnce(&mut ExtCtxt<'_>, Span, Vec>) -> P { match *substructure.fields { EnumMatching(.., ref all_fields) | diff --git a/src/libsyntax_ext/deriving/generic/ty.rs b/src/libsyntax_ext/deriving/generic/ty.rs index 83ec99b3573..ea6e07922b2 100644 --- a/src/libsyntax_ext/deriving/generic/ty.rs +++ b/src/libsyntax_ext/deriving/generic/ty.rs @@ -1,11 +1,10 @@ //! A mini version of ast::Ty, which is easier to use, and features an explicit `Self` type to use //! when specifying impls to be derived. -pub use self::PtrTy::*; -pub use self::Ty::*; +pub use PtrTy::*; +pub use Ty::*; -use syntax::ast; -use syntax::ast::{Expr, GenericParamKind, Generics, Ident, SelfKind, GenericArg}; +use syntax::ast::{self, Expr, GenericParamKind, Generics, Ident, SelfKind, GenericArg}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::source_map::{respan, DUMMY_SP}; @@ -60,7 +59,7 @@ impl<'a> Path<'a> { } pub fn to_ty(&self, - cx: &ExtCtxt, + cx: &ExtCtxt<'_>, span: Span, self_ty: Ident, self_generics: &Generics) @@ -68,7 +67,7 @@ impl<'a> Path<'a> { cx.ty_path(self.to_path(cx, span, self_ty, self_generics)) } pub fn to_path(&self, - cx: &ExtCtxt, + cx: &ExtCtxt<'_>, span: Span, self_ty: Ident, self_generics: &Generics) @@ -127,19 +126,19 @@ pub fn nil_ty<'r>() -> Ty<'r> { Tuple(Vec::new()) } -fn mk_lifetime(cx: &ExtCtxt, span: Span, lt: &Option<&str>) -> Option { +fn mk_lifetime(cx: &ExtCtxt<'_>, span: Span, lt: &Option<&str>) -> Option { lt.map(|s| cx.lifetime(span, Ident::from_str(s)) ) } -fn mk_lifetimes(cx: &ExtCtxt, span: Span, lt: &Option<&str>) -> Vec { +fn mk_lifetimes(cx: &ExtCtxt<'_>, span: Span, lt: &Option<&str>) -> Vec { mk_lifetime(cx, span, lt).into_iter().collect() } impl<'a> Ty<'a> { pub fn to_ty(&self, - cx: &ExtCtxt, + cx: &ExtCtxt<'_>, span: Span, self_ty: Ident, self_generics: &Generics) @@ -167,7 +166,7 @@ impl<'a> Ty<'a> { } pub fn to_path(&self, - cx: &ExtCtxt, + cx: &ExtCtxt<'_>, span: Span, self_ty: Ident, generics: &Generics) @@ -193,11 +192,11 @@ impl<'a> Ty<'a> { } -fn mk_ty_param(cx: &ExtCtxt, +fn mk_ty_param(cx: &ExtCtxt<'_>, span: Span, name: &str, attrs: &[ast::Attribute], - bounds: &[Path], + bounds: &[Path<'_>], self_ident: Ident, self_generics: &Generics) -> ast::GenericParam { @@ -237,7 +236,7 @@ impl<'a> LifetimeBounds<'a> { } } pub fn to_generics(&self, - cx: &ExtCtxt, + cx: &ExtCtxt<'_>, span: Span, self_ty: Ident, self_generics: &Generics) @@ -262,9 +261,9 @@ impl<'a> LifetimeBounds<'a> { } } -pub fn get_explicit_self(cx: &ExtCtxt, +pub fn get_explicit_self(cx: &ExtCtxt<'_>, span: Span, - self_ptr: &Option) + self_ptr: &Option>) -> (P, ast::ExplicitSelf) { // this constructs a fresh `self` path let self_path = cx.expr_self(span); diff --git a/src/libsyntax_ext/deriving/hash.rs b/src/libsyntax_ext/deriving/hash.rs index 4af2bd57b00..0d4f2ddc3be 100644 --- a/src/libsyntax_ext/deriving/hash.rs +++ b/src/libsyntax_ext/deriving/hash.rs @@ -1,6 +1,6 @@ -use deriving::{self, pathvec_std, path_std}; -use deriving::generic::*; -use deriving::generic::ty::*; +use crate::deriving::{self, pathvec_std, path_std}; +use crate::deriving::generic::*; +use crate::deriving::generic::ty::*; use syntax::ast::{Expr, MetaItem, Mutability}; use syntax::ext::base::{Annotatable, ExtCtxt}; @@ -8,7 +8,7 @@ use syntax::ext::build::AstBuilder; use syntax::ptr::P; use syntax_pos::Span; -pub fn expand_deriving_hash(cx: &mut ExtCtxt, +pub fn expand_deriving_hash(cx: &mut ExtCtxt<'_>, span: Span, mitem: &MetaItem, item: &Annotatable, @@ -50,7 +50,7 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt, hash_trait_def.expand(cx, mitem, item, push); } -fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P { +fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructure<'_>) -> P { let state_expr = match (substr.nonself_args.len(), substr.nonself_args.get(0)) { (1, Some(o_f)) => o_f, _ => { diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index 7548d43f184..2c8a996cdb0 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -90,7 +90,7 @@ derive_traits! { } #[inline] // because `name` is a compile-time constant -fn warn_if_deprecated(ecx: &mut ExtCtxt, sp: Span, name: &str) { +fn warn_if_deprecated(ecx: &mut ExtCtxt<'_>, sp: Span, name: &str) { if let Some(replacement) = match name { "Encodable" => Some("RustcEncodable"), "Decodable" => Some("RustcDecodable"), @@ -131,7 +131,7 @@ fn hygienic_type_parameter(item: &Annotatable, base: &str) -> String { } /// Constructs an expression that calls an intrinsic -fn call_intrinsic(cx: &ExtCtxt, +fn call_intrinsic(cx: &ExtCtxt<'_>, mut span: Span, intrinsic: &str, args: Vec>) diff --git a/src/libsyntax_ext/diagnostics.rs b/src/libsyntax_ext/diagnostics.rs index e8ad4af6850..9bbd9fdec17 100644 --- a/src/libsyntax_ext/diagnostics.rs +++ b/src/libsyntax_ext/diagnostics.rs @@ -1,5 +1,7 @@ #![allow(non_snake_case)] +use syntax::{register_diagnostic, register_long_diagnostics}; + // Error messages for EXXXX errors. // Each message should start and end with a new line, and be wrapped to 80 characters. // In vim you can `:set tw=80` and use `gq` to wrap paragraphs. Use `:set tw=0` to disable. diff --git a/src/libsyntax_ext/env.rs b/src/libsyntax_ext/env.rs index 16fb64a5f39..ccff4aec2c8 100644 --- a/src/libsyntax_ext/env.rs +++ b/src/libsyntax_ext/env.rs @@ -4,8 +4,7 @@ // use syntax::ast::{self, Ident, GenericArg}; -use syntax::ext::base::*; -use syntax::ext::base; +use syntax::ext::base::{self, *}; use syntax::ext::build::AstBuilder; use syntax::symbol::{keywords, Symbol}; use syntax_pos::Span; @@ -13,7 +12,7 @@ use syntax::tokenstream; use std::env; -pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, +pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { @@ -44,7 +43,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, MacEager::expr(e) } -pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, +pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 4c473fe7612..6bb7ee1d5dd 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -1,9 +1,11 @@ -use self::ArgumentType::*; -use self::Position::*; +use ArgumentType::*; +use Position::*; use fmt_macros as parse; -use errors::DiagnosticBuilder; +use crate::errors::DiagnosticBuilder; +use crate::errors::Applicability; + use syntax::ast; use syntax::ext::base::{self, *}; use syntax::ext::build::AstBuilder; @@ -13,7 +15,6 @@ use syntax::ptr::P; use syntax::symbol::Symbol; use syntax::tokenstream; use syntax_pos::{MultiSpan, Span, DUMMY_SP}; -use errors::Applicability; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use std::borrow::Cow; @@ -184,7 +185,7 @@ fn parse_args<'a>( } impl<'a, 'b> Context<'a, 'b> { - fn resolve_name_inplace(&self, p: &mut parse::Piece) { + fn resolve_name_inplace(&self, p: &mut parse::Piece<'_>) { // NOTE: the `unwrap_or` branch is needed in case of invalid format // arguments, e.g., `format_args!("{foo}")`. let lookup = |s| *self.names.get(s).unwrap_or(&0); @@ -208,7 +209,7 @@ impl<'a, 'b> Context<'a, 'b> { /// Verifies one piece of a parse string, and remembers it if valid. /// All errors are not emitted as fatal so we can continue giving errors /// about this and possibly other format strings. - fn verify_piece(&mut self, p: &parse::Piece) { + fn verify_piece(&mut self, p: &parse::Piece<'_>) { match *p { parse::String(..) => {} parse::NextArgument(ref arg) => { @@ -231,7 +232,7 @@ impl<'a, 'b> Context<'a, 'b> { } } - fn verify_count(&mut self, c: parse::Count) { + fn verify_count(&mut self, c: parse::Count<'_>) { match c { parse::CountImplied | parse::CountIs(..) => {} @@ -244,7 +245,7 @@ impl<'a, 'b> Context<'a, 'b> { } } - fn describe_num_args(&self) -> Cow { + fn describe_num_args(&self) -> Cow<'_, str> { match self.args.len() { 0 => "no arguments were given".into(), 1 => "there is 1 argument".into(), @@ -385,11 +386,11 @@ impl<'a, 'b> Context<'a, 'b> { self.count_args_index_offset = sofar; } - fn rtpath(ecx: &ExtCtxt, s: &str) -> Vec { + fn rtpath(ecx: &ExtCtxt<'_>, s: &str) -> Vec { ecx.std_path(&["fmt", "rt", "v1", s]) } - fn build_count(&self, c: parse::Count) -> P { + fn build_count(&self, c: parse::Count<'_>) -> P { let sp = self.macsp; let count = |c, arg| { let mut path = Context::rtpath(self.ecx, "Count"); @@ -426,7 +427,7 @@ impl<'a, 'b> Context<'a, 'b> { /// Build a static `rt::Argument` from a `parse::Piece` or append /// to the `literal` string. fn build_piece(&mut self, - piece: &parse::Piece, + piece: &parse::Piece<'_>, arg_index_consumed: &mut Vec) -> Option> { let sp = self.macsp; @@ -647,7 +648,7 @@ impl<'a, 'b> Context<'a, 'b> { self.ecx.expr_call_global(self.macsp, path, fn_args) } - fn format_arg(ecx: &ExtCtxt, + fn format_arg(ecx: &ExtCtxt<'_>, macsp: Span, mut sp: Span, ty: &ArgumentType, @@ -686,7 +687,7 @@ impl<'a, 'b> Context<'a, 'b> { } } -pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt, +pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt<'_>, mut sp: Span, tts: &[tokenstream::TokenTree]) -> Box { @@ -703,7 +704,7 @@ pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt, } pub fn expand_format_args_nl<'cx>( - ecx: &'cx mut ExtCtxt, + ecx: &'cx mut ExtCtxt<'_>, mut sp: Span, tts: &[tokenstream::TokenTree], ) -> Box { @@ -734,7 +735,7 @@ pub fn expand_format_args_nl<'cx>( /// Take the various parts of `format_args!(efmt, args..., name=names...)` /// and construct the appropriate formatting expression. -pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, +pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt<'_>, sp: Span, efmt: P, args: Vec>, @@ -952,7 +953,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, piece }).collect::>(); - let numbered_position_args = pieces.iter().any(|arg: &parse::Piece| { + let numbered_position_args = pieces.iter().any(|arg: &parse::Piece<'_>| { match *arg { parse::String(_) => false, parse::NextArgument(arg) => { diff --git a/src/libsyntax_ext/format_foreign.rs b/src/libsyntax_ext/format_foreign.rs index 8ac6d460ec3..381325b2963 100644 --- a/src/libsyntax_ext/format_foreign.rs +++ b/src/libsyntax_ext/format_foreign.rs @@ -68,7 +68,7 @@ pub mod printf { pub position: (usize, usize), } - impl<'a> Format<'a> { + impl Format<'_> { /// Translate this directive into an equivalent Rust formatting directive. /// /// Returns `None` in cases where the `printf` directive does not have an exact Rust @@ -249,12 +249,12 @@ pub mod printf { } } - fn translate(&self, s: &mut String) -> ::std::fmt::Result { + fn translate(&self, s: &mut String) -> std::fmt::Result { use std::fmt::Write; match *self { Num::Num(n) => write!(s, "{}", n), Num::Arg(n) => { - let n = n.checked_sub(1).ok_or(::std::fmt::Error)?; + let n = n.checked_sub(1).ok_or(std::fmt::Error)?; write!(s, "{}$", n) }, Num::Next => write!(s, "*"), @@ -263,7 +263,7 @@ pub mod printf { } /// Returns an iterator over all substitutions in a given string. - pub fn iter_subs(s: &str) -> Substitutions { + pub fn iter_subs(s: &str) -> Substitutions<'_> { Substitutions { s, pos: 0, @@ -309,7 +309,7 @@ pub mod printf { } /// Parse the next substitution from the input string. - pub fn parse_next_substitution(s: &str) -> Option<(Substitution, &str)> { + pub fn parse_next_substitution(s: &str) -> Option<(Substitution<'_>, &str)> { use self::State::*; let at = { @@ -389,7 +389,7 @@ pub mod printf { let mut precision: Option = None; let mut length: Option<&str> = None; let mut type_: &str = ""; - let end: Cur; + let end: Cur<'_>; if let Start = state { match c { @@ -575,7 +575,7 @@ pub mod printf { Some((Substitution::Format(f), end.slice_after())) } - fn at_next_cp_while(mut cur: Cur, mut pred: F) -> Cur + fn at_next_cp_while(mut cur: Cur<'_>, mut pred: F) -> Cur<'_> where F: FnMut(char) -> bool { loop { match cur.next_cp() { @@ -769,7 +769,7 @@ pub mod shell { Escape((usize, usize)), } - impl<'a> Substitution<'a> { + impl Substitution<'_> { pub fn as_str(&self) -> String { match self { Substitution::Ordinal(n, _) => format!("${}", n), @@ -804,7 +804,7 @@ pub mod shell { } /// Returns an iterator over all substitutions in a given string. - pub fn iter_subs(s: &str) -> Substitutions { + pub fn iter_subs(s: &str) -> Substitutions<'_> { Substitutions { s, pos: 0, @@ -839,7 +839,7 @@ pub mod shell { } /// Parse the next substitution from the input string. - pub fn parse_next_substitution(s: &str) -> Option<(Substitution, &str)> { + pub fn parse_next_substitution(s: &str) -> Option<(Substitution<'_>, &str)> { let at = { let start = s.find('$')?; match s[start+1..].chars().next()? { @@ -868,7 +868,7 @@ pub mod shell { } } - fn at_next_cp_while(mut cur: Cur, mut pred: F) -> Cur + fn at_next_cp_while(mut cur: Cur<'_>, mut pred: F) -> Cur<'_> where F: FnMut(char) -> bool { loop { match cur.next_cp() { @@ -962,8 +962,6 @@ pub mod shell { } mod strcursor { - use std; - pub struct StrCursor<'a> { s: &'a str, pub at: usize, @@ -1028,7 +1026,7 @@ mod strcursor { } } - impl<'a> Copy for StrCursor<'a> {} + impl Copy for StrCursor<'_> {} impl<'a> Clone for StrCursor<'a> { fn clone(&self) -> StrCursor<'a> { @@ -1036,8 +1034,8 @@ mod strcursor { } } - impl<'a> std::fmt::Debug for StrCursor<'a> { - fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { + impl std::fmt::Debug for StrCursor<'_> { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(fmt, "StrCursor({:?} | {:?})", self.slice_before(), self.slice_after()) } } diff --git a/src/libsyntax_ext/global_asm.rs b/src/libsyntax_ext/global_asm.rs index 0a12e27c4fc..14dbd930023 100644 --- a/src/libsyntax_ext/global_asm.rs +++ b/src/libsyntax_ext/global_asm.rs @@ -8,21 +8,22 @@ /// LLVM's `module asm "some assembly here"`. All of LLVM's caveats /// therefore apply. -use errors::DiagnosticBuilder; +use crate::errors::DiagnosticBuilder; + use syntax::ast; use syntax::source_map::respan; -use syntax::ext::base; -use syntax::ext::base::*; +use syntax::ext::base::{self, *}; use syntax::feature_gate; use syntax::parse::token; use syntax::ptr::P; use syntax::symbol::Symbol; use syntax_pos::Span; use syntax::tokenstream; +use smallvec::smallvec; pub const MACRO: &str = "global_asm"; -pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt, +pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { if !cx.ecfg.enable_global_asm() { diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs index 5e767d237cc..9308cfb3a4f 100644 --- a/src/libsyntax_ext/lib.rs +++ b/src/libsyntax_ext/lib.rs @@ -4,29 +4,21 @@ html_favicon_url = "https://doc.rust-lang.org/favicon.ico", html_root_url = "https://doc.rust-lang.org/nightly/")] +#![deny(rust_2018_idioms)] + #![feature(in_band_lifetimes)] #![feature(proc_macro_diagnostic)] #![feature(proc_macro_internals)] #![feature(proc_macro_span)] #![feature(decl_macro)] -#![feature(nll)] #![feature(str_escape)] #![feature(rustc_diagnostic_macros)] #![recursion_limit="256"] -extern crate fmt_macros; -#[macro_use] -extern crate syntax; -extern crate syntax_pos; extern crate proc_macro; -extern crate rustc_data_structures; -extern crate rustc_errors as errors; -extern crate rustc_target; -#[macro_use] -extern crate smallvec; -#[macro_use] -extern crate log; + +use rustc_errors as errors; mod diagnostics; diff --git a/src/libsyntax_ext/log_syntax.rs b/src/libsyntax_ext/log_syntax.rs index a143186b945..658ce98d268 100644 --- a/src/libsyntax_ext/log_syntax.rs +++ b/src/libsyntax_ext/log_syntax.rs @@ -4,7 +4,7 @@ use syntax::print; use syntax::tokenstream; use syntax_pos; -pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt, +pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt<'_>, sp: syntax_pos::Span, tts: &[tokenstream::TokenTree]) -> Box { diff --git a/src/libsyntax_ext/proc_macro_decls.rs b/src/libsyntax_ext/proc_macro_decls.rs index 46c502965ee..fbc4d899074 100644 --- a/src/libsyntax_ext/proc_macro_decls.rs +++ b/src/libsyntax_ext/proc_macro_decls.rs @@ -1,6 +1,7 @@ use std::mem; -use errors; +use crate::deriving; +use crate::errors; use syntax::ast::{self, Ident}; use syntax::attr; @@ -18,8 +19,6 @@ use syntax::visit::{self, Visitor}; use syntax_pos::{Span, DUMMY_SP}; -use deriving; - const PROC_MACRO_KINDS: [&str; 3] = ["proc_macro_derive", "proc_macro_attribute", "proc_macro"]; struct ProcMacroDerive { @@ -324,7 +323,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { // ]; // } fn mk_decls( - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, custom_derives: &[ProcMacroDerive], custom_attrs: &[ProcMacroDef], custom_macros: &[ProcMacroDef], diff --git a/src/libsyntax_ext/proc_macro_impl.rs b/src/libsyntax_ext/proc_macro_impl.rs index 60d167d01ee..88e20e3dc7c 100644 --- a/src/libsyntax_ext/proc_macro_impl.rs +++ b/src/libsyntax_ext/proc_macro_impl.rs @@ -1,27 +1,27 @@ -use errors::FatalError; +use crate::errors::FatalError; +use crate::proc_macro_server; use syntax::source_map::Span; -use syntax::ext::base::*; +use syntax::ext::base::{self, *}; use syntax::tokenstream::TokenStream; -use syntax::ext::base; -pub const EXEC_STRATEGY: ::proc_macro::bridge::server::SameThread = - ::proc_macro::bridge::server::SameThread; +pub const EXEC_STRATEGY: proc_macro::bridge::server::SameThread = + proc_macro::bridge::server::SameThread; pub struct AttrProcMacro { - pub client: ::proc_macro::bridge::client::Client< - fn(::proc_macro::TokenStream, ::proc_macro::TokenStream) -> ::proc_macro::TokenStream, + pub client: proc_macro::bridge::client::Client< + fn(proc_macro::TokenStream, proc_macro::TokenStream) -> proc_macro::TokenStream, >, } impl base::AttrProcMacro for AttrProcMacro { fn expand<'cx>(&self, - ecx: &'cx mut ExtCtxt, + ecx: &'cx mut ExtCtxt<'_>, span: Span, annotation: TokenStream, annotated: TokenStream) -> TokenStream { - let server = ::proc_macro_server::Rustc::new(ecx); + let server = proc_macro_server::Rustc::new(ecx); match self.client.run(&EXEC_STRATEGY, server, annotation, annotated) { Ok(stream) => stream, Err(e) => { @@ -39,18 +39,18 @@ impl base::AttrProcMacro for AttrProcMacro { } pub struct BangProcMacro { - pub client: ::proc_macro::bridge::client::Client< - fn(::proc_macro::TokenStream) -> ::proc_macro::TokenStream, + pub client: proc_macro::bridge::client::Client< + fn(proc_macro::TokenStream) -> proc_macro::TokenStream, >, } impl base::ProcMacro for BangProcMacro { fn expand<'cx>(&self, - ecx: &'cx mut ExtCtxt, + ecx: &'cx mut ExtCtxt<'_>, span: Span, input: TokenStream) -> TokenStream { - let server = ::proc_macro_server::Rustc::new(ecx); + let server = proc_macro_server::Rustc::new(ecx); match self.client.run(&EXEC_STRATEGY, server, input) { Ok(stream) => stream, Err(e) => { diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index 7de9b9343a8..730262683c0 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -1,4 +1,5 @@ -use errors::{self, Diagnostic, DiagnosticBuilder}; +use crate::errors::{self, Diagnostic, DiagnosticBuilder}; + use std::panic; use proc_macro::bridge::{server, TokenTree}; @@ -369,7 +370,7 @@ pub(crate) struct Rustc<'a> { } impl<'a> Rustc<'a> { - pub fn new(cx: &'a ExtCtxt) -> Self { + pub fn new(cx: &'a ExtCtxt<'_>) -> Self { // No way to determine def location for a proc macro right now, so use call location. let location = cx.current_expansion.mark.expn_info().unwrap().call_site; let to_span = |transparency| { @@ -650,7 +651,7 @@ impl server::Literal for Rustc<'_> { } } -impl<'a> server::SourceFile for Rustc<'a> { +impl server::SourceFile for Rustc<'_> { fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool { Lrc::ptr_eq(file1, file2) } diff --git a/src/libsyntax_ext/test.rs b/src/libsyntax_ext/test.rs index 11c734b299c..832bebb6113 100644 --- a/src/libsyntax_ext/test.rs +++ b/src/libsyntax_ext/test.rs @@ -13,7 +13,7 @@ use syntax::source_map::{ExpnInfo, MacroAttribute}; use std::iter; pub fn expand_test( - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, attr_sp: Span, _meta_item: &ast::MetaItem, item: Annotatable, @@ -22,7 +22,7 @@ pub fn expand_test( } pub fn expand_bench( - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, attr_sp: Span, _meta_item: &ast::MetaItem, item: Annotatable, @@ -31,7 +31,7 @@ pub fn expand_bench( } pub fn expand_test_or_bench( - cx: &mut ExtCtxt, + cx: &mut ExtCtxt<'_>, attr_sp: Span, item: Annotatable, is_bench: bool @@ -180,7 +180,7 @@ pub fn expand_test_or_bench( ast::ItemKind::ExternCrate(Some(Symbol::intern("test"))) ); - debug!("Synthetic test item:\n{}\n", pprust::item_to_string(&test_const)); + log::debug!("Synthetic test item:\n{}\n", pprust::item_to_string(&test_const)); vec![ // Access to libtest under a gensymed name @@ -210,7 +210,7 @@ fn should_fail(i: &ast::Item) -> bool { attr::contains_name(&i.attrs, "allow_fail") } -fn should_panic(cx: &ExtCtxt, i: &ast::Item) -> ShouldPanic { +fn should_panic(cx: &ExtCtxt<'_>, i: &ast::Item) -> ShouldPanic { match attr::find_by_name(&i.attrs, "should_panic") { Some(attr) => { let ref sd = cx.parse_sess.span_diagnostic; @@ -243,7 +243,7 @@ fn should_panic(cx: &ExtCtxt, i: &ast::Item) -> ShouldPanic { } } -fn has_test_signature(cx: &ExtCtxt, i: &ast::Item) -> bool { +fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool { let has_should_panic_attr = attr::contains_name(&i.attrs, "should_panic"); let ref sd = cx.parse_sess.span_diagnostic; if let ast::ItemKind::Fn(ref decl, ref header, ref generics, _) = i.node { @@ -296,7 +296,7 @@ fn has_test_signature(cx: &ExtCtxt, i: &ast::Item) -> bool { } } -fn has_bench_signature(cx: &ExtCtxt, i: &ast::Item) -> bool { +fn has_bench_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool { let has_sig = if let ast::ItemKind::Fn(ref decl, _, _, _) = i.node { // N.B., inadequate check, but we're running // well before resolve, can't get too deep. diff --git a/src/libsyntax_ext/test_case.rs b/src/libsyntax_ext/test_case.rs index 04e33671872..63417b702d5 100644 --- a/src/libsyntax_ext/test_case.rs +++ b/src/libsyntax_ext/test_case.rs @@ -20,7 +20,7 @@ use syntax::source_map::{ExpnInfo, MacroAttribute}; use syntax::feature_gate; pub fn expand( - ecx: &mut ExtCtxt, + ecx: &mut ExtCtxt<'_>, attr_sp: Span, _meta_item: &ast::MetaItem, anno_item: Annotatable diff --git a/src/libsyntax_ext/trace_macros.rs b/src/libsyntax_ext/trace_macros.rs index 638d7b5568b..4d35daf3de9 100644 --- a/src/libsyntax_ext/trace_macros.rs +++ b/src/libsyntax_ext/trace_macros.rs @@ -1,11 +1,10 @@ -use syntax::ext::base::ExtCtxt; -use syntax::ext::base; +use syntax::ext::base::{self, ExtCtxt}; use syntax::feature_gate; use syntax::symbol::keywords; use syntax_pos::Span; use syntax::tokenstream::TokenTree; -pub fn expand_trace_macros(cx: &mut ExtCtxt, +pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>, sp: Span, tt: &[TokenTree]) -> Box { -- cgit 1.4.1-3-g733a5 From 9fcb1658ab13a7f722e4747c5a4b691291e88a3b Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Tue, 5 Feb 2019 15:20:55 +1100 Subject: Overhaul `syntax::fold::Folder`. This commit changes `syntax::fold::Folder` from a functional style (where most methods take a `T` and produce a new `T`) to a more imperative style (where most methods take and modify a `&mut T`), and renames it `syntax::mut_visit::MutVisitor`. The first benefit is speed. The functional style does not require any reallocations, due to the use of `P::map` and `MoveMap::move_{,flat_}map`. However, every field in the AST must be overwritten; even those fields that are unchanged are overwritten with the same value. This causes a lot of unnecessary memory writes. The imperative style reduces instruction counts by 1--3% across a wide range of workloads, particularly incremental workloads. The second benefit is conciseness; the imperative style is usually more concise. E.g. compare the old functional style: ``` fn fold_abc(&mut self, abc: ABC) { ABC { a: fold_a(abc.a), b: fold_b(abc.b), c: abc.c, } } ``` with the imperative style: ``` fn visit_abc(&mut self, ABC { a, b, c: _ }: &mut ABC) { visit_a(a); visit_b(b); } ``` (The reductions get larger in more complex examples.) Overall, the patch removes over 200 lines of code -- even though the new code has more comments -- and a lot of the remaining lines have fewer characters. Some notes: - The old style used methods called `fold_*`. The new style mostly uses methods called `visit_*`, but there are a few methods that map a `T` to something other than a `T`, which are called `flat_map_*` (`T` maps to multiple `T`s) or `filter_map_*` (`T` maps to 0 or 1 `T`s). - `move_map.rs`/`MoveMap`/`move_map`/`move_flat_map` are renamed `map_in_place.rs`/`MapInPlace`/`map_in_place`/`flat_map_in_place` to reflect their slightly changed signatures. - Although this commit renames the `fold` module as `mut_visit`, it keeps it in the `fold.rs` file, so as not to confuse git. The next commit will rename the file. --- src/librustc_allocator/expand.rs | 27 +- src/librustc_data_structures/thin_vec.rs | 9 + src/librustc_driver/driver.rs | 12 +- src/librustc_driver/lib.rs | 9 +- src/librustc_driver/pretty.rs | 42 +- src/libsyntax/attr/mod.rs | 60 +- src/libsyntax/config.rs | 166 +- src/libsyntax/ext/base.rs | 48 +- src/libsyntax/ext/derive.rs | 7 +- src/libsyntax/ext/expand.rs | 290 +-- src/libsyntax/ext/placeholders.rs | 79 +- src/libsyntax/ext/tt/transcribe.rs | 6 +- src/libsyntax/fold.rs | 2002 ++++++++++---------- src/libsyntax/lib.rs | 4 +- src/libsyntax/parse/parser.rs | 3 +- src/libsyntax/parse/token.rs | 2 +- src/libsyntax/test.rs | 64 +- src/libsyntax/tokenstream.rs | 2 +- src/libsyntax/util/map_in_place.rs | 102 + src/libsyntax/util/move_map.rs | 115 -- src/libsyntax_ext/deriving/generic/mod.rs | 12 +- src/libsyntax_ext/proc_macro_decls.rs | 4 +- .../run-pass-fulldeps/pprust-expr-roundtrip.rs | 52 +- src/test/ui/issues/issue-49934.rs | 4 +- 24 files changed, 1511 insertions(+), 1610 deletions(-) create mode 100644 src/libsyntax/util/map_in_place.rs delete mode 100644 src/libsyntax/util/move_map.rs (limited to 'src/libsyntax_ext') diff --git a/src/librustc_allocator/expand.rs b/src/librustc_allocator/expand.rs index 73a35c7cdcd..1fb1794d514 100644 --- a/src/librustc_allocator/expand.rs +++ b/src/librustc_allocator/expand.rs @@ -16,7 +16,7 @@ use syntax::{ expand::ExpansionConfig, hygiene::{self, Mark, SyntaxContext}, }, - fold::{self, Folder}, + mut_visit::{self, MutVisitor}, parse::ParseSess, ptr::P, symbol::Symbol @@ -28,10 +28,10 @@ use {AllocatorMethod, AllocatorTy, ALLOCATOR_METHODS}; pub fn modify( sess: &ParseSess, resolver: &mut dyn Resolver, - krate: Crate, + krate: &mut Crate, crate_name: String, handler: &rustc_errors::Handler, -) -> ast::Crate { +) { ExpandAllocatorDirectives { handler, sess, @@ -39,7 +39,7 @@ pub fn modify( found: false, crate_name: Some(crate_name), in_submod: -1, // -1 to account for the "root" module - }.fold_crate(krate) + }.visit_crate(krate); } struct ExpandAllocatorDirectives<'a> { @@ -54,14 +54,14 @@ struct ExpandAllocatorDirectives<'a> { in_submod: isize, } -impl<'a> Folder for ExpandAllocatorDirectives<'a> { - fn fold_item(&mut self, item: P) -> SmallVec<[P; 1]> { +impl<'a> MutVisitor for ExpandAllocatorDirectives<'a> { + fn flat_map_item(&mut self, item: P) -> SmallVec<[P; 1]> { debug!("in submodule {}", self.in_submod); let name = if attr::contains_name(&item.attrs, "global_allocator") { "global_allocator" } else { - return fold::noop_fold_item(item, self); + return mut_visit::noop_flat_map_item(item, self); }; match item.node { ItemKind::Static(..) => {} @@ -139,25 +139,24 @@ impl<'a> Folder for ExpandAllocatorDirectives<'a> { let name = f.kind.fn_name("allocator_abi"); let allocator_abi = Ident::with_empty_ctxt(Symbol::gensym(&name)); let module = f.cx.item_mod(span, span, allocator_abi, Vec::new(), items); - let module = f.cx.monotonic_expander().fold_item(module).pop().unwrap(); + let module = f.cx.monotonic_expander().flat_map_item(module).pop().unwrap(); // Return the item and new submodule smallvec![item, module] } // If we enter a submodule, take note. - fn fold_mod(&mut self, m: Mod) -> Mod { + fn visit_mod(&mut self, m: &mut Mod) { debug!("enter submodule"); self.in_submod += 1; - let ret = fold::noop_fold_mod(m, self); + mut_visit::noop_visit_mod(m, self); self.in_submod -= 1; debug!("exit submodule"); - ret } - // `fold_mac` is disabled by default. Enable it here. - fn fold_mac(&mut self, mac: Mac) -> Mac { - fold::noop_fold_mac(mac, self) + // `visit_mac` is disabled by default. Enable it here. + fn visit_mac(&mut self, mac: &mut Mac) { + mut_visit::noop_visit_mac(mac, self) } } diff --git a/src/librustc_data_structures/thin_vec.rs b/src/librustc_data_structures/thin_vec.rs index 359f9b7842d..ed57c528f51 100644 --- a/src/librustc_data_structures/thin_vec.rs +++ b/src/librustc_data_structures/thin_vec.rs @@ -39,6 +39,15 @@ impl ::std::ops::Deref for ThinVec { } } +impl ::std::ops::DerefMut for ThinVec { + fn deref_mut(&mut self) -> &mut [T] { + match *self { + ThinVec(None) => &mut [], + ThinVec(Some(ref mut vec)) => vec, + } + } +} + impl Extend for ThinVec { fn extend>(&mut self, iter: I) { match *self { diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index d3412ec2dd9..4549b20899d 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -32,7 +32,7 @@ use rustc_typeck as typeck; use syntax::{self, ast, attr, diagnostics, visit}; use syntax::early_buffered_lints::BufferedEarlyLint; use syntax::ext::base::ExtCtxt; -use syntax::fold::Folder; +use syntax::mut_visit::MutVisitor; use syntax::parse::{self, PResult}; use syntax::util::node_count::NodeCounter; use syntax::util::lev_distance::find_best_match_for_name; @@ -1000,12 +1000,12 @@ where }); sess.profiler(|p| p.end_activity(ProfileCategory::Expansion)); - krate = time(sess, "maybe building test harness", || { + time(sess, "maybe building test harness", || { syntax::test::modify_for_testing( &sess.parse_sess, &mut resolver, sess.opts.test, - krate, + &mut krate, sess.diagnostic(), &sess.features_untracked(), ) @@ -1014,7 +1014,7 @@ where // If we're actually rustdoc then there's no need to actually compile // anything, so switch everything to just looping if sess.opts.actually_rustdoc { - krate = ReplaceBodyWithLoop::new(sess).fold_crate(krate); + ReplaceBodyWithLoop::new(sess).visit_crate(&mut krate); } let (has_proc_macro_decls, has_global_allocator) = time(sess, "AST validation", || { @@ -1045,11 +1045,11 @@ where if has_global_allocator { // Expand global allocators, which are treated as an in-tree proc macro - krate = time(sess, "creating allocators", || { + time(sess, "creating allocators", || { allocator::expand::modify( &sess.parse_sess, &mut resolver, - krate, + &mut krate, crate_name.to_string(), sess.diagnostic(), ) diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index a95ce810ffa..d0dc7799c7b 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -870,9 +870,9 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { control.after_hir_lowering.stop = Compilation::Stop; control.after_parse.callback = box move |state| { - state.krate = Some(pretty::fold_crate(state.session, - state.krate.take().unwrap(), - ppm)); + let mut krate = state.krate.take().unwrap(); + pretty::visit_crate(state.session, &mut krate, ppm); + state.krate = Some(krate); }; control.after_hir_lowering.callback = box move |state| { pretty::print_after_hir_lowering(state.session, @@ -891,7 +891,8 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { control.after_parse.stop = Compilation::Stop; control.after_parse.callback = box move |state| { - let krate = pretty::fold_crate(state.session, state.krate.take().unwrap(), ppm); + let mut krate = state.krate.take().unwrap(); + pretty::visit_crate(state.session, &mut krate, ppm); pretty::print_after_parsing(state.session, state.input, &krate, diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index d980c5a3d29..4caf2ec676f 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -16,7 +16,7 @@ use rustc_metadata::cstore::CStore; use rustc_mir::util::{write_mir_pretty, write_mir_graphviz}; use syntax::ast::{self, BlockCheckMode}; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{*, MutVisitor, visit_clobber}; use syntax::print::{pprust}; use syntax::print::pprust::PrintState; use syntax::ptr::P; @@ -28,6 +28,7 @@ use smallvec::SmallVec; use std::cell::Cell; use std::fs::File; use std::io::{self, Write}; +use std::ops::DerefMut; use std::option; use std::path::Path; use std::str::FromStr; @@ -703,42 +704,42 @@ impl<'a> ReplaceBodyWithLoop<'a> { } } -impl<'a> fold::Folder for ReplaceBodyWithLoop<'a> { - fn fold_item_kind(&mut self, i: ast::ItemKind) -> ast::ItemKind { +impl<'a> MutVisitor for ReplaceBodyWithLoop<'a> { + fn visit_item_kind(&mut self, i: &mut ast::ItemKind) { let is_const = match i { ast::ItemKind::Static(..) | ast::ItemKind::Const(..) => true, ast::ItemKind::Fn(ref decl, ref header, _, _) => header.constness.node == ast::Constness::Const || Self::should_ignore_fn(decl), _ => false, }; - self.run(is_const, |s| fold::noop_fold_item_kind(i, s)) + self.run(is_const, |s| noop_visit_item_kind(i, s)) } - fn fold_trait_item(&mut self, i: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> { + fn flat_map_trait_item(&mut self, i: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> { let is_const = match i.node { ast::TraitItemKind::Const(..) => true, ast::TraitItemKind::Method(ast::MethodSig { ref decl, ref header, .. }, _) => header.constness.node == ast::Constness::Const || Self::should_ignore_fn(decl), _ => false, }; - self.run(is_const, |s| fold::noop_fold_trait_item(i, s)) + self.run(is_const, |s| noop_flat_map_trait_item(i, s)) } - fn fold_impl_item(&mut self, i: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> { + fn flat_map_impl_item(&mut self, i: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> { let is_const = match i.node { ast::ImplItemKind::Const(..) => true, ast::ImplItemKind::Method(ast::MethodSig { ref decl, ref header, .. }, _) => header.constness.node == ast::Constness::Const || Self::should_ignore_fn(decl), _ => false, }; - self.run(is_const, |s| fold::noop_fold_impl_item(i, s)) + self.run(is_const, |s| noop_flat_map_impl_item(i, s)) } - fn fold_anon_const(&mut self, c: ast::AnonConst) -> ast::AnonConst { - self.run(true, |s| fold::noop_fold_anon_const(c, s)) + fn visit_anon_const(&mut self, c: &mut ast::AnonConst) { + self.run(true, |s| noop_visit_anon_const(c, s)) } - fn fold_block(&mut self, b: P) -> P { + fn visit_block(&mut self, b: &mut P) { fn stmt_to_block(rules: ast::BlockCheckMode, s: Option, sess: &Session) -> ast::Block { @@ -780,14 +781,14 @@ impl<'a> fold::Folder for ReplaceBodyWithLoop<'a> { }; if self.within_static_or_const { - fold::noop_fold_block(b, self) + noop_visit_block(b, self) } else { - b.map(|b| { + visit_clobber(b.deref_mut(), |b| { let mut stmts = vec![]; for s in b.stmts { let old_blocks = self.nested_blocks.replace(vec![]); - stmts.extend(self.fold_stmt(s).into_iter().filter(|s| s.is_item())); + stmts.extend(self.flat_map_stmt(s).into_iter().filter(|s| s.is_item())); // we put a Some in there earlier with that replace(), so this is valid let new_blocks = self.nested_blocks.take().unwrap(); @@ -818,9 +819,9 @@ impl<'a> fold::Folder for ReplaceBodyWithLoop<'a> { } // in general the pretty printer processes unexpanded code, so - // we override the default `fold_mac` method which panics. - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - fold::noop_fold_mac(mac, self) + // we override the default `visit_mac` method which panics. + fn visit_mac(&mut self, mac: &mut ast::Mac) { + noop_visit_mac(mac, self) } } @@ -889,12 +890,9 @@ fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec, } } -pub fn fold_crate(sess: &Session, krate: ast::Crate, ppm: PpMode) -> ast::Crate { +pub fn visit_crate(sess: &Session, krate: &mut ast::Crate, ppm: PpMode) { if let PpmSource(PpmEveryBodyLoops) = ppm { - let mut fold = ReplaceBodyWithLoop::new(sess); - fold.fold_crate(krate) - } else { - krate + ReplaceBodyWithLoop::new(sess).visit_crate(krate); } } diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 58be7c3e085..c5a397e0480 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -15,6 +15,7 @@ use ast; use ast::{AttrId, Attribute, AttrStyle, Name, Ident, Path, PathSegment}; use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind}; use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind, GenericParam}; +use mut_visit::visit_clobber; use source_map::{BytePos, Spanned, respan, dummy_spanned}; use syntax_pos::{FileName, Span}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; @@ -28,6 +29,7 @@ use tokenstream::{TokenStream, TokenTree, DelimSpan}; use GLOBALS; use std::iter; +use std::ops::DerefMut; pub fn mark_used(attr: &Attribute) { debug!("Marking {:?} as used.", attr); @@ -695,13 +697,13 @@ impl LitKind { pub trait HasAttrs: Sized { fn attrs(&self) -> &[ast::Attribute]; - fn map_attrs) -> Vec>(self, f: F) -> Self; + fn visit_attrs)>(&mut self, f: F); } impl HasAttrs for Spanned { fn attrs(&self) -> &[ast::Attribute] { self.node.attrs() } - fn map_attrs) -> Vec>(self, f: F) -> Self { - respan(self.span, self.node.map_attrs(f)) + fn visit_attrs)>(&mut self, f: F) { + self.node.visit_attrs(f); } } @@ -709,7 +711,7 @@ impl HasAttrs for Vec { fn attrs(&self) -> &[Attribute] { self } - fn map_attrs) -> Vec>(self, f: F) -> Self { + fn visit_attrs)>(&mut self, f: F) { f(self) } } @@ -718,8 +720,12 @@ impl HasAttrs for ThinVec { fn attrs(&self) -> &[Attribute] { self } - fn map_attrs) -> Vec>(self, f: F) -> Self { - f(self.into()).into() + fn visit_attrs)>(&mut self, f: F) { + visit_clobber(self, |this| { + let mut vec = this.into(); + f(&mut vec); + vec.into() + }); } } @@ -727,8 +733,8 @@ impl HasAttrs for P { fn attrs(&self) -> &[Attribute] { (**self).attrs() } - fn map_attrs) -> Vec>(self, f: F) -> Self { - self.map(|t| t.map_attrs(f)) + fn visit_attrs)>(&mut self, f: F) { + (**self).visit_attrs(f); } } @@ -745,23 +751,27 @@ impl HasAttrs for StmtKind { } } - fn map_attrs) -> Vec>(self, f: F) -> Self { + fn visit_attrs)>(&mut self, f: F) { match self { - StmtKind::Local(local) => StmtKind::Local(local.map_attrs(f)), - StmtKind::Item(..) => self, - StmtKind::Expr(expr) => StmtKind::Expr(expr.map_attrs(f)), - StmtKind::Semi(expr) => StmtKind::Semi(expr.map_attrs(f)), - StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, style, attrs)| { - (mac, style, attrs.map_attrs(f)) - })), + StmtKind::Local(local) => local.visit_attrs(f), + StmtKind::Item(..) => {} + StmtKind::Expr(expr) => expr.visit_attrs(f), + StmtKind::Semi(expr) => expr.visit_attrs(f), + StmtKind::Mac(mac) => { + let (_mac, _style, attrs) = mac.deref_mut(); + attrs.visit_attrs(f); + } } } } impl HasAttrs for Stmt { - fn attrs(&self) -> &[ast::Attribute] { self.node.attrs() } - fn map_attrs) -> Vec>(self, f: F) -> Self { - Stmt { id: self.id, node: self.node.map_attrs(f), span: self.span } + fn attrs(&self) -> &[ast::Attribute] { + self.node.attrs() + } + + fn visit_attrs)>(&mut self, f: F) { + self.node.visit_attrs(f); } } @@ -770,9 +780,8 @@ impl HasAttrs for GenericParam { &self.attrs } - fn map_attrs) -> Vec>(mut self, f: F) -> Self { - self.attrs = self.attrs.map_attrs(f); - self + fn visit_attrs)>(&mut self, f: F) { + self.attrs.visit_attrs(f); } } @@ -783,11 +792,8 @@ macro_rules! derive_has_attrs { &self.attrs } - fn map_attrs(mut self, f: F) -> Self - where F: FnOnce(Vec) -> Vec, - { - self.attrs = self.attrs.map_attrs(f); - self + fn visit_attrs)>(&mut self, f: F) { + self.attrs.visit_attrs(f); } } )* } diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index b35730bf238..fce2601e3aa 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -6,16 +6,15 @@ use feature_gate::{ get_features, GateIssue, }; -use {fold, attr}; +use attr; use ast; -use source_map::Spanned; use edition::Edition; -use parse::{token, ParseSess}; -use smallvec::SmallVec; use errors::Applicability; -use util::move_map::MoveMap; - +use mut_visit::*; +use parse::{token, ParseSess}; use ptr::P; +use smallvec::SmallVec; +use util::map_in_place::MapInPlace; /// A folder that strips out items that do not belong in the current configuration. pub struct StripUnconfigured<'a> { @@ -65,8 +64,8 @@ macro_rules! configure { } impl<'a> StripUnconfigured<'a> { - pub fn configure(&mut self, node: T) -> Option { - let node = self.process_cfg_attrs(node); + pub fn configure(&mut self, mut node: T) -> Option { + self.process_cfg_attrs(&mut node); if self.in_cfg(node.attrs()) { Some(node) } else { None } } @@ -76,10 +75,10 @@ impl<'a> StripUnconfigured<'a> { /// Gives compiler warnigns if any `cfg_attr` does not contain any /// attributes and is in the original source code. Gives compiler errors if /// the syntax of any `cfg_attr` is incorrect. - pub fn process_cfg_attrs(&mut self, node: T) -> T { - node.map_attrs(|attrs| { - attrs.into_iter().flat_map(|attr| self.process_cfg_attr(attr)).collect() - }) + pub fn process_cfg_attrs(&mut self, node: &mut T) { + node.visit_attrs(|attrs| { + attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr)); + }); } /// Parse and expand a single `cfg_attr` attribute into a list of attributes @@ -218,70 +217,47 @@ impl<'a> StripUnconfigured<'a> { } } - pub fn configure_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod { - ast::ForeignMod { - abi: foreign_mod.abi, - items: foreign_mod.items.move_flat_map(|item| self.configure(item)), - } + pub fn configure_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) { + let ast::ForeignMod { abi: _, items } = foreign_mod; + items.flat_map_in_place(|item| self.configure(item)); } - fn configure_variant_data(&mut self, vdata: ast::VariantData) -> ast::VariantData { + fn configure_variant_data(&mut self, vdata: &mut ast::VariantData) { match vdata { - ast::VariantData::Struct(fields, id) => { - let fields = fields.move_flat_map(|field| self.configure(field)); - ast::VariantData::Struct(fields, id) - } - ast::VariantData::Tuple(fields, id) => { - let fields = fields.move_flat_map(|field| self.configure(field)); - ast::VariantData::Tuple(fields, id) - } - ast::VariantData::Unit(id) => ast::VariantData::Unit(id) + ast::VariantData::Struct(fields, _id) | + ast::VariantData::Tuple(fields, _id) => + fields.flat_map_in_place(|field| self.configure(field)), + ast::VariantData::Unit(_id) => {} } } - pub fn configure_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind { + pub fn configure_item_kind(&mut self, item: &mut ast::ItemKind) { match item { - ast::ItemKind::Struct(def, generics) => { - ast::ItemKind::Struct(self.configure_variant_data(def), generics) - } - ast::ItemKind::Union(def, generics) => { - ast::ItemKind::Union(self.configure_variant_data(def), generics) - } - ast::ItemKind::Enum(def, generics) => { - let variants = def.variants.move_flat_map(|v| { - self.configure(v).map(|v| { - Spanned { - node: ast::Variant_ { - ident: v.node.ident, - attrs: v.node.attrs, - data: self.configure_variant_data(v.node.data), - disr_expr: v.node.disr_expr, - }, - span: v.span - } - }) - }); - ast::ItemKind::Enum(ast::EnumDef { variants }, generics) + ast::ItemKind::Struct(def, _generics) | + ast::ItemKind::Union(def, _generics) => self.configure_variant_data(def), + ast::ItemKind::Enum(ast::EnumDef { variants }, _generics) => { + variants.flat_map_in_place(|variant| self.configure(variant)); + for variant in variants { + self.configure_variant_data(&mut variant.node.data); + } } - item => item, + _ => {} } } - pub fn configure_expr_kind(&mut self, expr_kind: ast::ExprKind) -> ast::ExprKind { + pub fn configure_expr_kind(&mut self, expr_kind: &mut ast::ExprKind) { match expr_kind { - ast::ExprKind::Match(m, arms) => { - let arms = arms.move_flat_map(|a| self.configure(a)); - ast::ExprKind::Match(m, arms) + ast::ExprKind::Match(_m, arms) => { + arms.flat_map_in_place(|arm| self.configure(arm)); } - ast::ExprKind::Struct(path, fields, base) => { - let fields = fields.move_flat_map(|field| self.configure(field)); - ast::ExprKind::Struct(path, fields, base) + ast::ExprKind::Struct(_path, fields, _base) => { + fields.flat_map_in_place(|field| self.configure(field)); } - _ => expr_kind, + _ => {} } } - pub fn configure_expr(&mut self, expr: P) -> P { + pub fn configure_expr(&mut self, expr: &mut P) { self.visit_expr_attrs(expr.attrs()); // If an expr is valid to cfg away it will have been removed by the @@ -289,8 +265,8 @@ impl<'a> StripUnconfigured<'a> { // Anything else is always required, and thus has to error out // in case of a cfg attr. // - // N.B., this is intentionally not part of the fold_expr() function - // in order for fold_opt_expr() to be able to avoid this check + // N.B., this is intentionally not part of the visit_expr() function + // in order for filter_map_expr() to be able to avoid this check if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(a)) { let msg = "removing an expression is not supported in this position"; self.sess.span_diagnostic.span_err(attr.span, msg); @@ -299,14 +275,10 @@ impl<'a> StripUnconfigured<'a> { self.process_cfg_attrs(expr) } - pub fn configure_pat(&mut self, pattern: P) -> P { - pattern.map(|mut pattern| { - if let ast::PatKind::Struct(path, fields, etc) = pattern.node { - let fields = fields.move_flat_map(|field| self.configure(field)); - pattern.node = ast::PatKind::Struct(path, fields, etc); - } - pattern - }) + pub fn configure_pat(&mut self, pat: &mut P) { + if let ast::PatKind::Struct(_path, fields, _etc) = &mut pat.node { + fields.flat_map_in_place(|field| self.configure(field)); + } } // deny #[cfg] on generic parameters until we decide what to do with it. @@ -326,54 +298,54 @@ impl<'a> StripUnconfigured<'a> { } } -impl<'a> fold::Folder for StripUnconfigured<'a> { - fn fold_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod { - let foreign_mod = self.configure_foreign_mod(foreign_mod); - fold::noop_fold_foreign_mod(foreign_mod, self) +impl<'a> MutVisitor for StripUnconfigured<'a> { + fn visit_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) { + self.configure_foreign_mod(foreign_mod); + noop_visit_foreign_mod(foreign_mod, self); } - fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind { - let item = self.configure_item_kind(item); - fold::noop_fold_item_kind(item, self) + fn visit_item_kind(&mut self, item: &mut ast::ItemKind) { + self.configure_item_kind(item); + noop_visit_item_kind(item, self); } - fn fold_expr(&mut self, expr: P) -> P { - let mut expr = self.configure_expr(expr).into_inner(); - expr.node = self.configure_expr_kind(expr.node); - P(fold::noop_fold_expr(expr, self)) + fn visit_expr(&mut self, expr: &mut P) { + self.configure_expr(expr); + self.configure_expr_kind(&mut expr.node); + noop_visit_expr(expr, self); } - fn fold_opt_expr(&mut self, expr: P) -> Option> { - let mut expr = configure!(self, expr).into_inner(); - expr.node = self.configure_expr_kind(expr.node); - Some(P(fold::noop_fold_expr(expr, self))) + fn filter_map_expr(&mut self, expr: P) -> Option> { + let mut expr = configure!(self, expr); + self.configure_expr_kind(&mut expr.node); + noop_visit_expr(&mut expr, self); + Some(expr) } - fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> { - fold::noop_fold_stmt(configure!(self, stmt), self) + fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> { + noop_flat_map_stmt(configure!(self, stmt), self) } - fn fold_item(&mut self, item: P) -> SmallVec<[P; 1]> { - fold::noop_fold_item(configure!(self, item), self) + fn flat_map_item(&mut self, item: P) -> SmallVec<[P; 1]> { + noop_flat_map_item(configure!(self, item), self) } - fn fold_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> - { - fold::noop_fold_impl_item(configure!(self, item), self) + fn flat_map_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> { + noop_flat_map_impl_item(configure!(self, item), self) } - fn fold_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> { - fold::noop_fold_trait_item(configure!(self, item), self) + fn flat_map_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> { + noop_flat_map_trait_item(configure!(self, item), self) } - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { + fn visit_mac(&mut self, _mac: &mut ast::Mac) { // Don't configure interpolated AST (cf. issue #34171). // Interpolated AST will get configured once the surrounding tokens are parsed. - mac } - fn fold_pat(&mut self, pattern: P) -> P { - fold::noop_fold_pat(self.configure_pat(pattern), self) + fn visit_pat(&mut self, pat: &mut P) { + self.configure_pat(pat); + noop_visit_pat(pat, self) } } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 09e7e57f78c..b53068f5bc2 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -8,7 +8,7 @@ use edition::Edition; use errors::{DiagnosticBuilder, DiagnosticId}; use ext::expand::{self, AstFragment, Invocation}; use ext::hygiene::{self, Mark, SyntaxContext, Transparency}; -use fold::{self, Folder}; +use mut_visit::{self, MutVisitor}; use parse::{self, parser, DirectoryOwnership}; use parse::token; use ptr::P; @@ -47,15 +47,14 @@ impl HasAttrs for Annotatable { } } - fn map_attrs) -> Vec>(self, f: F) -> Self { + fn visit_attrs)>(&mut self, f: F) { match self { - Annotatable::Item(item) => Annotatable::Item(item.map_attrs(f)), - Annotatable::TraitItem(trait_item) => Annotatable::TraitItem(trait_item.map_attrs(f)), - Annotatable::ImplItem(impl_item) => Annotatable::ImplItem(impl_item.map_attrs(f)), - Annotatable::ForeignItem(foreign_item) => - Annotatable::ForeignItem(foreign_item.map_attrs(f)), - Annotatable::Stmt(stmt) => Annotatable::Stmt(stmt.map_attrs(f)), - Annotatable::Expr(expr) => Annotatable::Expr(expr.map_attrs(f)), + Annotatable::Item(item) => item.visit_attrs(f), + Annotatable::TraitItem(trait_item) => trait_item.visit_attrs(f), + Annotatable::ImplItem(impl_item) => impl_item.visit_attrs(f), + Annotatable::ForeignItem(foreign_item) => foreign_item.visit_attrs(f), + Annotatable::Stmt(stmt) => stmt.visit_attrs(f), + Annotatable::Expr(expr) => expr.visit_attrs(f), } } } @@ -263,24 +262,24 @@ impl TTMacroExpander for F ) -> Box { struct AvoidInterpolatedIdents; - impl Folder for AvoidInterpolatedIdents { - fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree { - if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt { + impl MutVisitor for AvoidInterpolatedIdents { + fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) { + if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt { if let token::NtIdent(ident, is_raw) = nt.0 { - return tokenstream::TokenTree::Token(ident.span, - token::Ident(ident, is_raw)); + *tt = tokenstream::TokenTree::Token(ident.span, + token::Ident(ident, is_raw)); } } - fold::noop_fold_tt(tt, self) + mut_visit::noop_visit_tt(tt, self) } - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - fold::noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut ast::Mac) { + mut_visit::noop_visit_mac(mac, self) } } let input: Vec<_> = - input.trees().map(|tt| AvoidInterpolatedIdents.fold_tt(tt)).collect(); + input.trees().map(|mut tt| { AvoidInterpolatedIdents.visit_tt(&mut tt); tt }).collect(); (*self)(ecx, span, &input) } } @@ -981,17 +980,14 @@ impl<'a> ExtCtxt<'a> { /// compilation on error, merely emits a non-fatal error and returns None. pub fn expr_to_spanned_string<'a>( cx: &'a mut ExtCtxt, - expr: P, + mut expr: P, err_msg: &str, ) -> Result, Option>> { // Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation. - let expr = expr.map(|mut expr| { - expr.span = expr.span.apply_mark(cx.current_expansion.mark); - expr - }); + expr.span = expr.span.apply_mark(cx.current_expansion.mark); // we want to be able to handle e.g., `concat!("foo", "bar")` - let expr = cx.expander().fold_expr(expr); + cx.expander().visit_expr(&mut expr); Err(match expr.node { ast::ExprKind::Lit(ref l) => match l.node { ast::LitKind::Str(s, style) => return Ok(respan(expr.span, (s, style))), @@ -1055,7 +1051,9 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt, let mut p = cx.new_parser_from_tts(tts); let mut es = Vec::new(); while p.token != token::Eof { - es.push(cx.expander().fold_expr(panictry!(p.parse_expr()))); + let mut expr = panictry!(p.parse_expr()); + cx.expander().visit_expr(&mut expr); + es.push(expr); if p.eat(&token::Comma) { continue; } diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs index 7ef09ce5fbd..fa8cf6c496a 100644 --- a/src/libsyntax/ext/derive.rs +++ b/src/libsyntax/ext/derive.rs @@ -40,7 +40,7 @@ pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec result } -pub fn add_derived_markers(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path], item: T) -> T +pub fn add_derived_markers(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path], item: &mut T) where T: HasAttrs, { let (mut names, mut pretty_name) = (FxHashSet::default(), "derive(".to_owned()); @@ -64,7 +64,7 @@ pub fn add_derived_markers(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path] }); let span = span.with_ctxt(cx.backtrace()); - item.map_attrs(|mut attrs| { + item.visit_attrs(|attrs| { if names.contains(&Symbol::intern("Eq")) && names.contains(&Symbol::intern("PartialEq")) { let meta = cx.meta_word(span, Symbol::intern("structural_match")); attrs.push(cx.attribute(span, meta)); @@ -73,6 +73,5 @@ pub fn add_derived_markers(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path] let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker")); attrs.push(cx.attribute(span, meta)); } - attrs - }) + }); } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 2effd910e85..a0ccce98659 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -9,7 +9,7 @@ use ext::derive::{add_derived_markers, collect_derives}; use ext::hygiene::{self, Mark, SyntaxContext}; use ext::placeholders::{placeholder, PlaceholderExpander}; use feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err}; -use fold::*; +use mut_visit::*; use parse::{DirectoryOwnership, PResult, ParseSess}; use parse::token::{self, Token}; use parse::parser::Parser; @@ -21,11 +21,13 @@ use syntax_pos::{Span, DUMMY_SP, FileName}; use syntax_pos::hygiene::ExpnFormat; use tokenstream::{TokenStream, TokenTree}; use visit::{self, Visitor}; +use util::map_in_place::MapInPlace; use rustc_data_structures::fx::FxHashMap; use std::fs; use std::io::ErrorKind; use std::{iter, mem}; +use std::ops::DerefMut; use std::rc::Rc; use std::path::PathBuf; @@ -35,8 +37,8 @@ macro_rules! ast_fragments { $kind_name:expr; // FIXME: HACK: this should be `$(one ...)?` and `$(many ...)?` but `?` macro // repetition was removed from 2015 edition in #51587 because of ambiguities. - $(one fn $fold_ast:ident; fn $visit_ast:ident;)* - $(many fn $fold_ast_elt:ident; fn $visit_ast_elt:ident;)* + $(one fn $mut_visit_ast:ident; fn $visit_ast:ident;)* + $(many fn $flat_map_ast_elt:ident; fn $visit_ast_elt:ident;)* fn $make_ast:ident; })* ) => { @@ -86,16 +88,20 @@ macro_rules! ast_fragments { } })* - pub fn fold_with(self, folder: &mut F) -> Self { + pub fn mut_visit_with(&mut self, vis: &mut F) { match self { - AstFragment::OptExpr(expr) => - AstFragment::OptExpr(expr.and_then(|expr| folder.fold_opt_expr(expr))), - $($(AstFragment::$Kind(ast) => - AstFragment::$Kind(folder.$fold_ast(ast)),)*)* + AstFragment::OptExpr(opt_expr) => { + visit_clobber(opt_expr, |opt_expr| { + if let Some(expr) = opt_expr { + vis.filter_map_expr(expr) + } else { + None + } + }); + } + $($(AstFragment::$Kind(ast) => vis.$mut_visit_ast(ast),)*)* $($(AstFragment::$Kind(ast) => - AstFragment::$Kind(ast.into_iter() - .flat_map(|ast| folder.$fold_ast_elt(ast)) - .collect()),)*)* + ast.flat_map_in_place(|ast| vis.$flat_map_ast_elt(ast)),)*)* } } @@ -111,14 +117,14 @@ macro_rules! ast_fragments { } } - impl<'a, 'b> Folder for MacroExpander<'a, 'b> { - fn fold_opt_expr(&mut self, expr: P) -> Option> { + impl<'a, 'b> MutVisitor for MacroExpander<'a, 'b> { + fn filter_map_expr(&mut self, expr: P) -> Option> { self.expand_fragment(AstFragment::OptExpr(Some(expr))).make_opt_expr() } - $($(fn $fold_ast(&mut self, ast: $AstTy) -> $AstTy { - self.expand_fragment(AstFragment::$Kind(ast)).$make_ast() + $($(fn $mut_visit_ast(&mut self, ast: &mut $AstTy) { + visit_clobber(ast, |ast| self.expand_fragment(AstFragment::$Kind(ast)).$make_ast()); })*)* - $($(fn $fold_ast_elt(&mut self, ast_elt: <$AstTy as IntoIterator>::Item) -> $AstTy { + $($(fn $flat_map_ast_elt(&mut self, ast_elt: <$AstTy as IntoIterator>::Item) -> $AstTy { self.expand_fragment(AstFragment::$Kind(smallvec![ast_elt])).$make_ast() })*)* } @@ -133,23 +139,23 @@ macro_rules! ast_fragments { } ast_fragments! { - Expr(P) { "expression"; one fn fold_expr; fn visit_expr; fn make_expr; } - Pat(P) { "pattern"; one fn fold_pat; fn visit_pat; fn make_pat; } - Ty(P) { "type"; one fn fold_ty; fn visit_ty; fn make_ty; } + Expr(P) { "expression"; one fn visit_expr; fn visit_expr; fn make_expr; } + Pat(P) { "pattern"; one fn visit_pat; fn visit_pat; fn make_pat; } + Ty(P) { "type"; one fn visit_ty; fn visit_ty; fn make_ty; } Stmts(SmallVec<[ast::Stmt; 1]>) { - "statement"; many fn fold_stmt; fn visit_stmt; fn make_stmts; + "statement"; many fn flat_map_stmt; fn visit_stmt; fn make_stmts; } Items(SmallVec<[P; 1]>) { - "item"; many fn fold_item; fn visit_item; fn make_items; + "item"; many fn flat_map_item; fn visit_item; fn make_items; } TraitItems(SmallVec<[ast::TraitItem; 1]>) { - "trait item"; many fn fold_trait_item; fn visit_trait_item; fn make_trait_items; + "trait item"; many fn flat_map_trait_item; fn visit_trait_item; fn make_trait_items; } ImplItems(SmallVec<[ast::ImplItem; 1]>) { - "impl item"; many fn fold_impl_item; fn visit_impl_item; fn make_impl_items; + "impl item"; many fn flat_map_impl_item; fn visit_impl_item; fn make_impl_items; } ForeignItems(SmallVec<[ast::ForeignItem; 1]>) { - "foreign item"; many fn fold_foreign_item; fn visit_foreign_item; fn make_foreign_items; + "foreign item"; many fn flat_map_foreign_item; fn visit_foreign_item; fn make_foreign_items; } } @@ -297,7 +303,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.current_expansion.depth = 0; // Collect all macro invocations and replace them with placeholders. - let (fragment_with_placeholders, mut invocations) + let (mut fragment_with_placeholders, mut invocations) = self.collect_invocations(input_fragment, &[]); // Optimization: if we resolve all imports now, @@ -369,10 +375,10 @@ impl<'a, 'b> MacroExpander<'a, 'b> { err.emit(); } - let item = self.fully_configure(item) - .map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs }); - let item_with_markers = - add_derived_markers(&mut self.cx, item.span(), &traits, item.clone()); + let mut item = self.fully_configure(item); + item.visit_attrs(|attrs| attrs.retain(|a| a.path != "derive")); + let mut item_with_markers = item.clone(); + add_derived_markers(&mut self.cx, item.span(), &traits, &mut item_with_markers); let derives = derives.entry(invoc.expansion_data.mark).or_default(); derives.reserve(traits.len()); @@ -427,7 +433,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { expanded_fragment, derives); } } - fragment_with_placeholders.fold_with(&mut placeholder_expander) + fragment_with_placeholders.mut_visit_with(&mut placeholder_expander); + fragment_with_placeholders } fn resolve_imports(&mut self) { @@ -440,12 +447,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> { /// them with "placeholders" - dummy macro invocations with specially crafted `NodeId`s. /// Then call into resolver that builds a skeleton ("reduced graph") of the fragment and /// prepares data for resolving paths of macro invocations. - fn collect_invocations(&mut self, fragment: AstFragment, derives: &[Mark]) + fn collect_invocations(&mut self, mut fragment: AstFragment, derives: &[Mark]) -> (AstFragment, Vec) { // Resolve `$crate`s in the fragment for pretty-printing. self.cx.resolver.resolve_dollar_crates(&fragment); - let (fragment_with_placeholders, invocations) = { + let invocations = { let mut collector = InvocationCollector { cfg: StripUnconfigured { sess: self.cx.parse_sess, @@ -455,16 +462,16 @@ impl<'a, 'b> MacroExpander<'a, 'b> { invocations: Vec::new(), monotonic: self.monotonic, }; - (fragment.fold_with(&mut collector), collector.invocations) + fragment.mut_visit_with(&mut collector); + collector.invocations }; if self.monotonic { self.cx.resolver.visit_ast_fragment_with_placeholders( - self.cx.current_expansion.mark, &fragment_with_placeholders, derives - ); + self.cx.current_expansion.mark, &fragment, derives); } - (fragment_with_placeholders, invocations) + (fragment, invocations) } fn fully_configure(&mut self, item: Annotatable) -> Annotatable { @@ -476,24 +483,25 @@ impl<'a, 'b> MacroExpander<'a, 'b> { // we know that fold result vector will contain exactly one element match item { Annotatable::Item(item) => { - Annotatable::Item(cfg.fold_item(item).pop().unwrap()) + Annotatable::Item(cfg.flat_map_item(item).pop().unwrap()) } Annotatable::TraitItem(item) => { - Annotatable::TraitItem(item.map(|item| cfg.fold_trait_item(item).pop().unwrap())) + Annotatable::TraitItem( + item.map(|item| cfg.flat_map_trait_item(item).pop().unwrap())) } Annotatable::ImplItem(item) => { - Annotatable::ImplItem(item.map(|item| cfg.fold_impl_item(item).pop().unwrap())) + Annotatable::ImplItem(item.map(|item| cfg.flat_map_impl_item(item).pop().unwrap())) } Annotatable::ForeignItem(item) => { Annotatable::ForeignItem( - item.map(|item| cfg.fold_foreign_item(item).pop().unwrap()) + item.map(|item| cfg.flat_map_foreign_item(item).pop().unwrap()) ) } Annotatable::Stmt(stmt) => { - Annotatable::Stmt(stmt.map(|stmt| cfg.fold_stmt(stmt).pop().unwrap())) + Annotatable::Stmt(stmt.map(|stmt| cfg.flat_map_stmt(stmt).pop().unwrap())) } - Annotatable::Expr(expr) => { - Annotatable::Expr(cfg.fold_expr(expr)) + Annotatable::Expr(mut expr) => { + Annotatable::Expr({ cfg.visit_expr(&mut expr); expr }) } } } @@ -535,7 +543,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { invoc: Invocation, ext: &SyntaxExtension) -> Option { - let (attr, item) = match invoc.kind { + let (attr, mut item) = match invoc.kind { InvocationKind::Attr { attr, item, .. } => (attr?, item), _ => unreachable!(), }; @@ -558,7 +566,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { match *ext { NonMacroAttr { .. } => { attr::mark_known(&attr); - let item = item.map_attrs(|mut attrs| { attrs.push(attr); attrs }); + item.visit_attrs(|attrs| attrs.push(attr)); Some(invoc.fragment_kind.expect_from_annotatables(iter::once(item))) } MultiModifier(ref mac) => { @@ -1113,34 +1121,32 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { } /// If `item` is an attr invocation, remove and return the macro attribute and derive traits. - fn classify_item(&mut self, mut item: T) - -> (Option, Vec, T, /* after_derive */ bool) + fn classify_item(&mut self, item: &mut T) + -> (Option, Vec, /* after_derive */ bool) where T: HasAttrs, { let (mut attr, mut traits, mut after_derive) = (None, Vec::new(), false); - item = item.map_attrs(|mut attrs| { + item.visit_attrs(|mut attrs| { attr = self.find_attr_invoc(&mut attrs, &mut after_derive); traits = collect_derives(&mut self.cx, &mut attrs); - attrs }); - (attr, traits, item, after_derive) + (attr, traits, after_derive) } - /// Alternative of `classify_item()` that ignores `#[derive]` so invocations fallthrough + /// Alternative to `classify_item()` that ignores `#[derive]` so invocations fallthrough /// to the unused-attributes lint (making it an error on statements and expressions /// is a breaking change) - fn classify_nonitem(&mut self, mut item: T) - -> (Option, T, /* after_derive */ bool) { + fn classify_nonitem(&mut self, nonitem: &mut T) + -> (Option, /* after_derive */ bool) { let (mut attr, mut after_derive) = (None, false); - item = item.map_attrs(|mut attrs| { + nonitem.visit_attrs(|mut attrs| { attr = self.find_attr_invoc(&mut attrs, &mut after_derive); - attrs }); - (attr, item, after_derive) + (attr, after_derive) } fn configure(&mut self, node: T) -> Option { @@ -1173,14 +1179,14 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { } } -impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { - fn fold_expr(&mut self, expr: P) -> P { - let expr = self.cfg.configure_expr(expr); - expr.map(|mut expr| { - expr.node = self.cfg.configure_expr_kind(expr.node); +impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { + fn visit_expr(&mut self, expr: &mut P) { + self.cfg.configure_expr(expr); + visit_clobber(expr.deref_mut(), |mut expr| { + self.cfg.configure_expr_kind(&mut expr.node); // ignore derives so they remain unused - let (attr, expr, after_derive) = self.classify_nonitem(expr); + let (attr, after_derive) = self.classify_nonitem(&mut expr); if attr.is_some() { // Collect the invoc regardless of whether or not attributes are permitted here @@ -1189,7 +1195,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { // AstFragmentKind::Expr requires the macro to emit an expression. return self.collect_attr(attr, vec![], Annotatable::Expr(P(expr)), - AstFragmentKind::Expr, after_derive) + AstFragmentKind::Expr, after_derive) .make_expr() .into_inner() } @@ -1200,18 +1206,19 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { .make_expr() .into_inner() } else { - noop_fold_expr(expr, self) + noop_visit_expr(&mut expr, self); + expr } - }) + }); } - fn fold_opt_expr(&mut self, expr: P) -> Option> { + fn filter_map_expr(&mut self, expr: P) -> Option> { let expr = configure!(self, expr); expr.filter_map(|mut expr| { - expr.node = self.cfg.configure_expr_kind(expr.node); + self.cfg.configure_expr_kind(&mut expr.node); // Ignore derives so they remain unused. - let (attr, expr, after_derive) = self.classify_nonitem(expr); + let (attr, after_derive) = self.classify_nonitem(&mut expr); if attr.is_some() { attr.as_ref().map(|a| self.cfg.maybe_emit_expr_attr_err(a)); @@ -1228,44 +1235,45 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { .make_opt_expr() .map(|expr| expr.into_inner()) } else { - Some(noop_fold_expr(expr, self)) + Some({ noop_visit_expr(&mut expr, self); expr }) } }) } - fn fold_pat(&mut self, pat: P) -> P { - let pat = self.cfg.configure_pat(pat); + fn visit_pat(&mut self, pat: &mut P) { + self.cfg.configure_pat(pat); match pat.node { PatKind::Mac(_) => {} - _ => return noop_fold_pat(pat, self), + _ => return noop_visit_pat(pat, self), } - pat.and_then(|pat| match pat.node { - PatKind::Mac(mac) => self.collect_bang(mac, pat.span, AstFragmentKind::Pat).make_pat(), - _ => unreachable!(), - }) + visit_clobber(pat, |mut pat| { + match mem::replace(&mut pat.node, PatKind::Wild) { + PatKind::Mac(mac) => + self.collect_bang(mac, pat.span, AstFragmentKind::Pat).make_pat(), + _ => unreachable!(), + } + }); } - fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> { + fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> { let mut stmt = configure!(self, stmt); // we'll expand attributes on expressions separately if !stmt.is_expr() { - let (attr, derives, stmt_, after_derive) = if stmt.is_item() { - self.classify_item(stmt) + let (attr, derives, after_derive) = if stmt.is_item() { + self.classify_item(&mut stmt) } else { // ignore derives on non-item statements so it falls through // to the unused-attributes lint - let (attr, stmt, after_derive) = self.classify_nonitem(stmt); - (attr, vec![], stmt, after_derive) + let (attr, after_derive) = self.classify_nonitem(&mut stmt); + (attr, vec![], after_derive) }; if attr.is_some() || !derives.is_empty() { - return self.collect_attr(attr, derives, Annotatable::Stmt(P(stmt_)), + return self.collect_attr(attr, derives, Annotatable::Stmt(P(stmt)), AstFragmentKind::Stmts, after_derive).make_stmts(); } - - stmt = stmt_; } if let StmtKind::Mac(mac) = stmt.node { @@ -1287,24 +1295,23 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { // The placeholder expander gives ids to statements, so we avoid folding the id here. let ast::Stmt { id, node, span } = stmt; - noop_fold_stmt_kind(node, self).into_iter().map(|node| { + noop_flat_map_stmt_kind(node, self).into_iter().map(|node| { ast::Stmt { id, node, span } }).collect() } - fn fold_block(&mut self, block: P) -> P { + fn visit_block(&mut self, block: &mut P) { let old_directory_ownership = self.cx.current_expansion.directory_ownership; self.cx.current_expansion.directory_ownership = DirectoryOwnership::UnownedViaBlock; - let result = noop_fold_block(block, self); + noop_visit_block(block, self); self.cx.current_expansion.directory_ownership = old_directory_ownership; - result } - fn fold_item(&mut self, item: P) -> SmallVec<[P; 1]> { - let item = configure!(self, item); + fn flat_map_item(&mut self, item: P) -> SmallVec<[P; 1]> { + let mut item = configure!(self, item); - let (attr, traits, item, after_derive) = self.classify_item(item); + let (attr, traits, after_derive) = self.classify_item(&mut item); if attr.is_some() || !traits.is_empty() { return self.collect_attr(attr, traits, Annotatable::Item(item), AstFragmentKind::Items, after_derive).make_items(); @@ -1326,7 +1333,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { } ast::ItemKind::Mod(ast::Mod { inner, .. }) => { if item.ident == keywords::Invalid.ident() { - return noop_fold_item(item, self); + return noop_flat_map_item(item, self); } let orig_directory_ownership = self.cx.current_expansion.directory_ownership; @@ -1366,20 +1373,20 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { let orig_module = mem::replace(&mut self.cx.current_expansion.module, Rc::new(module)); - let result = noop_fold_item(item, self); + let result = noop_flat_map_item(item, self); self.cx.current_expansion.module = orig_module; self.cx.current_expansion.directory_ownership = orig_directory_ownership; result } - _ => noop_fold_item(item, self), + _ => noop_flat_map_item(item, self), } } - fn fold_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> { - let item = configure!(self, item); + fn flat_map_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> { + let mut item = configure!(self, item); - let (attr, traits, item, after_derive) = self.classify_item(item); + let (attr, traits, after_derive) = self.classify_item(&mut item); if attr.is_some() || !traits.is_empty() { return self.collect_attr(attr, traits, Annotatable::TraitItem(P(item)), AstFragmentKind::TraitItems, after_derive).make_trait_items() @@ -1391,14 +1398,14 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { self.check_attributes(&attrs); self.collect_bang(mac, span, AstFragmentKind::TraitItems).make_trait_items() } - _ => noop_fold_trait_item(item, self), + _ => noop_flat_map_trait_item(item, self), } } - fn fold_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> { - let item = configure!(self, item); + fn flat_map_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> { + let mut item = configure!(self, item); - let (attr, traits, item, after_derive) = self.classify_item(item); + let (attr, traits, after_derive) = self.classify_item(&mut item); if attr.is_some() || !traits.is_empty() { return self.collect_attr(attr, traits, Annotatable::ImplItem(P(item)), AstFragmentKind::ImplItems, after_derive).make_impl_items(); @@ -1410,30 +1417,34 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { self.check_attributes(&attrs); self.collect_bang(mac, span, AstFragmentKind::ImplItems).make_impl_items() } - _ => noop_fold_impl_item(item, self), + _ => noop_flat_map_impl_item(item, self), } } - fn fold_ty(&mut self, ty: P) -> P { - let ty = match ty.node { - ast::TyKind::Mac(_) => ty.into_inner(), - _ => return noop_fold_ty(ty, self), + fn visit_ty(&mut self, ty: &mut P) { + match ty.node { + ast::TyKind::Mac(_) => {} + _ => return noop_visit_ty(ty, self), }; - match ty.node { - ast::TyKind::Mac(mac) => self.collect_bang(mac, ty.span, AstFragmentKind::Ty).make_ty(), - _ => unreachable!(), - } + visit_clobber(ty, |mut ty| { + match mem::replace(&mut ty.node, ast::TyKind::Err) { + ast::TyKind::Mac(mac) => + self.collect_bang(mac, ty.span, AstFragmentKind::Ty).make_ty(), + _ => unreachable!(), + } + }); } - fn fold_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod { - noop_fold_foreign_mod(self.cfg.configure_foreign_mod(foreign_mod), self) + fn visit_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) { + self.cfg.configure_foreign_mod(foreign_mod); + noop_visit_foreign_mod(foreign_mod, self); } - fn fold_foreign_item(&mut self, foreign_item: ast::ForeignItem) + fn flat_map_foreign_item(&mut self, mut foreign_item: ast::ForeignItem) -> SmallVec<[ast::ForeignItem; 1]> { - let (attr, traits, foreign_item, after_derive) = self.classify_item(foreign_item); + let (attr, traits, after_derive) = self.classify_item(&mut foreign_item); if attr.is_some() || !traits.is_empty() { return self.collect_attr(attr, traits, Annotatable::ForeignItem(P(foreign_item)), @@ -1447,38 +1458,41 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { .make_foreign_items(); } - noop_fold_foreign_item(foreign_item, self) + noop_flat_map_foreign_item(foreign_item, self) } - fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind { + fn visit_item_kind(&mut self, item: &mut ast::ItemKind) { match item { - ast::ItemKind::MacroDef(..) => item, - _ => noop_fold_item_kind(self.cfg.configure_item_kind(item), self), + ast::ItemKind::MacroDef(..) => {} + _ => { + self.cfg.configure_item_kind(item); + noop_visit_item_kind(item, self); + } } } - fn fold_generic_param(&mut self, param: ast::GenericParam) -> ast::GenericParam { + fn visit_generic_param(&mut self, param: &mut ast::GenericParam) { self.cfg.disallow_cfg_on_generic_param(¶m); - noop_fold_generic_param(param, self) + noop_visit_generic_param(param, self) } - fn fold_attribute(&mut self, at: ast::Attribute) -> ast::Attribute { + fn visit_attribute(&mut self, at: &mut ast::Attribute) { // turn `#[doc(include="filename")]` attributes into `#[doc(include(file="filename", // contents="file contents")]` attributes if !at.check_name("doc") { - return noop_fold_attribute(at, self); + return noop_visit_attribute(at, self); } if let Some(list) = at.meta_item_list() { if !list.iter().any(|it| it.check_name("include")) { - return noop_fold_attribute(at, self); + return noop_visit_attribute(at, self); } let mut items = vec![]; - for it in list { + for mut it in list { if !it.check_name("include") { - items.push(noop_fold_meta_list_item(it, self)); + items.push({ noop_visit_meta_list_item(&mut it, self); it }); continue; } @@ -1487,7 +1501,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { self.check_attribute(&at); if self.cx.parse_sess.span_diagnostic.err_count() > err_count { // avoid loading the file if they haven't enabled the feature - return noop_fold_attribute(at, self); + return noop_visit_attribute(at, self); } let filename = self.cx.root_path.join(file.to_string()); @@ -1582,20 +1596,18 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { let meta = attr::mk_list_item(DUMMY_SP, Ident::from_str("doc"), items); match at.style { - ast::AttrStyle::Inner => attr::mk_spanned_attr_inner(at.span, at.id, meta), - ast::AttrStyle::Outer => attr::mk_spanned_attr_outer(at.span, at.id, meta), + ast::AttrStyle::Inner => *at = attr::mk_spanned_attr_inner(at.span, at.id, meta), + ast::AttrStyle::Outer => *at = attr::mk_spanned_attr_outer(at.span, at.id, meta), } } else { - noop_fold_attribute(at, self) + noop_visit_attribute(at, self) } } - fn new_id(&mut self, id: ast::NodeId) -> ast::NodeId { + fn visit_id(&mut self, id: &mut ast::NodeId) { if self.monotonic { - assert_eq!(id, ast::DUMMY_NODE_ID); - self.cx.resolver.next_node_id() - } else { - id + debug_assert_eq!(*id, ast::DUMMY_NODE_ID); + *id = self.cx.resolver.next_node_id() } } } @@ -1660,12 +1672,12 @@ impl<'feat> ExpansionConfig<'feat> { #[derive(Debug)] pub struct Marker(pub Mark); -impl Folder for Marker { - fn new_span(&mut self, span: Span) -> Span { - span.apply_mark(self.0) +impl MutVisitor for Marker { + fn visit_span(&mut self, span: &mut Span) { + *span = span.apply_mark(self.0) } - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut ast::Mac) { + noop_visit_mac(mac, self) } } diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index 0928bc80404..23b34c2660b 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -4,12 +4,11 @@ use ext::base::ExtCtxt; use ext::expand::{AstFragment, AstFragmentKind}; use ext::hygiene::Mark; use tokenstream::TokenStream; -use fold::*; +use mut_visit::*; use ptr::P; use smallvec::SmallVec; use symbol::keywords; use ThinVec; -use util::move_map::MoveMap; use rustc_data_structures::fx::FxHashMap; @@ -85,8 +84,8 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> { } } - pub fn add(&mut self, id: ast::NodeId, fragment: AstFragment, derives: Vec) { - let mut fragment = fragment.fold_with(self); + pub fn add(&mut self, id: ast::NodeId, mut fragment: AstFragment, derives: Vec) { + fragment.mut_visit_with(self); if let AstFragment::Items(mut items) = fragment { for derive in derives { match self.remove(NodeId::placeholder_from_mark(derive)) { @@ -104,56 +103,56 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> { } } -impl<'a, 'b> Folder for PlaceholderExpander<'a, 'b> { - fn fold_item(&mut self, item: P) -> SmallVec<[P; 1]> { +impl<'a, 'b> MutVisitor for PlaceholderExpander<'a, 'b> { + fn flat_map_item(&mut self, item: P) -> SmallVec<[P; 1]> { match item.node { ast::ItemKind::Mac(_) => return self.remove(item.id).make_items(), ast::ItemKind::MacroDef(_) => return smallvec![item], _ => {} } - noop_fold_item(item, self) + noop_flat_map_item(item, self) } - fn fold_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> { + fn flat_map_trait_item(&mut self, item: ast::TraitItem) -> SmallVec<[ast::TraitItem; 1]> { match item.node { ast::TraitItemKind::Macro(_) => self.remove(item.id).make_trait_items(), - _ => noop_fold_trait_item(item, self), + _ => noop_flat_map_trait_item(item, self), } } - fn fold_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> { + fn flat_map_impl_item(&mut self, item: ast::ImplItem) -> SmallVec<[ast::ImplItem; 1]> { match item.node { ast::ImplItemKind::Macro(_) => self.remove(item.id).make_impl_items(), - _ => noop_fold_impl_item(item, self), + _ => noop_flat_map_impl_item(item, self), } } - fn fold_foreign_item(&mut self, item: ast::ForeignItem) -> SmallVec<[ast::ForeignItem; 1]> { + fn flat_map_foreign_item(&mut self, item: ast::ForeignItem) -> SmallVec<[ast::ForeignItem; 1]> { match item.node { ast::ForeignItemKind::Macro(_) => self.remove(item.id).make_foreign_items(), - _ => noop_fold_foreign_item(item, self), + _ => noop_flat_map_foreign_item(item, self), } } - fn fold_expr(&mut self, expr: P) -> P { + fn visit_expr(&mut self, expr: &mut P) { match expr.node { - ast::ExprKind::Mac(_) => self.remove(expr.id).make_expr(), - _ => expr.map(|expr| noop_fold_expr(expr, self)), + ast::ExprKind::Mac(_) => *expr = self.remove(expr.id).make_expr(), + _ => noop_visit_expr(expr, self), } } - fn fold_opt_expr(&mut self, expr: P) -> Option> { + fn filter_map_expr(&mut self, expr: P) -> Option> { match expr.node { ast::ExprKind::Mac(_) => self.remove(expr.id).make_opt_expr(), - _ => noop_fold_opt_expr(expr, self), + _ => noop_filter_map_expr(expr, self), } } - fn fold_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> { + fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> { let (style, mut stmts) = match stmt.node { ast::StmtKind::Mac(mac) => (mac.1, self.remove(stmt.id).make_stmts()), - _ => return noop_fold_stmt(stmt, self), + _ => return noop_flat_map_stmt(stmt, self), }; if style == ast::MacStmtStyle::Semicolon { @@ -165,44 +164,40 @@ impl<'a, 'b> Folder for PlaceholderExpander<'a, 'b> { stmts } - fn fold_pat(&mut self, pat: P) -> P { + fn visit_pat(&mut self, pat: &mut P) { match pat.node { - ast::PatKind::Mac(_) => self.remove(pat.id).make_pat(), - _ => noop_fold_pat(pat, self), + ast::PatKind::Mac(_) => *pat = self.remove(pat.id).make_pat(), + _ => noop_visit_pat(pat, self), } } - fn fold_ty(&mut self, ty: P) -> P { + fn visit_ty(&mut self, ty: &mut P) { match ty.node { - ast::TyKind::Mac(_) => self.remove(ty.id).make_ty(), - _ => noop_fold_ty(ty, self), + ast::TyKind::Mac(_) => *ty = self.remove(ty.id).make_ty(), + _ => noop_visit_ty(ty, self), } } - fn fold_block(&mut self, block: P) -> P { - noop_fold_block(block, self).map(|mut block| { - block.stmts = block.stmts.move_map(|mut stmt| { - if self.monotonic { - assert_eq!(stmt.id, ast::DUMMY_NODE_ID); - stmt.id = self.cx.resolver.next_node_id(); - } - stmt - }); + fn visit_block(&mut self, block: &mut P) { + noop_visit_block(block, self); - block - }) + for stmt in block.stmts.iter_mut() { + if self.monotonic { + assert_eq!(stmt.id, ast::DUMMY_NODE_ID); + stmt.id = self.cx.resolver.next_node_id(); + } + } } - fn fold_mod(&mut self, module: ast::Mod) -> ast::Mod { - let mut module = noop_fold_mod(module, self); + fn visit_mod(&mut self, module: &mut ast::Mod) { + noop_visit_mod(module, self); module.items.retain(|item| match item.node { ast::ItemKind::Mac(_) if !self.cx.ecfg.keep_macs => false, // remove macro definitions _ => true, }); - module } - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - mac + fn visit_mac(&mut self, _mac: &mut ast::Mac) { + // Do nothing. } } diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 0ef2d3b749d..08f34b22328 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -3,7 +3,7 @@ use ext::base::ExtCtxt; use ext::expand::Marker; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::quoted; -use fold::noop_fold_tt; +use mut_visit::noop_visit_tt; use parse::token::{self, Token, NtTT}; use smallvec::SmallVec; use syntax_pos::DUMMY_SP; @@ -170,7 +170,9 @@ pub fn transcribe(cx: &ExtCtxt, } quoted::TokenTree::Token(sp, tok) => { let mut marker = Marker(cx.current_expansion.mark); - result.push(noop_fold_tt(TokenTree::Token(sp, tok), &mut marker).into()) + let mut tt = TokenTree::Token(sp, tok); + noop_visit_tt(&mut tt, &mut marker); + result.push(tt.into()); } quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"), } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 5fb0132ad45..93fedb73d27 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -1,11 +1,10 @@ -//! A Folder represents an AST->AST fold; it accepts an AST piece, -//! and returns a piece of the same type. So, for instance, macro -//! expansion is a Folder that walks over an AST and produces another -//! AST. +//! A MutVisitor represents an AST modification; it accepts an AST piece and +//! and mutates it in place. So, for instance, macro expansion is a MutVisitor +//! that walks over an AST and modifies it. //! -//! Note: using a Folder (other than the MacroExpander Folder) on +//! Note: using a MutVisitor (other than the MacroExpander MutVisitor) on //! an AST before macro expansion is probably a bad idea. For instance, -//! a folder renaming item names in a module will miss all of those +//! a MutVisitor renaming item names in a module will miss all of those //! that are created by the expansion of a macro. use ast::*; @@ -14,10 +13,11 @@ use source_map::{Spanned, respan}; use parse::token::{self, Token}; use ptr::P; use smallvec::{Array, SmallVec}; +use std::ops::DerefMut; use symbol::keywords; use ThinVec; use tokenstream::*; -use util::move_map::MoveMap; +use util::map_in_place::MapInPlace; use rustc_data_structures::sync::Lrc; @@ -32,1308 +32,1225 @@ impl ExpectOne for SmallVec { } } -pub trait Folder : Sized { - // Any additions to this trait should happen in form - // of a call to a public `noop_*` function that only calls - // out to the folder again, not other `noop_*` functions. +pub trait MutVisitor: Sized { + // Methods in this trait have one of three forms: // - // This is a necessary API workaround to the problem of not - // being able to call out to the super default method - // in an overridden default method. + // fn visit_t(&mut self, t: &mut T); // common + // fn flat_map_t(&mut self, t: T) -> SmallVec<[T; 1]>; // rare + // fn filter_map_t(&mut self, t: T) -> Option; // rarest + // + // Any additions to this trait should happen in form of a call to a public + // `noop_*` function that only calls out to the visitor again, not other + // `noop_*` functions. This is a necessary API workaround to the problem of + // not being able to call out to the super default method in an overridden + // default method. + // + // When writing these methods, it is better to use destructuring like this: + // + // fn visit_abc(&mut self, ABC { a, b, c: _ }: &mut ABC) { + // visit_a(a); + // visit_b(b); + // } + // + // than to use field access like this: + // + // fn visit_abc(&mut self, abc: &mut ABC) { + // visit_a(&mut abc.a); + // visit_b(&mut abc.b); + // // ignore abc.c + // } + // + // As well as being more concise, the former is explicit about which fields + // are skipped. Furthermore, if a new field is added, the destructuring + // version will cause a compile error, which is good. In comparison, the + // field access version will continue working and it would be easy to + // forget to add handling for it. - fn fold_crate(&mut self, c: Crate) -> Crate { - noop_fold_crate(c, self) + fn visit_crate(&mut self, c: &mut Crate) { + noop_visit_crate(c, self) } - fn fold_meta_list_item(&mut self, list_item: NestedMetaItem) -> NestedMetaItem { - noop_fold_meta_list_item(list_item, self) + fn visit_meta_list_item(&mut self, list_item: &mut NestedMetaItem) { + noop_visit_meta_list_item(list_item, self); } - fn fold_meta_item(&mut self, meta_item: MetaItem) -> MetaItem { - noop_fold_meta_item(meta_item, self) + fn visit_meta_item(&mut self, meta_item: &mut MetaItem) { + noop_visit_meta_item(meta_item, self); } - fn fold_use_tree(&mut self, use_tree: UseTree) -> UseTree { - noop_fold_use_tree(use_tree, self) + fn visit_use_tree(&mut self, use_tree: &mut UseTree) { + noop_visit_use_tree(use_tree, self); } - fn fold_foreign_item(&mut self, ni: ForeignItem) -> SmallVec<[ForeignItem; 1]> { - noop_fold_foreign_item(ni, self) + fn flat_map_foreign_item(&mut self, ni: ForeignItem) -> SmallVec<[ForeignItem; 1]> { + noop_flat_map_foreign_item(ni, self) } - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { - noop_fold_item(i, self) + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { + noop_flat_map_item(i, self) } - fn fold_fn_header(&mut self, header: FnHeader) -> FnHeader { - noop_fold_fn_header(header, self) + fn visit_fn_header(&mut self, header: &mut FnHeader) { + noop_visit_fn_header(header, self); } - fn fold_struct_field(&mut self, sf: StructField) -> StructField { - noop_fold_struct_field(sf, self) + fn visit_struct_field(&mut self, sf: &mut StructField) { + noop_visit_struct_field(sf, self); } - fn fold_item_kind(&mut self, i: ItemKind) -> ItemKind { - noop_fold_item_kind(i, self) + fn visit_item_kind(&mut self, i: &mut ItemKind) { + noop_visit_item_kind(i, self); } - fn fold_trait_item(&mut self, i: TraitItem) -> SmallVec<[TraitItem; 1]> { - noop_fold_trait_item(i, self) + fn flat_map_trait_item(&mut self, i: TraitItem) -> SmallVec<[TraitItem; 1]> { + noop_flat_map_trait_item(i, self) } - fn fold_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> { - noop_fold_impl_item(i, self) + fn flat_map_impl_item(&mut self, i: ImplItem) -> SmallVec<[ImplItem; 1]> { + noop_flat_map_impl_item(i, self) } - fn fold_fn_decl(&mut self, d: P) -> P { - noop_fold_fn_decl(d, self) + fn visit_fn_decl(&mut self, d: &mut P) { + noop_visit_fn_decl(d, self); } - fn fold_asyncness(&mut self, a: IsAsync) -> IsAsync { - noop_fold_asyncness(a, self) + fn visit_asyncness(&mut self, a: &mut IsAsync) { + noop_visit_asyncness(a, self); } - fn fold_block(&mut self, b: P) -> P { - noop_fold_block(b, self) + fn visit_block(&mut self, b: &mut P) { + noop_visit_block(b, self); } - fn fold_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { - noop_fold_stmt(s, self) + fn flat_map_stmt(&mut self, s: Stmt) -> SmallVec<[Stmt; 1]> { + noop_flat_map_stmt(s, self) } - fn fold_arm(&mut self, a: Arm) -> Arm { - noop_fold_arm(a, self) + fn visit_arm(&mut self, a: &mut Arm) { + noop_visit_arm(a, self); } - fn fold_guard(&mut self, g: Guard) -> Guard { - noop_fold_guard(g, self) + fn visit_guard(&mut self, g: &mut Guard) { + noop_visit_guard(g, self); } - fn fold_pat(&mut self, p: P) -> P { - noop_fold_pat(p, self) + fn visit_pat(&mut self, p: &mut P) { + noop_visit_pat(p, self); } - fn fold_anon_const(&mut self, c: AnonConst) -> AnonConst { - noop_fold_anon_const(c, self) + fn visit_anon_const(&mut self, c: &mut AnonConst) { + noop_visit_anon_const(c, self); } - fn fold_expr(&mut self, e: P) -> P { - e.map(|e| noop_fold_expr(e, self)) + fn visit_expr(&mut self, e: &mut P) { + noop_visit_expr(e, self); } - fn fold_opt_expr(&mut self, e: P) -> Option> { - noop_fold_opt_expr(e, self) + fn filter_map_expr(&mut self, e: P) -> Option> { + noop_filter_map_expr(e, self) } - fn fold_generic_arg(&mut self, arg: GenericArg) -> GenericArg { - match arg { - GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.fold_lifetime(lt)), - GenericArg::Type(ty) => GenericArg::Type(self.fold_ty(ty)), - } + fn visit_generic_arg(&mut self, arg: &mut GenericArg) { + noop_visit_generic_arg(arg, self); } - fn fold_ty(&mut self, t: P) -> P { - noop_fold_ty(t, self) + fn visit_ty(&mut self, t: &mut P) { + noop_visit_ty(t, self); } - fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime { - noop_fold_lifetime(l, self) + fn visit_lifetime(&mut self, l: &mut Lifetime) { + noop_visit_lifetime(l, self); } - fn fold_ty_binding(&mut self, t: TypeBinding) -> TypeBinding { - noop_fold_ty_binding(t, self) + fn visit_ty_binding(&mut self, t: &mut TypeBinding) { + noop_visit_ty_binding(t, self); } - fn fold_mod(&mut self, m: Mod) -> Mod { - noop_fold_mod(m, self) + fn visit_mod(&mut self, m: &mut Mod) { + noop_visit_mod(m, self); } - fn fold_foreign_mod(&mut self, nm: ForeignMod) -> ForeignMod { - noop_fold_foreign_mod(nm, self) + fn visit_foreign_mod(&mut self, nm: &mut ForeignMod) { + noop_visit_foreign_mod(nm, self); } - fn fold_variant(&mut self, v: Variant) -> Variant { - noop_fold_variant(v, self) + fn visit_variant(&mut self, v: &mut Variant) { + noop_visit_variant(v, self); } - fn fold_ident(&mut self, i: Ident) -> Ident { - noop_fold_ident(i, self) + fn visit_ident(&mut self, i: &mut Ident) { + noop_visit_ident(i, self); } - fn fold_path(&mut self, p: Path) -> Path { - noop_fold_path(p, self) + fn visit_path(&mut self, p: &mut Path) { + noop_visit_path(p, self); } - fn fold_qself(&mut self, qs: Option) -> Option { - noop_fold_qself(qs, self) + fn visit_qself(&mut self, qs: &mut Option) { + noop_visit_qself(qs, self); } - fn fold_generic_args(&mut self, p: GenericArgs) -> GenericArgs { - noop_fold_generic_args(p, self) + fn visit_generic_args(&mut self, p: &mut GenericArgs) { + noop_visit_generic_args(p, self); } - fn fold_angle_bracketed_parameter_data(&mut self, p: AngleBracketedArgs) - -> AngleBracketedArgs - { - noop_fold_angle_bracketed_parameter_data(p, self) + fn visit_angle_bracketed_parameter_data(&mut self, p: &mut AngleBracketedArgs) { + noop_visit_angle_bracketed_parameter_data(p, self); } - fn fold_parenthesized_parameter_data(&mut self, p: ParenthesizedArgs) - -> ParenthesizedArgs - { - noop_fold_parenthesized_parameter_data(p, self) + fn visit_parenthesized_parameter_data(&mut self, p: &mut ParenthesizedArgs) { + noop_visit_parenthesized_parameter_data(p, self); } - fn fold_local(&mut self, l: P) -> P { - noop_fold_local(l, self) + fn visit_local(&mut self, l: &mut P) { + noop_visit_local(l, self); } - fn fold_mac(&mut self, _mac: Mac) -> Mac { - panic!("fold_mac disabled by default"); - // N.B., see note about macros above. - // if you really want a folder that - // works on macros, use this - // definition in your trait impl: - // fold::noop_fold_mac(_mac, self) + fn visit_mac(&mut self, _mac: &mut Mac) { + panic!("visit_mac disabled by default"); + // N.B., see note about macros above. If you really want a visitor that + // works on macros, use this definition in your trait impl: + // mut_visit::noop_visit_mac(_mac, self); } - fn fold_macro_def(&mut self, def: MacroDef) -> MacroDef { - noop_fold_macro_def(def, self) + fn visit_macro_def(&mut self, def: &mut MacroDef) { + noop_visit_macro_def(def, self); } - fn fold_label(&mut self, label: Label) -> Label { - noop_fold_label(label, self) + fn visit_label(&mut self, label: &mut Label) { + noop_visit_label(label, self); } - fn fold_attribute(&mut self, at: Attribute) -> Attribute { - noop_fold_attribute(at, self) + fn visit_attribute(&mut self, at: &mut Attribute) { + noop_visit_attribute(at, self); } - fn fold_arg(&mut self, a: Arg) -> Arg { - noop_fold_arg(a, self) + fn visit_arg(&mut self, a: &mut Arg) { + noop_visit_arg(a, self); } - fn fold_generics(&mut self, generics: Generics) -> Generics { - noop_fold_generics(generics, self) + fn visit_generics(&mut self, generics: &mut Generics) { + noop_visit_generics(generics, self); } - fn fold_trait_ref(&mut self, p: TraitRef) -> TraitRef { - noop_fold_trait_ref(p, self) + fn visit_trait_ref(&mut self, tr: &mut TraitRef) { + noop_visit_trait_ref(tr, self); } - fn fold_poly_trait_ref(&mut self, p: PolyTraitRef) -> PolyTraitRef { - noop_fold_poly_trait_ref(p, self) + fn visit_poly_trait_ref(&mut self, p: &mut PolyTraitRef) { + noop_visit_poly_trait_ref(p, self); } - fn fold_variant_data(&mut self, vdata: VariantData) -> VariantData { - noop_fold_variant_data(vdata, self) + fn visit_variant_data(&mut self, vdata: &mut VariantData) { + noop_visit_variant_data(vdata, self); } - fn fold_generic_param(&mut self, param: GenericParam) -> GenericParam { - noop_fold_generic_param(param, self) + fn visit_generic_param(&mut self, param: &mut GenericParam) { + noop_visit_generic_param(param, self); } - fn fold_generic_params(&mut self, params: Vec) -> Vec { - noop_fold_generic_params(params, self) + fn visit_generic_params(&mut self, params: &mut Vec) { + noop_visit_generic_params(params, self); } - fn fold_tt(&mut self, tt: TokenTree) -> TokenTree { - noop_fold_tt(tt, self) + fn visit_tt(&mut self, tt: &mut TokenTree) { + noop_visit_tt(tt, self); } - fn fold_tts(&mut self, tts: TokenStream) -> TokenStream { - noop_fold_tts(tts, self) + fn visit_tts(&mut self, tts: &mut TokenStream) { + noop_visit_tts(tts, self); } - fn fold_token(&mut self, t: token::Token) -> token::Token { - noop_fold_token(t, self) + fn visit_token(&mut self, t: &mut Token) { + noop_visit_token(t, self); } - fn fold_interpolated(&mut self, nt: token::Nonterminal) -> token::Nonterminal { - noop_fold_interpolated(nt, self) + fn visit_interpolated(&mut self, nt: &mut token::Nonterminal) { + noop_visit_interpolated(nt, self); } - fn fold_param_bound(&mut self, tpb: GenericBound) -> GenericBound { - noop_fold_param_bound(tpb, self) + fn visit_param_bound(&mut self, tpb: &mut GenericBound) { + noop_visit_param_bound(tpb, self); } - fn fold_mt(&mut self, mt: MutTy) -> MutTy { - noop_fold_mt(mt, self) + fn visit_mt(&mut self, mt: &mut MutTy) { + noop_visit_mt(mt, self); } - fn fold_field(&mut self, field: Field) -> Field { - noop_fold_field(field, self) + fn visit_field(&mut self, field: &mut Field) { + noop_visit_field(field, self); } - fn fold_where_clause(&mut self, where_clause: WhereClause) - -> WhereClause { - noop_fold_where_clause(where_clause, self) + fn visit_where_clause(&mut self, where_clause: &mut WhereClause) { + noop_visit_where_clause(where_clause, self); } - fn fold_where_predicate(&mut self, where_predicate: WherePredicate) - -> WherePredicate { - noop_fold_where_predicate(where_predicate, self) + fn visit_where_predicate(&mut self, where_predicate: &mut WherePredicate) { + noop_visit_where_predicate(where_predicate, self); } - fn fold_vis(&mut self, vis: Visibility) -> Visibility { - noop_fold_vis(vis, self) + fn visit_vis(&mut self, vis: &mut Visibility) { + noop_visit_vis(vis, self); } - fn new_id(&mut self, i: NodeId) -> NodeId { - i + fn visit_id(&mut self, _id: &mut NodeId) { + // Do nothing. } - fn new_span(&mut self, sp: Span) -> Span { - sp + fn visit_span(&mut self, _sp: &mut Span) { + // Do nothing. } } -// No `noop_` prefix because there isn't a corresponding method in `Folder`. -fn fold_attrs(attrs: Vec, fld: &mut T) -> Vec { - attrs.move_map(|x| fld.fold_attribute(x)) +/// Use a map-style function (`FnOnce(T) -> T`) to overwrite a `&mut T`. Useful +/// when using a `flat_map_*` or `filter_map_*` method within a `visit_` +/// method. +// +// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. +pub fn visit_clobber(t: &mut T, f: F) where F: FnOnce(T) -> T { + unsafe { std::ptr::write(t, f(std::ptr::read(t))); } } -// No `noop_` prefix because there isn't a corresponding method in `Folder`. -fn fold_thin_attrs(attrs: ThinVec, fld: &mut T) -> ThinVec { - fold_attrs(attrs.into(), fld).into() +// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. +#[inline] +pub fn visit_vec(elems: &mut Vec, mut visit_elem: F) where F: FnMut(&mut T) { + for elem in elems { + visit_elem(elem); + } } -// No `noop_` prefix because there isn't a corresponding method in `Folder`. -fn fold_exprs(es: Vec>, fld: &mut T) -> Vec> { - es.move_flat_map(|e| fld.fold_opt_expr(e)) +// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. +#[inline] +pub fn visit_opt(opt: &mut Option, mut visit_elem: F) where F: FnMut(&mut T) { + if let Some(elem) = opt { + visit_elem(elem); + } } -// No `noop_` prefix because there isn't a corresponding method in `Folder`. -fn fold_bounds(bounds: GenericBounds, folder: &mut T) -> GenericBounds { - bounds.move_map(|bound| folder.fold_param_bound(bound)) +// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. +pub fn visit_attrs(attrs: &mut Vec, vis: &mut T) { + visit_vec(attrs, |attr| vis.visit_attribute(attr)); } -// No `noop_` prefix because there isn't a corresponding method in `Folder`. -fn fold_method_sig(sig: MethodSig, folder: &mut T) -> MethodSig { - MethodSig { - header: folder.fold_fn_header(sig.header), - decl: folder.fold_fn_decl(sig.decl) +// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. +pub fn visit_thin_attrs(attrs: &mut ThinVec, vis: &mut T) { + for attr in attrs.iter_mut() { + vis.visit_attribute(attr); } } -pub fn noop_fold_use_tree(use_tree: UseTree, fld: &mut T) -> UseTree { - UseTree { - span: fld.new_span(use_tree.span), - prefix: fld.fold_path(use_tree.prefix), - kind: match use_tree.kind { - UseTreeKind::Simple(rename, id1, id2) => - UseTreeKind::Simple(rename.map(|ident| fld.fold_ident(ident)), - fld.new_id(id1), fld.new_id(id2)), - UseTreeKind::Glob => UseTreeKind::Glob, - UseTreeKind::Nested(items) => UseTreeKind::Nested(items.move_map(|(tree, id)| { - (fld.fold_use_tree(tree), fld.new_id(id)) - })), - }, - } +// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. +pub fn visit_exprs(exprs: &mut Vec>, vis: &mut T) { + exprs.flat_map_in_place(|expr| vis.filter_map_expr(expr)) } -pub fn noop_fold_arm(Arm {attrs, pats, guard, body}: Arm, - fld: &mut T) -> Arm { - Arm { - attrs: fold_attrs(attrs, fld), - pats: pats.move_map(|x| fld.fold_pat(x)), - guard: guard.map(|x| fld.fold_guard(x)), - body: fld.fold_expr(body), - } +// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. +pub fn visit_bounds(bounds: &mut GenericBounds, vis: &mut T) { + visit_vec(bounds, |bound| vis.visit_param_bound(bound)); } -pub fn noop_fold_guard(g: Guard, fld: &mut T) -> Guard { - match g { - Guard::If(e) => Guard::If(fld.fold_expr(e)), - } +// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`. +pub fn visit_method_sig(MethodSig { header, decl }: &mut MethodSig, vis: &mut T) { + vis.visit_fn_header(header); + vis.visit_fn_decl(decl); } -pub fn noop_fold_ty_binding(b: TypeBinding, fld: &mut T) -> TypeBinding { - TypeBinding { - id: fld.new_id(b.id), - ident: fld.fold_ident(b.ident), - ty: fld.fold_ty(b.ty), - span: fld.new_span(b.span), +pub fn noop_visit_use_tree(use_tree: &mut UseTree, vis: &mut T) { + let UseTree { prefix, kind, span } = use_tree; + vis.visit_path(prefix); + match kind { + UseTreeKind::Simple(rename, id1, id2) => { + visit_opt(rename, |rename| vis.visit_ident(rename)); + vis.visit_id(id1); + vis.visit_id(id2); + } + UseTreeKind::Nested(items) => { + for (tree, id) in items { + vis.visit_use_tree(tree); + vis.visit_id(id); + } + } + UseTreeKind::Glob => {} } + vis.visit_span(span); } -pub fn noop_fold_ty(t: P, fld: &mut T) -> P { - t.map(|Ty {id, node, span}| Ty { - id: fld.new_id(id), - node: match node { - TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err => node, - TyKind::Slice(ty) => TyKind::Slice(fld.fold_ty(ty)), - TyKind::Ptr(mt) => TyKind::Ptr(fld.fold_mt(mt)), - TyKind::Rptr(region, mt) => { - TyKind::Rptr(region.map(|lt| noop_fold_lifetime(lt, fld)), fld.fold_mt(mt)) - } - TyKind::BareFn(f) => { - TyKind::BareFn(f.map(|BareFnTy {generic_params, unsafety, abi, decl}| BareFnTy { - generic_params: fld.fold_generic_params(generic_params), - unsafety, - abi, - decl: fld.fold_fn_decl(decl) - })) - } - TyKind::Never => node, - TyKind::Tup(tys) => TyKind::Tup(tys.move_map(|ty| fld.fold_ty(ty))), - TyKind::Paren(ty) => TyKind::Paren(fld.fold_ty(ty)), - TyKind::Path(qself, path) => { - TyKind::Path(fld.fold_qself(qself), fld.fold_path(path)) - } - TyKind::Array(ty, length) => { - TyKind::Array(fld.fold_ty(ty), fld.fold_anon_const(length)) - } - TyKind::Typeof(expr) => { - TyKind::Typeof(fld.fold_anon_const(expr)) - } - TyKind::TraitObject(bounds, syntax) => { - TyKind::TraitObject(bounds.move_map(|b| fld.fold_param_bound(b)), syntax) - } - TyKind::ImplTrait(id, bounds) => { - TyKind::ImplTrait(fld.new_id(id), bounds.move_map(|b| fld.fold_param_bound(b))) - } - TyKind::Mac(mac) => { - TyKind::Mac(fld.fold_mac(mac)) - } - }, - span: fld.new_span(span) - }) +pub fn noop_visit_arm(Arm { attrs, pats, guard, body }: &mut Arm, vis: &mut T) { + visit_attrs(attrs, vis); + visit_vec(pats, |pat| vis.visit_pat(pat)); + visit_opt(guard, |guard| vis.visit_guard(guard)); + vis.visit_expr(body); } -pub fn noop_fold_foreign_mod(ForeignMod {abi, items}: ForeignMod, - fld: &mut T) -> ForeignMod { - ForeignMod { - abi, - items: items.move_flat_map(|x| fld.fold_foreign_item(x)), +pub fn noop_visit_guard(g: &mut Guard, vis: &mut T) { + match g { + Guard::If(e) => vis.visit_expr(e), } } -pub fn noop_fold_variant(v: Variant, fld: &mut T) -> Variant { - Spanned { - node: Variant_ { - ident: fld.fold_ident(v.node.ident), - attrs: fold_attrs(v.node.attrs, fld), - data: fld.fold_variant_data(v.node.data), - disr_expr: v.node.disr_expr.map(|e| fld.fold_anon_const(e)), - }, - span: fld.new_span(v.span), +pub fn noop_visit_ty_binding(TypeBinding { id, ident, ty, span }: &mut TypeBinding, + vis: &mut T) { + vis.visit_id(id); + vis.visit_ident(ident); + vis.visit_ty(ty); + vis.visit_span(span); +} + +pub fn noop_visit_ty(ty: &mut P, vis: &mut T) { + let Ty { id, node, span } = ty.deref_mut(); + vis.visit_id(id); + match node { + TyKind::Infer | TyKind::ImplicitSelf | TyKind::Err | TyKind::Never => {} + TyKind::Slice(ty) => vis.visit_ty(ty), + TyKind::Ptr(mt) => vis.visit_mt(mt), + TyKind::Rptr(lt, mt) => { + visit_opt(lt, |lt| noop_visit_lifetime(lt, vis)); + vis.visit_mt(mt); + } + TyKind::BareFn(bft) => { + let BareFnTy { unsafety: _, abi: _, generic_params, decl } = bft.deref_mut(); + vis.visit_generic_params(generic_params); + vis.visit_fn_decl(decl); + } + TyKind::Tup(tys) => visit_vec(tys, |ty| vis.visit_ty(ty)), + TyKind::Paren(ty) => vis.visit_ty(ty), + TyKind::Path(qself, path) => { + vis.visit_qself(qself); + vis.visit_path(path); + } + TyKind::Array(ty, length) => { + vis.visit_ty(ty); + vis.visit_anon_const(length); + } + TyKind::Typeof(expr) => vis.visit_anon_const(expr), + TyKind::TraitObject(bounds, _syntax) => + visit_vec(bounds, |bound| vis.visit_param_bound(bound)), + TyKind::ImplTrait(id, bounds) => { + vis.visit_id(id); + visit_vec(bounds, |bound| vis.visit_param_bound(bound)); + } + TyKind::Mac(mac) => vis.visit_mac(mac), } + vis.visit_span(span); +} + +pub fn noop_visit_foreign_mod(foreign_mod: &mut ForeignMod, vis: &mut T) { + let ForeignMod { abi: _, items} = foreign_mod; + items.flat_map_in_place(|item| vis.flat_map_foreign_item(item)); +} + +pub fn noop_visit_variant(variant: &mut Variant, vis: &mut T) { + let Spanned { node: Variant_ { ident, attrs, data, disr_expr }, span } = variant; + vis.visit_ident(ident); + visit_attrs(attrs, vis); + vis.visit_variant_data(data); + visit_opt(disr_expr, |disr_expr| vis.visit_anon_const(disr_expr)); + vis.visit_span(span); } -pub fn noop_fold_ident(ident: Ident, fld: &mut T) -> Ident { - Ident::new(ident.name, fld.new_span(ident.span)) +pub fn noop_visit_ident(Ident { name: _, span }: &mut Ident, vis: &mut T) { + vis.visit_span(span); } -pub fn noop_fold_path(Path { segments, span }: Path, fld: &mut T) -> Path { - Path { - segments: segments.move_map(|PathSegment { ident, id, args }| PathSegment { - ident: fld.fold_ident(ident), - id: fld.new_id(id), - args: args.map(|args| args.map(|args| fld.fold_generic_args(args))), - }), - span: fld.new_span(span) +pub fn noop_visit_path(Path { segments, span }: &mut Path, vis: &mut T) { + vis.visit_span(span); + for PathSegment { ident, id, args } in segments { + vis.visit_ident(ident); + vis.visit_id(id); + visit_opt(args, |args| vis.visit_generic_args(args)); } } -pub fn noop_fold_qself(qself: Option, fld: &mut T) -> Option { - qself.map(|QSelf { ty, path_span, position }| { - QSelf { - ty: fld.fold_ty(ty), - path_span: fld.new_span(path_span), - position, - } +pub fn noop_visit_qself(qself: &mut Option, vis: &mut T) { + visit_opt(qself, |QSelf { ty, path_span, position: _ }| { + vis.visit_ty(ty); + vis.visit_span(path_span); }) } -pub fn noop_fold_generic_args(generic_args: GenericArgs, fld: &mut T) -> GenericArgs -{ +pub fn noop_visit_generic_args(generic_args: &mut GenericArgs, vis: &mut T) { match generic_args { - GenericArgs::AngleBracketed(data) => { - GenericArgs::AngleBracketed(fld.fold_angle_bracketed_parameter_data(data)) - } - GenericArgs::Parenthesized(data) => { - GenericArgs::Parenthesized(fld.fold_parenthesized_parameter_data(data)) - } + GenericArgs::AngleBracketed(data) => vis.visit_angle_bracketed_parameter_data(data), + GenericArgs::Parenthesized(data) => vis.visit_parenthesized_parameter_data(data), } } -pub fn noop_fold_angle_bracketed_parameter_data(data: AngleBracketedArgs, - fld: &mut T) - -> AngleBracketedArgs -{ - let AngleBracketedArgs { args, bindings, span } = data; - AngleBracketedArgs { - args: args.move_map(|arg| fld.fold_generic_arg(arg)), - bindings: bindings.move_map(|b| fld.fold_ty_binding(b)), - span: fld.new_span(span) +pub fn noop_visit_generic_arg(arg: &mut GenericArg, vis: &mut T) { + match arg { + GenericArg::Lifetime(lt) => vis.visit_lifetime(lt), + GenericArg::Type(ty) => vis.visit_ty(ty), } } -pub fn noop_fold_parenthesized_parameter_data(data: ParenthesizedArgs, - fld: &mut T) - -> ParenthesizedArgs -{ - let ParenthesizedArgs { inputs, output, span } = data; - ParenthesizedArgs { - inputs: inputs.move_map(|ty| fld.fold_ty(ty)), - output: output.map(|ty| fld.fold_ty(ty)), - span: fld.new_span(span) - } -} - -pub fn noop_fold_local(l: P, fld: &mut T) -> P { - l.map(|Local {id, pat, ty, init, span, attrs}| Local { - id: fld.new_id(id), - pat: fld.fold_pat(pat), - ty: ty.map(|t| fld.fold_ty(t)), - init: init.map(|e| fld.fold_expr(e)), - span: fld.new_span(span), - attrs: fold_attrs(attrs.into(), fld).into(), - }) +pub fn noop_visit_angle_bracketed_parameter_data(data: &mut AngleBracketedArgs, + vis: &mut T) { + let AngleBracketedArgs { args, bindings, span } = data; + visit_vec(args, |arg| vis.visit_generic_arg(arg)); + visit_vec(bindings, |binding| vis.visit_ty_binding(binding)); + vis.visit_span(span); } -pub fn noop_fold_attribute(attr: Attribute, fld: &mut T) -> Attribute { - Attribute { - id: attr.id, - style: attr.style, - path: fld.fold_path(attr.path), - tokens: fld.fold_tts(attr.tokens), - is_sugared_doc: attr.is_sugared_doc, - span: fld.new_span(attr.span), - } +pub fn noop_visit_parenthesized_parameter_data(args: &mut ParenthesizedArgs, + vis: &mut T) { + let ParenthesizedArgs { inputs, output, span } = args; + visit_vec(inputs, |input| vis.visit_ty(input)); + visit_opt(output, |output| vis.visit_ty(output)); + vis.visit_span(span); } -pub fn noop_fold_mac(Spanned {node, span}: Mac, fld: &mut T) -> Mac { - Spanned { - node: Mac_ { - tts: fld.fold_tts(node.stream()).into(), - path: fld.fold_path(node.path), - delim: node.delim, - }, - span: fld.new_span(span) - } +pub fn noop_visit_local(local: &mut P, vis: &mut T) { + let Local { id, pat, ty, init, span, attrs } = local.deref_mut(); + vis.visit_id(id); + vis.visit_pat(pat); + visit_opt(ty, |ty| vis.visit_ty(ty)); + visit_opt(init, |init| vis.visit_expr(init)); + vis.visit_span(span); + visit_thin_attrs(attrs, vis); } -pub fn noop_fold_macro_def(def: MacroDef, fld: &mut T) -> MacroDef { - MacroDef { - tokens: fld.fold_tts(def.tokens.into()).into(), - legacy: def.legacy, - } +pub fn noop_visit_attribute(attr: &mut Attribute, vis: &mut T) { + let Attribute { id: _, style: _, path, tokens, is_sugared_doc: _, span } = attr; + vis.visit_path(path); + vis.visit_tts(tokens); + vis.visit_span(span); } -pub fn noop_fold_meta_list_item(li: NestedMetaItem, fld: &mut T) - -> NestedMetaItem { - Spanned { - node: match li.node { - NestedMetaItemKind::MetaItem(mi) => { - NestedMetaItemKind::MetaItem(fld.fold_meta_item(mi)) - }, - NestedMetaItemKind::Literal(lit) => NestedMetaItemKind::Literal(lit) - }, - span: fld.new_span(li.span) - } +pub fn noop_visit_mac(Spanned { node, span }: &mut Mac, vis: &mut T) { + let Mac_ { path, delim: _, tts } = node; + vis.visit_path(path); + vis.visit_tts(tts); + vis.visit_span(span); } -pub fn noop_fold_meta_item(mi: MetaItem, fld: &mut T) -> MetaItem { - MetaItem { - ident: mi.ident, - node: match mi.node { - MetaItemKind::Word => MetaItemKind::Word, - MetaItemKind::List(mis) => { - MetaItemKind::List(mis.move_map(|e| fld.fold_meta_list_item(e))) - }, - MetaItemKind::NameValue(s) => MetaItemKind::NameValue(s), - }, - span: fld.new_span(mi.span) +pub fn noop_visit_macro_def(macro_def: &mut MacroDef, vis: &mut T) { + let MacroDef { tokens, legacy: _ } = macro_def; + vis.visit_tts(tokens); +} + +pub fn noop_visit_meta_list_item(li: &mut NestedMetaItem, vis: &mut T) { + let Spanned { node, span } = li; + match node { + NestedMetaItemKind::MetaItem(mi) => vis.visit_meta_item(mi), + NestedMetaItemKind::Literal(_lit) => {} } + vis.visit_span(span); } -pub fn noop_fold_arg(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg { - Arg { - id: fld.new_id(id), - pat: fld.fold_pat(pat), - ty: fld.fold_ty(ty) +pub fn noop_visit_meta_item(mi: &mut MetaItem, vis: &mut T) { + let MetaItem { ident: _, node, span } = mi; + match node { + MetaItemKind::Word => {} + MetaItemKind::List(mis) => visit_vec(mis, |mi| vis.visit_meta_list_item(mi)), + MetaItemKind::NameValue(_s) => {} } + vis.visit_span(span); +} + +pub fn noop_visit_arg(Arg { id, pat, ty }: &mut Arg, vis: &mut T) { + vis.visit_id(id); + vis.visit_pat(pat); + vis.visit_ty(ty); } -pub fn noop_fold_tt(tt: TokenTree, fld: &mut T) -> TokenTree { +pub fn noop_visit_tt(tt: &mut TokenTree, vis: &mut T) { match tt { - TokenTree::Token(span, tok) => - TokenTree::Token(fld.new_span(span), fld.fold_token(tok)), - TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited( - DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)), - delim, - fld.fold_tts(tts).into(), - ), + TokenTree::Token(span, tok) => { + vis.visit_span(span); + vis.visit_token(tok); + } + TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => { + vis.visit_span(open); + vis.visit_span(close); + vis.visit_tts(tts); + } } } -pub fn noop_fold_tts(tts: TokenStream, fld: &mut T) -> TokenStream { - tts.map(|tt| fld.fold_tt(tt)) +pub fn noop_visit_tts(TokenStream(tts): &mut TokenStream, vis: &mut T) { + visit_opt(tts, |tts| { + let tts = Lrc::make_mut(tts); + visit_vec(tts, |(tree, _is_joint)| vis.visit_tt(tree)); + }) } -// apply ident folder if it's an ident, apply other folds to interpolated nodes -pub fn noop_fold_token(t: token::Token, fld: &mut T) -> token::Token { +// apply ident visitor if it's an ident, apply other visits to interpolated nodes +pub fn noop_visit_token(t: &mut Token, vis: &mut T) { match t { - token::Ident(id, is_raw) => token::Ident(fld.fold_ident(id), is_raw), - token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)), + token::Ident(id, _is_raw) => vis.visit_ident(id), + token::Lifetime(id) => vis.visit_ident(id), token::Interpolated(nt) => { - let nt = match Lrc::try_unwrap(nt) { - Ok(nt) => nt, - Err(nt) => (*nt).clone(), - }; - Token::interpolated(fld.fold_interpolated(nt.0)) + let nt = Lrc::make_mut(nt); + vis.visit_interpolated(&mut nt.0); + nt.1 = token::LazyTokenStream::new(); } - _ => t + _ => {} } } -/// apply folder to elements of interpolated nodes +/// Apply visitor to elements of interpolated nodes. // -// N.B., this can occur only when applying a fold to partially expanded code, where -// parsed pieces have gotten implanted ito *other* macro invocations. This is relevant -// for macro hygiene, but possibly not elsewhere. +// N.B., this can occur only when applying a visitor to partially expanded +// code, where parsed pieces have gotten implanted ito *other* macro +// invocations. This is relevant for macro hygiene, but possibly not elsewhere. // -// One problem here occurs because the types for fold_item, fold_stmt, etc. allow the -// folder to return *multiple* items; this is a problem for the nodes here, because -// they insist on having exactly one piece. One solution would be to mangle the fold -// trait to include one-to-many and one-to-one versions of these entry points, but that -// would probably confuse a lot of people and help very few. Instead, I'm just going -// to put in dynamic checks. I think the performance impact of this will be pretty much -// nonexistent. The danger is that someone will apply a fold to a partially expanded -// node, and will be confused by the fact that their "fold_item" or "fold_stmt" isn't -// getting called on NtItem or NtStmt nodes. Hopefully they'll wind up reading this -// comment, and doing something appropriate. +// One problem here occurs because the types for flat_map_item, flat_map_stmt, +// etc. allow the visitor to return *multiple* items; this is a problem for the +// nodes here, because they insist on having exactly one piece. One solution +// would be to mangle the MutVisitor trait to include one-to-many and +// one-to-one versions of these entry points, but that would probably confuse a +// lot of people and help very few. Instead, I'm just going to put in dynamic +// checks. I think the performance impact of this will be pretty much +// nonexistent. The danger is that someone will apply a MutVisitor to a +// partially expanded node, and will be confused by the fact that their +// "flat_map_item" or "flat_map_stmt" isn't getting called on NtItem or NtStmt +// nodes. Hopefully they'll wind up reading this comment, and doing something +// appropriate. // -// BTW, design choice: I considered just changing the type of, e.g., NtItem to contain -// multiple items, but decided against it when I looked at parse_item_or_view_item and -// tried to figure out what I would do with multiple items there.... -pub fn noop_fold_interpolated(nt: token::Nonterminal, fld: &mut T) - -> token::Nonterminal { +// BTW, design choice: I considered just changing the type of, e.g., NtItem to +// contain multiple items, but decided against it when I looked at +// parse_item_or_view_item and tried to figure out what I would do with +// multiple items there.... +pub fn noop_visit_interpolated(nt: &mut token::Nonterminal, vis: &mut T) { match nt { token::NtItem(item) => - token::NtItem(fld.fold_item(item) - // this is probably okay, because the only folds likely - // to peek inside interpolated nodes will be renamings/markings, - // which map single items to single items - .expect_one("expected fold to produce exactly one item")), - token::NtBlock(block) => token::NtBlock(fld.fold_block(block)), + visit_clobber(item, |item| { + // This is probably okay, because the only visitors likely to + // peek inside interpolated nodes will be renamings/markings, + // which map single items to single items. + vis.flat_map_item(item).expect_one("expected visitor to produce exactly one item") + }), + token::NtBlock(block) => vis.visit_block(block), token::NtStmt(stmt) => - token::NtStmt(fld.fold_stmt(stmt) - // this is probably okay, because the only folds likely - // to peek inside interpolated nodes will be renamings/markings, - // which map single items to single items - .expect_one("expected fold to produce exactly one statement")), - token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)), - token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)), - token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)), - token::NtIdent(ident, is_raw) => token::NtIdent(fld.fold_ident(ident), is_raw), - token::NtLifetime(ident) => token::NtLifetime(fld.fold_ident(ident)), - token::NtLiteral(expr) => token::NtLiteral(fld.fold_expr(expr)), - token::NtMeta(meta) => token::NtMeta(fld.fold_meta_item(meta)), - token::NtPath(path) => token::NtPath(fld.fold_path(path)), - token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)), - token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)), + visit_clobber(stmt, |stmt| { + // See reasoning above. + vis.flat_map_stmt(stmt).expect_one("expected visitor to produce exactly one item") + }), + token::NtPat(pat) => vis.visit_pat(pat), + token::NtExpr(expr) => vis.visit_expr(expr), + token::NtTy(ty) => vis.visit_ty(ty), + token::NtIdent(ident, _is_raw) => vis.visit_ident(ident), + token::NtLifetime(ident) => vis.visit_ident(ident), + token::NtLiteral(expr) => vis.visit_expr(expr), + token::NtMeta(meta) => vis.visit_meta_item(meta), + token::NtPath(path) => vis.visit_path(path), + token::NtTT(tt) => vis.visit_tt(tt), + token::NtArm(arm) => vis.visit_arm(arm), token::NtImplItem(item) => - token::NtImplItem(fld.fold_impl_item(item) - .expect_one("expected fold to produce exactly one item")), + visit_clobber(item, |item| { + // See reasoning above. + vis.flat_map_impl_item(item) + .expect_one("expected visitor to produce exactly one item") + }), token::NtTraitItem(item) => - token::NtTraitItem(fld.fold_trait_item(item) - .expect_one("expected fold to produce exactly one item")), - token::NtGenerics(generics) => token::NtGenerics(fld.fold_generics(generics)), - token::NtWhereClause(where_clause) => - token::NtWhereClause(fld.fold_where_clause(where_clause)), - token::NtArg(arg) => token::NtArg(fld.fold_arg(arg)), - token::NtVis(vis) => token::NtVis(fld.fold_vis(vis)), - token::NtForeignItem(ni) => - token::NtForeignItem(fld.fold_foreign_item(ni) - // see reasoning above - .expect_one("expected fold to produce exactly one item")), - } -} - -pub fn noop_fold_asyncness(asyncness: IsAsync, fld: &mut T) -> IsAsync { + visit_clobber(item, |item| { + // See reasoning above. + vis.flat_map_trait_item(item) + .expect_one("expected visitor to produce exactly one item") + }), + token::NtGenerics(generics) => vis.visit_generics(generics), + token::NtWhereClause(where_clause) => vis.visit_where_clause(where_clause), + token::NtArg(arg) => vis.visit_arg(arg), + token::NtVis(visib) => vis.visit_vis(visib), + token::NtForeignItem(item) => + visit_clobber(item, |item| { + // See reasoning above. + vis.flat_map_foreign_item(item) + .expect_one("expected visitor to produce exactly one item") + }), + } +} + +pub fn noop_visit_asyncness(asyncness: &mut IsAsync, vis: &mut T) { match asyncness { - IsAsync::Async { closure_id, return_impl_trait_id } => IsAsync::Async { - closure_id: fld.new_id(closure_id), - return_impl_trait_id: fld.new_id(return_impl_trait_id), - }, - IsAsync::NotAsync => IsAsync::NotAsync, + IsAsync::Async { closure_id, return_impl_trait_id } => { + vis.visit_id(closure_id); + vis.visit_id(return_impl_trait_id); + } + IsAsync::NotAsync => {} } } -pub fn noop_fold_fn_decl(decl: P, fld: &mut T) -> P { - decl.map(|FnDecl {inputs, output, variadic}| FnDecl { - inputs: inputs.move_map(|x| fld.fold_arg(x)), - output: match output { - FunctionRetTy::Ty(ty) => FunctionRetTy::Ty(fld.fold_ty(ty)), - FunctionRetTy::Default(span) => FunctionRetTy::Default(fld.new_span(span)), - }, - variadic, - }) +pub fn noop_visit_fn_decl(decl: &mut P, vis: &mut T) { + let FnDecl { inputs, output, variadic: _ } = decl.deref_mut(); + visit_vec(inputs, |input| vis.visit_arg(input)); + match output { + FunctionRetTy::Default(span) => vis.visit_span(span), + FunctionRetTy::Ty(ty) => vis.visit_ty(ty), + } } -pub fn noop_fold_param_bound(pb: GenericBound, fld: &mut T) -> GenericBound where T: Folder { +pub fn noop_visit_param_bound(pb: &mut GenericBound, vis: &mut T) { match pb { - GenericBound::Trait(ty, modifier) => { - GenericBound::Trait(fld.fold_poly_trait_ref(ty), modifier) - } - GenericBound::Outlives(lifetime) => { - GenericBound::Outlives(noop_fold_lifetime(lifetime, fld)) - } + GenericBound::Trait(ty, _modifier) => vis.visit_poly_trait_ref(ty), + GenericBound::Outlives(lifetime) => noop_visit_lifetime(lifetime, vis), } } -pub fn noop_fold_generic_param(param: GenericParam, fld: &mut T) -> GenericParam { - GenericParam { - ident: fld.fold_ident(param.ident), - id: fld.new_id(param.id), - attrs: fold_thin_attrs(param.attrs, fld), - bounds: param.bounds.move_map(|l| noop_fold_param_bound(l, fld)), - kind: match param.kind { - GenericParamKind::Lifetime => GenericParamKind::Lifetime, - GenericParamKind::Type { default } => GenericParamKind::Type { - default: default.map(|ty| fld.fold_ty(ty)) - } +pub fn noop_visit_generic_param(param: &mut GenericParam, vis: &mut T) { + let GenericParam { id, ident, attrs, bounds, kind } = param; + vis.visit_id(id); + vis.visit_ident(ident); + visit_thin_attrs(attrs, vis); + visit_vec(bounds, |bound| noop_visit_param_bound(bound, vis)); + match kind { + GenericParamKind::Lifetime => {} + GenericParamKind::Type { default } => { + visit_opt(default, |default| vis.visit_ty(default)); } } } -pub fn noop_fold_generic_params( - params: Vec, - fld: &mut T -) -> Vec { - params.move_map(|p| fld.fold_generic_param(p)) +pub fn noop_visit_generic_params(params: &mut Vec, vis: &mut T){ + visit_vec(params, |param| vis.visit_generic_param(param)); } -pub fn noop_fold_label(label: Label, fld: &mut T) -> Label { - Label { - ident: fld.fold_ident(label.ident), - } +pub fn noop_visit_label(Label { ident }: &mut Label, vis: &mut T) { + vis.visit_ident(ident); } -fn noop_fold_lifetime(l: Lifetime, fld: &mut T) -> Lifetime { - Lifetime { - id: fld.new_id(l.id), - ident: fld.fold_ident(l.ident), - } +fn noop_visit_lifetime(Lifetime { id, ident }: &mut Lifetime, vis: &mut T) { + vis.visit_id(id); + vis.visit_ident(ident); } -pub fn noop_fold_generics(Generics { params, where_clause, span }: Generics, - fld: &mut T) -> Generics { - Generics { - params: fld.fold_generic_params(params), - where_clause: fld.fold_where_clause(where_clause), - span: fld.new_span(span), - } +pub fn noop_visit_generics(generics: &mut Generics, vis: &mut T) { + let Generics { params, where_clause, span } = generics; + vis.visit_generic_params(params); + vis.visit_where_clause(where_clause); + vis.visit_span(span); } -pub fn noop_fold_where_clause( - WhereClause {id, predicates, span}: WhereClause, - fld: &mut T) - -> WhereClause { - WhereClause { - id: fld.new_id(id), - predicates: predicates.move_map(|predicate| { - fld.fold_where_predicate(predicate) - }), - span: fld.new_span(span), - } +pub fn noop_visit_where_clause(wc: &mut WhereClause, vis: &mut T) { + let WhereClause { id, predicates, span } = wc; + vis.visit_id(id); + visit_vec(predicates, |predicate| vis.visit_where_predicate(predicate)); + vis.visit_span(span); } -pub fn noop_fold_where_predicate( - pred: WherePredicate, - fld: &mut T) - -> WherePredicate { +pub fn noop_visit_where_predicate(pred: &mut WherePredicate, vis: &mut T) { match pred { - WherePredicate::BoundPredicate(WhereBoundPredicate { bound_generic_params, - bounded_ty, - bounds, - span }) => { - WherePredicate::BoundPredicate(WhereBoundPredicate { - bound_generic_params: fld.fold_generic_params(bound_generic_params), - bounded_ty: fld.fold_ty(bounded_ty), - bounds: bounds.move_map(|x| fld.fold_param_bound(x)), - span: fld.new_span(span) - }) - } - WherePredicate::RegionPredicate(WhereRegionPredicate { lifetime, bounds, span }) => { - WherePredicate::RegionPredicate(WhereRegionPredicate { - span: fld.new_span(span), - lifetime: noop_fold_lifetime(lifetime, fld), - bounds: bounds.move_map(|bound| noop_fold_param_bound(bound, fld)) - }) - } - WherePredicate::EqPredicate(WhereEqPredicate { id, lhs_ty, rhs_ty, span }) => { - WherePredicate::EqPredicate(WhereEqPredicate{ - id: fld.new_id(id), - lhs_ty: fld.fold_ty(lhs_ty), - rhs_ty: fld.fold_ty(rhs_ty), - span: fld.new_span(span) - }) - } - } -} - -pub fn noop_fold_variant_data(vdata: VariantData, fld: &mut T) -> VariantData { - match vdata { - VariantData::Struct(fields, id) => { - VariantData::Struct(fields.move_map(|f| fld.fold_struct_field(f)), fld.new_id(id)) + WherePredicate::BoundPredicate(bp) => { + let WhereBoundPredicate { span, bound_generic_params, bounded_ty, bounds } = bp; + vis.visit_span(span); + vis.visit_generic_params(bound_generic_params); + vis.visit_ty(bounded_ty); + visit_vec(bounds, |bound| vis.visit_param_bound(bound)); } - VariantData::Tuple(fields, id) => { - VariantData::Tuple(fields.move_map(|f| fld.fold_struct_field(f)), fld.new_id(id)) + WherePredicate::RegionPredicate(rp) => { + let WhereRegionPredicate { span, lifetime, bounds } = rp; + vis.visit_span(span); + noop_visit_lifetime(lifetime, vis); + visit_vec(bounds, |bound| noop_visit_param_bound(bound, vis)); + } + WherePredicate::EqPredicate(ep) => { + let WhereEqPredicate { id, span, lhs_ty, rhs_ty } = ep; + vis.visit_id(id); + vis.visit_span(span); + vis.visit_ty(lhs_ty); + vis.visit_ty(rhs_ty); } - VariantData::Unit(id) => VariantData::Unit(fld.new_id(id)) } } -pub fn noop_fold_trait_ref(p: TraitRef, fld: &mut T) -> TraitRef { - let id = fld.new_id(p.ref_id); - let TraitRef { - path, - ref_id: _, - } = p; - TraitRef { - path: fld.fold_path(path), - ref_id: id, +pub fn noop_visit_variant_data(vdata: &mut VariantData, vis: &mut T) { + match vdata { + VariantData::Struct(fields, id) | + VariantData::Tuple(fields, id) => { + visit_vec(fields, |field| vis.visit_struct_field(field)); + vis.visit_id(id); + } + VariantData::Unit(id) => vis.visit_id(id), } } -pub fn noop_fold_poly_trait_ref(p: PolyTraitRef, fld: &mut T) -> PolyTraitRef { - PolyTraitRef { - bound_generic_params: fld.fold_generic_params(p.bound_generic_params), - trait_ref: fld.fold_trait_ref(p.trait_ref), - span: fld.new_span(p.span), - } +pub fn noop_visit_trait_ref(TraitRef { path, ref_id }: &mut TraitRef, vis: &mut T) { + vis.visit_path(path); + vis.visit_id(ref_id); } -pub fn noop_fold_struct_field(f: StructField, fld: &mut T) -> StructField { - StructField { - span: fld.new_span(f.span), - id: fld.new_id(f.id), - ident: f.ident.map(|ident| fld.fold_ident(ident)), - vis: fld.fold_vis(f.vis), - ty: fld.fold_ty(f.ty), - attrs: fold_attrs(f.attrs, fld), - } +pub fn noop_visit_poly_trait_ref(p: &mut PolyTraitRef, vis: &mut T) { + let PolyTraitRef { bound_generic_params, trait_ref, span } = p; + vis.visit_generic_params(bound_generic_params); + vis.visit_trait_ref(trait_ref); + vis.visit_span(span); } -pub fn noop_fold_field(f: Field, folder: &mut T) -> Field { - Field { - ident: folder.fold_ident(f.ident), - expr: folder.fold_expr(f.expr), - span: folder.new_span(f.span), - is_shorthand: f.is_shorthand, - attrs: fold_thin_attrs(f.attrs, folder), - } +pub fn noop_visit_struct_field(f: &mut StructField, visitor: &mut T) { + let StructField { span, ident, vis, id, ty, attrs } = f; + visitor.visit_span(span); + visit_opt(ident, |ident| visitor.visit_ident(ident)); + visitor.visit_vis(vis); + visitor.visit_id(id); + visitor.visit_ty(ty); + visit_attrs(attrs, visitor); } -pub fn noop_fold_mt(MutTy {ty, mutbl}: MutTy, folder: &mut T) -> MutTy { - MutTy { - ty: folder.fold_ty(ty), - mutbl, - } +pub fn noop_visit_field(f: &mut Field, vis: &mut T) { + let Field { ident, expr, span, is_shorthand: _, attrs } = f; + vis.visit_ident(ident); + vis.visit_expr(expr); + vis.visit_span(span); + visit_thin_attrs(attrs, vis); } -pub fn noop_fold_block(b: P, folder: &mut T) -> P { - b.map(|Block {id, stmts, rules, span}| Block { - id: folder.new_id(id), - stmts: stmts.move_flat_map(|s| folder.fold_stmt(s).into_iter()), - rules, - span: folder.new_span(span), - }) +pub fn noop_visit_mt(MutTy { ty, mutbl: _ }: &mut MutTy, vis: &mut T) { + vis.visit_ty(ty); } -pub fn noop_fold_item_kind(i: ItemKind, folder: &mut T) -> ItemKind { - match i { - ItemKind::ExternCrate(orig_name) => ItemKind::ExternCrate(orig_name), - ItemKind::Use(use_tree) => { - ItemKind::Use(use_tree.map(|tree| folder.fold_use_tree(tree))) - } - ItemKind::Static(t, m, e) => { - ItemKind::Static(folder.fold_ty(t), m, folder.fold_expr(e)) +pub fn noop_visit_block(block: &mut P, vis: &mut T) { + let Block { id, stmts, rules: _, span } = block.deref_mut(); + vis.visit_id(id); + stmts.flat_map_in_place(|stmt| vis.flat_map_stmt(stmt)); + vis.visit_span(span); +} + +pub fn noop_visit_item_kind(kind: &mut ItemKind, vis: &mut T) { + match kind { + ItemKind::ExternCrate(_orig_name) => {} + ItemKind::Use(use_tree) => vis.visit_use_tree(use_tree), + ItemKind::Static(ty, _mut, expr) => { + vis.visit_ty(ty); + vis.visit_expr(expr); } - ItemKind::Const(t, e) => { - ItemKind::Const(folder.fold_ty(t), folder.fold_expr(e)) + ItemKind::Const(ty, expr) => { + vis.visit_ty(ty); + vis.visit_expr(expr); } ItemKind::Fn(decl, header, generics, body) => { - let generics = folder.fold_generics(generics); - let header = folder.fold_fn_header(header); - let decl = folder.fold_fn_decl(decl); - let body = folder.fold_block(body); - ItemKind::Fn(decl, header, generics, body) - } - ItemKind::Mod(m) => ItemKind::Mod(folder.fold_mod(m)), - ItemKind::ForeignMod(nm) => ItemKind::ForeignMod(folder.fold_foreign_mod(nm)), - ItemKind::GlobalAsm(ga) => ItemKind::GlobalAsm(ga), - ItemKind::Ty(t, generics) => { - ItemKind::Ty(folder.fold_ty(t), folder.fold_generics(generics)) - } - ItemKind::Existential(bounds, generics) => ItemKind::Existential( - fold_bounds(bounds, folder), - folder.fold_generics(generics), - ), - ItemKind::Enum(enum_definition, generics) => { - let generics = folder.fold_generics(generics); - let variants = enum_definition.variants.move_map(|x| folder.fold_variant(x)); - ItemKind::Enum(EnumDef { variants }, generics) - } - ItemKind::Struct(struct_def, generics) => { - let generics = folder.fold_generics(generics); - ItemKind::Struct(folder.fold_variant_data(struct_def), generics) - } - ItemKind::Union(struct_def, generics) => { - let generics = folder.fold_generics(generics); - ItemKind::Union(folder.fold_variant_data(struct_def), generics) - } - ItemKind::Impl(unsafety, - polarity, - defaultness, - generics, - ifce, - ty, - impl_items) => ItemKind::Impl( - unsafety, - polarity, - defaultness, - folder.fold_generics(generics), - ifce.map(|trait_ref| folder.fold_trait_ref(trait_ref)), - folder.fold_ty(ty), - impl_items.move_flat_map(|item| folder.fold_impl_item(item)), - ), - ItemKind::Trait(is_auto, unsafety, generics, bounds, items) => ItemKind::Trait( - is_auto, - unsafety, - folder.fold_generics(generics), - fold_bounds(bounds, folder), - items.move_flat_map(|item| folder.fold_trait_item(item)), - ), - ItemKind::TraitAlias(generics, bounds) => ItemKind::TraitAlias( - folder.fold_generics(generics), - fold_bounds(bounds, folder)), - ItemKind::Mac(m) => ItemKind::Mac(folder.fold_mac(m)), - ItemKind::MacroDef(def) => ItemKind::MacroDef(folder.fold_macro_def(def)), - } -} - -pub fn noop_fold_trait_item(i: TraitItem, folder: &mut T) -> SmallVec<[TraitItem; 1]> { - smallvec![TraitItem { - id: folder.new_id(i.id), - ident: folder.fold_ident(i.ident), - attrs: fold_attrs(i.attrs, folder), - generics: folder.fold_generics(i.generics), - node: match i.node { - TraitItemKind::Const(ty, default) => { - TraitItemKind::Const(folder.fold_ty(ty), - default.map(|x| folder.fold_expr(x))) - } - TraitItemKind::Method(sig, body) => { - TraitItemKind::Method(fold_method_sig(sig, folder), - body.map(|x| folder.fold_block(x))) - } - TraitItemKind::Type(bounds, default) => { - TraitItemKind::Type(fold_bounds(bounds, folder), - default.map(|x| folder.fold_ty(x))) - } - TraitItemKind::Macro(mac) => { - TraitItemKind::Macro(folder.fold_mac(mac)) - } - }, - span: folder.new_span(i.span), - tokens: i.tokens, - }] -} - -pub fn noop_fold_impl_item(i: ImplItem, folder: &mut T)-> SmallVec<[ImplItem; 1]> { - smallvec![ImplItem { - id: folder.new_id(i.id), - vis: folder.fold_vis(i.vis), - ident: folder.fold_ident(i.ident), - attrs: fold_attrs(i.attrs, folder), - generics: folder.fold_generics(i.generics), - defaultness: i.defaultness, - node: match i.node { - ImplItemKind::Const(ty, expr) => { - ImplItemKind::Const(folder.fold_ty(ty), folder.fold_expr(expr)) - } - ImplItemKind::Method(sig, body) => { - ImplItemKind::Method(fold_method_sig(sig, folder), - folder.fold_block(body)) - } - ImplItemKind::Type(ty) => ImplItemKind::Type(folder.fold_ty(ty)), - ImplItemKind::Existential(bounds) => { - ImplItemKind::Existential(fold_bounds(bounds, folder)) - }, - ImplItemKind::Macro(mac) => ImplItemKind::Macro(folder.fold_mac(mac)) - }, - span: folder.new_span(i.span), - tokens: i.tokens, - }] -} - -pub fn noop_fold_fn_header(mut header: FnHeader, folder: &mut T) -> FnHeader { - header.asyncness = folder.fold_asyncness(header.asyncness); - header + vis.visit_fn_decl(decl); + vis.visit_fn_header(header); + vis.visit_generics(generics); + vis.visit_block(body); + } + ItemKind::Mod(m) => vis.visit_mod(m), + ItemKind::ForeignMod(nm) => vis.visit_foreign_mod(nm), + ItemKind::GlobalAsm(_ga) => {} + ItemKind::Ty(ty, generics) => { + vis.visit_ty(ty); + vis.visit_generics(generics); + } + ItemKind::Existential(bounds, generics) => { + visit_bounds(bounds, vis); + vis.visit_generics(generics); + } + ItemKind::Enum(EnumDef { variants }, generics) => { + visit_vec(variants, |variant| vis.visit_variant(variant)); + vis.visit_generics(generics); + } + ItemKind::Struct(variant_data, generics) | + ItemKind::Union(variant_data, generics) => { + vis.visit_variant_data(variant_data); + vis.visit_generics(generics); + } + ItemKind::Impl(_unsafety, _polarity, _defaultness, generics, trait_ref, ty, items) => { + vis.visit_generics(generics); + visit_opt(trait_ref, |trait_ref| vis.visit_trait_ref(trait_ref)); + vis.visit_ty(ty); + items.flat_map_in_place(|item| vis.flat_map_impl_item(item)); + } + ItemKind::Trait(_is_auto, _unsafety, generics, bounds, items) => { + vis.visit_generics(generics); + visit_bounds(bounds, vis); + items.flat_map_in_place(|item| vis.flat_map_trait_item(item)); + } + ItemKind::TraitAlias(generics, bounds) => { + vis.visit_generics(generics); + visit_bounds(bounds, vis); + } + ItemKind::Mac(m) => vis.visit_mac(m), + ItemKind::MacroDef(def) => vis.visit_macro_def(def), + } } -pub fn noop_fold_mod(Mod {inner, items, inline}: Mod, folder: &mut T) -> Mod { - Mod { - inner: folder.new_span(inner), - items: items.move_flat_map(|x| folder.fold_item(x)), - inline: inline, +pub fn noop_flat_map_trait_item(mut item: TraitItem, vis: &mut T) + -> SmallVec<[TraitItem; 1]> +{ + let TraitItem { id, ident, attrs, generics, node, span, tokens: _ } = &mut item; + vis.visit_id(id); + vis.visit_ident(ident); + visit_attrs(attrs, vis); + vis.visit_generics(generics); + match node { + TraitItemKind::Const(ty, default) => { + vis.visit_ty(ty); + visit_opt(default, |default| vis.visit_expr(default)); + } + TraitItemKind::Method(sig, body) => { + visit_method_sig(sig, vis); + visit_opt(body, |body| vis.visit_block(body)); + } + TraitItemKind::Type(bounds, default) => { + visit_bounds(bounds, vis); + visit_opt(default, |default| vis.visit_ty(default)); + } + TraitItemKind::Macro(mac) => { + vis.visit_mac(mac); + } } -} + vis.visit_span(span); -pub fn noop_fold_crate(Crate {module, attrs, span}: Crate, - folder: &mut T) -> Crate { - let item = P(Item { - ident: keywords::Invalid.ident(), - attrs, - id: DUMMY_NODE_ID, - vis: respan(span.shrink_to_lo(), VisibilityKind::Public), - span, - node: ItemKind::Mod(module), - tokens: None, - }); - let items = folder.fold_item(item); + smallvec![item] +} - let len = items.len(); - if len == 0 { - let module = Mod { inner: span, items: vec![], inline: true }; - Crate { module, attrs: vec![], span } - } else if len == 1 { - let Item { attrs, span, node, .. } = items.into_iter().next().unwrap().into_inner(); - match node { - ItemKind::Mod(module) => Crate { module, attrs, span }, - _ => panic!("fold converted a module to not a module"), +pub fn noop_flat_map_impl_item(mut item: ImplItem, visitor: &mut T) + -> SmallVec<[ImplItem; 1]> +{ + let ImplItem { id, ident, vis, defaultness: _, attrs, generics, node, span, tokens: _ } = + &mut item; + visitor.visit_id(id); + visitor.visit_ident(ident); + visitor.visit_vis(vis); + visit_attrs(attrs, visitor); + visitor.visit_generics(generics); + match node { + ImplItemKind::Const(ty, expr) => { + visitor.visit_ty(ty); + visitor.visit_expr(expr); } - } else { - panic!("a crate cannot expand to more than one item"); - } + ImplItemKind::Method(sig, body) => { + visit_method_sig(sig, visitor); + visitor.visit_block(body); + } + ImplItemKind::Type(ty) => visitor.visit_ty(ty), + ImplItemKind::Existential(bounds) => visit_bounds(bounds, visitor), + ImplItemKind::Macro(mac) => visitor.visit_mac(mac), + } + visitor.visit_span(span); + + smallvec![item] +} + +pub fn noop_visit_fn_header(header: &mut FnHeader, vis: &mut T) { + let FnHeader { unsafety: _, asyncness, constness: _, abi: _ } = header; + vis.visit_asyncness(asyncness); +} + +pub fn noop_visit_mod(Mod { inner, items, inline: _ }: &mut Mod, vis: &mut T) { + vis.visit_span(inner); + items.flat_map_in_place(|item| vis.flat_map_item(item)); +} + +pub fn noop_visit_crate(krate: &mut Crate, vis: &mut T) { + visit_clobber(krate, |Crate { module, attrs, span }| { + let item = P(Item { + ident: keywords::Invalid.ident(), + attrs, + id: DUMMY_NODE_ID, + vis: respan(span.shrink_to_lo(), VisibilityKind::Public), + span, + node: ItemKind::Mod(module), + tokens: None, + }); + let items = vis.flat_map_item(item); + + let len = items.len(); + if len == 0 { + let module = Mod { inner: span, items: vec![], inline: true }; + Crate { module, attrs: vec![], span } + } else if len == 1 { + let Item { attrs, span, node, .. } = items.into_iter().next().unwrap().into_inner(); + match node { + ItemKind::Mod(module) => Crate { module, attrs, span }, + _ => panic!("visitor converted a module to not a module"), + } + } else { + panic!("a crate cannot expand to more than one item"); + } + }); } -// fold one item into possibly many items -pub fn noop_fold_item(i: P, folder: &mut T) -> SmallVec<[P; 1]> { - smallvec![i.map(|i| { - let Item {id, ident, attrs, node, vis, span, tokens} = i; - Item { - id: folder.new_id(id), - vis: folder.fold_vis(vis), - ident: folder.fold_ident(ident), - attrs: fold_attrs(attrs, folder), - node: folder.fold_item_kind(node), - span: folder.new_span(span), +// Mutate one item into possibly many items. +pub fn noop_flat_map_item(mut item: P, visitor: &mut T) + -> SmallVec<[P; 1]> { + let Item { ident, attrs, id, node, vis, span, tokens: _ } = item.deref_mut(); + visitor.visit_ident(ident); + visit_attrs(attrs, visitor); + visitor.visit_id(id); + visitor.visit_item_kind(node); + visitor.visit_vis(vis); + visitor.visit_span(span); - // FIXME: if this is replaced with a call to `folder.fold_tts` it causes - // an ICE during resolve... odd! - tokens, - } - })] + // FIXME: if `tokens` is modified with a call to `vis.visit_tts` it causes + // an ICE during resolve... odd! + + smallvec![item] } -pub fn noop_fold_foreign_item(ni: ForeignItem, folder: &mut T) +pub fn noop_flat_map_foreign_item(mut item: ForeignItem, visitor: &mut T) -> SmallVec<[ForeignItem; 1]> { - smallvec![ForeignItem { - id: folder.new_id(ni.id), - vis: folder.fold_vis(ni.vis), - ident: folder.fold_ident(ni.ident), - attrs: fold_attrs(ni.attrs, folder), - node: match ni.node { - ForeignItemKind::Fn(fdec, generics) => { - ForeignItemKind::Fn(folder.fold_fn_decl(fdec), folder.fold_generics(generics)) - } - ForeignItemKind::Static(t, m) => { - ForeignItemKind::Static(folder.fold_ty(t), m) - } - ForeignItemKind::Ty => ForeignItemKind::Ty, - ForeignItemKind::Macro(mac) => ForeignItemKind::Macro(folder.fold_mac(mac)), - }, - span: folder.new_span(ni.span) - }] -} - -pub fn noop_fold_pat(p: P, folder: &mut T) -> P { - p.map(|Pat {id, node, span}| Pat { - id: folder.new_id(id), - node: match node { - PatKind::Wild => PatKind::Wild, - PatKind::Ident(binding_mode, ident, sub) => { - PatKind::Ident(binding_mode, - folder.fold_ident(ident), - sub.map(|x| folder.fold_pat(x))) - } - PatKind::Lit(e) => PatKind::Lit(folder.fold_expr(e)), - PatKind::TupleStruct(pth, pats, ddpos) => { - PatKind::TupleStruct(folder.fold_path(pth), - pats.move_map(|x| folder.fold_pat(x)), ddpos) - } - PatKind::Path(qself, pth) => { - PatKind::Path(folder.fold_qself(qself), folder.fold_path(pth)) - } - PatKind::Struct(pth, fields, etc) => { - let pth = folder.fold_path(pth); - let fs = fields.move_map(|f| { - Spanned { span: folder.new_span(f.span), - node: FieldPat { - ident: folder.fold_ident(f.node.ident), - pat: folder.fold_pat(f.node.pat), - is_shorthand: f.node.is_shorthand, - attrs: fold_attrs(f.node.attrs.into(), folder).into() - }} - }); - PatKind::Struct(pth, fs, etc) - } - PatKind::Tuple(elts, ddpos) => { - PatKind::Tuple(elts.move_map(|x| folder.fold_pat(x)), ddpos) - } - PatKind::Box(inner) => PatKind::Box(folder.fold_pat(inner)), - PatKind::Ref(inner, mutbl) => PatKind::Ref(folder.fold_pat(inner), mutbl), - PatKind::Range(e1, e2, Spanned { span, node }) => { - PatKind::Range(folder.fold_expr(e1), - folder.fold_expr(e2), - Spanned { node, span: folder.new_span(span) }) - }, - PatKind::Slice(before, slice, after) => { - PatKind::Slice(before.move_map(|x| folder.fold_pat(x)), - slice.map(|x| folder.fold_pat(x)), - after.move_map(|x| folder.fold_pat(x))) - } - PatKind::Paren(inner) => PatKind::Paren(folder.fold_pat(inner)), - PatKind::Mac(mac) => PatKind::Mac(folder.fold_mac(mac)) - }, - span: folder.new_span(span) - }) + let ForeignItem { ident, attrs, node, id, span, vis } = &mut item; + visitor.visit_ident(ident); + visit_attrs(attrs, visitor); + match node { + ForeignItemKind::Fn(fdec, generics) => { + visitor.visit_fn_decl(fdec); + visitor.visit_generics(generics); + } + ForeignItemKind::Static(t, _m) => visitor.visit_ty(t), + ForeignItemKind::Ty => {} + ForeignItemKind::Macro(mac) => visitor.visit_mac(mac), + } + visitor.visit_id(id); + visitor.visit_span(span); + visitor.visit_vis(vis); + + smallvec![item] } -pub fn noop_fold_anon_const(constant: AnonConst, folder: &mut T) -> AnonConst { - let AnonConst {id, value} = constant; - AnonConst { - id: folder.new_id(id), - value: folder.fold_expr(value), +pub fn noop_visit_pat(pat: &mut P, vis: &mut T) { + let Pat { id, node, span } = pat.deref_mut(); + vis.visit_id(id); + match node { + PatKind::Wild => {} + PatKind::Ident(_binding_mode, ident, sub) => { + vis.visit_ident(ident); + visit_opt(sub, |sub| vis.visit_pat(sub)); + } + PatKind::Lit(e) => vis.visit_expr(e), + PatKind::TupleStruct(path, pats, _ddpos) => { + vis.visit_path(path); + visit_vec(pats, |pat| vis.visit_pat(pat)); + } + PatKind::Path(qself, path) => { + vis.visit_qself(qself); + vis.visit_path(path); + } + PatKind::Struct(path, fields, _etc) => { + vis.visit_path(path); + for Spanned { node: FieldPat { ident, pat, is_shorthand: _, attrs }, span } in fields { + vis.visit_ident(ident); + vis.visit_pat(pat); + visit_thin_attrs(attrs, vis); + vis.visit_span(span); + }; + } + PatKind::Tuple(elts, _ddpos) => visit_vec(elts, |elt| vis.visit_pat(elt)), + PatKind::Box(inner) => vis.visit_pat(inner), + PatKind::Ref(inner, _mutbl) => vis.visit_pat(inner), + PatKind::Range(e1, e2, Spanned { span: _, node: _ }) => { + vis.visit_expr(e1); + vis.visit_expr(e2); + vis.visit_span(span); + }, + PatKind::Slice(before, slice, after) => { + visit_vec(before, |pat| vis.visit_pat(pat)); + visit_opt(slice, |slice| vis.visit_pat(slice)); + visit_vec(after, |pat| vis.visit_pat(pat)); + } + PatKind::Paren(inner) => vis.visit_pat(inner), + PatKind::Mac(mac) => vis.visit_mac(mac), } + vis.visit_span(span); } -pub fn noop_fold_expr(Expr {id, node, span, attrs}: Expr, folder: &mut T) -> Expr { - Expr { - node: match node { - ExprKind::Box(e) => { - ExprKind::Box(folder.fold_expr(e)) - } - ExprKind::ObsoleteInPlace(a, b) => { - ExprKind::ObsoleteInPlace(folder.fold_expr(a), folder.fold_expr(b)) - } - ExprKind::Array(exprs) => { - ExprKind::Array(fold_exprs(exprs, folder)) - } - ExprKind::Repeat(expr, count) => { - ExprKind::Repeat(folder.fold_expr(expr), folder.fold_anon_const(count)) - } - ExprKind::Tup(exprs) => ExprKind::Tup(fold_exprs(exprs, folder)), - ExprKind::Call(f, args) => { - ExprKind::Call(folder.fold_expr(f), - fold_exprs(args, folder)) - } - ExprKind::MethodCall(seg, args) => { - ExprKind::MethodCall( - PathSegment { - ident: folder.fold_ident(seg.ident), - id: folder.new_id(seg.id), - args: seg.args.map(|args| { - args.map(|args| folder.fold_generic_args(args)) - }), - }, - fold_exprs(args, folder)) - } - ExprKind::Binary(binop, lhs, rhs) => { - ExprKind::Binary(binop, - folder.fold_expr(lhs), - folder.fold_expr(rhs)) - } - ExprKind::Unary(binop, ohs) => { - ExprKind::Unary(binop, folder.fold_expr(ohs)) - } - ExprKind::Lit(l) => ExprKind::Lit(l), - ExprKind::Cast(expr, ty) => { - ExprKind::Cast(folder.fold_expr(expr), folder.fold_ty(ty)) - } - ExprKind::Type(expr, ty) => { - ExprKind::Type(folder.fold_expr(expr), folder.fold_ty(ty)) - } - ExprKind::AddrOf(m, ohs) => ExprKind::AddrOf(m, folder.fold_expr(ohs)), - ExprKind::If(cond, tr, fl) => { - ExprKind::If(folder.fold_expr(cond), - folder.fold_block(tr), - fl.map(|x| folder.fold_expr(x))) - } - ExprKind::IfLet(pats, expr, tr, fl) => { - ExprKind::IfLet(pats.move_map(|pat| folder.fold_pat(pat)), - folder.fold_expr(expr), - folder.fold_block(tr), - fl.map(|x| folder.fold_expr(x))) - } - ExprKind::While(cond, body, opt_label) => { - ExprKind::While(folder.fold_expr(cond), - folder.fold_block(body), - opt_label.map(|label| folder.fold_label(label))) - } - ExprKind::WhileLet(pats, expr, body, opt_label) => { - ExprKind::WhileLet(pats.move_map(|pat| folder.fold_pat(pat)), - folder.fold_expr(expr), - folder.fold_block(body), - opt_label.map(|label| folder.fold_label(label))) - } - ExprKind::ForLoop(pat, iter, body, opt_label) => { - ExprKind::ForLoop(folder.fold_pat(pat), - folder.fold_expr(iter), - folder.fold_block(body), - opt_label.map(|label| folder.fold_label(label))) - } - ExprKind::Loop(body, opt_label) => { - ExprKind::Loop(folder.fold_block(body), - opt_label.map(|label| folder.fold_label(label))) - } - ExprKind::Match(expr, arms) => { - ExprKind::Match(folder.fold_expr(expr), - arms.move_map(|x| folder.fold_arm(x))) - } - ExprKind::Closure(capture_clause, asyncness, movability, decl, body, span) => { - ExprKind::Closure(capture_clause, - folder.fold_asyncness(asyncness), - movability, - folder.fold_fn_decl(decl), - folder.fold_expr(body), - folder.new_span(span)) - } - ExprKind::Block(blk, opt_label) => { - ExprKind::Block(folder.fold_block(blk), - opt_label.map(|label| folder.fold_label(label))) - } - ExprKind::Async(capture_clause, node_id, body) => { - ExprKind::Async( - capture_clause, - folder.new_id(node_id), - folder.fold_block(body), - ) - } - ExprKind::Assign(el, er) => { - ExprKind::Assign(folder.fold_expr(el), folder.fold_expr(er)) - } - ExprKind::AssignOp(op, el, er) => { - ExprKind::AssignOp(op, - folder.fold_expr(el), - folder.fold_expr(er)) - } - ExprKind::Field(el, ident) => { - ExprKind::Field(folder.fold_expr(el), folder.fold_ident(ident)) - } - ExprKind::Index(el, er) => { - ExprKind::Index(folder.fold_expr(el), folder.fold_expr(er)) - } - ExprKind::Range(e1, e2, lim) => { - ExprKind::Range(e1.map(|x| folder.fold_expr(x)), - e2.map(|x| folder.fold_expr(x)), - lim) - } - ExprKind::Path(qself, path) => { - ExprKind::Path(folder.fold_qself(qself), folder.fold_path(path)) - } - ExprKind::Break(opt_label, opt_expr) => { - ExprKind::Break(opt_label.map(|label| folder.fold_label(label)), - opt_expr.map(|e| folder.fold_expr(e))) - } - ExprKind::Continue(opt_label) => { - ExprKind::Continue(opt_label.map(|label| folder.fold_label(label))) - } - ExprKind::Ret(e) => ExprKind::Ret(e.map(|x| folder.fold_expr(x))), - ExprKind::InlineAsm(asm) => ExprKind::InlineAsm(asm.map(|asm| { - InlineAsm { - inputs: asm.inputs.move_map(|(c, input)| { - (c, folder.fold_expr(input)) - }), - outputs: asm.outputs.move_map(|out| { - InlineAsmOutput { - constraint: out.constraint, - expr: folder.fold_expr(out.expr), - is_rw: out.is_rw, - is_indirect: out.is_indirect, - } - }), - ..asm - } - })), - ExprKind::Mac(mac) => ExprKind::Mac(folder.fold_mac(mac)), - ExprKind::Struct(path, fields, maybe_expr) => { - ExprKind::Struct(folder.fold_path(path), - fields.move_map(|x| folder.fold_field(x)), - maybe_expr.map(|x| folder.fold_expr(x))) - }, - ExprKind::Paren(ex) => { - let sub_expr = folder.fold_expr(ex); - return Expr { - // Nodes that are equal modulo `Paren` sugar no-ops should have the same ids. - id: sub_expr.id, - node: ExprKind::Paren(sub_expr), - span: folder.new_span(span), - attrs: fold_attrs(attrs.into(), folder).into(), - }; +pub fn noop_visit_anon_const(AnonConst { id, value }: &mut AnonConst, vis: &mut T) { + vis.visit_id(id); + vis.visit_expr(value); +} + +pub fn noop_visit_expr(Expr { node, id, span, attrs }: &mut Expr, vis: &mut T) { + match node { + ExprKind::Box(expr) => vis.visit_expr(expr), + ExprKind::ObsoleteInPlace(a, b) => { + vis.visit_expr(a); + vis.visit_expr(b); + } + ExprKind::Array(exprs) => visit_exprs(exprs, vis), + ExprKind::Repeat(expr, count) => { + vis.visit_expr(expr); + vis.visit_anon_const(count); + } + ExprKind::Tup(exprs) => visit_exprs(exprs, vis), + ExprKind::Call(f, args) => { + vis.visit_expr(f); + visit_exprs(args, vis); + } + ExprKind::MethodCall(PathSegment { ident, id, args }, exprs) => { + vis.visit_ident(ident); + vis.visit_id(id); + visit_opt(args, |args| vis.visit_generic_args(args)); + visit_exprs(exprs, vis); + } + ExprKind::Binary(_binop, lhs, rhs) => { + vis.visit_expr(lhs); + vis.visit_expr(rhs); + } + ExprKind::Unary(_unop, ohs) => vis.visit_expr(ohs), + ExprKind::Lit(_lit) => {} + ExprKind::Cast(expr, ty) => { + vis.visit_expr(expr); + vis.visit_ty(ty); + } + ExprKind::Type(expr, ty) => { + vis.visit_expr(expr); + vis.visit_ty(ty); + } + ExprKind::AddrOf(_m, ohs) => vis.visit_expr(ohs), + ExprKind::If(cond, tr, fl) => { + vis.visit_expr(cond); + vis.visit_block(tr); + visit_opt(fl, |fl| vis.visit_expr(fl)); + } + ExprKind::IfLet(pats, expr, tr, fl) => { + visit_vec(pats, |pat| vis.visit_pat(pat)); + vis.visit_expr(expr); + vis.visit_block(tr); + visit_opt(fl, |fl| vis.visit_expr(fl)); + } + ExprKind::While(cond, body, label) => { + vis.visit_expr(cond); + vis.visit_block(body); + visit_opt(label, |label| vis.visit_label(label)); + } + ExprKind::WhileLet(pats, expr, body, label) => { + visit_vec(pats, |pat| vis.visit_pat(pat)); + vis.visit_expr(expr); + vis.visit_block(body); + visit_opt(label, |label| vis.visit_label(label)); + } + ExprKind::ForLoop(pat, iter, body, label) => { + vis.visit_pat(pat); + vis.visit_expr(iter); + vis.visit_block(body); + visit_opt(label, |label| vis.visit_label(label)); + } + ExprKind::Loop(body, label) => { + vis.visit_block(body); + visit_opt(label, |label| vis.visit_label(label)); + } + ExprKind::Match(expr, arms) => { + vis.visit_expr(expr); + visit_vec(arms, |arm| vis.visit_arm(arm)); + } + ExprKind::Closure(_capture_by, asyncness, _movability, decl, body, span) => { + vis.visit_asyncness(asyncness); + vis.visit_fn_decl(decl); + vis.visit_expr(body); + vis.visit_span(span); + } + ExprKind::Block(blk, label) => { + vis.visit_block(blk); + visit_opt(label, |label| vis.visit_label(label)); + } + ExprKind::Async(_capture_by, node_id, body) => { + vis.visit_id(node_id); + vis.visit_block(body); + } + ExprKind::Assign(el, er) => { + vis.visit_expr(el); + vis.visit_expr(er); + } + ExprKind::AssignOp(_op, el, er) => { + vis.visit_expr(el); + vis.visit_expr(er); + } + ExprKind::Field(el, ident) => { + vis.visit_expr(el); + vis.visit_ident(ident); + } + ExprKind::Index(el, er) => { + vis.visit_expr(el); + vis.visit_expr(er); + } + ExprKind::Range(e1, e2, _lim) => { + visit_opt(e1, |e1| vis.visit_expr(e1)); + visit_opt(e2, |e2| vis.visit_expr(e2)); + } + ExprKind::Path(qself, path) => { + vis.visit_qself(qself); + vis.visit_path(path); + } + ExprKind::Break(label, expr) => { + visit_opt(label, |label| vis.visit_label(label)); + visit_opt(expr, |expr| vis.visit_expr(expr)); + } + ExprKind::Continue(label) => { + visit_opt(label, |label| vis.visit_label(label)); + } + ExprKind::Ret(expr) => { + visit_opt(expr, |expr| vis.visit_expr(expr)); + } + ExprKind::InlineAsm(asm) => { + let InlineAsm { asm: _, asm_str_style: _, outputs, inputs, clobbers: _, volatile: _, + alignstack: _, dialect: _, ctxt: _ } = asm.deref_mut(); + for out in outputs { + let InlineAsmOutput { constraint: _, expr, is_rw: _, is_indirect: _ } = out; + vis.visit_expr(expr); } - ExprKind::Yield(ex) => ExprKind::Yield(ex.map(|x| folder.fold_expr(x))), - ExprKind::Try(ex) => ExprKind::Try(folder.fold_expr(ex)), - ExprKind::TryBlock(body) => ExprKind::TryBlock(folder.fold_block(body)), - ExprKind::Err => ExprKind::Err, + visit_vec(inputs, |(_c, expr)| vis.visit_expr(expr)); + } + ExprKind::Mac(mac) => vis.visit_mac(mac), + ExprKind::Struct(path, fields, expr) => { + vis.visit_path(path); + visit_vec(fields, |field| vis.visit_field(field)); + visit_opt(expr, |expr| vis.visit_expr(expr)); }, - id: folder.new_id(id), - span: folder.new_span(span), - attrs: fold_attrs(attrs.into(), folder).into(), + ExprKind::Paren(expr) => { + vis.visit_expr(expr); + + // Nodes that are equal modulo `Paren` sugar no-ops should have the same ids. + *id = expr.id; + vis.visit_span(span); + visit_thin_attrs(attrs, vis); + return; + } + ExprKind::Yield(expr) => { + visit_opt(expr, |expr| vis.visit_expr(expr)); + } + ExprKind::Try(expr) => vis.visit_expr(expr), + ExprKind::TryBlock(body) => vis.visit_block(body), + ExprKind::Err => {} } + vis.visit_id(id); + vis.visit_span(span); + visit_thin_attrs(attrs, vis); } -pub fn noop_fold_opt_expr(e: P, folder: &mut T) -> Option> { - Some(folder.fold_expr(e)) +pub fn noop_filter_map_expr(mut e: P, vis: &mut T) -> Option> { + Some({ vis.visit_expr(&mut e); e }) } -pub fn noop_fold_stmt(Stmt {node, span, id}: Stmt, folder: &mut T) -> SmallVec<[Stmt; 1]> +pub fn noop_flat_map_stmt(Stmt { node, mut span, mut id }: Stmt, vis: &mut T) + -> SmallVec<[Stmt; 1]> { - let id = folder.new_id(id); - let span = folder.new_span(span); - noop_fold_stmt_kind(node, folder).into_iter().map(|node| { - Stmt { id: id, node: node, span: span } + vis.visit_id(&mut id); + vis.visit_span(&mut span); + noop_flat_map_stmt_kind(node, vis).into_iter().map(|node| { + Stmt { id, node, span } }).collect() } -pub fn noop_fold_stmt_kind(node: StmtKind, folder: &mut T) -> SmallVec<[StmtKind; 1]> { +pub fn noop_flat_map_stmt_kind(node: StmtKind, vis: &mut T) + -> SmallVec<[StmtKind; 1]> { match node { - StmtKind::Local(local) => smallvec![StmtKind::Local(folder.fold_local(local))], - StmtKind::Item(item) => folder.fold_item(item).into_iter().map(StmtKind::Item).collect(), + StmtKind::Local(mut local) => + smallvec![StmtKind::Local({ vis.visit_local(&mut local); local })], + StmtKind::Item(item) => vis.flat_map_item(item).into_iter().map(StmtKind::Item).collect(), StmtKind::Expr(expr) => { - folder.fold_opt_expr(expr).into_iter().map(StmtKind::Expr).collect() + vis.filter_map_expr(expr).into_iter().map(StmtKind::Expr).collect() } StmtKind::Semi(expr) => { - folder.fold_opt_expr(expr).into_iter().map(StmtKind::Semi).collect() + vis.filter_map_expr(expr).into_iter().map(StmtKind::Semi).collect() + } + StmtKind::Mac(mut mac) => { + let (mac_, _semi, attrs) = mac.deref_mut(); + vis.visit_mac(mac_); + visit_thin_attrs(attrs, vis); + smallvec![StmtKind::Mac(mac)] } - StmtKind::Mac(mac) => smallvec![StmtKind::Mac(mac.map(|(mac, semi, attrs)| { - (folder.fold_mac(mac), semi, fold_attrs(attrs.into(), folder).into()) - }))], } } -pub fn noop_fold_vis(Spanned { node, span }: Visibility, folder: &mut T) -> Visibility { - Visibility { - node: match node { - VisibilityKind::Public => VisibilityKind::Public, - VisibilityKind::Crate(sugar) => VisibilityKind::Crate(sugar), - VisibilityKind::Restricted { path, id } => { - VisibilityKind::Restricted { - path: path.map(|path| folder.fold_path(path)), - id: folder.new_id(id), - } - } - VisibilityKind::Inherited => VisibilityKind::Inherited, - }, - span: folder.new_span(span), +pub fn noop_visit_vis(Spanned { node, span }: &mut Visibility, vis: &mut T) { + match node { + VisibilityKind::Public | VisibilityKind::Crate(_) | VisibilityKind::Inherited => {} + VisibilityKind::Restricted { path, id } => { + vis.visit_path(path); + vis.visit_id(id); + } } + vis.visit_span(span); } #[cfg(test)] @@ -1342,7 +1259,7 @@ mod tests { use ast::{self, Ident}; use util::parser_testing::{string_to_crate, matches_codepattern}; use print::pprust; - use fold; + use mut_visit; use with_globals; use super::*; @@ -1353,14 +1270,14 @@ mod tests { } // change every identifier to "zz" - struct ToZzIdentFolder; + struct ToZzIdentMutVisitor; - impl Folder for ToZzIdentFolder { - fn fold_ident(&mut self, _: ast::Ident) -> ast::Ident { - Ident::from_str("zz") + impl MutVisitor for ToZzIdentMutVisitor { + fn visit_ident(&mut self, ident: &mut ast::Ident) { + *ident = Ident::from_str("zz"); } - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - fold::noop_fold_mac(mac, self) + fn visit_mac(&mut self, mac: &mut ast::Mac) { + mut_visit::noop_visit_mac(mac, self) } } @@ -1382,14 +1299,14 @@ mod tests { // make sure idents get transformed everywhere #[test] fn ident_transformation () { with_globals(|| { - let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate( + let mut zz_visitor = ToZzIdentMutVisitor; + let mut krate = string_to_crate( "#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string()); - let folded_crate = zz_fold.fold_crate(ast); + zz_visitor.visit_crate(&mut krate); assert_pred!( matches_codepattern, "matches_codepattern", - pprust::to_string(|s| fake_print_crate(s, &folded_crate)), + pprust::to_string(|s| fake_print_crate(s, &krate)), "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()); }) } @@ -1397,16 +1314,17 @@ mod tests { // even inside macro defs.... #[test] fn ident_transformation_in_defs () { with_globals(|| { - let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate( + let mut zz_visitor = ToZzIdentMutVisitor; + let mut krate = string_to_crate( "macro_rules! a {(b $c:expr $(d $e:token)f+ => \ (g $(d $d $e)+))} ".to_string()); - let folded_crate = zz_fold.fold_crate(ast); + zz_visitor.visit_crate(&mut krate); assert_pred!( matches_codepattern, "matches_codepattern", - pprust::to_string(|s| fake_print_crate(s, &folded_crate)), + pprust::to_string(|s| fake_print_crate(s, &krate)), "macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string()); }) } } + diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index b2a3ae7f9d9..dacb0d811ce 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -133,7 +133,7 @@ pub mod util { pub mod parser; #[cfg(test)] pub mod parser_testing; - pub mod move_map; + pub mod map_in_place; } pub mod json; @@ -151,7 +151,7 @@ pub mod source_map; pub mod config; pub mod entry; pub mod feature_gate; -pub mod fold; +#[path="fold.rs"] pub mod mut_visit; // temporary pub mod parse; pub mod ptr; pub mod show_span; diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 514b2952c50..a9f3acecc81 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -7046,7 +7046,8 @@ impl<'a> Parser<'a> { sess: self.sess, features: None, // don't perform gated feature checking }; - let outer_attrs = strip_unconfigured.process_cfg_attrs(outer_attrs.to_owned()); + let mut outer_attrs = outer_attrs.to_owned(); + strip_unconfigured.process_cfg_attrs(&mut outer_attrs); (!self.cfg_mods || strip_unconfigured.in_cfg(&outer_attrs), outer_attrs) }; diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index f06e975a6d9..5181bb8f34e 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -735,7 +735,7 @@ impl fmt::Debug for LazyTokenStream { } impl LazyTokenStream { - fn new() -> Self { + pub fn new() -> Self { LazyTokenStream(Lock::new(None)) } diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index b352486e39a..12f82a01dcf 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -20,10 +20,9 @@ use ext::base::{ExtCtxt, Resolver}; use ext::build::AstBuilder; use ext::expand::ExpansionConfig; use ext::hygiene::{self, Mark, SyntaxContext}; -use fold::Folder; +use mut_visit::{*, ExpectOne}; use feature_gate::Features; -use util::move_map::MoveMap; -use fold::{self, ExpectOne}; +use util::map_in_place::MapInPlace; use parse::{token, ParseSess}; use print::pprust; use ast::{self, Ident}; @@ -57,9 +56,9 @@ struct TestCtxt<'a> { pub fn modify_for_testing(sess: &ParseSess, resolver: &mut dyn Resolver, should_test: bool, - krate: ast::Crate, + krate: &mut ast::Crate, span_diagnostic: &errors::Handler, - features: &Features) -> ast::Crate { + features: &Features) { // Check for #[reexport_test_harness_main = "some_name"] which // creates a `use __test::main as some_name;`. This needs to be // unconditional, so that the attribute is still marked as used in @@ -75,8 +74,6 @@ pub fn modify_for_testing(sess: &ParseSess, if should_test { generate_test_harness(sess, resolver, reexport_test_harness_main, krate, span_diagnostic, features, test_runner) - } else { - krate } } @@ -88,21 +85,20 @@ struct TestHarnessGenerator<'a> { tested_submods: Vec<(Ident, Ident)>, } -impl<'a> fold::Folder for TestHarnessGenerator<'a> { - fn fold_crate(&mut self, c: ast::Crate) -> ast::Crate { - let mut folded = fold::noop_fold_crate(c, self); +impl<'a> MutVisitor for TestHarnessGenerator<'a> { + fn visit_crate(&mut self, c: &mut ast::Crate) { + noop_visit_crate(c, self); // Create a main function to run our tests let test_main = { let unresolved = mk_main(&mut self.cx); - self.cx.ext_cx.monotonic_expander().fold_item(unresolved).pop().unwrap() + self.cx.ext_cx.monotonic_expander().flat_map_item(unresolved).pop().unwrap() }; - folded.module.items.push(test_main); - folded + c.module.items.push(test_main); } - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { let ident = i.ident; if ident.name != keywords::Invalid.name() { self.cx.path.push(ident); @@ -123,16 +119,16 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { // We don't want to recurse into anything other than mods, since // mods or tests inside of functions will break things - if let ast::ItemKind::Mod(module) = item.node { + if let ast::ItemKind::Mod(mut module) = item.node { let tests = mem::replace(&mut self.tests, Vec::new()); let tested_submods = mem::replace(&mut self.tested_submods, Vec::new()); - let mut mod_folded = fold::noop_fold_mod(module, self); + noop_visit_mod(&mut module, self); let tests = mem::replace(&mut self.tests, tests); let tested_submods = mem::replace(&mut self.tested_submods, tested_submods); if !tests.is_empty() || !tested_submods.is_empty() { let (it, sym) = mk_reexport_mod(&mut self.cx, item.id, tests, tested_submods); - mod_folded.items.push(it); + module.items.push(it); if !self.cx.path.is_empty() { self.tested_submods.push((self.cx.path[self.cx.path.len()-1], sym)); @@ -141,7 +137,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { self.cx.toplevel_reexport = Some(sym); } } - item.node = ast::ItemKind::Mod(mod_folded); + item.node = ast::ItemKind::Mod(module); } if ident.name != keywords::Invalid.name() { self.cx.path.pop(); @@ -149,7 +145,9 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { smallvec![P(item)] } - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac } + fn visit_mac(&mut self, _mac: &mut ast::Mac) { + // Do nothing. + } } /// A folder used to remove any entry points (like fn main) because the harness @@ -159,20 +157,20 @@ struct EntryPointCleaner { depth: usize, } -impl fold::Folder for EntryPointCleaner { - fn fold_item(&mut self, i: P) -> SmallVec<[P; 1]> { +impl MutVisitor for EntryPointCleaner { + fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { self.depth += 1; - let folded = fold::noop_fold_item(i, self).expect_one("noop did something"); + let item = noop_flat_map_item(i, self).expect_one("noop did something"); self.depth -= 1; // Remove any #[main] or #[start] from the AST so it doesn't // clash with the one we're going to add, but mark it as // #[allow(dead_code)] to avoid printing warnings. - let folded = match entry::entry_point_type(&folded, self.depth) { + let item = match entry::entry_point_type(&item, self.depth) { EntryPointType::MainNamed | EntryPointType::MainAttr | EntryPointType::Start => - folded.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| { + item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| { let allow_ident = Ident::from_str("allow"); let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code")); let allow_dead_code_item = attr::mk_list_item(DUMMY_SP, allow_ident, @@ -197,13 +195,15 @@ impl fold::Folder for EntryPointCleaner { } }), EntryPointType::None | - EntryPointType::OtherMain => folded, + EntryPointType::OtherMain => item, }; - smallvec![folded] + smallvec![item] } - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac } + fn visit_mac(&mut self, _mac: &mut ast::Mac) { + // Do nothing. + } } /// Creates an item (specifically a module) that "pub use"s the tests passed in. @@ -235,7 +235,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt, let sym = Ident::with_empty_ctxt(Symbol::gensym("__test_reexports")); let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent }; cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent); - let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item { + let it = cx.ext_cx.monotonic_expander().flat_map_item(P(ast::Item { ident: sym, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, @@ -252,13 +252,13 @@ fn mk_reexport_mod(cx: &mut TestCtxt, fn generate_test_harness(sess: &ParseSess, resolver: &mut dyn Resolver, reexport_test_harness_main: Option, - krate: ast::Crate, + krate: &mut ast::Crate, sd: &errors::Handler, features: &Features, - test_runner: Option) -> ast::Crate { + test_runner: Option) { // Remove the entry points let mut cleaner = EntryPointCleaner { depth: 0 }; - let krate = cleaner.fold_crate(krate); + cleaner.visit_crate(krate); let mark = Mark::fresh(Mark::root()); @@ -293,7 +293,7 @@ fn generate_test_harness(sess: &ParseSess, cx, tests: Vec::new(), tested_submods: Vec::new(), - }.fold_crate(krate) + }.visit_crate(krate); } /// Craft a span that will be ignored by the stability lint's diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index f5d2d6f18ee..ff5978a7ee5 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -147,7 +147,7 @@ impl TokenTree { /// empty stream is represented with `None`; it may be represented as a `Some` /// around an empty `Vec`. #[derive(Clone, Debug)] -pub struct TokenStream(Option>>); +pub struct TokenStream(pub Option>>); pub type TreeAndJoint = (TokenTree, IsJoint); diff --git a/src/libsyntax/util/map_in_place.rs b/src/libsyntax/util/map_in_place.rs new file mode 100644 index 00000000000..5724b540a0d --- /dev/null +++ b/src/libsyntax/util/map_in_place.rs @@ -0,0 +1,102 @@ +use std::ptr; +use smallvec::{Array, SmallVec}; + +pub trait MapInPlace: Sized { + fn map_in_place(&mut self, mut f: F) where F: FnMut(T) -> T { + self.flat_map_in_place(|e| Some(f(e))) + } + + fn flat_map_in_place(&mut self, f: F) + where F: FnMut(T) -> I, + I: IntoIterator; +} + +impl MapInPlace for Vec { + fn flat_map_in_place(&mut self, mut f: F) + where F: FnMut(T) -> I, + I: IntoIterator + { + let mut read_i = 0; + let mut write_i = 0; + unsafe { + let mut old_len = self.len(); + self.set_len(0); // make sure we just leak elements in case of panic + + while read_i < old_len { + // move the read_i'th item out of the vector and map it + // to an iterator + let e = ptr::read(self.get_unchecked(read_i)); + let iter = f(e).into_iter(); + read_i += 1; + + for e in iter { + if write_i < read_i { + ptr::write(self.get_unchecked_mut(write_i), e); + write_i += 1; + } else { + // If this is reached we ran out of space + // in the middle of the vector. + // However, the vector is in a valid state here, + // so we just do a somewhat inefficient insert. + self.set_len(old_len); + self.insert(write_i, e); + + old_len = self.len(); + self.set_len(0); + + read_i += 1; + write_i += 1; + } + } + } + + // write_i tracks the number of actually written new items. + self.set_len(write_i); + } + } +} + +impl> MapInPlace for SmallVec { + fn flat_map_in_place(&mut self, mut f: F) + where F: FnMut(T) -> I, + I: IntoIterator + { + let mut read_i = 0; + let mut write_i = 0; + unsafe { + let mut old_len = self.len(); + self.set_len(0); // make sure we just leak elements in case of panic + + while read_i < old_len { + // move the read_i'th item out of the vector and map it + // to an iterator + let e = ptr::read(self.get_unchecked(read_i)); + let iter = f(e).into_iter(); + read_i += 1; + + for e in iter { + if write_i < read_i { + ptr::write(self.get_unchecked_mut(write_i), e); + write_i += 1; + } else { + // If this is reached we ran out of space + // in the middle of the vector. + // However, the vector is in a valid state here, + // so we just do a somewhat inefficient insert. + self.set_len(old_len); + self.insert(write_i, e); + + old_len = self.len(); + self.set_len(0); + + read_i += 1; + write_i += 1; + } + } + } + + // write_i tracks the number of actually written new items. + self.set_len(write_i); + } + } +} diff --git a/src/libsyntax/util/move_map.rs b/src/libsyntax/util/move_map.rs deleted file mode 100644 index a0f9d39ce89..00000000000 --- a/src/libsyntax/util/move_map.rs +++ /dev/null @@ -1,115 +0,0 @@ -use std::ptr; -use smallvec::{Array, SmallVec}; - -pub trait MoveMap: Sized { - fn move_map(self, mut f: F) -> Self where F: FnMut(T) -> T { - self.move_flat_map(|e| Some(f(e))) - } - - fn move_flat_map(self, f: F) -> Self - where F: FnMut(T) -> I, - I: IntoIterator; -} - -impl MoveMap for Vec { - fn move_flat_map(mut self, mut f: F) -> Self - where F: FnMut(T) -> I, - I: IntoIterator - { - let mut read_i = 0; - let mut write_i = 0; - unsafe { - let mut old_len = self.len(); - self.set_len(0); // make sure we just leak elements in case of panic - - while read_i < old_len { - // move the read_i'th item out of the vector and map it - // to an iterator - let e = ptr::read(self.get_unchecked(read_i)); - let iter = f(e).into_iter(); - read_i += 1; - - for e in iter { - if write_i < read_i { - ptr::write(self.get_unchecked_mut(write_i), e); - write_i += 1; - } else { - // If this is reached we ran out of space - // in the middle of the vector. - // However, the vector is in a valid state here, - // so we just do a somewhat inefficient insert. - self.set_len(old_len); - self.insert(write_i, e); - - old_len = self.len(); - self.set_len(0); - - read_i += 1; - write_i += 1; - } - } - } - - // write_i tracks the number of actually written new items. - self.set_len(write_i); - } - - self - } -} - -impl MoveMap for ::ptr::P<[T]> { - fn move_flat_map(self, f: F) -> Self - where F: FnMut(T) -> I, - I: IntoIterator - { - ::ptr::P::from_vec(self.into_vec().move_flat_map(f)) - } -} - -impl> MoveMap for SmallVec { - fn move_flat_map(mut self, mut f: F) -> Self - where F: FnMut(T) -> I, - I: IntoIterator - { - let mut read_i = 0; - let mut write_i = 0; - unsafe { - let mut old_len = self.len(); - self.set_len(0); // make sure we just leak elements in case of panic - - while read_i < old_len { - // move the read_i'th item out of the vector and map it - // to an iterator - let e = ptr::read(self.get_unchecked(read_i)); - let iter = f(e).into_iter(); - read_i += 1; - - for e in iter { - if write_i < read_i { - ptr::write(self.get_unchecked_mut(write_i), e); - write_i += 1; - } else { - // If this is reached we ran out of space - // in the middle of the vector. - // However, the vector is in a valid state here, - // so we just do a somewhat inefficient insert. - self.set_len(old_len); - self.insert(write_i, e); - - old_len = self.len(); - self.set_len(0); - - read_i += 1; - write_i += 1; - } - } - } - - // write_i tracks the number of actually written new items. - self.set_len(write_i); - } - - self - } -} diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 22643db5016..ec2c3113fab 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -189,7 +189,7 @@ use syntax::attr; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; use syntax::source_map::{self, respan}; -use syntax::util::move_map::MoveMap; +use syntax::util::map_in_place::MapInPlace; use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; use syntax::parse::ParseSess; @@ -1184,7 +1184,7 @@ impl<'a> MethodDef<'a> { enum_def: &'b EnumDef, type_attrs: &[ast::Attribute], type_ident: Ident, - self_args: Vec>, + mut self_args: Vec>, nonself_args: &[P]) -> P { let sp = trait_.span; @@ -1417,8 +1417,8 @@ impl<'a> MethodDef<'a> { // them when they are fed as r-values into a tuple // expression; here add a layer of borrowing, turning // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. - let borrowed_self_args = self_args.move_map(|self_arg| cx.expr_addr_of(sp, self_arg)); - let match_arg = cx.expr(sp, ast::ExprKind::Tup(borrowed_self_args)); + self_args.map_in_place(|self_arg| cx.expr_addr_of(sp, self_arg)); + let match_arg = cx.expr(sp, ast::ExprKind::Tup(self_args)); // Lastly we create an expression which branches on all discriminants being equal // if discriminant_test { @@ -1494,8 +1494,8 @@ impl<'a> MethodDef<'a> { // them when they are fed as r-values into a tuple // expression; here add a layer of borrowing, turning // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. - let borrowed_self_args = self_args.move_map(|self_arg| cx.expr_addr_of(sp, self_arg)); - let match_arg = cx.expr(sp, ast::ExprKind::Tup(borrowed_self_args)); + self_args.map_in_place(|self_arg| cx.expr_addr_of(sp, self_arg)); + let match_arg = cx.expr(sp, ast::ExprKind::Tup(self_args)); cx.expr_match(sp, match_arg, match_arms) } } diff --git a/src/libsyntax_ext/proc_macro_decls.rs b/src/libsyntax_ext/proc_macro_decls.rs index 46c502965ee..663fb12242c 100644 --- a/src/libsyntax_ext/proc_macro_decls.rs +++ b/src/libsyntax_ext/proc_macro_decls.rs @@ -9,7 +9,7 @@ use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::expand::ExpansionConfig; use syntax::ext::hygiene::Mark; -use syntax::fold::Folder; +use syntax::mut_visit::MutVisitor; use syntax::parse::ParseSess; use syntax::ptr::P; use syntax::symbol::Symbol; @@ -412,5 +412,5 @@ fn mk_decls( i }); - cx.monotonic_expander().fold_item(module).pop().unwrap() + cx.monotonic_expander().flat_map_item(module).pop().unwrap() } diff --git a/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs b/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs index ce3b03efd26..ee4ecde44f2 100644 --- a/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs +++ b/src/test/run-pass-fulldeps/pprust-expr-roundtrip.rs @@ -27,7 +27,7 @@ use rustc_data_structures::thin_vec::ThinVec; use syntax::ast::*; use syntax::source_map::{Spanned, DUMMY_SP, FileName}; use syntax::source_map::FilePathMapping; -use syntax::fold::{self, Folder}; +use syntax::mut_visit::{self, MutVisitor, visit_clobber}; use syntax::parse::{self, ParseSess}; use syntax::print::pprust; use syntax::ptr::P; @@ -157,32 +157,34 @@ fn iter_exprs(depth: usize, f: &mut FnMut(P)) { // Folders for manipulating the placement of `Paren` nodes. See below for why this is needed. -/// Folder that removes all `ExprKind::Paren` nodes. +/// MutVisitor that removes all `ExprKind::Paren` nodes. struct RemoveParens; -impl Folder for RemoveParens { - fn fold_expr(&mut self, e: P) -> P { - let e = match e.node { - ExprKind::Paren(ref inner) => inner.clone(), - _ => e.clone(), +impl MutVisitor for RemoveParens { + fn visit_expr(&mut self, e: &mut P) { + match e.node.clone() { + ExprKind::Paren(inner) => *e = inner, + _ => {} }; - e.map(|e| fold::noop_fold_expr(e, self)) + mut_visit::noop_visit_expr(e, self); } } -/// Folder that inserts `ExprKind::Paren` nodes around every `Expr`. +/// MutVisitor that inserts `ExprKind::Paren` nodes around every `Expr`. struct AddParens; -impl Folder for AddParens { - fn fold_expr(&mut self, e: P) -> P { - let e = e.map(|e| fold::noop_fold_expr(e, self)); - P(Expr { - id: DUMMY_NODE_ID, - node: ExprKind::Paren(e), - span: DUMMY_SP, - attrs: ThinVec::new(), - }) +impl MutVisitor for AddParens { + fn visit_expr(&mut self, e: &mut P) { + mut_visit::noop_visit_expr(e, self); + visit_clobber(e, |e| { + P(Expr { + id: DUMMY_NODE_ID, + node: ExprKind::Paren(e), + span: DUMMY_SP, + attrs: ThinVec::new(), + }) + }); } } @@ -193,13 +195,13 @@ fn main() { fn run() { let ps = ParseSess::new(FilePathMapping::empty()); - iter_exprs(2, &mut |e| { + iter_exprs(2, &mut |mut e| { // If the pretty printer is correct, then `parse(print(e))` should be identical to `e`, // modulo placement of `Paren` nodes. let printed = pprust::expr_to_string(&e); println!("printed: {}", printed); - let parsed = parse_expr(&ps, &printed); + let mut parsed = parse_expr(&ps, &printed); // We want to know if `parsed` is structurally identical to `e`, ignoring trivial // differences like placement of `Paren`s or the exact ranges of node spans. @@ -207,10 +209,12 @@ fn run() { // everywhere we can, then pretty-print. This should give an unambiguous representation of // each `Expr`, and it bypasses nearly all of the parenthesization logic, so we aren't // relying on the correctness of the very thing we're testing. - let e1 = AddParens.fold_expr(RemoveParens.fold_expr(e)); - let text1 = pprust::expr_to_string(&e1); - let e2 = AddParens.fold_expr(RemoveParens.fold_expr(parsed)); - let text2 = pprust::expr_to_string(&e2); + RemoveParens.visit_expr(&mut e); + AddParens.visit_expr(&mut e); + let text1 = pprust::expr_to_string(&e); + RemoveParens.visit_expr(&mut parsed); + AddParens.visit_expr(&mut parsed); + let text2 = pprust::expr_to_string(&parsed); assert!(text1 == text2, "exprs are not equal:\n e = {:?}\n parsed = {:?}", text1, text2); diff --git a/src/test/ui/issues/issue-49934.rs b/src/test/ui/issues/issue-49934.rs index 59ca6cc292d..ad410f30c04 100644 --- a/src/test/ui/issues/issue-49934.rs +++ b/src/test/ui/issues/issue-49934.rs @@ -30,12 +30,12 @@ fn main() { #[derive(Debug)] //~ WARN unused attribute let _ = "Hello, world!"; - // fold_expr + // visit_expr let _ = #[derive(Debug)] "Hello, world!"; //~^ WARN unused attribute let _ = [ - // fold_opt_expr + // filter_map_expr #[derive(Debug)] //~ WARN unused attribute "Hello, world!" ]; -- cgit 1.4.1-3-g733a5 From 7451cd8dc0b20c012fad1886b321c3dee2052577 Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Sun, 27 Jan 2019 21:04:50 -0800 Subject: Deduplicate mismatched delimiter errors Delay unmatched delimiter errors until after the parser has run to deduplicate them when parsing and attempt recovering intelligently. --- src/librustc_errors/emitter.rs | 4 +- src/librustc_metadata/cstore_impl.rs | 8 +- src/libsyntax/parse/lexer/mod.rs | 11 + src/libsyntax/parse/lexer/tokentrees.rs | 30 +-- src/libsyntax/parse/mod.rs | 57 +++-- src/libsyntax/parse/parser.rs | 261 ++++++++++++++++----- src/libsyntax/parse/token.rs | 30 ++- src/libsyntax_ext/proc_macro_server.rs | 2 +- src/test/ui/parser-recovery-2.stderr | 12 +- src/test/ui/parser/issue-10636-2.rs | 2 +- src/test/ui/parser/issue-10636-2.stderr | 20 +- src/test/ui/parser/issue-2354.rs | 3 +- src/test/ui/parser/issue-2354.stderr | 7 +- .../macro-mismatched-delim-paren-brace.stderr | 12 +- src/test/ui/resolve/token-error-correct-3.rs | 2 +- src/test/ui/resolve/token-error-correct-3.stderr | 20 +- src/test/ui/resolve/token-error-correct.stderr | 12 +- 17 files changed, 335 insertions(+), 158 deletions(-) (limited to 'src/libsyntax_ext') diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 25d09a33c15..f04bd7b8f02 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -674,8 +674,8 @@ impl EmitterWriter { // | | something about `foo` // | something about `fn foo()` annotations_position.sort_by(|a, b| { - // Decreasing order - a.1.len().cmp(&b.1.len()).reverse() + // Decreasing order. When `a` and `b` are the same length, prefer `Primary`. + (a.1.len(), !a.1.is_primary).cmp(&(b.1.len(), !b.1.is_primary)).reverse() }); // Write the underlines. diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index e61229db86d..ecbc900215f 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -436,7 +436,13 @@ impl cstore::CStore { let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body); let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION); - let body = source_file_to_stream(&sess.parse_sess, source_file, None); + let (body, errors) = source_file_to_stream(&sess.parse_sess, source_file, None); + for err in errors { + sess.struct_span_err( + err.found_span, + "unclosed delimiter cstore", + ).emit(); + } // Mark the attrs as used let attrs = data.get_item_attrs(id.index, sess); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 2e3233c8ed8..d3fc1c03634 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -33,6 +33,15 @@ impl Default for TokenAndSpan { } } +#[derive(Clone, Debug)] +pub struct UnmatchedBrace { + pub expected_delim: token::DelimToken, + pub found_delim: token::DelimToken, + pub found_span: Span, + pub unclosed_span: Option, + pub candidate_span: Option, +} + pub struct StringReader<'a> { pub sess: &'a ParseSess, /// The absolute offset within the source_map of the next character to read @@ -58,6 +67,7 @@ pub struct StringReader<'a> { span_src_raw: Span, /// Stack of open delimiters and their spans. Used for error message. open_braces: Vec<(token::DelimToken, Span)>, + crate unmatched_braces: Vec, /// The type and spans for all braces /// /// Used only for error recovery when arriving to EOF with mismatched braces. @@ -222,6 +232,7 @@ impl<'a> StringReader<'a> { span: syntax_pos::DUMMY_SP, span_src_raw: syntax_pos::DUMMY_SP, open_braces: Vec::new(), + unmatched_braces: Vec::new(), matching_delim_spans: Vec::new(), override_span, last_unclosed_found_span: None, diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 7699d9eab22..0db36c84cdf 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -1,5 +1,5 @@ use crate::print::pprust::token_to_string; -use crate::parse::lexer::StringReader; +use crate::parse::lexer::{StringReader, UnmatchedBrace}; use crate::parse::{token, PResult}; use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint}; @@ -101,38 +101,38 @@ impl<'a> StringReader<'a> { } // Incorrect delimiter. token::CloseDelim(other) => { - let token_str = token_to_string(&self.token); + let mut unclosed_delimiter = None; + let mut candidate = None; if self.last_unclosed_found_span != Some(self.span) { // do not complain about the same unclosed delimiter multiple times self.last_unclosed_found_span = Some(self.span); - let msg = format!("incorrect close delimiter: `{}`", token_str); - let mut err = self.sess.span_diagnostic.struct_span_err( - self.span, - &msg, - ); - err.span_label(self.span, "incorrect close delimiter"); // This is a conservative error: only report the last unclosed // delimiter. The previous unclosed delimiters could actually be // closed! The parser just hasn't gotten to them yet. if let Some(&(_, sp)) = self.open_braces.last() { - err.span_label(sp, "un-closed delimiter"); + unclosed_delimiter = Some(sp); }; if let Some(current_padding) = sm.span_to_margin(self.span) { for (brace, brace_span) in &self.open_braces { if let Some(padding) = sm.span_to_margin(*brace_span) { // high likelihood of these two corresponding if current_padding == padding && brace == &other { - err.span_label( - *brace_span, - "close delimiter possibly meant for this", - ); + candidate = Some(*brace_span); } } } } - err.emit(); + let (tok, _) = self.open_braces.pop().unwrap(); + self.unmatched_braces.push(UnmatchedBrace { + expected_delim: tok, + found_delim: other, + found_span: self.span, + unclosed_span: unclosed_delimiter, + candidate_span: candidate, + }); + } else { + self.open_braces.pop(); } - self.open_braces.pop().unwrap(); // If the incorrect delimiter matches an earlier opening // delimiter, then don't consume it (it can be used to diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index c723d591f2f..52c7e774ab6 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -9,6 +9,7 @@ use crate::parse::parser::Parser; use crate::symbol::Symbol; use crate::tokenstream::{TokenStream, TokenTree}; use crate::diagnostics::plugin::ErrorMap; +use crate::print::pprust::token_to_string; use rustc_data_structures::sync::{Lrc, Lock}; use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; @@ -136,15 +137,17 @@ pub fn parse_crate_attrs_from_source_str(name: FileName, source: String, sess: & new_parser_from_source_str(sess, name, source).parse_inner_attributes() } -pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess, - override_span: Option) - -> TokenStream { +pub fn parse_stream_from_source_str( + name: FileName, + source: String, + sess: &ParseSess, + override_span: Option, +) -> (TokenStream, Vec) { source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span) } /// Create a new parser from a source string -pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) - -> Parser<'_> { +pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> { panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source)) } @@ -195,12 +198,14 @@ fn source_file_to_parser(sess: &ParseSess, source_file: Lrc) -> Pars /// Given a source_file and config, return a parser. Returns any buffered errors from lexing the /// initial token stream. -fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc) - -> Result, Vec> -{ +fn maybe_source_file_to_parser( + sess: &ParseSess, + source_file: Lrc, +) -> Result, Vec> { let end_pos = source_file.end_pos; - let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?); - + let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; + let mut parser = stream_to_parser(sess, stream); + parser.unclosed_delims = unclosed_delims; if parser.token == token::Eof && parser.span.is_dummy() { parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); } @@ -247,25 +252,43 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a source_file, produce a sequence of token-trees -pub fn source_file_to_stream(sess: &ParseSess, - source_file: Lrc, - override_span: Option) -> TokenStream { +pub fn source_file_to_stream( + sess: &ParseSess, + source_file: Lrc, + override_span: Option, +) -> (TokenStream, Vec) { panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span)) } /// Given a source file, produce a sequence of token-trees. Returns any buffered errors from /// parsing the token tream. -pub fn maybe_file_to_stream(sess: &ParseSess, - source_file: Lrc, - override_span: Option) -> Result> { +pub fn maybe_file_to_stream( + sess: &ParseSess, + source_file: Lrc, + override_span: Option, +) -> Result<(TokenStream, Vec), Vec> { let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?; srdr.real_token(); match srdr.parse_all_token_trees() { - Ok(stream) => Ok(stream), + Ok(stream) => Ok((stream, srdr.unmatched_braces)), Err(err) => { let mut buffer = Vec::with_capacity(1); err.buffer(&mut buffer); + for unmatched in srdr.unmatched_braces { + let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!( + "incorrect close delimiter: `{}`", + token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + )); + db.span_label(unmatched.found_span, "incorrect close delimiter"); + if let Some(sp) = unmatched.candidate_span { + db.span_label(sp, "close delimiter possibly meant for this"); + } + if let Some(sp) = unmatched.unclosed_span { + db.span_label(sp, "un-closed delimiter"); + } + db.buffer(&mut buffer); + } Err(buffer) } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index cacdab980fa..2e605ab6544 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -35,7 +35,7 @@ use crate::ext::base::DummyResult; use crate::source_map::{self, SourceMap, Spanned, respan}; use crate::errors::{self, Applicability, DiagnosticBuilder, DiagnosticId}; use crate::parse::{self, SeqSep, classify, token}; -use crate::parse::lexer::TokenAndSpan; +use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace}; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use crate::parse::token::DelimToken; use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; @@ -251,6 +251,8 @@ pub struct Parser<'a> { /// /// See the comments in the `parse_path_segment` function for more details. crate unmatched_angle_bracket_count: u32, + crate max_angle_bracket_count: u32, + crate unclosed_delims: Vec, } @@ -573,6 +575,8 @@ impl<'a> Parser<'a> { desugar_doc_comments, cfg_mods: true, unmatched_angle_bracket_count: 0, + max_angle_bracket_count: 0, + unclosed_delims: Vec::new(), }; let tok = parser.next_tok(); @@ -642,11 +646,11 @@ impl<'a> Parser<'a> { /// Expect and consume the token t. Signal an error if /// the next token is not t. - pub fn expect(&mut self, t: &token::Token) -> PResult<'a, ()> { + pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); - Ok(()) + Ok(false) } else { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_descr(); @@ -661,6 +665,12 @@ impl<'a> Parser<'a> { self.sess.source_map().next_point(self.prev_span) }; let label_exp = format!("expected `{}`", token_str); + match self.recover_closing_delimiter(&[t.clone()], err) { + Err(e) => err = e, + Ok(recovered) => { + return Ok(recovered); + } + } let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line == b.line => { @@ -680,12 +690,62 @@ impl<'a> Parser<'a> { } } + fn recover_closing_delimiter( + &mut self, + tokens: &[token::Token], + mut err: DiagnosticBuilder<'a>, + ) -> PResult<'a, bool> { + let mut pos = None; + // we want to use the last closing delim that would apply + for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() { + if tokens.contains(&token::CloseDelim(unmatched.expected_delim)) + && Some(self.span) > unmatched.unclosed_span + { + pos = Some(i); + } + } + match pos { + Some(pos) => { + // Recover and assume that the detected unclosed delimiter was meant for + // this location. Emit the diagnostic and act as if the delimiter was + // present for the parser's sake. + + // Don't attempt to recover from this unclosed delimiter more than once. + let unmatched = self.unclosed_delims.remove(pos); + let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim)); + + // We want to suggest the inclusion of the closing delimiter where it makes + // the most sense, which is immediately after the last token: + // + // {foo(bar {}} + // - ^ help: `)` may belong here + // | + // in order to close this... + if let Some(sp) = unmatched.unclosed_span { + err.span_label(sp, "in order to close this..."); + } + err.span_suggestion_short_with_applicability( + self.sess.source_map().next_point(self.prev_span), + &format!("{} may belong here", delim.to_string()), + delim.to_string(), + Applicability::MaybeIncorrect, + ); + err.emit(); + // self.expected_tokens.clear(); // reduce errors + Ok(true) + } + _ => Err(err), + } + } + /// Expect next token to be edible or inedible token. If edible, /// then consume it; if inedible, then return without consuming /// anything. Signal a fatal error if next token is unexpected. - pub fn expect_one_of(&mut self, - edible: &[token::Token], - inedible: &[token::Token]) -> PResult<'a, ()>{ + pub fn expect_one_of( + &mut self, + edible: &[token::Token], + inedible: &[token::Token], + ) -> PResult<'a, bool /* recovered */> { fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); // This might be a sign we need a connect method on Iterator. @@ -705,10 +765,10 @@ impl<'a> Parser<'a> { } if edible.contains(&self.token) { self.bump(); - Ok(()) + Ok(false) } else if inedible.contains(&self.token) { // leave it in the input - Ok(()) + Ok(false) } else { let mut expected = edible.iter() .map(|x| TokenType::Token(x.clone())) @@ -759,6 +819,15 @@ impl<'a> Parser<'a> { } else { label_sp }; + match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt { + TokenType::Token(t) => Some(t.clone()), + _ => None, + }).collect::>(), err) { + Err(e) => err = e, + Ok(recovered) => { + return Ok(recovered); + } + } let cm = self.sess.source_map(); match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { @@ -1053,6 +1122,7 @@ impl<'a> Parser<'a> { if ate { // See doc comment for `unmatched_angle_bracket_count`. self.unmatched_angle_bracket_count += 1; + self.max_angle_bracket_count += 1; debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count); } @@ -1093,14 +1163,30 @@ impl<'a> Parser<'a> { }; match ate { - Some(x) => { + Some(_) => { // See doc comment for `unmatched_angle_bracket_count`. self.unmatched_angle_bracket_count -= 1; debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count); - Ok(x) + Ok(()) }, - None => self.unexpected(), + None => { + match ( + &self.token, + self.unmatched_angle_bracket_count, + self.max_angle_bracket_count > 1, + ) { + // (token::OpenDelim(_), 1, true) | (token::Semi, 1, true) => { + // self.struct_span_err( + // self.span, + // &format!("expected `>`, found `{}`", self.this_token_to_string()), + // // ).span_suggestion_short_with_applicability( + // ).emit(); + // Ok(()) + // } + _ => self.unexpected(), + } + } } } @@ -1127,19 +1213,22 @@ impl<'a> Parser<'a> { -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { - let val = self.parse_seq_to_before_end(ket, sep, f)?; - self.bump(); + let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; + if !recovered { + self.bump(); + } Ok(val) } /// Parse a sequence, not including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. - pub fn parse_seq_to_before_end(&mut self, - ket: &token::Token, - sep: SeqSep, - f: F) - -> PResult<'a, Vec> + pub fn parse_seq_to_before_end( + &mut self, + ket: &token::Token, + sep: SeqSep, + f: F, + ) -> PResult<'a, (Vec, bool)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) @@ -1151,10 +1240,11 @@ impl<'a> Parser<'a> { sep: SeqSep, expect: TokenExpectType, mut f: F, - ) -> PResult<'a, Vec> + ) -> PResult<'a, (Vec, bool /* recovered */)> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T> { - let mut first: bool = true; + let mut first = true; + let mut recovered = false; let mut v = vec![]; while !kets.iter().any(|k| { match expect { @@ -1170,23 +1260,30 @@ impl<'a> Parser<'a> { if first { first = false; } else { - if let Err(mut e) = self.expect(t) { - // Attempt to keep parsing if it was a similar separator - if let Some(ref tokens) = t.similar_tokens() { - if tokens.contains(&self.token) { - self.bump(); - } + match self.expect(t) { + Ok(false) => {} + Ok(true) => { + recovered = true; + break; } - e.emit(); - // Attempt to keep parsing if it was an omitted separator - match f(self) { - Ok(t) => { - v.push(t); - continue; - }, - Err(mut e) => { - e.cancel(); - break; + Err(mut e) => { + // Attempt to keep parsing if it was a similar separator + if let Some(ref tokens) = t.similar_tokens() { + if tokens.contains(&self.token) { + self.bump(); + } + } + e.emit(); + // Attempt to keep parsing if it was an omitted separator + match f(self) { + Ok(t) => { + v.push(t); + continue; + }, + Err(mut e) => { + e.cancel(); + break; + } } } } @@ -1205,23 +1302,26 @@ impl<'a> Parser<'a> { v.push(t); } - Ok(v) + Ok((v, recovered)) } /// Parse a sequence, including the closing delimiter. The function /// f must consume tokens until reaching the next separator or /// closing bracket. - fn parse_unspanned_seq(&mut self, - bra: &token::Token, - ket: &token::Token, - sep: SeqSep, - f: F) - -> PResult<'a, Vec> where + fn parse_unspanned_seq( + &mut self, + bra: &token::Token, + ket: &token::Token, + sep: SeqSep, + f: F, + ) -> PResult<'a, Vec> where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, { self.expect(bra)?; - let result = self.parse_seq_to_before_end(ket, sep, f)?; - self.eat(ket); + let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; + if !recovered { + self.eat(ket); + } Ok(result) } @@ -2273,7 +2373,10 @@ impl<'a> Parser<'a> { // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If // it isn't, then we reset the unmatched angle bracket count as we're about to start // parsing a new path. - if style == PathStyle::Expr { self.unmatched_angle_bracket_count = 0; } + if style == PathStyle::Expr { + self.unmatched_angle_bracket_count = 0; + self.max_angle_bracket_count = 0; + } let args = if self.eat_lt() { // `<'a, T, A = U>` @@ -2285,12 +2388,14 @@ impl<'a> Parser<'a> { } else { // `(T, U) -> R` self.bump(); // `(` - let inputs = self.parse_seq_to_before_tokens( + let (inputs, recovered) = self.parse_seq_to_before_tokens( &[&token::CloseDelim(token::Paren)], SeqSep::trailing_allowed(token::Comma), TokenExpectType::Expect, |p| p.parse_ty())?; - self.bump(); // `)` + if !recovered { + self.bump(); // `)` + } let span = lo.to(self.prev_span); let output = if self.eat(&token::RArrow) { Some(self.parse_ty_common(false, false)?) @@ -2496,9 +2601,13 @@ impl<'a> Parser<'a> { // (e,) is a tuple with only one field, e let mut es = vec![]; let mut trailing_comma = false; + let mut recovered = false; while self.token != token::CloseDelim(token::Paren) { es.push(self.parse_expr()?); - self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?; + recovered = self.expect_one_of( + &[], + &[token::Comma, token::CloseDelim(token::Paren)], + )?; if self.eat(&token::Comma) { trailing_comma = true; } else { @@ -2506,7 +2615,9 @@ impl<'a> Parser<'a> { break; } } - self.bump(); + if !recovered { + self.bump(); + } hi = self.prev_span; ex = if es.len() == 1 && !trailing_comma { @@ -2802,7 +2913,7 @@ impl<'a> Parser<'a> { match self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]) { - Ok(()) => if let Some(f) = parsed_field.or(recovery_field) { + Ok(_) => if let Some(f) = parsed_field.or(recovery_field) { // only include the field if there's no parse error for the field name fields.push(f); } @@ -6011,7 +6122,7 @@ impl<'a> Parser<'a> { let sp = self.span; let mut variadic = false; - let args: Vec> = + let (args, recovered): (Vec>, bool) = self.parse_seq_to_before_end( &token::CloseDelim(token::Paren), SeqSep::trailing_allowed(token::Comma), @@ -6059,7 +6170,9 @@ impl<'a> Parser<'a> { } )?; - self.eat(&token::CloseDelim(token::Paren)); + if !recovered { + self.eat(&token::CloseDelim(token::Paren)); + } let args: Vec<_> = args.into_iter().filter_map(|x| x).collect(); @@ -6204,15 +6317,15 @@ impl<'a> Parser<'a> { // Parse the rest of the function parameter list. let sep = SeqSep::trailing_allowed(token::Comma); - let fn_inputs = if let Some(self_arg) = self_arg { + let (fn_inputs, recovered) = if let Some(self_arg) = self_arg { if self.check(&token::CloseDelim(token::Paren)) { - vec![self_arg] + (vec![self_arg], false) } else if self.eat(&token::Comma) { let mut fn_inputs = vec![self_arg]; - fn_inputs.append(&mut self.parse_seq_to_before_end( - &token::CloseDelim(token::Paren), sep, parse_arg_fn)? - ); - fn_inputs + let (mut input, recovered) = self.parse_seq_to_before_end( + &token::CloseDelim(token::Paren), sep, parse_arg_fn)?; + fn_inputs.append(&mut input); + (fn_inputs, recovered) } else { return self.unexpected(); } @@ -6220,8 +6333,10 @@ impl<'a> Parser<'a> { self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)? }; - // Parse closing paren and return type. - self.expect(&token::CloseDelim(token::Paren))?; + if !recovered { + // Parse closing paren and return type. + self.expect(&token::CloseDelim(token::Paren))?; + } Ok(P(FnDecl { inputs: fn_inputs, output: self.parse_ret_ty(true)?, @@ -6241,7 +6356,7 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), TokenExpectType::NoExpect, |p| p.parse_fn_block_arg() - )?; + )?.0; self.expect_or()?; args } @@ -8238,7 +8353,7 @@ impl<'a> Parser<'a> { // eat a matched-delimiter token tree: let (delim, tts) = self.expect_delimited_token_tree()?; if delim != MacDelimiter::Brace { - self.expect(&token::Semi)? + self.expect(&token::Semi)?; } Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim }))) @@ -8383,11 +8498,27 @@ impl<'a> Parser<'a> { /// entry point for the parser. pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> { let lo = self.span; - Ok(ast::Crate { + let krate = Ok(ast::Crate { attrs: self.parse_inner_attributes()?, module: self.parse_mod_items(&token::Eof, lo)?, span: lo.to(self.span), - }) + }); + for unmatched in &self.unclosed_delims { + let mut err = self.struct_span_err(unmatched.found_span, &format!( + "incorrect close delimiter: `{}`", + pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + )); + err.span_label(unmatched.found_span, "incorrect close delimiter"); + if let Some(sp) = unmatched.candidate_span { + err.span_label(sp, "close delimiter possibly meant for this"); + } + if let Some(sp) = unmatched.unclosed_span { + err.span_label(sp, "un-closed delimiter"); + } + err.emit(); + } + self.unclosed_delims.clear(); + krate } pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option)> { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 3b1fa5ea01f..a0f3113a1cb 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -487,8 +487,8 @@ impl Token { /// Enables better error recovery when the wrong token is found. crate fn similar_tokens(&self) -> Option> { match *self { - Comma => Some(vec![Dot, Lt]), - Semi => Some(vec![Colon]), + Comma => Some(vec![Dot, Lt, Semi]), + Semi => Some(vec![Colon, Comma]), _ => None } } @@ -545,7 +545,15 @@ impl Token { // FIXME(#43081): Avoid this pretty-print + reparse hack let source = pprust::token_to_string(self); let filename = FileName::macro_expansion_source_code(&source); - parse_stream_from_source_str(filename, source, sess, Some(span)) + let (tokens, errors) = parse_stream_from_source_str( + filename, source, sess, Some(span)); + for err in errors { + sess.span_diagnostic.struct_span_err( + err.found_span, + "unclosed delimiter for_real", + ).emit(); + } + tokens }); // During early phases of the compiler the AST could get modified @@ -786,12 +794,18 @@ fn prepend_attrs(sess: &ParseSess, let source = pprust::attr_to_string(attr); let macro_filename = FileName::macro_expansion_source_code(&source); if attr.is_sugared_doc { - let stream = parse_stream_from_source_str( + let (stream, errors) = parse_stream_from_source_str( macro_filename, source, sess, Some(span), ); + for err in errors { + sess.span_diagnostic.struct_span_err( + err.found_span, + "unclosed delimiter attrs", + ).emit(); + } builder.push(stream); continue } @@ -808,12 +822,18 @@ fn prepend_attrs(sess: &ParseSess, // ... and for more complicated paths, fall back to a reparse hack that // should eventually be removed. } else { - let stream = parse_stream_from_source_str( + let (stream, errors) = parse_stream_from_source_str( macro_filename, source, sess, Some(span), ); + for err in errors { + sess.span_diagnostic.struct_span_err( + err.found_span, + "unclosed delimiter attrs 2", + ).emit(); + } brackets.push(stream); } diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index 7de9b9343a8..ef291e2102b 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -413,7 +413,7 @@ impl server::TokenStream for Rustc<'_> { src.to_string(), self.sess, Some(self.call_site), - ) + ).0 } fn to_string(&mut self, stream: &Self::TokenStream) -> String { stream.to_string() diff --git a/src/test/ui/parser-recovery-2.stderr b/src/test/ui/parser-recovery-2.stderr index 92d8cbc100a..76f7af38e77 100644 --- a/src/test/ui/parser-recovery-2.stderr +++ b/src/test/ui/parser-recovery-2.stderr @@ -1,3 +1,9 @@ +error: unexpected token: `;` + --> $DIR/parser-recovery-2.rs:12:15 + | +LL | let x = y.; //~ ERROR unexpected token + | ^ + error: incorrect close delimiter: `)` --> $DIR/parser-recovery-2.rs:8:5 | @@ -7,12 +13,6 @@ LL | let x = foo(); //~ ERROR cannot find function `foo` in this scope LL | ) //~ ERROR incorrect close delimiter: `)` | ^ incorrect close delimiter -error: unexpected token: `;` - --> $DIR/parser-recovery-2.rs:12:15 - | -LL | let x = y.; //~ ERROR unexpected token - | ^ - error[E0425]: cannot find function `foo` in this scope --> $DIR/parser-recovery-2.rs:7:17 | diff --git a/src/test/ui/parser/issue-10636-2.rs b/src/test/ui/parser/issue-10636-2.rs index a02fd41b349..6fb63639d5f 100644 --- a/src/test/ui/parser/issue-10636-2.rs +++ b/src/test/ui/parser/issue-10636-2.rs @@ -5,7 +5,7 @@ pub fn trace_option(option: Option) { option.map(|some| 42; //~^ ERROR: expected one of -} //~ ERROR: incorrect close delimiter +} //~^ ERROR: expected expression, found `)` fn main() {} diff --git a/src/test/ui/parser/issue-10636-2.stderr b/src/test/ui/parser/issue-10636-2.stderr index 9b3115cb3f4..670a116eb51 100644 --- a/src/test/ui/parser/issue-10636-2.stderr +++ b/src/test/ui/parser/issue-10636-2.stderr @@ -1,25 +1,17 @@ -error: incorrect close delimiter: `}` - --> $DIR/issue-10636-2.rs:8:1 - | -LL | pub fn trace_option(option: Option) { - | - close delimiter possibly meant for this -LL | option.map(|some| 42; - | - un-closed delimiter -... -LL | } //~ ERROR: incorrect close delimiter - | ^ incorrect close delimiter - error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;` --> $DIR/issue-10636-2.rs:5:25 | LL | option.map(|some| 42; - | ^ expected one of `)`, `,`, `.`, `?`, or an operator here + | - ^ + | | | + | | help: `)` may belong here + | in order to close this... error: expected expression, found `)` --> $DIR/issue-10636-2.rs:8:1 | -LL | } //~ ERROR: incorrect close delimiter +LL | } | ^ expected expression -error: aborting due to 3 previous errors +error: aborting due to 2 previous errors diff --git a/src/test/ui/parser/issue-2354.rs b/src/test/ui/parser/issue-2354.rs index 565f84822f7..b383bc00f91 100644 --- a/src/test/ui/parser/issue-2354.rs +++ b/src/test/ui/parser/issue-2354.rs @@ -1,4 +1,5 @@ -fn foo() { //~ NOTE un-closed delimiter +fn foo() { + //~^ NOTE un-closed delimiter match Some(10) { //~^ NOTE this delimiter might not be properly closed... Some(y) => { panic!(); } diff --git a/src/test/ui/parser/issue-2354.stderr b/src/test/ui/parser/issue-2354.stderr index 0f4cd5724ce..f1b0905d866 100644 --- a/src/test/ui/parser/issue-2354.stderr +++ b/src/test/ui/parser/issue-2354.stderr @@ -1,8 +1,9 @@ error: this file contains an un-closed delimiter - --> $DIR/issue-2354.rs:15:66 + --> $DIR/issue-2354.rs:16:66 | -LL | fn foo() { //~ NOTE un-closed delimiter +LL | fn foo() { | - un-closed delimiter +LL | //~^ NOTE un-closed delimiter LL | match Some(10) { | - this delimiter might not be properly closed... ... @@ -16,7 +17,7 @@ error[E0601]: `main` function not found in crate `issue_2354` | = note: the main function must be defined at the crate level but you have one or more functions named 'main' that are not defined at the crate level. Either move the definition or attach the `#[main]` attribute to override this behavior. note: here is a function named 'main' - --> $DIR/issue-2354.rs:14:1 + --> $DIR/issue-2354.rs:15:1 | LL | fn main() {} //~ NOTE here is a function named 'main' | ^^^^^^^^^^^^ diff --git a/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr b/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr index 805ba8b6baa..abb08209795 100644 --- a/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr +++ b/src/test/ui/parser/macro-mismatched-delim-paren-brace.stderr @@ -1,3 +1,9 @@ +error: unexpected close delimiter: `}` + --> $DIR/macro-mismatched-delim-paren-brace.rs:5:1 + | +LL | } //~ ERROR unexpected close delimiter: `}` + | ^ unexpected close delimiter + error: incorrect close delimiter: `}` --> $DIR/macro-mismatched-delim-paren-brace.rs:4:5 | @@ -7,11 +13,5 @@ LL | bar, "baz", 1, 2.0 LL | } //~ ERROR incorrect close delimiter | ^ incorrect close delimiter -error: unexpected close delimiter: `}` - --> $DIR/macro-mismatched-delim-paren-brace.rs:5:1 - | -LL | } //~ ERROR unexpected close delimiter: `}` - | ^ unexpected close delimiter - error: aborting due to 2 previous errors diff --git a/src/test/ui/resolve/token-error-correct-3.rs b/src/test/ui/resolve/token-error-correct-3.rs index 86cf71117a6..b1ca0bbfc57 100644 --- a/src/test/ui/resolve/token-error-correct-3.rs +++ b/src/test/ui/resolve/token-error-correct-3.rs @@ -17,7 +17,7 @@ pub mod raw { //~| expected type `()` //~| found type `std::result::Result` //~| expected one of - } else { //~ ERROR: incorrect close delimiter: `}` + } else { //~^ ERROR: expected one of //~| unexpected token Ok(false); diff --git a/src/test/ui/resolve/token-error-correct-3.stderr b/src/test/ui/resolve/token-error-correct-3.stderr index 2164d27a051..fcc1c34d1fc 100644 --- a/src/test/ui/resolve/token-error-correct-3.stderr +++ b/src/test/ui/resolve/token-error-correct-3.stderr @@ -1,19 +1,11 @@ -error: incorrect close delimiter: `}` - --> $DIR/token-error-correct-3.rs:20:9 - | -LL | if !is_directory(path.as_ref()) { //~ ERROR: cannot find function `is_directory` - | - close delimiter possibly meant for this -LL | callback(path.as_ref(); //~ ERROR expected one of - | - un-closed delimiter -... -LL | } else { //~ ERROR: incorrect close delimiter: `}` - | ^ incorrect close delimiter - error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;` --> $DIR/token-error-correct-3.rs:14:35 | LL | callback(path.as_ref(); //~ ERROR expected one of - | ^ expected one of `)`, `,`, `.`, `?`, or an operator here + | - ^ + | | | + | | help: `)` may belong here + | in order to close this... error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)` --> $DIR/token-error-correct-3.rs:20:9 @@ -21,7 +13,7 @@ error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)` LL | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types | - expected one of `.`, `;`, `?`, `}`, or an operator here ... -LL | } else { //~ ERROR: incorrect close delimiter: `}` +LL | } else { | ^ unexpected token error[E0425]: cannot find function `is_directory` in this scope @@ -41,7 +33,7 @@ LL | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mis = note: expected type `()` found type `std::result::Result` -error: aborting due to 5 previous errors +error: aborting due to 4 previous errors Some errors occurred: E0308, E0425. For more information about an error, try `rustc --explain E0308`. diff --git a/src/test/ui/resolve/token-error-correct.stderr b/src/test/ui/resolve/token-error-correct.stderr index 0a4590461b5..48a247a5898 100644 --- a/src/test/ui/resolve/token-error-correct.stderr +++ b/src/test/ui/resolve/token-error-correct.stderr @@ -1,3 +1,9 @@ +error: expected expression, found `;` + --> $DIR/token-error-correct.rs:4:13 + | +LL | foo(bar(; + | ^ expected expression + error: incorrect close delimiter: `}` --> $DIR/token-error-correct.rs:6:1 | @@ -9,11 +15,5 @@ LL | //~^ ERROR: expected expression, found `;` LL | } | ^ incorrect close delimiter -error: expected expression, found `;` - --> $DIR/token-error-correct.rs:4:13 - | -LL | foo(bar(; - | ^ expected expression - error: aborting due to 2 previous errors -- cgit 1.4.1-3-g733a5 From 99be87aac3b9af941e74b8681643e1963ce75671 Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Tue, 5 Feb 2019 01:35:25 -0800 Subject: unify error handling to single method --- src/librustc_metadata/cstore_impl.rs | 8 ++------ src/libsyntax/parse/mod.rs | 1 + src/libsyntax/parse/parser.rs | 36 +++++++++++++++++++--------------- src/libsyntax/parse/token.rs | 22 ++++----------------- src/libsyntax_ext/proc_macro_server.rs | 7 +++++-- 5 files changed, 32 insertions(+), 42 deletions(-) (limited to 'src/libsyntax_ext') diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index ecbc900215f..fd4089dfdb9 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -29,6 +29,7 @@ use syntax::attr; use syntax::source_map; use syntax::edition::Edition; use syntax::parse::source_file_to_stream; +use syntax::parse::parser::emit_unclosed_delims; use syntax::symbol::Symbol; use syntax_pos::{Span, NO_EXPANSION, FileName}; use rustc_data_structures::bit_set::BitSet; @@ -437,12 +438,7 @@ impl cstore::CStore { let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body); let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION); let (body, errors) = source_file_to_stream(&sess.parse_sess, source_file, None); - for err in errors { - sess.struct_span_err( - err.found_span, - "unclosed delimiter cstore", - ).emit(); - } + emit_unclosed_delims(&errors, &sess.diagnostic()); // Mark the attrs as used let attrs = data.get_item_attrs(id.index, sess); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 52c7e774ab6..317d6933207 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -275,6 +275,7 @@ pub fn maybe_file_to_stream( Err(err) => { let mut buffer = Vec::with_capacity(1); err.buffer(&mut buffer); + // Not using `emit_unclosed_delims` to use `db.buffer` for unmatched in srdr.unmatched_braces { let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!( "incorrect close delimiter: `{}`", diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 2e605ab6544..4a932ab7bd1 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -724,7 +724,7 @@ impl<'a> Parser<'a> { if let Some(sp) = unmatched.unclosed_span { err.span_label(sp, "in order to close this..."); } - err.span_suggestion_short_with_applicability( + err.span_suggestion_short( self.sess.source_map().next_point(self.prev_span), &format!("{} may belong here", delim.to_string()), delim.to_string(), @@ -1180,7 +1180,7 @@ impl<'a> Parser<'a> { // self.struct_span_err( // self.span, // &format!("expected `>`, found `{}`", self.this_token_to_string()), - // // ).span_suggestion_short_with_applicability( + // // ).span_suggestion_short( // ).emit(); // Ok(()) // } @@ -8503,20 +8503,7 @@ impl<'a> Parser<'a> { module: self.parse_mod_items(&token::Eof, lo)?, span: lo.to(self.span), }); - for unmatched in &self.unclosed_delims { - let mut err = self.struct_span_err(unmatched.found_span, &format!( - "incorrect close delimiter: `{}`", - pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), - )); - err.span_label(unmatched.found_span, "incorrect close delimiter"); - if let Some(sp) = unmatched.candidate_span { - err.span_label(sp, "close delimiter possibly meant for this"); - } - if let Some(sp) = unmatched.unclosed_span { - err.span_label(sp, "un-closed delimiter"); - } - err.emit(); - } + emit_unclosed_delims(&self.unclosed_delims, self.diagnostic()); self.unclosed_delims.clear(); krate } @@ -8547,3 +8534,20 @@ impl<'a> Parser<'a> { } } } + +pub fn emit_unclosed_delims(unclosed_delims: &[UnmatchedBrace], handler: &errors::Handler) { + for unmatched in unclosed_delims { + let mut err = handler.struct_span_err(unmatched.found_span, &format!( + "incorrect close delimiter: `{}`", + pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)), + )); + err.span_label(unmatched.found_span, "incorrect close delimiter"); + if let Some(sp) = unmatched.candidate_span { + err.span_label(sp, "close delimiter possibly meant for this"); + } + if let Some(sp) = unmatched.unclosed_span { + err.span_label(sp, "un-closed delimiter"); + } + err.emit(); + } +} \ No newline at end of file diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index a0f3113a1cb..d144223d1b8 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -10,6 +10,7 @@ use crate::print::pprust; use crate::ptr::P; use crate::symbol::keywords; use crate::syntax::parse::parse_stream_from_source_str; +use crate::syntax::parse::parser::emit_unclosed_delims; use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree}; use serialize::{Decodable, Decoder, Encodable, Encoder}; @@ -547,12 +548,7 @@ impl Token { let filename = FileName::macro_expansion_source_code(&source); let (tokens, errors) = parse_stream_from_source_str( filename, source, sess, Some(span)); - for err in errors { - sess.span_diagnostic.struct_span_err( - err.found_span, - "unclosed delimiter for_real", - ).emit(); - } + emit_unclosed_delims(&errors, &sess.span_diagnostic); tokens }); @@ -800,12 +796,7 @@ fn prepend_attrs(sess: &ParseSess, sess, Some(span), ); - for err in errors { - sess.span_diagnostic.struct_span_err( - err.found_span, - "unclosed delimiter attrs", - ).emit(); - } + emit_unclosed_delims(&errors, &sess.span_diagnostic); builder.push(stream); continue } @@ -828,12 +819,7 @@ fn prepend_attrs(sess: &ParseSess, sess, Some(span), ); - for err in errors { - sess.span_diagnostic.struct_span_err( - err.found_span, - "unclosed delimiter attrs 2", - ).emit(); - } + emit_unclosed_delims(&errors, &sess.span_diagnostic); brackets.push(stream); } diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index ef291e2102b..38d12db13ef 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -11,6 +11,7 @@ use syntax::ast; use syntax::ext::base::ExtCtxt; use syntax::parse::lexer::comments; use syntax::parse::{self, token, ParseSess}; +use syntax::parse::parser::emit_unclosed_delims; use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint}; use syntax_pos::hygiene::{SyntaxContext, Transparency}; use syntax_pos::symbol::{keywords, Symbol}; @@ -408,12 +409,14 @@ impl server::TokenStream for Rustc<'_> { stream.is_empty() } fn from_str(&mut self, src: &str) -> Self::TokenStream { - parse::parse_stream_from_source_str( + let (tokens, errors) = parse::parse_stream_from_source_str( FileName::proc_macro_source_code(src.clone()), src.to_string(), self.sess, Some(self.call_site), - ).0 + ); + emit_unclosed_delims(&errors, &self.sess.span_diagnostic); + tokens } fn to_string(&mut self, stream: &Self::TokenStream) -> String { stream.to_string() -- cgit 1.4.1-3-g733a5 From 8b886e07f52d1523421a3cf0c484a4898d13b432 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Tue, 5 Feb 2019 14:37:15 +0100 Subject: Remove images' url to make it work even without internet connection --- src/liballoc/lib.rs | 4 +--- src/libarena/lib.rs | 4 +--- src/libcore/lib.rs | 4 +--- src/libfmt_macros/lib.rs | 4 +--- src/libgraphviz/lib.rs | 4 +--- src/libpanic_abort/lib.rs | 4 +--- src/libpanic_unwind/lib.rs | 4 +--- src/libproc_macro/lib.rs | 4 +--- src/librustc/lib.rs | 4 +--- src/librustc_apfloat/lib.rs | 4 +--- src/librustc_borrowck/lib.rs | 4 +--- src/librustc_codegen_llvm/lib.rs | 4 +--- src/librustc_codegen_ssa/lib.rs | 4 +--- src/librustc_codegen_utils/codegen_backend.rs | 4 +--- src/librustc_codegen_utils/lib.rs | 4 +--- src/librustc_data_structures/lib.rs | 4 +--- src/librustc_driver/lib.rs | 4 +--- src/librustc_errors/lib.rs | 4 +--- src/librustc_incremental/lib.rs | 4 +--- src/librustc_lint/lib.rs | 4 +--- src/librustc_llvm/lib.rs | 4 +--- src/librustc_metadata/lib.rs | 4 +--- src/librustc_passes/lib.rs | 4 +--- src/librustc_plugin/lib.rs | 4 +--- src/librustc_privacy/lib.rs | 4 +--- src/librustc_resolve/lib.rs | 4 +--- src/librustc_save_analysis/lib.rs | 4 +--- src/librustc_target/lib.rs | 4 +--- src/librustc_typeck/lib.rs | 4 +--- src/librustdoc/lib.rs | 4 +--- src/libserialize/lib.rs | 4 +--- src/libstd/lib.rs | 4 +--- src/libsyntax/lib.rs | 4 +--- src/libsyntax_ext/lib.rs | 4 +--- src/libsyntax_pos/lib.rs | 4 +--- src/libterm/lib.rs | 4 +--- src/libtest/lib.rs | 7 +------ 37 files changed, 37 insertions(+), 114 deletions(-) (limited to 'src/libsyntax_ext') diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 80097a128a5..189ba84eeed 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -55,9 +55,7 @@ reason = "this library is unlikely to be stabilized in its current \ form or name", issue = "27783")] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", test(no_crate_inject, attr(allow(unused_variables), deny(warnings))))] #![no_std] diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 9f9ded51e37..aa522d86dcf 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -8,9 +8,7 @@ //! This crate implements `TypedArena`, a simple arena that can only hold //! objects of a single type. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", test(no_crate_inject, attr(deny(warnings))))] #![feature(alloc)] diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index 7180a813a3e..78f1c3c0dff 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -51,9 +51,7 @@ #![cfg(not(test))] #![stable(feature = "core", since = "1.6.0")] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", html_playground_url = "https://play.rust-lang.org/", issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", test(no_crate_inject, attr(deny(warnings))), diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index 7bfe2377cea..ea67c01dfc9 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -4,9 +4,7 @@ //! Parsing does not happen at runtime: structures of `std::fmt::rt` are //! generated instead. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", html_playground_url = "https://play.rust-lang.org/", test(attr(deny(warnings))))] diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index f05f6e6651f..8ce0f755df0 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -271,9 +271,7 @@ //! //! * [DOT language](http://www.graphviz.org/doc/info/lang.html) -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", test(attr(allow(unused_variables), deny(warnings))))] #![deny(rust_2018_idioms)] diff --git a/src/libpanic_abort/lib.rs b/src/libpanic_abort/lib.rs index daa1998d29d..7c6f36ece3c 100644 --- a/src/libpanic_abort/lib.rs +++ b/src/libpanic_abort/lib.rs @@ -5,9 +5,7 @@ #![no_std] #![unstable(feature = "panic_abort", issue = "32837")] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")] #![panic_runtime] diff --git a/src/libpanic_unwind/lib.rs b/src/libpanic_unwind/lib.rs index 98f174710d2..fa7a0916d42 100644 --- a/src/libpanic_unwind/lib.rs +++ b/src/libpanic_unwind/lib.rs @@ -14,9 +14,7 @@ #![no_std] #![unstable(feature = "panic_unwind", issue = "32837")] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")] #![feature(allocator_api)] diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index bb6f5e234f7..2cdc5a48a53 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -9,9 +9,7 @@ #![stable(feature = "proc_macro_lib", since = "1.15.0")] #![deny(missing_docs)] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", html_playground_url = "https://play.rust-lang.org/", issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", test(no_crate_inject, attr(deny(warnings))), diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index d1951351520..be147556477 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -26,9 +26,7 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![deny(rust_2018_idioms)] #![allow(explicit_outlives_requirements)] diff --git a/src/librustc_apfloat/lib.rs b/src/librustc_apfloat/lib.rs index 17311f0688f..f79d448edce 100644 --- a/src/librustc_apfloat/lib.rs +++ b/src/librustc_apfloat/lib.rs @@ -30,9 +30,7 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![forbid(unsafe_code)] #![deny(rust_2018_idioms)] diff --git a/src/librustc_borrowck/lib.rs b/src/librustc_borrowck/lib.rs index 8bdc4e1d5c1..49890330a4e 100644 --- a/src/librustc_borrowck/lib.rs +++ b/src/librustc_borrowck/lib.rs @@ -1,6 +1,4 @@ -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![allow(non_camel_case_types)] diff --git a/src/librustc_codegen_llvm/lib.rs b/src/librustc_codegen_llvm/lib.rs index ab2fb67d549..ad8db25ee95 100644 --- a/src/librustc_codegen_llvm/lib.rs +++ b/src/librustc_codegen_llvm/lib.rs @@ -4,9 +4,7 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(box_patterns)] #![feature(box_syntax)] diff --git a/src/librustc_codegen_ssa/lib.rs b/src/librustc_codegen_ssa/lib.rs index 1accbeb2aa8..58b3f0434a6 100644 --- a/src/librustc_codegen_ssa/lib.rs +++ b/src/librustc_codegen_ssa/lib.rs @@ -1,6 +1,4 @@ -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(box_patterns)] #![feature(box_syntax)] diff --git a/src/librustc_codegen_utils/codegen_backend.rs b/src/librustc_codegen_utils/codegen_backend.rs index 8981c542961..a87b02d33de 100644 --- a/src/librustc_codegen_utils/codegen_backend.rs +++ b/src/librustc_codegen_utils/codegen_backend.rs @@ -4,9 +4,7 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![deny(warnings)] #![feature(box_syntax)] diff --git a/src/librustc_codegen_utils/lib.rs b/src/librustc_codegen_utils/lib.rs index 8e96f985401..d6ef555144d 100644 --- a/src/librustc_codegen_utils/lib.rs +++ b/src/librustc_codegen_utils/lib.rs @@ -2,9 +2,7 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(box_patterns)] #![feature(box_syntax)] diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs index ec71f515894..a46f8aed324 100644 --- a/src/librustc_data_structures/lib.rs +++ b/src/librustc_data_structures/lib.rs @@ -6,9 +6,7 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://www.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(in_band_lifetimes)] #![feature(unboxed_closures)] diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index d0dc7799c7b..189869c1155 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -4,9 +4,7 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(box_syntax)] #![cfg_attr(unix, feature(libc))] diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index 831415ed0bb..ea530fa1bfb 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -1,6 +1,4 @@ -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(custom_attribute)] #![allow(unused_attributes)] diff --git a/src/librustc_incremental/lib.rs b/src/librustc_incremental/lib.rs index ae2e6e0b94c..f69a1cfa3a9 100644 --- a/src/librustc_incremental/lib.rs +++ b/src/librustc_incremental/lib.rs @@ -1,8 +1,6 @@ //! Support for serializing the dep-graph and reloading it. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(nll)] #![feature(specialization)] diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index 6607951d2cd..fd5e68d5ae6 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -9,9 +9,7 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![cfg_attr(test, feature(test))] #![feature(box_patterns)] diff --git a/src/librustc_llvm/lib.rs b/src/librustc_llvm/lib.rs index 99c5e2493ed..3fcb20a29dd 100644 --- a/src/librustc_llvm/lib.rs +++ b/src/librustc_llvm/lib.rs @@ -1,9 +1,7 @@ #![deny(rust_2018_idioms)] #![feature(static_nobundle)] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] // See librustc_cratesio_shim/Cargo.toml for a comment explaining this. #[allow(unused_extern_crates)] diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs index 1a661421240..5dc736bfbd3 100644 --- a/src/librustc_metadata/lib.rs +++ b/src/librustc_metadata/lib.rs @@ -1,6 +1,4 @@ -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(box_patterns)] #![feature(libc)] diff --git a/src/librustc_passes/lib.rs b/src/librustc_passes/lib.rs index 76605c58a78..625f2fcb249 100644 --- a/src/librustc_passes/lib.rs +++ b/src/librustc_passes/lib.rs @@ -4,9 +4,7 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(nll)] #![feature(rustc_diagnostic_macros)] diff --git a/src/librustc_plugin/lib.rs b/src/librustc_plugin/lib.rs index 9a31bddc1ed..32e003ff107 100644 --- a/src/librustc_plugin/lib.rs +++ b/src/librustc_plugin/lib.rs @@ -50,9 +50,7 @@ //! See the [`plugin` feature](../unstable-book/language-features/plugin.html) of //! the Unstable Book for more examples. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(rustc_diagnostic_macros)] diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 000c6bb275b..14a0922c477 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -1,6 +1,4 @@ -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![deny(rust_2018_idioms)] diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index b166b1be02f..270f2424197 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -1,6 +1,4 @@ -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(crate_visibility_modifier)] #![feature(label_break_value)] diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index 73eb5de5c76..b9d195d5715 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -1,6 +1,4 @@ -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(custom_attribute)] #![feature(nll)] #![allow(unused_attributes)] diff --git a/src/librustc_target/lib.rs b/src/librustc_target/lib.rs index 0df0027c171..8e9d0851af1 100644 --- a/src/librustc_target/lib.rs +++ b/src/librustc_target/lib.rs @@ -7,9 +7,7 @@ //! more 'stuff' here in the future. It does not have a dependency on //! LLVM. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(box_syntax)] #![feature(nll)] diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index d5e870bb28d..8d77310f3d4 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -55,9 +55,7 @@ This API is completely unstable and subject to change. */ -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![allow(non_camel_case_types)] diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index f4149b5f357..ddb730672d2 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -1,6 +1,4 @@ -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", html_playground_url = "https://play.rust-lang.org/")] #![feature(bind_by_move_pattern_guards)] diff --git a/src/libserialize/lib.rs b/src/libserialize/lib.rs index e8d185a9cc0..fe93a2dfb29 100644 --- a/src/libserialize/lib.rs +++ b/src/libserialize/lib.rs @@ -4,9 +4,7 @@ Core encoding and decoding interfaces. */ -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", html_playground_url = "https://play.rust-lang.org/", test(attr(allow(unused_variables), deny(warnings))))] diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index 244caf28ec7..8ecba3ecd68 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -196,9 +196,7 @@ //! [primitive types]: ../book/ch03-02-data-types.html #![stable(feature = "rust1", since = "1.0.0")] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", html_playground_url = "https://play.rust-lang.org/", issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", test(no_crate_inject, attr(deny(warnings))), diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index c04391b34ee..878d06c0f14 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -4,9 +4,7 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", test(attr(deny(warnings))))] #![deny(rust_2018_idioms)] diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs index 9308cfb3a4f..670d71fe25b 100644 --- a/src/libsyntax_ext/lib.rs +++ b/src/libsyntax_ext/lib.rs @@ -1,8 +1,6 @@ //! Syntax extensions in the Rust compiler. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![deny(rust_2018_idioms)] diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 13e7307570a..70c45f7f9a7 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -4,9 +4,7 @@ //! //! This API is completely unstable and subject to change. -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![deny(rust_2018_idioms)] diff --git a/src/libterm/lib.rs b/src/libterm/lib.rs index 115dffa5799..4d3126212dc 100644 --- a/src/libterm/lib.rs +++ b/src/libterm/lib.rs @@ -30,9 +30,7 @@ //! [win]: http://msdn.microsoft.com/en-us/library/windows/desktop/ms682010%28v=vs.85%29.aspx //! [ti]: https://en.wikipedia.org/wiki/Terminfo -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", html_playground_url = "https://play.rust-lang.org/", test(attr(deny(warnings))))] #![deny(missing_docs)] diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs index cced66f4a22..ae046f6d614 100644 --- a/src/libtest/lib.rs +++ b/src/libtest/lib.rs @@ -20,12 +20,7 @@ #![deny(rust_2018_idioms)] #![crate_name = "test"] #![unstable(feature = "test", issue = "27812")] -#![doc( - html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", - test(attr(deny(warnings))) -)] +#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", test(attr(deny(warnings))))] #![feature(asm)] #![feature(fnbox)] #![cfg_attr(any(unix, target_os = "cloudabi"), feature(libc, rustc_private))] -- cgit 1.4.1-3-g733a5 From d7695abb7667d2e8b7a010282ad5f44abbc58a58 Mon Sep 17 00:00:00 2001 From: varkor Date: Tue, 5 Feb 2019 16:50:00 +0100 Subject: Support const generics in derive Co-Authored-By: Gabriel Smith --- src/libsyntax/ext/build.rs | 20 +++++++++++++++++++- src/libsyntax_ext/deriving/generic/mod.rs | 4 ++++ src/libsyntax_ext/deriving/generic/ty.rs | 12 +++++++++++- 3 files changed, 34 insertions(+), 2 deletions(-) (limited to 'src/libsyntax_ext') diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 1e83f6c03ec..6708e3c12a0 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -38,12 +38,14 @@ pub trait AstBuilder { bindings: Vec) -> (ast::QSelf, ast::Path); - // types + // types and consts fn ty_mt(&self, ty: P, mutbl: ast::Mutability) -> ast::MutTy; fn ty(&self, span: Span, ty: ast::TyKind) -> P; fn ty_path(&self, path: ast::Path) -> P; fn ty_ident(&self, span: Span, idents: ast::Ident) -> P; + fn anon_const(&self, span: Span, expr: ast::ExprKind) -> ast::AnonConst; + fn const_ident(&self, span: Span, idents: ast::Ident) -> ast::AnonConst; fn ty_rptr(&self, span: Span, ty: P, @@ -394,6 +396,22 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.ty_path(self.path_ident(span, ident)) } + fn anon_const(&self, span: Span, expr: ast::ExprKind) -> ast::AnonConst { + ast::AnonConst { + id: ast::DUMMY_NODE_ID, + value: P(ast::Expr { + id: ast::DUMMY_NODE_ID, + node: expr, + span, + attrs: ThinVec::new(), + }) + } + } + + fn const_ident(&self, span: Span, ident: ast::Ident) -> ast::AnonConst { + self.anon_const(span, ast::ExprKind::Path(None, self.path_ident(span, ident))) + } + fn ty_rptr(&self, span: Span, ty: P, diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index e6fe125da9f..4678c752045 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -560,6 +560,7 @@ impl<'a> TraitDef<'a> { cx.typaram(self.span, param.ident, vec![], bounds, None) } + GenericParamKind::Const { .. } => param.clone(), })); // and similarly for where clauses @@ -657,6 +658,9 @@ impl<'a> TraitDef<'a> { GenericParamKind::Type { .. } => { GenericArg::Type(cx.ty_ident(self.span, param.ident)) } + GenericParamKind::Const { .. } => { + GenericArg::Const(cx.const_ident(self.span, param.ident)) + } }).collect(); // Create the type of `self`. diff --git a/src/libsyntax_ext/deriving/generic/ty.rs b/src/libsyntax_ext/deriving/generic/ty.rs index ea6e07922b2..100ec0057ee 100644 --- a/src/libsyntax_ext/deriving/generic/ty.rs +++ b/src/libsyntax_ext/deriving/generic/ty.rs @@ -94,7 +94,7 @@ impl<'a> Path<'a> { } } -/// A type. Supports pointers, Self, and literals +/// A type. Supports pointers, Self, and literals. #[derive(Clone)] pub enum Ty<'a> { Self_, @@ -107,6 +107,13 @@ pub enum Ty<'a> { Tuple(Vec>), } +/// A const expression. Supports literals and blocks. +#[derive(Clone, Eq, PartialEq)] +pub enum Const { + Literal, + Block, +} + pub fn borrowed_ptrty<'r>() -> PtrTy<'r> { Borrowed(None, ast::Mutability::Immutable) } @@ -180,6 +187,9 @@ impl<'a> Ty<'a> { GenericParamKind::Type { .. } => { GenericArg::Type(cx.ty_ident(span, param.ident)) } + GenericParamKind::Const { .. } => { + GenericArg::Const(cx.const_ident(span, param.ident)) + } }).collect(); cx.path_all(span, false, vec![self_ty], params, vec![]) -- cgit 1.4.1-3-g733a5 From 2be0993c4e219994b355a06e82394c966a2cfa5d Mon Sep 17 00:00:00 2001 From: Taiki Endo Date: Sun, 10 Feb 2019 16:13:30 +0900 Subject: Revert removed #![feature(nll)] --- src/libfmt_macros/lib.rs | 1 + src/libgraphviz/lib.rs | 1 + src/libpanic_abort/lib.rs | 1 + src/libproc_macro/lib.rs | 1 + src/libprofiler_builtins/lib.rs | 1 + src/librustc_allocator/lib.rs | 1 + src/librustc_apfloat/lib.rs | 1 + src/librustc_asan/lib.rs | 1 + src/librustc_errors/lib.rs | 1 + src/librustc_llvm/lib.rs | 1 + src/librustc_lsan/lib.rs | 1 + src/librustc_msan/lib.rs | 1 + src/librustc_plugin/lib.rs | 1 + src/librustc_privacy/lib.rs | 1 + src/librustc_resolve/lib.rs | 1 + src/librustc_save_analysis/lib.rs | 1 + src/librustc_tsan/lib.rs | 1 + src/libsyntax/lib.rs | 1 + src/libsyntax_ext/lib.rs | 1 + src/libsyntax_pos/lib.rs | 1 + src/libunwind/lib.rs | 1 + 21 files changed, 21 insertions(+) (limited to 'src/libsyntax_ext') diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index ea67c01dfc9..aacd6cec565 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -10,6 +10,7 @@ #![deny(rust_2018_idioms)] +#![feature(nll)] #![feature(rustc_private)] pub use Piece::*; diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 8ce0f755df0..fadcfaec4b2 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -276,6 +276,7 @@ #![deny(rust_2018_idioms)] +#![feature(nll)] #![feature(str_escape)] use LabelText::*; diff --git a/src/libpanic_abort/lib.rs b/src/libpanic_abort/lib.rs index 7c6f36ece3c..edc97cd28a5 100644 --- a/src/libpanic_abort/lib.rs +++ b/src/libpanic_abort/lib.rs @@ -14,6 +14,7 @@ #![feature(core_intrinsics)] #![feature(libc)] +#![feature(nll)] #![feature(panic_runtime)] #![feature(staged_api)] #![feature(rustc_attrs)] diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index 2cdc5a48a53..09a4a964abf 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -17,6 +17,7 @@ #![deny(rust_2018_idioms)] +#![feature(nll)] #![feature(staged_api)] #![feature(const_fn)] #![feature(extern_types)] diff --git a/src/libprofiler_builtins/lib.rs b/src/libprofiler_builtins/lib.rs index 9c8d3a13b08..2ce1a110b44 100644 --- a/src/libprofiler_builtins/lib.rs +++ b/src/libprofiler_builtins/lib.rs @@ -5,5 +5,6 @@ reason = "internal implementation detail of rustc right now", issue = "0")] #![allow(unused_features)] +#![feature(nll)] #![feature(staged_api)] #![deny(rust_2018_idioms)] diff --git a/src/librustc_allocator/lib.rs b/src/librustc_allocator/lib.rs index 16b9ccfda80..9d6e728e135 100644 --- a/src/librustc_allocator/lib.rs +++ b/src/librustc_allocator/lib.rs @@ -1,3 +1,4 @@ +#![feature(nll)] #![feature(rustc_private)] #![deny(rust_2018_idioms)] diff --git a/src/librustc_apfloat/lib.rs b/src/librustc_apfloat/lib.rs index f79d448edce..6653df8ffe9 100644 --- a/src/librustc_apfloat/lib.rs +++ b/src/librustc_apfloat/lib.rs @@ -34,6 +34,7 @@ #![forbid(unsafe_code)] #![deny(rust_2018_idioms)] +#![feature(nll)] #![feature(try_from)] // See librustc_cratesio_shim/Cargo.toml for a comment explaining this. #[allow(unused_extern_crates)] diff --git a/src/librustc_asan/lib.rs b/src/librustc_asan/lib.rs index 568bb540c47..3bdb86d313d 100644 --- a/src/librustc_asan/lib.rs +++ b/src/librustc_asan/lib.rs @@ -1,4 +1,5 @@ #![sanitizer_runtime] +#![feature(nll)] #![feature(sanitizer_runtime)] #![feature(staged_api)] #![no_std] diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index ea530fa1bfb..0fc7b59ff15 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -4,6 +4,7 @@ #![allow(unused_attributes)] #![feature(range_contains)] #![cfg_attr(unix, feature(libc))] +#![feature(nll)] #![feature(optin_builtin_traits)] #![deny(rust_2018_idioms)] diff --git a/src/librustc_llvm/lib.rs b/src/librustc_llvm/lib.rs index 3fcb20a29dd..292ce8b0a01 100644 --- a/src/librustc_llvm/lib.rs +++ b/src/librustc_llvm/lib.rs @@ -1,4 +1,5 @@ #![deny(rust_2018_idioms)] +#![feature(nll)] #![feature(static_nobundle)] #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] diff --git a/src/librustc_lsan/lib.rs b/src/librustc_lsan/lib.rs index 568bb540c47..3bdb86d313d 100644 --- a/src/librustc_lsan/lib.rs +++ b/src/librustc_lsan/lib.rs @@ -1,4 +1,5 @@ #![sanitizer_runtime] +#![feature(nll)] #![feature(sanitizer_runtime)] #![feature(staged_api)] #![no_std] diff --git a/src/librustc_msan/lib.rs b/src/librustc_msan/lib.rs index 568bb540c47..3bdb86d313d 100644 --- a/src/librustc_msan/lib.rs +++ b/src/librustc_msan/lib.rs @@ -1,4 +1,5 @@ #![sanitizer_runtime] +#![feature(nll)] #![feature(sanitizer_runtime)] #![feature(staged_api)] #![no_std] diff --git a/src/librustc_plugin/lib.rs b/src/librustc_plugin/lib.rs index 32e003ff107..0ea1634c0b4 100644 --- a/src/librustc_plugin/lib.rs +++ b/src/librustc_plugin/lib.rs @@ -52,6 +52,7 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] +#![feature(nll)] #![feature(rustc_diagnostic_macros)] #![recursion_limit="256"] diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 14a0922c477..d31dadd3402 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -2,6 +2,7 @@ #![deny(rust_2018_idioms)] +#![feature(nll)] #![feature(rustc_diagnostic_macros)] #![recursion_limit="256"] diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index ecbfcec3c5e..ad73b30ae3f 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -2,6 +2,7 @@ #![feature(crate_visibility_modifier)] #![feature(label_break_value)] +#![feature(nll)] #![feature(rustc_diagnostic_macros)] #![feature(slice_sort_by_cached_key)] diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index c4a2ebeba65..1f7b6d77333 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -1,5 +1,6 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(custom_attribute)] +#![feature(nll)] #![deny(rust_2018_idioms)] #![allow(unused_attributes)] diff --git a/src/librustc_tsan/lib.rs b/src/librustc_tsan/lib.rs index 568bb540c47..3bdb86d313d 100644 --- a/src/librustc_tsan/lib.rs +++ b/src/librustc_tsan/lib.rs @@ -1,4 +1,5 @@ #![sanitizer_runtime] +#![feature(nll)] #![feature(sanitizer_runtime)] #![feature(staged_api)] #![no_std] diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 878d06c0f14..c844f9e2a91 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -11,6 +11,7 @@ #![feature(crate_visibility_modifier)] #![feature(label_break_value)] +#![feature(nll)] #![feature(rustc_attrs)] #![feature(rustc_diagnostic_macros)] #![feature(slice_sort_by_cached_key)] diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs index 670d71fe25b..7d7fd030859 100644 --- a/src/libsyntax_ext/lib.rs +++ b/src/libsyntax_ext/lib.rs @@ -9,6 +9,7 @@ #![feature(proc_macro_internals)] #![feature(proc_macro_span)] #![feature(decl_macro)] +#![feature(nll)] #![feature(str_escape)] #![feature(rustc_diagnostic_macros)] diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 70c45f7f9a7..dbb4f8f8159 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -11,6 +11,7 @@ #![feature(const_fn)] #![feature(crate_visibility_modifier)] #![feature(custom_attribute)] +#![feature(nll)] #![feature(non_exhaustive)] #![feature(optin_builtin_traits)] #![feature(rustc_attrs)] diff --git a/src/libunwind/lib.rs b/src/libunwind/lib.rs index b9a9929ef8b..0ccffea3170 100644 --- a/src/libunwind/lib.rs +++ b/src/libunwind/lib.rs @@ -4,6 +4,7 @@ #![deny(rust_2018_idioms)] #![feature(link_cfg)] +#![feature(nll)] #![feature(staged_api)] #![feature(unwind_attributes)] #![feature(static_nobundle)] -- cgit 1.4.1-3-g733a5 From c3e182cf43aea2c010a1915eb37293a458df2228 Mon Sep 17 00:00:00 2001 From: Alexander Regueiro Date: Fri, 8 Feb 2019 14:53:55 +0100 Subject: rustc: doc comments --- src/bootstrap/builder.rs | 10 +- src/bootstrap/cache.rs | 4 +- src/bootstrap/check.rs | 2 +- src/bootstrap/clean.rs | 2 +- src/bootstrap/compile.rs | 8 +- src/bootstrap/dist.rs | 2 +- src/bootstrap/doc.rs | 8 +- src/bootstrap/lib.rs | 18 +- src/bootstrap/test.rs | 16 +- src/bootstrap/tool.rs | 4 +- src/bootstrap/util.rs | 2 +- src/build_helper/lib.rs | 6 +- src/libcore/str/mod.rs | 4 +- src/libcore/str/pattern.rs | 4 +- src/libgraphviz/lib.rs | 4 +- src/libpanic_unwind/dummy.rs | 4 +- src/libpanic_unwind/dwarf/eh.rs | 2 +- src/libpanic_unwind/dwarf/mod.rs | 2 +- src/libpanic_unwind/emcc.rs | 6 +- src/libpanic_unwind/gcc.rs | 10 +- src/librustc/dep_graph/debug.rs | 2 +- src/librustc/dep_graph/dep_node.rs | 6 +- src/librustc/dep_graph/dep_tracking_map.rs | 2 +- src/librustc/dep_graph/graph.rs | 46 +-- src/librustc/hir/check_attr.rs | 10 +- src/librustc/hir/def.rs | 2 +- src/librustc/hir/def_id.rs | 2 +- src/librustc/hir/intravisit.rs | 16 +- src/librustc/hir/lowering.rs | 26 +- src/librustc/hir/map/blocks.rs | 6 +- src/librustc/hir/map/def_collector.rs | 2 +- src/librustc/hir/map/definitions.rs | 52 ++-- src/librustc/hir/map/mod.rs | 24 +- src/librustc/hir/mod.rs | 331 +++++++++++---------- src/librustc/hir/pat_util.rs | 10 +- src/librustc/infer/at.rs | 24 +- src/librustc/infer/canonical/canonicalizer.rs | 10 +- src/librustc/infer/canonical/mod.rs | 4 +- src/librustc/infer/canonical/query_response.rs | 6 +- src/librustc/infer/combine.rs | 10 +- src/librustc/infer/error_reporting/mod.rs | 4 +- .../nice_region_error/different_lifetimes.rs | 2 +- src/librustc/infer/fudge.rs | 4 +- src/librustc/infer/higher_ranked/mod.rs | 2 +- src/librustc/infer/lattice.rs | 4 +- src/librustc/infer/lexical_region_resolve/mod.rs | 10 +- src/librustc/infer/mod.rs | 38 +-- src/librustc/infer/nll_relate/mod.rs | 16 +- src/librustc/infer/opaque_types/mod.rs | 23 +- src/librustc/infer/outlives/env.rs | 2 +- src/librustc/infer/outlives/free_region_map.rs | 2 +- src/librustc/infer/outlives/obligations.rs | 2 +- src/librustc/infer/outlives/verify.rs | 8 +- src/librustc/infer/region_constraints/mod.rs | 80 ++--- src/librustc/infer/type_variable.rs | 8 +- src/librustc/lint/context.rs | 10 +- src/librustc/lint/mod.rs | 18 +- src/librustc/middle/expr_use_visitor.rs | 6 +- src/librustc/middle/free_region.rs | 22 +- src/librustc/middle/liveness.rs | 56 ++-- src/librustc/middle/mem_categorization.rs | 16 +- src/librustc/middle/region.rs | 46 +-- src/librustc/middle/resolve_lifetime.rs | 22 +- src/librustc/middle/stability.rs | 4 +- src/librustc/middle/weak_lang_items.rs | 2 +- src/librustc/mir/interpret/allocation.rs | 20 +- src/librustc/mir/interpret/error.rs | 4 +- src/librustc/mir/interpret/mod.rs | 14 +- src/librustc/mir/interpret/value.rs | 15 +- src/librustc/mir/mod.rs | 66 ++-- src/librustc/mir/mono.rs | 2 +- src/librustc/mir/tcx.rs | 2 +- src/librustc/session/config.rs | 10 +- src/librustc/session/mod.rs | 30 +- src/librustc/traits/auto_trait.rs | 2 +- src/librustc/traits/codegen/mod.rs | 4 +- src/librustc/traits/coherence.rs | 10 +- src/librustc/traits/error_reporting.rs | 4 +- src/librustc/traits/fulfill.rs | 6 +- src/librustc/traits/mod.rs | 73 +++-- src/librustc/traits/object_safety.rs | 40 +-- src/librustc/traits/project.rs | 23 +- src/librustc/traits/query/dropck_outlives.rs | 2 +- src/librustc/traits/query/normalize.rs | 2 +- .../traits/query/normalize_erasing_regions.rs | 2 +- src/librustc/traits/query/outlives_bounds.rs | 4 +- src/librustc/traits/query/type_op/normalize.rs | 2 +- src/librustc/traits/select.rs | 47 ++- src/librustc/traits/specialize/mod.rs | 8 +- .../traits/specialize/specialization_graph.rs | 4 +- src/librustc/ty/adjustment.rs | 22 +- src/librustc/ty/constness.rs | 2 +- src/librustc/ty/context.rs | 22 +- src/librustc/ty/fold.rs | 46 +-- src/librustc/ty/inhabitedness/def_id_forest.rs | 10 +- src/librustc/ty/instance.rs | 12 +- src/librustc/ty/item_path.rs | 12 +- src/librustc/ty/layout.rs | 6 +- src/librustc/ty/mod.rs | 130 ++++---- src/librustc/ty/query/job.rs | 19 +- src/librustc/ty/query/mod.rs | 38 +-- src/librustc/ty/query/on_disk_cache.rs | 10 +- src/librustc/ty/query/plumbing.rs | 36 +-- src/librustc/ty/relate.rs | 2 +- src/librustc/ty/steal.rs | 6 +- src/librustc/ty/sty.rs | 94 +++--- src/librustc/ty/subst.rs | 24 +- src/librustc/ty/trait_def.rs | 6 +- src/librustc/ty/util.rs | 43 +-- src/librustc/ty/wf.rs | 4 +- src/librustc/util/common.rs | 8 +- src/librustc/util/nodemap.rs | 2 +- src/librustc/util/ppaux.rs | 31 +- src/librustc_apfloat/ieee.rs | 20 +- src/librustc_apfloat/lib.rs | 30 +- src/librustc_borrowck/borrowck/check_loans.rs | 2 +- .../borrowck/gather_loans/lifetime.rs | 2 +- src/librustc_borrowck/borrowck/gather_loans/mod.rs | 2 +- src/librustc_borrowck/borrowck/mod.rs | 4 +- src/librustc_borrowck/borrowck/move_data.rs | 8 +- src/librustc_borrowck/dataflow.rs | 2 +- src/librustc_codegen_llvm/abi.rs | 4 +- src/librustc_codegen_llvm/back/archive.rs | 2 +- src/librustc_codegen_llvm/back/link.rs | 2 +- src/librustc_codegen_llvm/back/lto.rs | 2 +- src/librustc_codegen_llvm/back/wasm.rs | 2 +- src/librustc_codegen_llvm/base.rs | 10 +- src/librustc_codegen_llvm/callee.rs | 4 +- src/librustc_codegen_llvm/context.rs | 4 +- .../debuginfo/create_scope_map.rs | 2 +- src/librustc_codegen_llvm/debuginfo/doc.rs | 4 +- src/librustc_codegen_llvm/debuginfo/mod.rs | 2 +- src/librustc_codegen_llvm/debuginfo/utils.rs | 2 +- src/librustc_codegen_llvm/llvm/ffi.rs | 2 +- src/librustc_codegen_llvm/type_of.rs | 2 +- src/librustc_codegen_ssa/back/linker.rs | 4 +- src/librustc_codegen_ssa/back/write.rs | 4 +- src/librustc_codegen_ssa/base.rs | 16 +- src/librustc_codegen_ssa/lib.rs | 4 +- src/librustc_codegen_ssa/mir/block.rs | 2 +- src/librustc_codegen_ssa/mir/mod.rs | 2 +- src/librustc_codegen_ssa/mir/place.rs | 10 +- src/librustc_codegen_ssa/traits/declare.rs | 6 +- src/librustc_codegen_ssa/traits/type_.rs | 4 +- src/librustc_data_structures/base_n.rs | 2 +- src/librustc_data_structures/bit_set.rs | 40 +-- .../graph/implementation/mod.rs | 4 +- src/librustc_data_structures/graph/scc/mod.rs | 2 +- src/librustc_data_structures/indexed_vec.rs | 10 +- .../obligation_forest/graphviz.rs | 4 +- .../obligation_forest/mod.rs | 20 +- src/librustc_data_structures/owning_ref/mod.rs | 14 +- src/librustc_data_structures/sip128.rs | 4 +- src/librustc_data_structures/svh.rs | 2 +- .../transitive_relation.rs | 14 +- src/librustc_data_structures/work_queue.rs | 6 +- src/librustc_driver/driver.rs | 6 +- src/librustc_driver/lib.rs | 8 +- src/librustc_driver/test.rs | 10 +- src/librustc_errors/diagnostic.rs | 2 +- src/librustc_errors/diagnostic_builder.rs | 4 +- src/librustc_errors/emitter.rs | 6 +- src/librustc_errors/lib.rs | 2 +- src/librustc_fs_util/lib.rs | 2 +- src/librustc_incremental/assert_dep_graph.rs | 2 +- src/librustc_incremental/persist/dirty_clean.rs | 23 +- src/librustc_incremental/persist/file_format.rs | 6 +- src/librustc_incremental/persist/fs.rs | 4 +- src/librustc_lint/builtin.rs | 29 +- src/librustc_lint/types.rs | 2 +- src/librustc_metadata/creader.rs | 2 +- src/librustc_metadata/cstore.rs | 6 +- src/librustc_metadata/decoder.rs | 2 +- src/librustc_metadata/dynamic_lib.rs | 2 +- src/librustc_metadata/index_builder.rs | 10 +- src/librustc_metadata/locator.rs | 4 +- src/librustc_mir/borrow_check/borrow_set.rs | 12 +- src/librustc_mir/borrow_check/error_reporting.rs | 10 +- src/librustc_mir/borrow_check/mod.rs | 30 +- src/librustc_mir/borrow_check/mutability_errors.rs | 2 +- .../borrow_check/nll/constraints/graph.rs | 4 +- .../borrow_check/nll/constraints/mod.rs | 4 +- .../borrow_check/nll/explain_borrow/mod.rs | 4 +- src/librustc_mir/borrow_check/nll/facts.rs | 2 +- src/librustc_mir/borrow_check/nll/invalidation.rs | 12 +- .../borrow_check/nll/region_infer/dump_mir.rs | 2 +- .../nll/region_infer/error_reporting/mod.rs | 4 +- .../region_infer/error_reporting/region_name.rs | 16 +- .../borrow_check/nll/region_infer/mod.rs | 30 +- .../borrow_check/nll/region_infer/values.rs | 14 +- .../nll/type_check/free_region_relations.rs | 12 +- .../nll/type_check/liveness/liveness_map.rs | 2 +- .../borrow_check/nll/type_check/liveness/mod.rs | 4 +- .../borrow_check/nll/type_check/liveness/trace.rs | 10 +- .../borrow_check/nll/type_check/mod.rs | 4 +- .../borrow_check/nll/type_check/relate_tys.rs | 2 +- .../borrow_check/nll/universal_regions.rs | 32 +- src/librustc_mir/borrow_check/path_utils.rs | 2 +- src/librustc_mir/borrow_check/place_ext.rs | 2 +- src/librustc_mir/borrow_check/places_conflict.rs | 4 +- src/librustc_mir/build/matches/mod.rs | 10 +- src/librustc_mir/build/matches/test.rs | 2 +- src/librustc_mir/build/misc.rs | 2 +- src/librustc_mir/build/mod.rs | 50 ++-- src/librustc_mir/build/scope.rs | 6 +- src/librustc_mir/const_eval.rs | 10 +- src/librustc_mir/dataflow/at_location.rs | 4 +- src/librustc_mir/dataflow/drop_flag_effects.rs | 4 +- src/librustc_mir/dataflow/graphviz.rs | 4 +- src/librustc_mir/dataflow/impls/mod.rs | 7 - src/librustc_mir/dataflow/mod.rs | 26 +- src/librustc_mir/dataflow/move_paths/abs_domain.rs | 10 +- src/librustc_mir/hair/cx/mod.rs | 14 +- src/librustc_mir/hair/mod.rs | 2 +- src/librustc_mir/hair/pattern/_match.rs | 22 +- src/librustc_mir/hair/pattern/check_match.rs | 7 +- src/librustc_mir/hair/pattern/mod.rs | 24 +- src/librustc_mir/interpret/eval_context.rs | 10 +- src/librustc_mir/interpret/intrinsics.rs | 6 +- src/librustc_mir/interpret/machine.rs | 24 +- src/librustc_mir/interpret/memory.rs | 14 +- src/librustc_mir/interpret/operand.rs | 8 +- src/librustc_mir/interpret/place.rs | 18 +- src/librustc_mir/interpret/step.rs | 2 +- src/librustc_mir/interpret/traits.rs | 2 +- src/librustc_mir/interpret/validity.rs | 2 +- src/librustc_mir/interpret/visitor.rs | 22 +- src/librustc_mir/monomorphize/item.rs | 2 +- src/librustc_mir/monomorphize/partitioning.rs | 2 +- src/librustc_mir/shim.rs | 4 +- src/librustc_mir/transform/check_unsafety.rs | 4 +- src/librustc_mir/transform/elaborate_drops.rs | 2 +- src/librustc_mir/transform/erase_regions.rs | 4 +- src/librustc_mir/transform/mod.rs | 2 +- src/librustc_mir/transform/promote_consts.rs | 2 +- src/librustc_mir/transform/qualify_consts.rs | 6 +- src/librustc_mir/transform/qualify_min_const_fn.rs | 2 +- .../transform/remove_noop_landing_pads.rs | 2 +- src/librustc_mir/util/alignment.rs | 2 +- src/librustc_mir/util/def_use.rs | 2 +- src/librustc_mir/util/elaborate_drops.rs | 14 +- src/librustc_mir/util/liveness.rs | 29 +- src/librustc_passes/ast_validation.rs | 30 +- src/librustc_passes/rvalue_promotion.rs | 8 +- src/librustc_plugin/build.rs | 2 +- src/librustc_plugin/lib.rs | 2 +- src/librustc_plugin/registry.rs | 2 +- src/librustc_privacy/lib.rs | 8 +- src/librustc_resolve/build_reduced_graph.rs | 6 +- src/librustc_resolve/lib.rs | 96 +++--- src/librustc_resolve/macros.rs | 20 +- src/librustc_resolve/resolve_imports.rs | 8 +- src/librustc_save_analysis/dump_visitor.rs | 8 +- src/librustc_save_analysis/lib.rs | 2 +- src/librustc_target/abi/call/mod.rs | 12 +- src/librustc_target/abi/mod.rs | 22 +- src/librustc_target/lib.rs | 2 +- src/librustc_target/spec/mod.rs | 10 +- src/librustc_traits/chalk_context/mod.rs | 12 +- src/librustc_traits/dropck_outlives.rs | 2 +- src/librustc_typeck/astconv.rs | 14 +- src/librustc_typeck/check/callee.rs | 6 +- src/librustc_typeck/check/cast.rs | 4 +- src/librustc_typeck/check/closure.rs | 4 +- src/librustc_typeck/check/coercion.rs | 22 +- src/librustc_typeck/check/compare_method.rs | 8 +- src/librustc_typeck/check/dropck.rs | 11 +- src/librustc_typeck/check/intrinsic.rs | 2 +- src/librustc_typeck/check/method/mod.rs | 14 +- src/librustc_typeck/check/method/probe.rs | 6 +- src/librustc_typeck/check/method/suggest.rs | 4 +- src/librustc_typeck/check/mod.rs | 54 ++-- src/librustc_typeck/check/op.rs | 8 +- src/librustc_typeck/check/regionck.rs | 36 +-- src/librustc_typeck/check/wfcheck.rs | 6 +- src/librustc_typeck/check_unused.rs | 2 +- src/librustc_typeck/coherence/mod.rs | 2 +- src/librustc_typeck/coherence/orphan.rs | 2 +- src/librustc_typeck/collect.rs | 24 +- src/librustc_typeck/constrained_type_params.rs | 8 +- src/librustc_typeck/impl_wf_check.rs | 6 +- src/librustc_typeck/lib.rs | 2 +- src/librustc_typeck/outlives/implicit_infer.rs | 2 +- src/librustc_typeck/variance/constraints.rs | 2 +- src/librustdoc/clean/cfg.rs | 4 +- src/librustdoc/clean/mod.rs | 10 +- src/librustdoc/clean/simplify.rs | 6 +- src/librustdoc/config.rs | 14 +- src/librustdoc/core.rs | 2 +- src/librustdoc/html/escape.rs | 4 +- src/librustdoc/html/format.rs | 6 +- src/librustdoc/html/highlight.rs | 2 +- src/librustdoc/html/markdown.rs | 10 +- src/librustdoc/html/render.rs | 16 +- src/librustdoc/html/toc.rs | 2 +- src/librustdoc/markdown.rs | 2 +- src/librustdoc/passes/collect_intra_doc_links.rs | 4 +- src/librustdoc/passes/mod.rs | 2 +- src/librustdoc/visit_ast.rs | 2 +- src/libserialize/hex.rs | 2 +- src/libserialize/json.rs | 70 ++--- src/libserialize/serialize.rs | 2 +- src/libsyntax/ast.rs | 29 +- src/libsyntax/attr/builtin.rs | 6 +- src/libsyntax/attr/mod.rs | 6 +- src/libsyntax/config.rs | 8 +- src/libsyntax/diagnostics/metadata.rs | 4 +- src/libsyntax/ext/base.rs | 34 +-- src/libsyntax/ext/build.rs | 2 +- src/libsyntax/ext/expand.rs | 2 +- src/libsyntax/ext/tt/macro_parser.rs | 18 +- src/libsyntax/ext/tt/macro_rules.rs | 8 +- src/libsyntax/ext/tt/quoted.rs | 18 +- src/libsyntax/feature_gate.rs | 7 +- src/libsyntax/json.rs | 2 +- src/libsyntax/parse/lexer/comments.rs | 6 +- src/libsyntax/parse/lexer/mod.rs | 12 +- src/libsyntax/parse/mod.rs | 43 ++- src/libsyntax/parse/parser.rs | 319 ++++++++++---------- src/libsyntax/parse/token.rs | 20 +- src/libsyntax/print/pp.rs | 4 +- src/libsyntax/ptr.rs | 2 +- src/libsyntax/source_map.rs | 18 +- src/libsyntax/tokenstream.rs | 5 +- src/libsyntax/util/lev_distance.rs | 4 +- src/libsyntax/util/parser.rs | 4 +- src/libsyntax/util/parser_testing.rs | 2 +- src/libsyntax/visit.rs | 6 +- src/libsyntax_ext/deriving/decodable.rs | 2 +- src/libsyntax_ext/deriving/encodable.rs | 4 +- src/libsyntax_ext/deriving/generic/mod.rs | 16 +- src/libsyntax_ext/format.rs | 6 +- src/libsyntax_ext/format_foreign.rs | 2 +- src/libsyntax_pos/analyze_source_file.rs | 4 +- src/libsyntax_pos/hygiene.rs | 4 +- src/libsyntax_pos/lib.rs | 42 +-- src/libsyntax_pos/symbol.rs | 12 +- src/libterm/lib.rs | 12 +- src/libterm/terminfo/mod.rs | 8 +- src/libterm/terminfo/parm.rs | 2 +- src/libterm/terminfo/parser/compiled.rs | 2 +- src/libterm/terminfo/searcher.rs | 2 +- src/libterm/win.rs | 5 +- 343 files changed, 2260 insertions(+), 2241 deletions(-) (limited to 'src/libsyntax_ext') diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index f512e1d7a0c..78ba1d376be 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -60,17 +60,17 @@ pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash { /// Run this rule for all hosts without cross compiling. const ONLY_HOSTS: bool = false; - /// Primary function to execute this rule. Can call `builder.ensure(...)` + /// Primary function to execute this rule. Can call `builder.ensure()` /// with other steps to run those. fn run(self, builder: &Builder) -> Self::Output; /// When bootstrap is passed a set of paths, this controls whether this rule /// will execute. However, it does not get called in a "default" context - /// when we are not passed any paths; in that case, make_run is called + /// when we are not passed any paths; in that case, `make_run` is called /// directly. fn should_run(run: ShouldRun) -> ShouldRun; - /// Build up a "root" rule, either as a default rule or from a path passed + /// Builds up a "root" rule, either as a default rule or from a path passed /// to us. /// /// When path is `None`, we are executing in a context where no paths were @@ -648,7 +648,7 @@ impl<'a> Builder<'a> { add_lib_path(vec![self.rustc_libdir(compiler)], cmd); } - /// Get a path to the compiler specified. + /// Gets a path to the compiler specified. pub fn rustc(&self, compiler: Compiler) -> PathBuf { if compiler.is_snapshot(self) { self.initial_rustc.clone() @@ -659,7 +659,7 @@ impl<'a> Builder<'a> { } } - /// Get the paths to all of the compiler's codegen backends. + /// Gets the paths to all of the compiler's codegen backends. fn codegen_backends(&self, compiler: Compiler) -> impl Iterator { fs::read_dir(self.sysroot_codegen_backends(compiler)) .into_iter() diff --git a/src/bootstrap/cache.rs b/src/bootstrap/cache.rs index ea8bc657a57..5f84816789a 100644 --- a/src/bootstrap/cache.rs +++ b/src/bootstrap/cache.rs @@ -227,10 +227,10 @@ lazy_static! { pub static ref INTERNER: Interner = Interner::default(); } -/// This is essentially a HashMap which allows storing any type in its input and +/// This is essentially a `HashMap` which allows storing any type in its input and /// any type in its output. It is a write-once cache; values are never evicted, /// which means that references to the value can safely be returned from the -/// get() method. +/// `get()` method. #[derive(Debug)] pub struct Cache( RefCell String { let mut features = "panic-unwind".to_string(); @@ -521,7 +521,7 @@ impl Build { features } - /// Get the space-separated set of activated features for the compiler. + /// Gets the space-separated set of activated features for the compiler. fn rustc_features(&self) -> String { let mut features = String::new(); if self.config.jemalloc { @@ -609,7 +609,7 @@ impl Build { self.out.join(&*target).join("crate-docs") } - /// Returns true if no custom `llvm-config` is set for the specified target. + /// Returns `true` if no custom `llvm-config` is set for the specified target. /// /// If no custom `llvm-config` was specified then Rust's llvm will be used. fn is_rust_llvm(&self, target: Interned) -> bool { @@ -857,13 +857,13 @@ impl Build { .map(|p| &**p) } - /// Returns true if this is a no-std `target`, if defined + /// Returns `true` if this is a no-std `target`, if defined fn no_std(&self, target: Interned) -> Option { self.config.target_config.get(&target) .map(|t| t.no_std) } - /// Returns whether the target will be tested using the `remote-test-client` + /// Returns `true` if the target will be tested using the `remote-test-client` /// and `remote-test-server` binaries. fn remote_tested(&self, target: Interned) -> bool { self.qemu_rootfs(target).is_some() || target.contains("android") || @@ -1059,7 +1059,7 @@ impl Build { self.rust_info.version(self, channel::CFG_RELEASE_NUM) } - /// Return the full commit hash + /// Returns the full commit hash. fn rust_sha(&self) -> Option<&str> { self.rust_info.sha() } @@ -1079,7 +1079,7 @@ impl Build { panic!("failed to find version in {}'s Cargo.toml", package) } - /// Returns whether unstable features should be enabled for the compiler + /// Returns `true` if unstable features should be enabled for the compiler /// we're building. fn unstable_features(&self) -> bool { match &self.config.channel[..] { @@ -1327,7 +1327,7 @@ impl<'a> Compiler { self } - /// Returns whether this is a snapshot compiler for `build`'s configuration + /// Returns `true` if this is a snapshot compiler for `build`'s configuration pub fn is_snapshot(&self, build: &Build) -> bool { self.stage == 0 && self.host == build.build } diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index bb00f6f6251..a882550f734 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -30,9 +30,9 @@ const ADB_TEST_DIR: &str = "/data/tmp/work"; /// The two modes of the test runner; tests or benchmarks. #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone, PartialOrd, Ord)] pub enum TestKind { - /// Run `cargo test` + /// Run `cargo test`. Test, - /// Run `cargo bench` + /// Run `cargo bench`. Bench, } @@ -1288,7 +1288,7 @@ impl Step for DocTest { run.never() } - /// Run `rustdoc --test` for all documentation in `src/doc`. + /// Runs `rustdoc --test` for all documentation in `src/doc`. /// /// This will run all tests in our markdown documentation (e.g., the book) /// located in `src/doc`. The `rustdoc` that's run is the one that sits next to @@ -1408,7 +1408,7 @@ impl Step for ErrorIndex { }); } - /// Run the error index generator tool to execute the tests located in the error + /// Runs the error index generator tool to execute the tests located in the error /// index. /// /// The `error_index_generator` tool lives in `src/tools` and is used to @@ -1614,7 +1614,7 @@ impl Step for Crate { } } - /// Run all unit tests plus documentation tests for a given crate defined + /// Runs all unit tests plus documentation tests for a given crate defined /// by a `Cargo.toml` (single manifest) /// /// This is what runs tests for crates like the standard library, compiler, etc. @@ -1833,7 +1833,7 @@ fn envify(s: &str) -> String { /// the standard library and such to the emulator ahead of time. This step /// represents this and is a dependency of all test suites. /// -/// Most of the time this is a noop. For some steps such as shipping data to +/// Most of the time this is a no-op. For some steps such as shipping data to /// QEMU we have to build our own tools so we've got conditional dependencies /// on those programs as well. Note that the remote test client is built for /// the build target (us) and the server is built for the target. @@ -1904,7 +1904,7 @@ impl Step for Distcheck { run.builder.ensure(Distcheck); } - /// Run "distcheck", a 'make check' from a tarball + /// Runs "distcheck", a 'make check' from a tarball fn run(self, builder: &Builder) { builder.info("Distcheck"); let dir = builder.out.join("tmp").join("distcheck"); @@ -1965,7 +1965,7 @@ impl Step for Bootstrap { const DEFAULT: bool = true; const ONLY_HOSTS: bool = true; - /// Test the build system itself + /// Tests the build system itself. fn run(self, builder: &Builder) { let mut cmd = Command::new(&builder.initial_cargo); cmd.arg("test") diff --git a/src/bootstrap/tool.rs b/src/bootstrap/tool.rs index cd3afc59e56..c09e9332895 100644 --- a/src/bootstrap/tool.rs +++ b/src/bootstrap/tool.rs @@ -40,7 +40,7 @@ impl Step for ToolBuild { run.never() } - /// Build a tool in `src/tools` + /// Builds a tool in `src/tools` /// /// This will build the specified tool with the specified `host` compiler in /// `stage` into the normal cargo output directory. @@ -621,7 +621,7 @@ tool_extended!((self, builder), ); impl<'a> Builder<'a> { - /// Get a `Command` which is ready to run `tool` in `stage` built for + /// Gets a `Command` which is ready to run `tool` in `stage` built for /// `host`. pub fn tool_cmd(&self, tool: Tool) -> Command { let mut cmd = Command::new(self.tool_exe(tool)); diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs index 37c6c040da8..29aa98971fb 100644 --- a/src/bootstrap/util.rs +++ b/src/bootstrap/util.rs @@ -33,7 +33,7 @@ pub fn exe(name: &str, target: &str) -> String { } } -/// Returns whether the file name given looks like a dynamic library. +/// Returns `true` if the file name given looks like a dynamic library. pub fn is_dylib(name: &str) -> bool { name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll") } diff --git a/src/build_helper/lib.rs b/src/build_helper/lib.rs index 93aa9176812..bd99dc118e6 100644 --- a/src/build_helper/lib.rs +++ b/src/build_helper/lib.rs @@ -163,7 +163,7 @@ pub fn mtime(path: &Path) -> SystemTime { .unwrap_or(UNIX_EPOCH) } -/// Returns whether `dst` is up to date given that the file or files in `src` +/// Returns `true` if `dst` is up to date given that the file or files in `src` /// are used to generate it. /// /// Uses last-modified time checks to verify this. @@ -190,12 +190,12 @@ pub struct NativeLibBoilerplate { } impl NativeLibBoilerplate { - /// On OSX we don't want to ship the exact filename that compiler-rt builds. + /// On macOS we don't want to ship the exact filename that compiler-rt builds. /// This conflicts with the system and ours is likely a wildly different /// version, so they can't be substituted. /// /// As a result, we rename it here but we need to also use - /// `install_name_tool` on OSX to rename the commands listed inside of it to + /// `install_name_tool` on macOS to rename the commands listed inside of it to /// ensure it's linked against correctly. pub fn fixup_sanitizer_lib_name(&self, sanitizer_name: &str) { if env::var("TARGET").unwrap() != "x86_64-apple-darwin" { diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index e9190cc3ddf..81c351be305 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -1,6 +1,6 @@ -//! String manipulation +//! String manipulation. //! -//! For more details, see std::str +//! For more details, see the `std::str` module. #![stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libcore/str/pattern.rs b/src/libcore/str/pattern.rs index 55a7ba181e5..e5a75cdbbcc 100644 --- a/src/libcore/str/pattern.rs +++ b/src/libcore/str/pattern.rs @@ -1,7 +1,7 @@ //! The string Pattern API. //! -//! For more details, see the traits `Pattern`, `Searcher`, -//! `ReverseSearcher` and `DoubleEndedSearcher`. +//! For more details, see the traits [`Pattern`], [`Searcher`], +//! [`ReverseSearcher`], and [`DoubleEndedSearcher`]. #![unstable(feature = "pattern", reason = "API not fully fleshed out and ready to be stabilized", diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 8ce0f755df0..a445e70ca99 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -392,7 +392,7 @@ impl<'a> Id<'a> { /// digit (i.e., the regular expression `[a-zA-Z_][a-zA-Z_0-9]*`). /// /// (Note: this format is a strict subset of the `ID` format - /// defined by the DOT language. This function may change in the + /// defined by the DOT language. This function may change in the /// future to accept a broader subset, or the entirety, of DOT's /// `ID` format.) /// @@ -529,7 +529,7 @@ impl<'a> LabelText<'a> { } /// Decomposes content into string suitable for making EscStr that - /// yields same content as self. The result obeys the law + /// yields same content as self. The result obeys the law /// render(`lt`) == render(`EscStr(lt.pre_escaped_content())`) for /// all `lt: LabelText`. fn pre_escaped_content(self) -> Cow<'a, str> { diff --git a/src/libpanic_unwind/dummy.rs b/src/libpanic_unwind/dummy.rs index b052f76e2a3..3a00d637665 100644 --- a/src/libpanic_unwind/dummy.rs +++ b/src/libpanic_unwind/dummy.rs @@ -1,6 +1,6 @@ -//! Unwinding for wasm32 +//! Unwinding for *wasm32* target. //! -//! Right now we don't support this, so this is just stubs +//! Right now we don't support this, so this is just stubs. use alloc::boxed::Box; use core::any::Any; diff --git a/src/libpanic_unwind/dwarf/eh.rs b/src/libpanic_unwind/dwarf/eh.rs index ce7fab8584a..ce24406b556 100644 --- a/src/libpanic_unwind/dwarf/eh.rs +++ b/src/libpanic_unwind/dwarf/eh.rs @@ -6,7 +6,7 @@ //! http://www.airs.com/blog/archives/464 //! //! A reference implementation may be found in the GCC source tree -//! (/libgcc/unwind-c.c as of this writing) +//! (`/libgcc/unwind-c.c` as of this writing). #![allow(non_upper_case_globals)] #![allow(unused)] diff --git a/src/libpanic_unwind/dwarf/mod.rs b/src/libpanic_unwind/dwarf/mod.rs index eb5fb81f61b..0360696426d 100644 --- a/src/libpanic_unwind/dwarf/mod.rs +++ b/src/libpanic_unwind/dwarf/mod.rs @@ -1,5 +1,5 @@ //! Utilities for parsing DWARF-encoded data streams. -//! See http://www.dwarfstd.org, +//! See , //! DWARF-4 standard, Section 7 - "Data Representation" // This module is used only by x86_64-pc-windows-gnu for now, but we diff --git a/src/libpanic_unwind/emcc.rs b/src/libpanic_unwind/emcc.rs index 45c9244a46f..1f5ccfb0f12 100644 --- a/src/libpanic_unwind/emcc.rs +++ b/src/libpanic_unwind/emcc.rs @@ -1,9 +1,9 @@ -//! Unwinding for emscripten +//! Unwinding for *emscripten* target. //! //! Whereas Rust's usual unwinding implementation for Unix platforms -//! calls into the libunwind APIs directly, on emscripten we instead +//! calls into the libunwind APIs directly, on Emscripten we instead //! call into the C++ unwinding APIs. This is just an expedience since -//! emscripten's runtime always implements those APIs and does not +//! Emscripten's runtime always implements those APIs and does not //! implement libunwind. #![allow(private_no_mangle_fns)] diff --git a/src/libpanic_unwind/gcc.rs b/src/libpanic_unwind/gcc.rs index 065403aba1b..607fe28e3f2 100644 --- a/src/libpanic_unwind/gcc.rs +++ b/src/libpanic_unwind/gcc.rs @@ -1,4 +1,4 @@ -//! Implementation of panics backed by libgcc/libunwind (in some form) +//! Implementation of panics backed by libgcc/libunwind (in some form). //! //! For background on exception handling and stack unwinding please see //! "Exception Handling in LLVM" (llvm.org/docs/ExceptionHandling.html) and @@ -23,14 +23,14 @@ //! //! In the search phase, the job of a personality routine is to examine //! exception object being thrown, and to decide whether it should be caught at -//! that stack frame. Once the handler frame has been identified, cleanup phase +//! that stack frame. Once the handler frame has been identified, cleanup phase //! begins. //! //! In the cleanup phase, the unwinder invokes each personality routine again. //! This time it decides which (if any) cleanup code needs to be run for -//! the current stack frame. If so, the control is transferred to a special +//! the current stack frame. If so, the control is transferred to a special //! branch in the function body, the "landing pad", which invokes destructors, -//! frees memory, etc. At the end of the landing pad, control is transferred +//! frees memory, etc. At the end of the landing pad, control is transferred //! back to the unwinder and unwinding resumes. //! //! Once stack has been unwound down to the handler frame level, unwinding stops @@ -39,7 +39,7 @@ //! ## `eh_personality` and `eh_unwind_resume` //! //! These language items are used by the compiler when generating unwind info. -//! The first one is the personality routine described above. The second one +//! The first one is the personality routine described above. The second one //! allows compilation target to customize the process of resuming unwind at the //! end of the landing pads. `eh_unwind_resume` is used only if //! `custom_unwind_resume` flag in the target options is set. diff --git a/src/librustc/dep_graph/debug.rs b/src/librustc/dep_graph/debug.rs index a9ad22c5e91..f18ee3dced7 100644 --- a/src/librustc/dep_graph/debug.rs +++ b/src/librustc/dep_graph/debug.rs @@ -22,7 +22,7 @@ impl DepNodeFilter { } } - /// True if all nodes always pass the filter. + /// Returns `true` if all nodes always pass the filter. pub fn accepts_all(&self) -> bool { self.text.is_empty() } diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 58087b76266..796739c8721 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -302,7 +302,7 @@ macro_rules! define_dep_nodes { } } - /// Create a new, parameterless DepNode. This method will assert + /// Creates a new, parameterless DepNode. This method will assert /// that the DepNode corresponding to the given DepKind actually /// does not require any parameters. #[inline(always)] @@ -314,7 +314,7 @@ macro_rules! define_dep_nodes { } } - /// Extract the DefId corresponding to this DepNode. This will work + /// Extracts the DefId corresponding to this DepNode. This will work /// if two conditions are met: /// /// 1. The Fingerprint of the DepNode actually is a DefPathHash, and @@ -798,7 +798,7 @@ impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for HirId { } /// A "work product" corresponds to a `.o` (or other) file that we -/// save in between runs. These ids do not have a DefId but rather +/// save in between runs. These IDs do not have a `DefId` but rather /// some independent path or string that persists between runs without /// the need to be mapped or unmapped. (This ensures we can serialize /// them even in the absence of a tcx.) diff --git a/src/librustc/dep_graph/dep_tracking_map.rs b/src/librustc/dep_graph/dep_tracking_map.rs index a296a3379c2..94b832bea62 100644 --- a/src/librustc/dep_graph/dep_tracking_map.rs +++ b/src/librustc/dep_graph/dep_tracking_map.rs @@ -43,7 +43,7 @@ impl MemoizationMap for RefCell> { /// /// Here, `[op]` represents whatever nodes `op` reads in the /// course of execution; `Map(key)` represents the node for this - /// map; and `CurrentTask` represents the current task when + /// map, and `CurrentTask` represents the current task when /// `memoize` is invoked. /// /// **Important:** when `op` is invoked, the current task will be diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs index e8c1cd36064..59ec459de96 100644 --- a/src/librustc/dep_graph/graph.rs +++ b/src/librustc/dep_graph/graph.rs @@ -61,13 +61,13 @@ struct DepGraphData { colors: DepNodeColorMap, - /// A set of loaded diagnostics which has been emitted. + /// A set of loaded diagnostics that have been emitted. emitted_diagnostics: Mutex>, /// Used to wait for diagnostics to be emitted. emitted_diagnostics_cond_var: Condvar, - /// When we load, there may be `.o` files, cached mir, or other such + /// When we load, there may be `.o` files, cached MIR, or other such /// things available to us. If we find that they are not dirty, we /// load the path to the file storing those work-products here into /// this map. We can later look for and extract that data. @@ -115,7 +115,7 @@ impl DepGraph { } } - /// True if we are actually building the full dep-graph. + /// Returns `true` if we are actually building the full dep-graph, and `false` otherwise. #[inline] pub fn is_fully_enabled(&self) -> bool { self.data.is_some() @@ -320,8 +320,8 @@ impl DepGraph { } } - /// Execute something within an "anonymous" task, that is, a task the - /// DepNode of which is determined by the list of inputs it read from. + /// Executes something within an "anonymous" task, that is, a task the + /// `DepNode` of which is determined by the list of inputs it read from. pub fn with_anon_task(&self, dep_kind: DepKind, op: OP) -> (R, DepNodeIndex) where OP: FnOnce() -> R { @@ -356,8 +356,8 @@ impl DepGraph { } } - /// Execute something within an "eval-always" task which is a task - // that runs whenever anything changes. + /// Executes something within an "eval-always" task which is a task + /// that runs whenever anything changes. pub fn with_eval_always_task<'a, C, A, R>( &self, key: DepNode, @@ -438,7 +438,7 @@ impl DepGraph { self.data.as_ref().unwrap().previous.node_to_index(dep_node) } - /// Check whether a previous work product exists for `v` and, if + /// Checks whether a previous work product exists for `v` and, if /// so, return the path that leads to it. Used to skip doing work. pub fn previous_work_product(&self, v: &WorkProductId) -> Option { self.data @@ -589,7 +589,7 @@ impl DepGraph { } } - /// Try to mark a dep-node which existed in the previous compilation session as green + /// Try to mark a dep-node which existed in the previous compilation session as green. fn try_mark_previous_green<'tcx>( &self, tcx: TyCtxt<'_, 'tcx, 'tcx>, @@ -773,8 +773,8 @@ impl DepGraph { Some(dep_node_index) } - /// Atomically emits some loaded diagnotics assuming that this only gets called with - /// did_allocation set to true on one thread + /// Atomically emits some loaded diagnotics, assuming that this only gets called with + /// `did_allocation` set to `true` on a single thread. #[cold] #[inline(never)] fn emit_diagnostics<'tcx>( @@ -913,7 +913,7 @@ impl DepGraph { #[derive(Clone, Debug, RustcEncodable, RustcDecodable)] pub struct WorkProduct { pub cgu_name: String, - /// Saved files associated with this CGU + /// Saved files associated with this CGU. pub saved_files: Vec<(WorkProductFileKind, String)>, } @@ -937,17 +937,17 @@ pub(super) struct CurrentDepGraph { #[allow(dead_code)] forbidden_edge: Option, - // Anonymous DepNodes are nodes the ID of which we compute from the list of - // their edges. This has the beneficial side-effect that multiple anonymous - // nodes can be coalesced into one without changing the semantics of the - // dependency graph. However, the merging of nodes can lead to a subtle - // problem during red-green marking: The color of an anonymous node from - // the current session might "shadow" the color of the node with the same - // ID from the previous session. In order to side-step this problem, we make - // sure that anon-node IDs allocated in different sessions don't overlap. - // This is implemented by mixing a session-key into the ID fingerprint of - // each anon node. The session-key is just a random number generated when - // the DepGraph is created. + /// Anonymous `DepNode`s are nodes whose IDs we compute from the list of + /// their edges. This has the beneficial side-effect that multiple anonymous + /// nodes can be coalesced into one without changing the semantics of the + /// dependency graph. However, the merging of nodes can lead to a subtle + /// problem during red-green marking: The color of an anonymous node from + /// the current session might "shadow" the color of the node with the same + /// ID from the previous session. In order to side-step this problem, we make + /// sure that anonymous `NodeId`s allocated in different sessions don't overlap. + /// This is implemented by mixing a session-key into the ID fingerprint of + /// each anon node. The session-key is just a random number generated when + /// the `DepGraph` is created. anon_id_seed: Fingerprint, total_read_count: u64, diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs index ba340ad251f..ddc1eebe645 100644 --- a/src/librustc/hir/check_attr.rs +++ b/src/librustc/hir/check_attr.rs @@ -91,7 +91,7 @@ struct CheckAttrVisitor<'a, 'tcx: 'a> { } impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { - /// Check any attribute. + /// Checks any attribute. fn check_attributes(&self, item: &hir::Item, target: Target) { if target == Target::Fn || target == Target::Const { self.tcx.codegen_fn_attrs(self.tcx.hir().local_def_id(item.id)); @@ -115,7 +115,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { self.check_used(item, target); } - /// Check if an `#[inline]` is applied to a function or a closure. + /// Checks if an `#[inline]` is applied to a function or a closure. fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) { if target != Target::Fn && target != Target::Closure { struct_span_err!(self.tcx.sess, @@ -127,7 +127,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { } } - /// Check if the `#[non_exhaustive]` attribute on an `item` is valid. + /// Checks if the `#[non_exhaustive]` attribute on an `item` is valid. fn check_non_exhaustive(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) { match target { Target::Struct | Target::Enum => { /* Valid */ }, @@ -143,7 +143,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { } } - /// Check if the `#[marker]` attribute on an `item` is valid. + /// Checks if the `#[marker]` attribute on an `item` is valid. fn check_marker(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) { match target { Target::Trait => { /* Valid */ }, @@ -157,7 +157,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> { } } - /// Check if the `#[repr]` attributes on `item` are valid. + /// Checks if the `#[repr]` attributes on `item` are valid. fn check_repr(&self, item: &hir::Item, target: Target) { // Extract the names of all repr hints, e.g., [foo, bar, align] for: // ``` diff --git a/src/librustc/hir/def.rs b/src/librustc/hir/def.rs index 15efa765029..b15bea01776 100644 --- a/src/librustc/hir/def.rs +++ b/src/librustc/hir/def.rs @@ -182,7 +182,7 @@ impl ::std::ops::IndexMut for PerNS { } impl PerNS> { - /// Returns whether all the items in this collection are `None`. + /// Returns `true` if all the items in this collection are `None`. pub fn is_empty(&self) -> bool { self.type_ns.is_none() && self.value_ns.is_none() && self.macro_ns.is_none() } diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index e06f09e21cb..ed1c15a73c2 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -229,7 +229,7 @@ impl fmt::Debug for DefId { } impl DefId { - /// Make a local `DefId` with the given index. + /// Makes a local `DefId` from the given `DefIndex`. #[inline] pub fn local(index: DefIndex) -> DefId { DefId { krate: LOCAL_CRATE, index: index } diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index 86c3fb9e4fc..9436c600c9f 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -4,7 +4,7 @@ //! `super::itemlikevisit::ItemLikeVisitor` trait.** //! //! If you have decided to use this visitor, here are some general -//! notes on how to do it: +//! notes on how to do so: //! //! Each overridden visit method has full control over what //! happens with its node, it can do its own traversal of the node's children, @@ -86,7 +86,7 @@ pub enum NestedVisitorMap<'this, 'tcx: 'this> { /// using this setting. OnlyBodies(&'this Map<'tcx>), - /// Visit all nested things, including item-likes. + /// Visits all nested things, including item-likes. /// /// **This is an unusual choice.** It is used when you want to /// process everything within their lexical context. Typically you @@ -96,7 +96,7 @@ pub enum NestedVisitorMap<'this, 'tcx: 'this> { impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> { /// Returns the map to use for an "intra item-like" thing (if any). - /// e.g., function body. + /// E.g., function body. pub fn intra(self) -> Option<&'this Map<'tcx>> { match self { NestedVisitorMap::None => None, @@ -106,7 +106,7 @@ impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> { } /// Returns the map to use for an "item-like" thing (if any). - /// e.g., item, impl-item. + /// E.g., item, impl-item. pub fn inter(self) -> Option<&'this Map<'tcx>> { match self { NestedVisitorMap::None => None, @@ -117,7 +117,7 @@ impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> { } /// Each method of the Visitor trait is a hook to be potentially -/// overridden. Each method's default implementation recursively visits +/// overridden. Each method's default implementation recursively visits /// the substructure of the input via the corresponding `walk` method; /// e.g., the `visit_mod` method by default calls `intravisit::walk_mod`. /// @@ -129,7 +129,7 @@ impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> { /// on `visit_nested_item` for details on how to visit nested items. /// /// If you want to ensure that your code handles every variant -/// explicitly, you need to override each method. (And you also need +/// explicitly, you need to override each method. (And you also need /// to monitor future changes to `Visitor` in case a new method with a /// new default implementation gets introduced.) pub trait Visitor<'v> : Sized { @@ -203,7 +203,7 @@ pub trait Visitor<'v> : Sized { } } - /// Visit the top-level item and (optionally) nested items / impl items. See + /// Visits the top-level item and (optionally) nested items / impl items. See /// `visit_nested_item` for details. fn visit_item(&mut self, i: &'v Item) { walk_item(self, i) @@ -214,7 +214,7 @@ pub trait Visitor<'v> : Sized { } /// When invoking `visit_all_item_likes()`, you need to supply an - /// item-like visitor. This method converts a "intra-visit" + /// item-like visitor. This method converts a "intra-visit" /// visitor into an item-like visitor that walks the entire tree. /// If you use this, you probably don't want to process the /// contents of nested item-like things, since the outer loop will diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 3de41b1665d..8ce6d140122 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -3,24 +3,24 @@ //! Since the AST and HIR are fairly similar, this is mostly a simple procedure, //! much like a fold. Where lowering involves a bit more work things get more //! interesting and there are some invariants you should know about. These mostly -//! concern spans and ids. +//! concern spans and IDs. //! //! Spans are assigned to AST nodes during parsing and then are modified during //! expansion to indicate the origin of a node and the process it went through -//! being expanded. Ids are assigned to AST nodes just before lowering. +//! being expanded. IDs are assigned to AST nodes just before lowering. //! -//! For the simpler lowering steps, ids and spans should be preserved. Unlike +//! For the simpler lowering steps, IDs and spans should be preserved. Unlike //! expansion we do not preserve the process of lowering in the spans, so spans //! should not be modified here. When creating a new node (as opposed to -//! 'folding' an existing one), then you create a new id using `next_id()`. +//! 'folding' an existing one), then you create a new ID using `next_id()`. //! -//! You must ensure that ids are unique. That means that you should only use the -//! id from an AST node in a single HIR node (you can assume that AST node ids -//! are unique). Every new node must have a unique id. Avoid cloning HIR nodes. -//! If you do, you must then set the new node's id to a fresh one. +//! You must ensure that IDs are unique. That means that you should only use the +//! ID from an AST node in a single HIR node (you can assume that AST node IDs +//! are unique). Every new node must have a unique ID. Avoid cloning HIR nodes. +//! If you do, you must then set the new node's ID to a fresh one. //! //! Spans are used for error messages and for tools to map semantics back to -//! source code. It is therefore not as important with spans as ids to be strict +//! source code. It is therefore not as important with spans as IDs to be strict //! about use (you can't break the compiler by screwing up a span). Obviously, a //! HIR node can only have a single span. But multiple nodes can have the same //! span and spans don't need to be kept in order, etc. Where code is preserved @@ -144,7 +144,7 @@ pub trait Resolver { is_value: bool, ) -> hir::Path; - /// Obtain the resolution for a node-id. + /// Obtain the resolution for a `NodeId`. fn get_resolution(&mut self, id: NodeId) -> Option; /// Obtain the possible resolutions for the given `use` statement. @@ -273,10 +273,10 @@ enum ParenthesizedGenericArgs { } /// What to do when we encounter an **anonymous** lifetime -/// reference. Anonymous lifetime references come in two flavors. You +/// reference. Anonymous lifetime references come in two flavors. You /// have implicit, or fully elided, references to lifetimes, like the /// one in `&T` or `Ref`, and you have `'_` lifetimes, like `&'_ T` -/// or `Ref<'_, T>`. These often behave the same, but not always: +/// or `Ref<'_, T>`. These often behave the same, but not always: /// /// - certain usages of implicit references are deprecated, like /// `Ref`, and we sometimes just give hard errors in those cases @@ -3287,7 +3287,7 @@ impl<'a> LoweringContext<'a> { /// Paths like the visibility path in `pub(super) use foo::{bar, baz}` are repeated /// many times in the HIR tree; for each occurrence, we need to assign distinct - /// node-ids. (See e.g., #56128.) + /// `NodeId`s. (See, e.g., #56128.) fn renumber_segment_ids(&mut self, path: &P) -> P { debug!("renumber_segment_ids(path = {:?})", path); let mut path = path.clone(); diff --git a/src/librustc/hir/map/blocks.rs b/src/librustc/hir/map/blocks.rs index d5fb578d8d4..6919628c767 100644 --- a/src/librustc/hir/map/blocks.rs +++ b/src/librustc/hir/map/blocks.rs @@ -1,9 +1,9 @@ //! This module provides a simplified abstraction for working with -//! code blocks identified by their integer node-id. In particular, +//! code blocks identified by their integer `NodeId`. In particular, //! it captures a common set of attributes that all "function-like -//! things" (represented by `FnLike` instances) share. For example, +//! things" (represented by `FnLike` instances) share. For example, //! all `FnLike` instances have a type signature (be it explicit or -//! inferred). And all `FnLike` instances have a body, i.e., the code +//! inferred). And all `FnLike` instances have a body, i.e., the code //! that is run when the function-like thing it represents is invoked. //! //! With the above abstraction in place, one can treat the program diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index 02fb503e752..8fe10a85ef3 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -12,7 +12,7 @@ use syntax_pos::Span; use crate::hir::map::{ITEM_LIKE_SPACE, REGULAR_SPACE}; -/// Creates def ids for nodes in the AST. +/// Creates `DefId`s for nodes in the AST. pub struct DefCollector<'a> { definitions: &'a mut Definitions, parent_def: Option, diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 84e9cde6df1..f454d691d41 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -1,5 +1,5 @@ -//! For each definition, we track the following data. A definition -//! here is defined somewhat circularly as "something with a def-id", +//! For each definition, we track the following data. A definition +//! here is defined somewhat circularly as "something with a `DefId`", //! but it generally corresponds to things like structs, enums, etc. //! There are also some rather random cases (like const initializer //! expressions) that are mostly just leftovers. @@ -163,10 +163,10 @@ pub struct Definitions { /// any) with a `DisambiguatedDefPathData`. #[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)] pub struct DefKey { - /// Parent path. + /// The parent path. pub parent: Option, - /// Identifier of this node. + /// The identifier of this node. pub disambiguated_data: DisambiguatedDefPathData, } @@ -207,12 +207,12 @@ impl DefKey { } } -/// Pair of `DefPathData` and an integer disambiguator. The integer is +/// A pair of `DefPathData` and an integer disambiguator. The integer is /// normally 0, but in the event that there are multiple defs with the /// same `parent` and `data`, we use this field to disambiguate /// between them. This introduces some artificial ordering dependency /// but means that if you have (e.g.) two impls for the same type in -/// the same module, they do get distinct def-ids. +/// the same module, they do get distinct `DefId`s. #[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)] pub struct DisambiguatedDefPathData { pub data: DefPathData, @@ -221,10 +221,10 @@ pub struct DisambiguatedDefPathData { #[derive(Clone, Debug, Hash, RustcEncodable, RustcDecodable)] pub struct DefPath { - /// the path leading from the crate root to the item + /// The path leading from the crate root to the item. pub data: Vec, - /// what krate root is this path relative to? + /// The crate root this path is relative to. pub krate: CrateNum, } @@ -260,9 +260,9 @@ impl DefPath { DefPath { data: data, krate: krate } } - /// Returns a string representation of the DefPath without + /// Returns a string representation of the `DefPath` without /// the crate-prefix. This method is useful if you don't have - /// a TyCtxt available. + /// a `TyCtxt` available. pub fn to_string_no_crate(&self) -> String { let mut s = String::with_capacity(self.data.len() * 16); @@ -277,7 +277,7 @@ impl DefPath { s } - /// Return filename friendly string of the DefPah with the + /// Returns a filename-friendly string for the `DefPath`, with the /// crate-prefix. pub fn to_string_friendly(&self, crate_imported_name: F) -> String where F: FnOnce(CrateNum) -> Symbol @@ -302,9 +302,9 @@ impl DefPath { s } - /// Return filename friendly string of the DefPah without + /// Returns a filename-friendly string of the `DefPath`, without /// the crate-prefix. This method is useful if you don't have - /// a TyCtxt available. + /// a `TyCtxt` available. pub fn to_filename_friendly_no_crate(&self) -> String { let mut s = String::with_capacity(self.data.len() * 16); @@ -394,18 +394,18 @@ impl Borrow for DefPathHash { } impl Definitions { - /// Create new empty definition map. + /// Creates new empty definition map. /// - /// The DefIndex returned from a new Definitions are as follows: - /// 1. At DefIndexAddressSpace::Low, + /// The `DefIndex` returned from a new `Definitions` are as follows: + /// 1. At `DefIndexAddressSpace::Low`, /// CRATE_ROOT has index 0:0, and then new indexes are allocated in /// ascending order. - /// 2. At DefIndexAddressSpace::High, - /// the first FIRST_FREE_HIGH_DEF_INDEX indexes are reserved for - /// internal use, then 1:FIRST_FREE_HIGH_DEF_INDEX are allocated in + /// 2. At `DefIndexAddressSpace::High`, + /// the first `FIRST_FREE_HIGH_DEF_INDEX` indexes are reserved for + /// internal use, then `1:FIRST_FREE_HIGH_DEF_INDEX` are allocated in /// ascending order. - /// - /// FIXME: there is probably a better place to put this comment. + // + // FIXME: there is probably a better place to put this comment. pub fn new() -> Self { Self::default() } @@ -414,7 +414,7 @@ impl Definitions { &self.table } - /// Get the number of definitions. + /// Gets the number of definitions. pub fn def_index_counts_lo_hi(&self) -> (usize, usize) { (self.table.index_to_key[DefIndexAddressSpace::Low.index()].len(), self.table.index_to_key[DefIndexAddressSpace::High.index()].len()) @@ -497,8 +497,8 @@ impl Definitions { self.node_to_hir_id[node_id] } - /// Retrieve the span of the given `DefId` if `DefId` is in the local crate, the span exists and - /// it's not DUMMY_SP + /// Retrieves the span of the given `DefId` if `DefId` is in the local crate, the span exists + /// and it's not `DUMMY_SP`. #[inline] pub fn opt_span(&self, def_id: DefId) -> Option { if def_id.krate == LOCAL_CRATE { @@ -508,7 +508,7 @@ impl Definitions { } } - /// Add a definition with a parent definition. + /// Adds a root definition (no parent). pub fn create_root_def(&mut self, crate_name: &str, crate_disambiguator: CrateDisambiguator) @@ -606,7 +606,7 @@ impl Definitions { index } - /// Initialize the ast::NodeId to HirId mapping once it has been generated during + /// Initialize the `ast::NodeId` to `HirId` mapping once it has been generated during /// AST to HIR lowering. pub fn init_node_id_to_hir_id_mapping(&mut self, mapping: IndexVec) { diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 955f834e403..bf89eada4a5 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -36,7 +36,7 @@ mod hir_id_validator; pub const ITEM_LIKE_SPACE: DefIndexAddressSpace = DefIndexAddressSpace::Low; pub const REGULAR_SPACE: DefIndexAddressSpace = DefIndexAddressSpace::High; -/// Represents an entry and its parent NodeId. +/// Represents an entry and its parent `NodeId`. #[derive(Copy, Clone, Debug)] pub struct Entry<'hir> { parent: NodeId, @@ -162,8 +162,7 @@ impl Forest { } } -/// Represents a mapping from Node IDs to AST elements and their parent -/// Node IDs +/// Represents a mapping from `NodeId`s to AST elements and their parent `NodeId`s. #[derive(Clone)] pub struct Map<'hir> { /// The backing storage for all the AST nodes. @@ -473,7 +472,7 @@ impl<'hir> Map<'hir> { self.local_def_id(self.body_owner(id)) } - /// Given a node id, returns the `BodyId` associated with it, + /// Given a `NodeId`, returns the `BodyId` associated with it, /// if the node is a body owner, otherwise returns `None`. pub fn maybe_body_owned_by(&self, id: NodeId) -> Option { if let Some(entry) = self.find_entry(id) { @@ -558,7 +557,7 @@ impl<'hir> Map<'hir> { self.trait_auto_impl(trait_did).is_some() } - /// Get the attributes on the krate. This is preferable to + /// Gets the attributes on the crate. This is preferable to /// invoking `krate.attrs` because it registers a tighter /// dep-graph access. pub fn krate_attrs(&self) -> &'hir [ast::Attribute] { @@ -653,8 +652,7 @@ impl<'hir> Map<'hir> { self.get_generics(id).map(|generics| generics.span).filter(|sp| *sp != DUMMY_SP) } - /// Retrieve the Node corresponding to `id`, returning None if - /// cannot be found. + /// Retrieves the `Node` corresponding to `id`, returning `None` if cannot be found. pub fn find(&self, id: NodeId) -> Option> { let result = self.find_entry(id).and_then(|entry| { if let Node::Crate = entry.node { @@ -683,8 +681,8 @@ impl<'hir> Map<'hir> { /// returns the enclosing item. Note that this might not be the actual parent /// node in the AST - some kinds of nodes are not in the map and these will /// never appear as the parent_node. So you can always walk the `parent_nodes` - /// from a node to the root of the ast (unless you get the same id back here - /// that can happen if the id is not in the map itself or is just weird). + /// from a node to the root of the ast (unless you get the same ID back here + /// that can happen if the ID is not in the map itself or is just weird). pub fn get_parent_node(&self, id: NodeId) -> NodeId { if self.dep_graph.is_fully_enabled() { let hir_id_owner = self.node_to_hir_id(id).owner; @@ -725,7 +723,7 @@ impl<'hir> Map<'hir> { /// If there is some error when walking the parents (e.g., a node does not /// have a parent in the map or a node can't be found), then we return the - /// last good node id we found. Note that reaching the crate root (`id == 0`), + /// last good `NodeId` we found. Note that reaching the crate root (`id == 0`), /// is not an error, since items in the crate module have the crate root as /// parent. fn walk_parent_nodes(&self, @@ -761,7 +759,7 @@ impl<'hir> Map<'hir> { } } - /// Retrieve the `NodeId` for `id`'s enclosing method, unless there's a + /// Retrieves the `NodeId` for `id`'s enclosing method, unless there's a /// `while` or `loop` before reaching it, as block tail returns are not /// available in them. /// @@ -809,7 +807,7 @@ impl<'hir> Map<'hir> { self.walk_parent_nodes(id, match_fn, match_non_returning_block).ok() } - /// Retrieve the `NodeId` for `id`'s parent item, or `id` itself if no + /// Retrieves the `NodeId` for `id`'s parent item, or `id` itself if no /// parent item is in this map. The "parent item" is the closest parent node /// in the HIR which is recorded by the map and is an item, either an item /// in a module, trait, or impl. @@ -1122,7 +1120,7 @@ pub struct NodesMatchingSuffix<'a, 'hir:'a> { } impl<'a, 'hir> NodesMatchingSuffix<'a, 'hir> { - /// Returns true only if some suffix of the module path for parent + /// Returns `true` only if some suffix of the module path for parent /// matches `self.in_which`. /// /// In other words: let `[x_0,x_1,...,x_k]` be `self.in_which`; diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 3e7dd1432e1..d9759da9dfc 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -62,14 +62,14 @@ pub mod map; pub mod pat_util; pub mod print; -/// A HirId uniquely identifies a node in the HIR of the current crate. It is -/// composed of the `owner`, which is the DefIndex of the directly enclosing -/// hir::Item, hir::TraitItem, or hir::ImplItem (i.e., the closest "item-like"), +/// Uniquely identifies a node in the HIR of the current crate. It is +/// composed of the `owner`, which is the `DefIndex` of the directly enclosing +/// `hir::Item`, `hir::TraitItem`, or `hir::ImplItem` (i.e., the closest "item-like"), /// and the `local_id` which is unique within the given owner. /// /// This two-level structure makes for more stable values: One can move an item /// around within the source code, or add or remove stuff before it, without -/// the local_id part of the HirId changing, which is a very useful property in +/// the `local_id` part of the `HirId` changing, which is a very useful property in /// incremental compilation where we have to persist things through changes to /// the code base. #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] @@ -130,7 +130,7 @@ mod item_local_id_inner { pub use self::item_local_id_inner::ItemLocalId; -/// The `HirId` corresponding to CRATE_NODE_ID and CRATE_DEF_INDEX +/// The `HirId` corresponding to `CRATE_NODE_ID` and `CRATE_DEF_INDEX`. pub const CRATE_HIR_ID: HirId = HirId { owner: CRATE_DEF_INDEX, local_id: ItemLocalId::from_u32_const(0) @@ -149,8 +149,8 @@ pub struct Lifetime { pub hir_id: HirId, pub span: Span, - /// Either "'a", referring to a named lifetime definition, - /// or "" (aka keywords::Invalid), for elision placeholders. + /// Either "`'a`", referring to a named lifetime definition, + /// or "``" (i.e., `keywords::Invalid`), for elision placeholders. /// /// HIR lowering inserts these placeholders in type paths that /// refer to type definitions needing lifetime parameters, @@ -163,8 +163,9 @@ pub enum ParamName { /// Some user-given name like `T` or `'x`. Plain(Ident), - /// Synthetic name generated when user elided a lifetime in an impl header, - /// e.g., the lifetimes in cases like these: + /// Synthetic name generated when user elided a lifetime in an impl header. + /// + /// E.g., the lifetimes in cases like these: /// /// impl Foo for &u32 /// impl Foo<'_> for u32 @@ -180,7 +181,7 @@ pub enum ParamName { /// Indicates an illegal name was given and an error has been /// repored (so we should squelch other derived errors). Occurs - /// when e.g., `'_` is used in the wrong place. + /// when, e.g., `'_` is used in the wrong place. Error, } @@ -205,17 +206,17 @@ pub enum LifetimeName { /// User-given names or fresh (synthetic) names. Param(ParamName), - /// User typed nothing. e.g., the lifetime in `&u32`. + /// User wrote nothing (e.g., the lifetime in `&u32`). Implicit, /// Indicates an error during lowering (usually `'_` in wrong place) /// that was already reported. Error, - /// User typed `'_`. + /// User wrote specifies `'_`. Underscore, - /// User wrote `'static` + /// User wrote `'static`. Static, } @@ -280,7 +281,7 @@ impl Lifetime { } } -/// A "Path" is essentially Rust's notion of a name; for instance: +/// A `Path` is essentially Rust's notion of a name; for instance, /// `std::cmp::PartialEq`. It's represented as a sequence of identifiers, /// along with a bunch of supporting information. #[derive(Clone, RustcEncodable, RustcDecodable)] @@ -340,7 +341,7 @@ pub struct PathSegment { } impl PathSegment { - /// Convert an identifier to the corresponding segment. + /// Converts an identifier to the corresponding segment. pub fn from_ident(ident: Ident) -> PathSegment { PathSegment { ident, @@ -597,14 +598,14 @@ impl Generics { } } -/// Synthetic Type Parameters are converted to an other form during lowering, this allows -/// to track the original form they had. Useful for error messages. +/// Synthetic type parameters are converted to another form during lowering; this allows +/// us to track the original form they had, and is useful for error messages. #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum SyntheticTyParamKind { ImplTrait } -/// A `where` clause in a definition +/// A where-clause in a definition. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereClause { pub id: NodeId, @@ -624,7 +625,7 @@ impl WhereClause { } } -/// A single predicate in a `where` clause +/// A single predicate in a where-clause. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum WherePredicate { /// A type binding (e.g., `for<'c> Foo: Send + Clone + 'c`). @@ -645,19 +646,19 @@ impl WherePredicate { } } -/// A type bound, eg `for<'c> Foo: Send+Clone+'c` +/// A type bound (e.g., `for<'c> Foo: Send + Clone + 'c`). #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereBoundPredicate { pub span: Span, - /// Any generics from a `for` binding + /// Any generics from a `for` binding. pub bound_generic_params: HirVec, - /// The type being bounded + /// The type being bounded. pub bounded_ty: P, - /// Trait and lifetime bounds (`Clone+Send+'static`) + /// Trait and lifetime bounds (e.g., `Clone + Send + 'static`). pub bounds: GenericBounds, } -/// A lifetime predicate, e.g., `'a: 'b+'c` +/// A lifetime predicate (e.g., `'a: 'b + 'c`). #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereRegionPredicate { pub span: Span, @@ -665,7 +666,7 @@ pub struct WhereRegionPredicate { pub bounds: GenericBounds, } -/// An equality predicate (unsupported), e.g., `T=int` +/// An equality predicate (e.g., `T = int`); currently unsupported. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct WhereEqPredicate { pub id: NodeId, @@ -759,7 +760,7 @@ impl Crate { } } - /// A parallel version of visit_all_item_likes + /// A parallel version of `visit_all_item_likes`. pub fn par_visit_all_item_likes<'hir, V>(&'hir self, visitor: &V) where V: itemlikevisit::ParItemLikeVisitor<'hir> + Sync + Send { @@ -800,14 +801,14 @@ pub struct MacroDef { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Block { - /// Statements in a block + /// Statements in a block. pub stmts: HirVec, /// An expression at the end of the block - /// without a semicolon, if any + /// without a semicolon, if any. pub expr: Option>, pub id: NodeId, pub hir_id: HirId, - /// Distinguishes between `unsafe { ... }` and `{ ... }` + /// Distinguishes between `unsafe { ... }` and `{ ... }`. pub rules: BlockCheckMode, pub span: Span, /// If true, then there may exist `break 'a` values that aim to @@ -874,18 +875,18 @@ impl Pat { } } -/// A single field in a struct pattern +/// A single field in a struct pattern. /// /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` /// are treated the same as` x: x, y: ref y, z: ref mut z`, -/// except is_shorthand is true +/// except `is_shorthand` is true. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct FieldPat { pub id: NodeId, pub hir_id: HirId, - /// The identifier for the field + /// The identifier for the field. pub ident: Ident, - /// The pattern the field is destructured to + /// The pattern the field is destructured to. pub pat: P, pub is_shorthand: bool, } @@ -922,41 +923,41 @@ pub enum RangeEnd { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum PatKind { - /// Represents a wildcard pattern (`_`) + /// Represents a wildcard pattern (i.e., `_`). Wild, /// A fresh binding `ref mut binding @ OPT_SUBPATTERN`. /// The `NodeId` is the canonical ID for the variable being bound, - /// e.g., in `Ok(x) | Err(x)`, both `x` use the same canonical ID, + /// (e.g., in `Ok(x) | Err(x)`, both `x` use the same canonical ID), /// which is the pattern ID of the first `x`. Binding(BindingAnnotation, NodeId, HirId, Ident, Option>), - /// A struct or struct variant pattern, e.g., `Variant {x, y, ..}`. + /// A struct or struct variant pattern (e.g., `Variant {x, y, ..}`). /// The `bool` is `true` in the presence of a `..`. Struct(QPath, HirVec>, bool), /// A tuple struct/variant pattern `Variant(x, y, .., z)`. /// If the `..` pattern fragment is present, then `Option` denotes its position. - /// 0 <= position <= subpats.len() + /// `0 <= position <= subpats.len()` TupleStruct(QPath, HirVec>, Option), /// A path pattern for an unit struct/variant or a (maybe-associated) constant. Path(QPath), - /// A tuple pattern `(a, b)`. + /// A tuple pattern (e.g., `(a, b)`). /// If the `..` pattern fragment is present, then `Option` denotes its position. - /// 0 <= position <= subpats.len() + /// `0 <= position <= subpats.len()` Tuple(HirVec>, Option), - /// A `box` pattern + /// A `box` pattern. Box(P), - /// A reference pattern, e.g., `&mut (a, b)` + /// A reference pattern (e.g., `&mut (a, b)`). Ref(P, Mutability), - /// A literal + /// A literal. Lit(P), - /// A range pattern, e.g., `1...2` or `1..2` + /// A range pattern (e.g., `1...2` or `1..2`). Range(P, P, RangeEnd), /// `[a, b, ..i, y, z]` is represented as: - /// `PatKind::Slice(box [a, b], Some(i), box [y, z])` + /// `PatKind::Slice(box [a, b], Some(i), box [y, z])`. Slice(HirVec>, Option>, HirVec>), } @@ -967,7 +968,7 @@ pub enum Mutability { } impl Mutability { - /// Return MutMutable only if both arguments are mutable. + /// Returns `MutMutable` only if both arguments are mutable. pub fn and(self, other: Self) -> Self { match self { MutMutable => other, @@ -978,41 +979,41 @@ impl Mutability { #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy, Hash)] pub enum BinOpKind { - /// The `+` operator (addition) + /// The `+` operator (addition). Add, - /// The `-` operator (subtraction) + /// The `-` operator (subtraction). Sub, - /// The `*` operator (multiplication) + /// The `*` operator (multiplication). Mul, - /// The `/` operator (division) + /// The `/` operator (division). Div, - /// The `%` operator (modulus) + /// The `%` operator (modulus). Rem, - /// The `&&` operator (logical and) + /// The `&&` operator (logical and). And, - /// The `||` operator (logical or) + /// The `||` operator (logical or). Or, - /// The `^` operator (bitwise xor) + /// The `^` operator (bitwise xor). BitXor, - /// The `&` operator (bitwise and) + /// The `&` operator (bitwise and). BitAnd, - /// The `|` operator (bitwise or) + /// The `|` operator (bitwise or). BitOr, - /// The `<<` operator (shift left) + /// The `<<` operator (shift left). Shl, - /// The `>>` operator (shift right) + /// The `>>` operator (shift right). Shr, - /// The `==` operator (equality) + /// The `==` operator (equality). Eq, - /// The `<` operator (less than) + /// The `<` operator (less than). Lt, - /// The `<=` operator (less than or equal to) + /// The `<=` operator (less than or equal to). Le, - /// The `!=` operator (not equal to) + /// The `!=` operator (not equal to). Ne, - /// The `>=` operator (greater than or equal to) + /// The `>=` operator (greater than or equal to). Ge, - /// The `>` operator (greater than) + /// The `>` operator (greater than). Gt, } @@ -1077,7 +1078,7 @@ impl BinOpKind { } } - /// Returns `true` if the binary operator takes its arguments by value + /// Returns `true` if the binary operator takes its arguments by value. pub fn is_by_value(self) -> bool { !self.is_comparison() } @@ -1112,11 +1113,11 @@ pub type BinOp = Spanned; #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy, Hash)] pub enum UnOp { - /// The `*` operator for dereferencing + /// The `*` operator (deferencing). UnDeref, - /// The `!` operator for logical inversion + /// The `!` operator (logical negation). UnNot, - /// The `-` operator for negation + /// The `-` operator (negation). UnNeg, } @@ -1129,7 +1130,7 @@ impl UnOp { } } - /// Returns `true` if the unary operator takes its argument by value + /// Returns `true` if the unary operator takes its argument by value. pub fn is_by_value(self) -> bool { match self { UnNeg | UnNot => true, @@ -1138,7 +1139,7 @@ impl UnOp { } } -/// A statement +/// A statement. #[derive(Clone, RustcEncodable, RustcDecodable)] pub struct Stmt { pub id: NodeId, @@ -1156,15 +1157,15 @@ impl fmt::Debug for Stmt { #[derive(Clone, RustcEncodable, RustcDecodable)] pub enum StmtKind { - /// A local (let) binding: + /// A local (`let`) binding. Local(P), - /// An item binding: + /// An item binding. Item(P), - /// Expr without trailing semi-colon (must have unit type): + /// An expression without a trailing semi-colon (must have unit type). Expr(P), - /// Expr with trailing semi-colon (may have any type): + /// An expression with a trailing semi-colon (may have any type). Semi(P), } @@ -1179,12 +1180,12 @@ impl StmtKind { } } -/// Local represents a `let` statement, e.g., `let : = ;` +/// Represents a `let` statement (i.e., `let : = ;`). #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Local { pub pat: P, pub ty: Option>, - /// Initializer expression to set the value, if any + /// Initializer expression to set the value, if any. pub init: Option>, pub id: NodeId, pub hir_id: HirId, @@ -1193,7 +1194,7 @@ pub struct Local { pub source: LocalSource, } -/// represents one arm of a 'match' +/// Represents a single arm of a `match` expression. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Arm { pub attrs: HirVec, @@ -1419,16 +1420,16 @@ impl fmt::Debug for Expr { pub enum ExprKind { /// A `box x` expression. Box(P), - /// An array (`[a, b, c, d]`) + /// An array (e.g., `[a, b, c, d]`). Array(HirVec), - /// A function call + /// A function call. /// /// The first field resolves to the function itself (usually an `ExprKind::Path`), /// and the second field is the list of arguments. /// This also represents calling the constructor of /// tuple-like ADTs such as tuple structs and enum variants. Call(P, HirVec), - /// A method call (`x.foo::<'static, Bar, Baz>(a, b, c, d)`) + /// A method call (e.g., `x.foo::<'static, Bar, Baz>(a, b, c, d)`). /// /// The `PathSegment`/`Span` represent the method name and its generic arguments /// (within the angle brackets). @@ -1438,63 +1439,64 @@ pub enum ExprKind { /// Thus, `x.foo::(a, b, c, d)` is represented as /// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, [x, a, b, c, d])`. MethodCall(PathSegment, Span, HirVec), - /// A tuple (`(a, b, c ,d)`) + /// A tuple (e.g., `(a, b, c ,d)`). Tup(HirVec), - /// A binary operation (For example: `a + b`, `a * b`) + /// A binary operation (e.g., `a + b`, `a * b`). Binary(BinOp, P, P), - /// A unary operation (For example: `!x`, `*x`) + /// A unary operation (e.g., `!x`, `*x`). Unary(UnOp, P), - /// A literal (For example: `1`, `"foo"`) + /// A literal (e.g., `1`, `"foo"`). Lit(Lit), - /// A cast (`foo as f64`) + /// A cast (e.g., `foo as f64`). Cast(P, P), + /// A type reference (e.g., `Foo`). Type(P, P), - /// An `if` block, with an optional else block + /// An `if` block, with an optional else block. /// - /// `if expr { expr } else { expr }` + /// I.e., `if { } else { }`. If(P, P, Option>), /// A while loop, with an optional label /// - /// `'label: while expr { block }` + /// I.e., `'label: while expr { }`. While(P, P, Option::new` and /// so forth. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] @@ -1053,7 +1053,7 @@ pub struct GlobalCtxt<'tcx> { } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - /// Get the global TyCtxt. + /// Gets the global `TyCtxt`. #[inline] pub fn global_tcx(self) -> TyCtxt<'gcx, 'gcx, 'gcx> { TyCtxt { @@ -1153,12 +1153,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { value.lift_to_tcx(self.global_tcx()) } - /// Returns true if self is the same as self.global_tcx(). + /// Returns `true` if self is the same as self.global_tcx(). fn is_global(self) -> bool { ptr::eq(self.interners, &self.global_interners) } - /// Create a type context and call the closure with a `TyCtxt` reference + /// Creates a type context and call the closure with a `TyCtxt` reference /// to the context. The closure enforces that the type context and any interned /// value (types, substs, etc.) can only be used while `ty::tls` has a valid /// reference to the context, to allow formatting values that need it. @@ -1353,7 +1353,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Convert a `DefId` into its fully expanded `DefPath` (every + /// Converts a `DefId` into its fully expanded `DefPath` (every /// `DefId` is really just an interned def-path). /// /// Note that if `id` is not local to this crate, the result will diff --git a/src/librustc/ty/fold.rs b/src/librustc/ty/fold.rs index 306c69666e5..7aa1694db80 100644 --- a/src/librustc/ty/fold.rs +++ b/src/librustc/ty/fold.rs @@ -4,7 +4,7 @@ //! instance of a "folder" (a type which implements `TypeFolder`). Then //! the setup is intended to be: //! -//! T.fold_with(F) --calls--> F.fold_T(T) --calls--> T.super_fold_with(F) +//! T.fold_with(F) --calls--> F.fold_T(T) --calls--> T.super_fold_with(F) //! //! This way, when you define a new folder F, you can override //! `fold_T()` to customize the behavior, and invoke `T.super_fold_with()` @@ -25,9 +25,11 @@ //! proper thing. //! //! A `TypeFoldable` T can also be visited by a `TypeVisitor` V using similar setup: -//! T.visit_with(V) --calls--> V.visit_T(T) --calls--> T.super_visit_with(V). -//! These methods return true to indicate that the visitor has found what it is looking for -//! and does not need to visit anything else. +//! +//! T.visit_with(V) --calls--> V.visit_T(T) --calls--> T.super_visit_with(V). +//! +//! These methods return true to indicate that the visitor has found what it is +//! looking for, and does not need to visit anything else. use crate::hir::def_id::DefId; use crate::ty::{self, Binder, Ty, TyCtxt, TypeFlags}; @@ -52,7 +54,7 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { self.super_visit_with(visitor) } - /// True if `self` has any late-bound regions that are either + /// Returns `true` if `self` has any late-bound regions that are either /// bound by `binder` or bound by some binder outside of `binder`. /// If `binder` is `ty::INNERMOST`, this indicates whether /// there are any late-bound regions that appear free. @@ -60,7 +62,7 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { self.visit_with(&mut HasEscapingVarsVisitor { outer_index: binder }) } - /// True if this `self` has any regions that escape `binder` (and + /// Returns `true` if this `self` has any regions that escape `binder` (and /// hence are not bound by it). fn has_vars_bound_above(&self, binder: ty::DebruijnIndex) -> bool { self.has_vars_bound_at_or_above(binder.shifted_in(1)) @@ -141,7 +143,7 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { } } -/// The TypeFolder trait defines the actual *folding*. There is a +/// The `TypeFolder` trait defines the actual *folding*. There is a /// method defined for every foldable type. Each of these has a /// default implementation that does an "identity" fold. Within each /// identity fold, it should invoke `foo.fold_with(self)` to fold each @@ -262,7 +264,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }); } - /// True if `callback` returns true for every region appearing free in `value`. + /// Returns `true` if `callback` returns true for every region appearing free in `value`. pub fn all_free_regions_meet( self, value: &impl TypeFoldable<'tcx>, @@ -271,7 +273,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { !self.any_free_region_meets(value, |r| !callback(r)) } - /// True if `callback` returns true for some region appearing free in `value`. + /// Returns `true` if `callback` returns true for some region appearing free in `value`. pub fn any_free_region_meets( self, value: &impl TypeFoldable<'tcx>, @@ -292,8 +294,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// ^ ^ ^ ^ /// | | | | here, would be shifted in 1 /// | | | here, would be shifted in 2 - /// | | here, would be INNERMOST shifted in by 1 - /// | here, initially, binder would be INNERMOST + /// | | here, would be `INNERMOST` shifted in by 1 + /// | here, initially, binder would be `INNERMOST` /// ``` /// /// You see that, initially, *any* bound value is free, @@ -496,12 +498,12 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for BoundVarReplacer<'a, 'gcx, 'tcx> } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - /// Replace all regions bound by the given `Binder` with the + /// Replaces all regions bound by the given `Binder` with the /// results returned by the closure; the closure is expected to /// return a free region (relative to this binder), and hence the /// binder is removed in the return type. The closure is invoked /// once for each unique `BoundRegion`; multiple references to the - /// same `BoundRegion` will reuse the previous result. A map is + /// same `BoundRegion` will reuse the previous result. A map is /// returned at the end with each bound region and the free region /// that replaced it. /// @@ -520,7 +522,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t) } - /// Replace all escaping bound vars. The `fld_r` closure replaces escaping + /// Replaces all escaping bound vars. The `fld_r` closure replaces escaping /// bound regions while the `fld_t` closure replaces escaping bound types. pub fn replace_escaping_bound_vars( self, @@ -554,7 +556,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Replace all types or regions bound by the given `Binder`. The `fld_r` + /// Replaces all types or regions bound by the given `Binder`. The `fld_r` /// closure replaces bound regions while the `fld_t` closure replaces bound /// types. pub fn replace_bound_vars( @@ -570,7 +572,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t) } - /// Replace any late-bound regions bound in `value` with + /// Replaces any late-bound regions bound in `value` with /// free variants attached to `all_outlive_scope`. pub fn liberate_late_bound_regions( &self, @@ -640,7 +642,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { collector.regions } - /// Replace any late-bound regions bound in `value` with `'erased`. Useful in codegen but also + /// Replaces any late-bound regions bound in `value` with `'erased`. Useful in codegen but also /// method lookup and a few other places where precise region relationships are not required. pub fn erase_late_bound_regions(self, value: &Binder) -> T where T : TypeFoldable<'tcx> @@ -648,13 +650,13 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.replace_late_bound_regions(value, |_| self.types.re_erased).0 } - /// Rewrite any late-bound regions so that they are anonymous. Region numbers are + /// Rewrite any late-bound regions so that they are anonymous. Region numbers are /// assigned starting at 1 and increasing monotonically in the order traversed /// by the fold operation. /// /// The chief purpose of this function is to canonicalize regions so that two /// `FnSig`s or `TraitRef`s which are equivalent up to region naming will become - /// structurally identical. For example, `for<'a, 'b> fn(&'a isize, &'b isize)` and + /// structurally identical. For example, `for<'a, 'b> fn(&'a isize, &'b isize)` and /// `for<'a, 'b> fn(&'b isize, &'a isize)` will become identical after anonymization. pub fn anonymize_late_bound_regions(self, sig: &Binder) -> Binder where T : TypeFoldable<'tcx>, @@ -818,7 +820,7 @@ pub fn shift_out_vars<'a, 'gcx, 'tcx, T>( /// scope to which it is attached, etc. An escaping var represents /// a bound var for which this processing has not yet been done. struct HasEscapingVarsVisitor { - /// Anything bound by `outer_index` or "above" is escaping + /// Anything bound by `outer_index` or "above" is escaping. outer_index: ty::DebruijnIndex, } @@ -881,10 +883,10 @@ struct LateBoundRegionsCollector { current_index: ty::DebruijnIndex, regions: FxHashSet, - /// If true, we only want regions that are known to be + /// `true` if we only want regions that are known to be /// "constrained" when you equate this type with another type. In /// particular, if you have e.g., `&'a u32` and `&'b u32`, equating - /// them constraints `'a == 'b`. But if you have `<&'a u32 as + /// them constraints `'a == 'b`. But if you have `<&'a u32 as /// Trait>::Foo` and `<&'b u32 as Trait>::Foo`, normalizing those /// types may mean that `'a` and `'b` don't appear in the results, /// so they are not considered *constrained*. diff --git a/src/librustc/ty/inhabitedness/def_id_forest.rs b/src/librustc/ty/inhabitedness/def_id_forest.rs index 73b7d74d9da..3b393c3ca15 100644 --- a/src/librustc/ty/inhabitedness/def_id_forest.rs +++ b/src/librustc/ty/inhabitedness/def_id_forest.rs @@ -22,14 +22,14 @@ pub struct DefIdForest { } impl<'a, 'gcx, 'tcx> DefIdForest { - /// Create an empty forest. + /// Creates an empty forest. pub fn empty() -> DefIdForest { DefIdForest { root_ids: SmallVec::new(), } } - /// Create a forest consisting of a single tree representing the entire + /// Creates a forest consisting of a single tree representing the entire /// crate. #[inline] pub fn full(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> DefIdForest { @@ -37,7 +37,7 @@ impl<'a, 'gcx, 'tcx> DefIdForest { DefIdForest::from_id(crate_id) } - /// Create a forest containing a DefId and all its descendants. + /// Creates a forest containing a DefId and all its descendants. pub fn from_id(id: DefId) -> DefIdForest { let mut root_ids = SmallVec::new(); root_ids.push(id); @@ -46,12 +46,12 @@ impl<'a, 'gcx, 'tcx> DefIdForest { } } - /// Test whether the forest is empty. + /// Tests whether the forest is empty. pub fn is_empty(&self) -> bool { self.root_ids.is_empty() } - /// Test whether the forest contains a given DefId. + /// Tests whether the forest contains a given DefId. pub fn contains(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, id: DefId) -> bool diff --git a/src/librustc/ty/instance.rs b/src/librustc/ty/instance.rs index e4fe93d5dea..5fc22e3c02b 100644 --- a/src/librustc/ty/instance.rs +++ b/src/librustc/ty/instance.rs @@ -22,17 +22,17 @@ pub enum InstanceDef<'tcx> { /// `::method` where `method` receives unsizeable `self: Self`. VtableShim(DefId), - /// \::call_* - /// def-id is FnTrait::call_* + /// `::call_*` + /// `DefId` is `FnTrait::call_*` FnPtrShim(DefId, Ty<'tcx>), - /// ::fn + /// `::fn` Virtual(DefId, usize), - /// <[mut closure] as FnOnce>::call_once + /// `<[mut closure] as FnOnce>::call_once` ClosureOnceShim { call_once: DefId }, - /// drop_in_place::; None for empty drop glue. + /// `drop_in_place::; None` for empty drop glue. DropGlue(DefId, Option>), ///`::clone` shim. @@ -220,7 +220,7 @@ impl<'a, 'b, 'tcx> Instance<'tcx> { self.def.def_id() } - /// Resolve a (def_id, substs) pair to an (optional) instance -- most commonly, + /// Resolves a `(def_id, substs)` pair to an (optional) instance -- most commonly, /// this is used to find the precise code that will run for a trait method invocation, /// if known. /// diff --git a/src/librustc/ty/item_path.rs b/src/librustc/ty/item_path.rs index 3f49c1b27ce..5dc31caf295 100644 --- a/src/librustc/ty/item_path.rs +++ b/src/librustc/ty/item_path.rs @@ -43,7 +43,7 @@ pub fn with_forced_impl_filename_line R, R>(f: F) -> R { }) } -/// Add the `crate::` prefix to paths where appropriate. +/// Adds the `crate::` prefix to paths where appropriate. pub fn with_crate_prefix R, R>(f: F) -> R { SHOULD_PREFIX_WITH_CRATE.with(|flag| { let old = flag.get(); @@ -55,7 +55,7 @@ pub fn with_crate_prefix R, R>(f: F) -> R { } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - /// Returns a string identifying this def-id. This string is + /// Returns a string identifying this `DefId`. This string is /// suitable for user output. It is relative to the current crate /// root, unless with_forced_absolute_paths was used. pub fn item_path_str(self, def_id: DefId) -> String { @@ -468,7 +468,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { buffer.push(&format!("", span_str)); } - /// Returns the def-id of `def_id`'s parent in the def tree. If + /// Returns the `DefId` of `def_id`'s parent in the def tree. If /// this returns `None`, then `def_id` represents a crate root or /// inlined root. pub fn parent_def_id(self, def_id: DefId) -> Option { @@ -478,9 +478,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } /// As a heuristic, when we see an impl, if we see that the -/// 'self-type' is a type defined in the same module as the impl, +/// 'self type' is a type defined in the same module as the impl, /// we can omit including the path to the impl itself. This -/// function tries to find a "characteristic def-id" for a +/// function tries to find a "characteristic `DefId`" for a /// type. It's just a heuristic so it makes some questionable /// decisions and we may want to adjust it later. pub fn characteristic_def_id_of_type(ty: Ty<'_>) -> Option { @@ -535,7 +535,7 @@ pub trait ItemPathBuffer { #[derive(Debug)] pub enum RootMode { - /// Try to make a path relative to the local crate. In + /// Try to make a path relative to the local crate. In /// particular, local paths have no prefix, and if the path comes /// from an extern crate, start with the path to the `extern /// crate` declaration. diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index 8401d0861ca..6c507c0015d 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -46,7 +46,7 @@ impl IntegerExt for Integer { } } - /// Get the Integer type from an attr::IntType. + /// Gets the Integer type from an attr::IntType. fn from_attr(cx: &C, ity: attr::IntType) -> Integer { let dl = cx.data_layout(); @@ -62,7 +62,7 @@ impl IntegerExt for Integer { } } - /// Find the appropriate Integer type and signedness for the given + /// Finds the appropriate Integer type and signedness for the given /// signed discriminant range and #[repr] attribute. /// N.B.: u128 values above i128::MAX will be treated as signed, but /// that shouldn't affect anything, other than maybe debuginfo. @@ -1686,7 +1686,7 @@ impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx> tcx.types.re_static, tcx.mk_array(tcx.types.usize, 3), ) - /* FIXME use actual fn pointers + /* FIXME: use actual fn pointers Warning: naively computing the number of entries in the vtable by counting the methods on the trait + methods on all parent traits does not work, because some methods can diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index 1f08d930fbd..70f72acad1f 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -135,8 +135,8 @@ pub enum AssociatedItemContainer { } impl AssociatedItemContainer { - /// Asserts that this is the def-id of an associated item declared - /// in a trait, and returns the trait def-id. + /// Asserts that this is the `DefId` of an associated item declared + /// in a trait, and returns the trait `DefId`. pub fn assert_trait(&self) -> DefId { match *self { TraitContainer(id) => id, @@ -154,7 +154,7 @@ impl AssociatedItemContainer { /// The "header" of an impl is everything outside the body: a Self type, a trait /// ref (in the case of a trait impl), and a set of predicates (from the -/// bounds/where clauses). +/// bounds / where-clauses). #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub struct ImplHeader<'tcx> { pub impl_def_id: DefId, @@ -328,7 +328,7 @@ pub enum Variance { /// item. pub struct CrateVariancesMap { /// For each item with generics, maps to a vector of the variance - /// of its generics. If an item has no generics, it will have no + /// of its generics. If an item has no generics, it will have no /// entry. pub variances: FxHashMap>>, @@ -338,7 +338,7 @@ pub struct CrateVariancesMap { impl Variance { /// `a.xform(b)` combines the variance of a context with the - /// variance of a type with the following meaning. If we are in a + /// variance of a type with the following meaning. If we are in a /// context with variance `a`, and we encounter a type argument in /// a position with variance `b`, then `a.xform(b)` is the new /// variance with which the argument appears. @@ -362,10 +362,10 @@ impl Variance { /// The ambient variance is covariant. A `fn` type is /// contravariant with respect to its parameters, so the variance /// within which both pointer types appear is - /// `Covariant.xform(Contravariant)`, or `Contravariant`. `*const + /// `Covariant.xform(Contravariant)`, or `Contravariant`. `*const /// T` is covariant with respect to `T`, so the variance within /// which the first `Vec` appears is - /// `Contravariant.xform(Covariant)` or `Contravariant`. The same + /// `Contravariant.xform(Covariant)` or `Contravariant`. The same /// is true for its `i32` argument. In the `*mut T` case, the /// variance of `Vec` is `Contravariant.xform(Invariant)`, /// and hence the outermost type is `Invariant` with respect to @@ -489,12 +489,12 @@ pub struct TyS<'tcx> { /// So, for a type without any late-bound things, like `u32`, this /// will be *innermost*, because that is the innermost binder that /// captures nothing. But for a type `&'D u32`, where `'D` is a - /// late-bound region with debruijn index `D`, this would be `D + 1` + /// late-bound region with De Bruijn index `D`, this would be `D + 1` /// -- the binder itself does not capture `D`, but `D` is captured /// by an inner binder. /// /// We call this concept an "exclusive" binder `D` because all - /// debruijn indices within the type are contained within `0..D` + /// De Bruijn indices within the type are contained within `0..D` /// (exclusive). outer_exclusive_binder: ty::DebruijnIndex, } @@ -720,9 +720,9 @@ pub struct UpvarPath { pub hir_id: hir::HirId, } -/// Upvars do not get their own node-id. Instead, we use the pair of -/// the original var id (that is, the root variable that is referenced -/// by the upvar) and the id of the closure expression. +/// Upvars do not get their own `NodeId`. Instead, we use the pair of +/// the original var ID (that is, the root variable that is referenced +/// by the upvar) and the ID of the closure expression. #[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct UpvarId { pub var_path: UpvarPath, @@ -734,7 +734,7 @@ pub enum BorrowKind { /// Data must be immutable and is aliasable. ImmBorrow, - /// Data must be immutable but not aliasable. This kind of borrow + /// Data must be immutable but not aliasable. This kind of borrow /// cannot currently be expressed by the user and is used only in /// implicit closure bindings. It is needed when the closure /// is borrowing or mutating a mutable referent, e.g.: @@ -1096,7 +1096,7 @@ impl<'a, 'gcx, 'tcx> Predicate<'tcx> { /// Performs a substitution suitable for going from a /// poly-trait-ref to supertraits that must hold if that /// poly-trait-ref holds. This is slightly different from a normal - /// substitution in terms of what happens with bound regions. See + /// substitution in terms of what happens with bound regions. See /// lengthy comment below for details. pub fn subst_supertrait(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, trait_ref: &ty::PolyTraitRef<'tcx>) @@ -1235,7 +1235,7 @@ pub type PolySubtypePredicate<'tcx> = ty::Binder>; /// This kind of predicate has no *direct* correspondent in the /// syntax, but it roughly corresponds to the syntactic forms: /// -/// 1. `T: TraitRef<..., Item=Type>` +/// 1. `T: TraitRef<..., Item = Type>` /// 2. `>::Item == Type` (NYI) /// /// In particular, form #1 is "desugared" to the combination of a @@ -1456,8 +1456,8 @@ impl<'tcx> Predicate<'tcx> { } /// Represents the bounds declared on a particular set of type -/// parameters. Should eventually be generalized into a flag list of -/// where clauses. You can obtain a `InstantiatedPredicates` list from a +/// parameters. Should eventually be generalized into a flag list of +/// where-clauses. You can obtain a `InstantiatedPredicates` list from a /// `GenericPredicates` by using the `instantiate` method. Note that this method /// reflects an important semantic invariant of `InstantiatedPredicates`: while /// the `GenericPredicates` are expressed in terms of the bound type @@ -1471,7 +1471,7 @@ impl<'tcx> Predicate<'tcx> { /// struct Foo> { ... } /// /// Here, the `GenericPredicates` for `Foo` would contain a list of bounds like -/// `[[], [U:Bar]]`. Now if there were some particular reference +/// `[[], [U:Bar]]`. Now if there were some particular reference /// like `Foo`, then the `InstantiatedPredicates` would be `[[], /// [usize:Bar]]`. #[derive(Clone)] @@ -1537,7 +1537,7 @@ impl UniverseIndex { /// Returns the "next" universe index in order -- this new index /// is considered to extend all previous universes. This - /// corresponds to entering a `forall` quantifier. So, for + /// corresponds to entering a `forall` quantifier. So, for /// example, suppose we have this type in universe `U`: /// /// ``` @@ -1619,7 +1619,7 @@ pub struct ParamEnv<'tcx> { impl<'tcx> ParamEnv<'tcx> { /// Construct a trait environment suitable for contexts where - /// there are no where clauses in scope. Hidden types (like `impl + /// there are no where-clauses in scope. Hidden types (like `impl /// Trait`) are left hidden, so this is suitable for ordinary /// type-checking. #[inline] @@ -1627,12 +1627,12 @@ impl<'tcx> ParamEnv<'tcx> { Self::new(List::empty(), Reveal::UserFacing, None) } - /// Construct a trait environment with no where clauses in scope + /// Construct a trait environment with no where-clauses in scope /// where the values of all `impl Trait` and other hidden types /// are revealed. This is suitable for monomorphized, post-typeck /// environments like codegen or doing optimizations. /// - /// N.B. If you want to have predicates in scope, use `ParamEnv::new`, + /// N.B., if you want to have predicates in scope, use `ParamEnv::new`, /// or invoke `param_env.with_reveal_all()`. #[inline] pub fn reveal_all() -> Self { @@ -1651,7 +1651,7 @@ impl<'tcx> ParamEnv<'tcx> { /// Returns a new parameter environment with the same clauses, but /// which "reveals" the true results of projections in all cases - /// (even for associated types that are specializable). This is + /// (even for associated types that are specializable). This is /// the desired behavior during codegen and certain other special /// contexts; normally though we want to use `Reveal::UserFacing`, /// which is the default. @@ -1736,7 +1736,7 @@ impl<'a, 'gcx, T> HashStable> for ParamEnvAnd<'gcx, T> #[derive(Copy, Clone, Debug)] pub struct Destructor { - /// The def-id of the destructor method + /// The `DefId` of the destructor method pub did: DefId, } @@ -1781,20 +1781,21 @@ pub struct VariantDef { } impl<'a, 'gcx, 'tcx> VariantDef { - /// Create a new `VariantDef`. + /// Creates a new `VariantDef`. /// - /// - `did` is the DefId used for the variant - for tuple-structs, it is the constructor DefId, - /// and for everything else, it is the variant DefId. + /// - `did` is the `DefId` used for the variant. + /// This is the constructor `DefId` for tuple stucts, and the variant `DefId` for everything + /// else. /// - `attribute_def_id` is the DefId that has the variant's attributes. - /// this is the struct DefId for structs, and the variant DefId for variants. + /// This is the struct `DefId` for structs, and the variant `DefId` for variants. /// - /// Note that we *could* use the constructor DefId, because the constructor attributes + /// Note that we *could* use the constructor `DefId`, because the constructor attributes /// redirect to the base attributes, but compiling a small crate requires - /// loading the AdtDefs for all the structs in the universe (e.g., coherence for any + /// loading the `AdtDef`s for all the structs in the universe (e.g., coherence for any /// built-in trait), and we do not want to load attributes twice. /// /// If someone speeds up attribute loading to not be a performance concern, they can - /// remove this hack and use the constructor DefId everywhere. + /// remove this hack and use the constructor `DefId` everywhere. pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, did: DefId, ident: Ident, @@ -2049,13 +2050,13 @@ impl ReprOptions { } /// Returns `true` if this `#[repr()]` should inhibit struct field reordering - /// optimizations, such as with repr(C), repr(packed(1)), or repr(). + /// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr()`. pub fn inhibit_struct_field_reordering_opt(&self) -> bool { self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.pack == 1 || self.int.is_some() } - /// Returns true if this `#[repr()]` should inhibit union abi optimisations + /// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations. pub fn inhibit_union_abi_opt(&self) -> bool { self.c() } @@ -2170,14 +2171,14 @@ impl<'a, 'gcx, 'tcx> AdtDef { self.flags.contains(AdtFlags::HAS_CTOR) } - /// Returns whether this type is `#[fundamental]` for the purposes + /// Returns `true` if this type is `#[fundamental]` for the purposes /// of coherence checking. #[inline] pub fn is_fundamental(&self) -> bool { self.flags.contains(AdtFlags::IS_FUNDAMENTAL) } - /// Returns `true` if this is PhantomData. + /// Returns `true` if this is `PhantomData`. #[inline] pub fn is_phantom_data(&self) -> bool { self.flags.contains(AdtFlags::IS_PHANTOM_DATA) @@ -2199,7 +2200,7 @@ impl<'a, 'gcx, 'tcx> AdtDef { self.flags.contains(AdtFlags::IS_BOX) } - /// Returns whether this type has a destructor. + /// Returns `true` if this type has a destructor. pub fn has_dtor(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool { self.destructor(tcx).is_some() } @@ -2320,7 +2321,7 @@ impl<'a, 'gcx, 'tcx> AdtDef { }) } - /// Compute the discriminant value used by a specific variant. + /// Computes the discriminant value used by a specific variant. /// Unlike `discriminants`, this is (amortized) constant-time, /// only doing at most one query for evaluating an explicit /// discriminant (the last one before the requested variant), @@ -2336,9 +2337,9 @@ impl<'a, 'gcx, 'tcx> AdtDef { explicit_value.checked_add(tcx, offset as u128).0 } - /// Yields a DefId for the discriminant and an offset to add to it + /// Yields a `DefId` for the discriminant and an offset to add to it /// Alternatively, if there is no explicit discriminant, returns the - /// inferred discriminant directly + /// inferred discriminant directly. pub fn discriminant_def_for_variant( &self, variant_index: VariantIdx, @@ -2368,15 +2369,15 @@ impl<'a, 'gcx, 'tcx> AdtDef { } /// Returns a list of types such that `Self: Sized` if and only - /// if that type is Sized, or `TyErr` if this type is recursive. + /// if that type is `Sized`, or `TyErr` if this type is recursive. /// - /// Oddly enough, checking that the sized-constraint is Sized is + /// Oddly enough, checking that the sized-constraint is `Sized` is /// actually more expressive than checking all members: - /// the Sized trait is inductive, so an associated type that references - /// Self would prevent its containing ADT from being Sized. + /// the `Sized` trait is inductive, so an associated type that references + /// `Self` would prevent its containing ADT from being `Sized`. /// /// Due to normalization being eager, this applies even if - /// the associated type is behind a pointer, e.g., issue #31299. + /// the associated type is behind a pointer (e.g., issue #31299). pub fn sized_constraint(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> &'tcx [Ty<'tcx>] { match tcx.try_adt_sized_constraint(DUMMY_SP, self.did) { Ok(tys) => tys, @@ -2480,7 +2481,7 @@ impl<'a, 'gcx, 'tcx> FieldDef { } } -/// Represents the various closure traits in the Rust language. This +/// Represents the various closure traits in the language. This /// will determine the type of the environment (`self`, in the /// desugaring) argument that the closure expects. /// @@ -2552,7 +2553,7 @@ impl<'tcx> TyS<'tcx> { TypeWalker::new(self) } - /// Iterator that walks the immediate children of `self`. Hence + /// Iterator that walks the immediate children of `self`. Hence /// `Foo, u32>` yields the sequence `[Bar, u32]` /// (but not `i32`, like `walk`). pub fn walk_shallow(&'tcx self) -> smallvec::IntoIter> { @@ -2560,7 +2561,7 @@ impl<'tcx> TyS<'tcx> { } /// Walks `ty` and any types appearing within `ty`, invoking the - /// callback `f` on each type. If the callback returns false, then the + /// callback `f` on each type. If the callback returns `false`, then the /// children of the current type are ignored. /// /// Note: prefer `ty.walk()` where possible. @@ -2670,7 +2671,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.typeck_tables_of(self.hir().body_owner_def_id(body)) } - /// Returns an iterator of the def-ids for all body-owners in this + /// Returns an iterator of the `DefId`s for all body-owners in this /// crate. If you would prefer to iterate over the bodies /// themselves, you can do `self.hir().krate().body_ids.iter()`. pub fn body_owners( @@ -2917,7 +2918,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Return the possibly-auto-generated MIR of a (DefId, Subst) pair. + /// Returns the possibly-auto-generated MIR of a `(DefId, Subst)` pair. pub fn instance_mir(self, instance: ty::InstanceDef<'gcx>) -> &'gcx Mir<'gcx> { @@ -2937,7 +2938,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Get the attributes of a definition. + /// Gets the attributes of a definition. pub fn get_attrs(self, did: DefId) -> Attributes<'gcx> { if let Some(id) = self.hir().as_local_hir_id(did) { Attributes::Borrowed(self.hir().attrs_by_hir_id(id)) @@ -2946,7 +2947,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Determine whether an item is annotated with an attribute. + /// Determines whether an item is annotated with an attribute. pub fn has_attr(self, did: DefId, attr: &str) -> bool { attr::contains_name(&self.get_attrs(did), attr) } @@ -2960,14 +2961,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.optimized_mir(def_id).generator_layout.as_ref().unwrap() } - /// Given the def-id of an impl, return the def_id of the trait it implements. - /// If it implements no trait, return `None`. + /// Given the `DefId` of an impl, returns the `DefId` of the trait it implements. + /// If it implements no trait, returns `None`. pub fn trait_id_of_impl(self, def_id: DefId) -> Option { self.impl_trait_ref(def_id).map(|tr| tr.def_id) } - /// If the given defid describes a method belonging to an impl, return the - /// def-id of the impl that the method belongs to. Otherwise, return `None`. + /// If the given defid describes a method belonging to an impl, returns the + /// `DefId` of the impl that the method belongs to; otherwise, returns `None`. pub fn impl_of_method(self, def_id: DefId) -> Option { let item = if def_id.krate != LOCAL_CRATE { if let Some(Def::Method(_)) = self.describe_def(def_id) { @@ -2998,9 +2999,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - // Hygienically compare a use-site name (`use_name`) for a field or an associated item with its - // supposed definition name (`def_name`). The method also needs `DefId` of the supposed - // definition's parent/scope to perform comparison. + /// Hygienically compares a use-site name (`use_name`) for a field or an associated item with + /// its supposed definition name (`def_name`). The method also needs `DefId` of the supposed + /// definition's parent/scope to perform comparison. pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool { self.adjust_ident(use_name, def_parent_def_id, DUMMY_NODE_ID).0 == def_name.modern() } @@ -3082,7 +3083,7 @@ fn associated_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Asso parent_item.node) } -/// Calculates the Sized-constraint. +/// Calculates the `Sized` constraint. /// /// In fact, there are only a few options for the types in the constraint: /// - an obviously-unsized type @@ -3135,9 +3136,9 @@ fn def_span<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Span { tcx.hir().span_if_local(def_id).unwrap() } -/// If the given def ID describes an item belonging to a trait, -/// return the ID of the trait that the trait item belongs to. -/// Otherwise, return `None`. +/// If the given `DefId` describes an item belonging to a trait, +/// returns the `DefId` of the trait that the trait item belongs to; +/// otherwise, returns `None`. fn trait_of_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option { tcx.opt_associated_item(def_id) .and_then(|associated_item| { @@ -3232,10 +3233,9 @@ fn instance_def_size_estimate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } } -/// If `def_id` is an issue 33140 hack impl, return its self type. Otherwise -/// return None. +/// If `def_id` is an issue 33140 hack impl, returns its self type; otherwise, returns `None`. /// -/// See ImplOverlapKind::Issue33140 for more details. +/// See [`ImplOverlapKind::Issue33140`] for more details. fn issue33140_self_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option> diff --git a/src/librustc/ty/query/job.rs b/src/librustc/ty/query/job.rs index 0793366e6d4..16b4af53594 100644 --- a/src/librustc/ty/query/job.rs +++ b/src/librustc/ty/query/job.rs @@ -31,37 +31,38 @@ use { rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher, HashStable}, }; -/// Indicates the state of a query for a given key in a query map +/// Indicates the state of a query for a given key in a query map. pub(super) enum QueryResult<'tcx> { - /// An already executing query. The query job can be used to await for its completion + /// An already executing query. The query job can be used to await for its completion. Started(Lrc>), - /// The query panicked. Queries trying to wait on this will raise a fatal error / silently panic + /// The query panicked. Queries trying to wait on this will raise a fatal error or + /// silently panic. Poisoned, } -/// A span and a query key +/// Represents a span and a query key. #[derive(Clone, Debug)] pub struct QueryInfo<'tcx> { - /// The span for a reason this query was required + /// The span corresponding to the reason for which this query was required. pub span: Span, pub query: Query<'tcx>, } -/// A object representing an active query job. +/// Representss an object representing an active query job. pub struct QueryJob<'tcx> { pub info: QueryInfo<'tcx>, /// The parent query job which created this job and is implicitly waiting on it. pub parent: Option>>, - /// The latch which is used to wait on this job + /// The latch that is used to wait on this job. #[cfg(parallel_compiler)] latch: QueryLatch<'tcx>, } impl<'tcx> QueryJob<'tcx> { - /// Creates a new query job + /// Creates a new query job. pub fn new(info: QueryInfo<'tcx>, parent: Option>>) -> Self { QueryJob { info, @@ -230,7 +231,7 @@ impl<'tcx> QueryLatch<'tcx> { } } - /// Remove a single waiter from the list of waiters. + /// Removes a single waiter from the list of waiters. /// This is used to break query cycles. fn extract_waiter( &self, diff --git a/src/librustc/ty/query/mod.rs b/src/librustc/ty/query/mod.rs index d002b99f385..67a5c7d6c9a 100644 --- a/src/librustc/ty/query/mod.rs +++ b/src/librustc/ty/query/mod.rs @@ -102,12 +102,12 @@ define_queries! { <'tcx> /// Records the type of every item. [] fn type_of: TypeOfItem(DefId) -> Ty<'tcx>, - /// Maps from the def-id of an item (trait/struct/enum/fn) to its + /// Maps from the `DefId` of an item (trait/struct/enum/fn) to its /// associated generics. [] fn generics_of: GenericsOfItem(DefId) -> &'tcx ty::Generics, - /// Maps from the def-id of an item (trait/struct/enum/fn) to the - /// predicates (where clauses) that must be proven true in order + /// Maps from the `DefId` of an item (trait/struct/enum/fn) to the + /// predicates (where-clauses) that must be proven true in order /// to reference it. This is almost always the "predicates query" /// that you want. /// @@ -123,8 +123,8 @@ define_queries! { <'tcx> /// user.) [] fn predicates_of: PredicatesOfItem(DefId) -> Lrc>, - /// Maps from the def-id of an item (trait/struct/enum/fn) to the - /// predicates (where clauses) directly defined on it. This is + /// Maps from the `DefId` of an item (trait/struct/enum/fn) to the + /// predicates (where-clauses) directly defined on it. This is /// equal to the `explicit_predicates_of` predicates plus the /// `inferred_outlives_of` predicates. [] fn predicates_defined_on: PredicatesDefinedOnItem(DefId) @@ -138,7 +138,7 @@ define_queries! { <'tcx> /// Foo<'a, T> { x: &'a T }`, this would return `T: 'a`). [] fn inferred_outlives_of: InferredOutlivesOf(DefId) -> Lrc>>, - /// Maps from the def-id of a trait to the list of + /// Maps from the `DefId` of a trait to the list of /// super-predicates. This is a subset of the full list of /// predicates. We store these in a separate map because we must /// evaluate them even during type conversion, often before the @@ -216,7 +216,7 @@ define_queries! { <'tcx> }, Codegen { - /// Set of all the def-ids in this crate that have MIR associated with + /// Set of all the `DefId`s in this crate that have MIR associated with /// them. This includes all the body owners, but also things like struct /// constructors. [] fn mir_keys: mir_keys(CrateNum) -> Lrc, @@ -226,11 +226,11 @@ define_queries! { <'tcx> /// the value isn't known except to the pass itself. [] fn mir_const_qualif: MirConstQualif(DefId) -> (u8, Lrc>), - /// Fetch the MIR for a given def-id right after it's built - this includes + /// Fetch the MIR for a given `DefId` right after it's built - this includes /// unreachable code. [] fn mir_built: MirBuilt(DefId) -> &'tcx Steal>, - /// Fetch the MIR for a given def-id up till the point where it is + /// Fetch the MIR for a given `DefId` up till the point where it is /// ready for const evaluation. /// /// See the README for the `mir` module for details. @@ -244,7 +244,7 @@ define_queries! { <'tcx> }, TypeChecking { - /// The result of unsafety-checking this def-id. + /// The result of unsafety-checking this `DefId`. [] fn unsafety_check_result: UnsafetyCheckResult(DefId) -> mir::UnsafetyCheckResult, /// HACK: when evaluated, this reports a "unsafe derive on repr(packed)" error @@ -307,13 +307,13 @@ define_queries! { <'tcx> TypeChecking { /// Gets a complete map from all types to their inherent impls. /// Not meant to be used directly outside of coherence. - /// (Defined only for LOCAL_CRATE) + /// (Defined only for `LOCAL_CRATE`.) [] fn crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> Lrc, - /// Checks all types in the krate for overlap in their inherent impls. Reports errors. + /// Checks all types in the crate for overlap in their inherent impls. Reports errors. /// Not meant to be used directly outside of coherence. - /// (Defined only for LOCAL_CRATE) + /// (Defined only for `LOCAL_CRATE`.) [] fn crate_inherent_impls_overlap_check: inherent_impls_overlap_check_dep_node(CrateNum) -> (), }, @@ -321,9 +321,9 @@ define_queries! { <'tcx> Other { /// Evaluate a constant without running sanity checks /// - /// DO NOT USE THIS outside const eval. Const eval uses this to break query cycles during - /// validation. Please add a comment to every use site explaining why using `const_eval` - /// isn't sufficient + /// **Do not use this** outside const eval. Const eval uses this to break query cycles + /// during validation. Please add a comment to every use site explaining why using + /// `const_eval` isn't sufficient [] fn const_eval_raw: const_eval_raw_dep_node(ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>) -> ConstEvalRawResult<'tcx>, @@ -344,7 +344,7 @@ define_queries! { <'tcx> Other { [] fn reachable_set: reachability_dep_node(CrateNum) -> ReachableSet, - /// Per-body `region::ScopeTree`. The `DefId` should be the owner-def-id for the body; + /// Per-body `region::ScopeTree`. The `DefId` should be the owner `DefId` for the body; /// in the case of closures, this will be redirected to the enclosing function. [] fn region_scope_tree: RegionScopeTree(DefId) -> Lrc, @@ -398,7 +398,7 @@ define_queries! { <'tcx> -> Lrc, [] fn is_object_safe: ObjectSafety(DefId) -> bool, - /// Get the ParameterEnvironment for a given item; this environment + /// Gets the ParameterEnvironment for a given item; this environment /// will be in "user-facing" mode, meaning that it is suitabe for /// type-checking etc, and it does not normalize specializable /// associated types. This is almost always what you want, @@ -485,7 +485,7 @@ define_queries! { <'tcx> [] fn foreign_modules: ForeignModules(CrateNum) -> Lrc>, - /// Identifies the entry-point (e.g. the `main` function) for a given + /// Identifies the entry-point (e.g., the `main` function) for a given /// crate, returning `None` if there is no entry point (such as for library crates). [] fn entry_fn: EntryFn(CrateNum) -> Option<(DefId, EntryFnType)>, [] fn plugin_registrar_fn: PluginRegistrarFn(CrateNum) -> Option, diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index 9c9bc0f6aa1..f948abc7f6f 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -103,7 +103,7 @@ impl AbsoluteBytePos { } impl<'sess> OnDiskCache<'sess> { - /// Create a new OnDiskCache instance from the serialized data in `data`. + /// Creates a new OnDiskCache instance from the serialized data in `data`. pub fn new(sess: &'sess Session, data: Vec, start_pos: usize) -> OnDiskCache<'sess> { debug_assert!(sess.opts.incremental.is_some()); @@ -325,7 +325,7 @@ impl<'sess> OnDiskCache<'sess> { }) } - /// Load a diagnostic emitted during the previous compilation session. + /// Loads a diagnostic emitted during the previous compilation session. pub fn load_diagnostics<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, dep_node_index: SerializedDepNodeIndex) @@ -339,7 +339,7 @@ impl<'sess> OnDiskCache<'sess> { diagnostics.unwrap_or_default() } - /// Store a diagnostic emitted during the current compilation session. + /// Stores a diagnostic emitted during the current compilation session. /// Anything stored like this will be available via `load_diagnostics` in /// the next compilation session. #[inline(never)] @@ -353,7 +353,7 @@ impl<'sess> OnDiskCache<'sess> { } /// Returns the cached query result if there is something in the cache for - /// the given SerializedDepNodeIndex. Otherwise returns None. + /// the given `SerializedDepNodeIndex`; otherwise returns `None`. pub fn try_load_query_result<'tcx, T>(&self, tcx: TyCtxt<'_, 'tcx, 'tcx>, dep_node_index: SerializedDepNodeIndex) @@ -366,7 +366,7 @@ impl<'sess> OnDiskCache<'sess> { "query result") } - /// Store a diagnostic emitted during computation of an anonymous query. + /// Stores a diagnostic emitted during computation of an anonymous query. /// Since many anonymous queries can share the same `DepNode`, we aggregate /// them -- as opposed to regular queries where we assume that there is a /// 1:1 relationship between query-key and `DepNode`. diff --git a/src/librustc/ty/query/plumbing.rs b/src/librustc/ty/query/plumbing.rs index f63fbd79825..9b2a70a6a6d 100644 --- a/src/librustc/ty/query/plumbing.rs +++ b/src/librustc/ty/query/plumbing.rs @@ -1,6 +1,6 @@ -//! The implementation of the query system itself. Defines the macros -//! that generate the actual methods on tcx which find and execute the -//! provider, manage the caches, and so forth. +//! The implementation of the query system itself. This defines the macros that +//! generate the actual methods on tcx which find and execute the provider, +//! manage the caches, and so forth. use crate::dep_graph::{DepNodeIndex, DepNode, DepKind, SerializedDepNodeIndex}; use crate::errors::DiagnosticBuilder; @@ -1017,8 +1017,8 @@ macro_rules! define_queries_inner { } impl<'a, $tcx, 'lcx> TyCtxt<'a, $tcx, 'lcx> { - /// Return a transparent wrapper for `TyCtxt` which ensures queries - /// are executed instead of returing their result + /// Returns a transparent wrapper for `TyCtxt`, which ensures queries + /// are executed instead of just returing their results. #[inline(always)] pub fn ensure(self) -> TyCtxtEnsure<'a, $tcx, 'lcx> { TyCtxtEnsure { @@ -1026,7 +1026,7 @@ macro_rules! define_queries_inner { } } - /// Return a transparent wrapper for `TyCtxt` which uses + /// Returns a transparent wrapper for `TyCtxt` which uses /// `span` as the location of queries performed through it. #[inline(always)] pub fn at(self, span: Span) -> TyCtxtAt<'a, $tcx, 'lcx> { @@ -1067,7 +1067,7 @@ macro_rules! define_queries_struct { (tcx: $tcx:tt, input: ($(([$($modifiers:tt)*] [$($attr:tt)*] [$name:ident]))*)) => { pub struct Queries<$tcx> { - /// This provides access to the incr. comp. on-disk cache for query results. + /// This provides access to the incrimental comilation on-disk cache for query results. /// Do not access this directly. It is only meant to be used by /// `DepGraph::try_mark_green()` and the query infrastructure. pub(crate) on_disk_cache: OnDiskCache<'tcx>, @@ -1123,22 +1123,22 @@ macro_rules! define_provider_struct { /// /// Now, if force_from_dep_node() would always fail, it would be pretty useless. /// Fortunately, we can use some contextual information that will allow us to -/// reconstruct query-keys for certain kinds of DepNodes. In particular, we -/// enforce by construction that the GUID/fingerprint of certain DepNodes is a -/// valid DefPathHash. Since we also always build a huge table that maps every -/// DefPathHash in the current codebase to the corresponding DefId, we have +/// reconstruct query-keys for certain kinds of `DepNode`s. In particular, we +/// enforce by construction that the GUID/fingerprint of certain `DepNode`s is a +/// valid `DefPathHash`. Since we also always build a huge table that maps every +/// `DefPathHash` in the current codebase to the corresponding `DefId`, we have /// everything we need to re-run the query. /// /// Take the `mir_validated` query as an example. Like many other queries, it -/// just has a single parameter: the DefId of the item it will compute the -/// validated MIR for. Now, when we call `force_from_dep_node()` on a dep-node -/// with kind `MirValidated`, we know that the GUID/fingerprint of the dep-node -/// is actually a DefPathHash, and can therefore just look up the corresponding -/// DefId in `tcx.def_path_hash_to_def_id`. +/// just has a single parameter: the `DefId` of the item it will compute the +/// validated MIR for. Now, when we call `force_from_dep_node()` on a `DepNode` +/// with kind `MirValidated`, we know that the GUID/fingerprint of the `DepNode` +/// is actually a `DefPathHash`, and can therefore just look up the corresponding +/// `DefId` in `tcx.def_path_hash_to_def_id`. /// /// When you implement a new query, it will likely have a corresponding new -/// DepKind, and you'll have to support it here in `force_from_dep_node()`. As -/// a rule of thumb, if your query takes a DefId or DefIndex as sole parameter, +/// `DepKind`, and you'll have to support it here in `force_from_dep_node()`. As +/// a rule of thumb, if your query takes a `DefId` or `DefIndex` as sole parameter, /// then `force_from_dep_node()` should not fail for it. Otherwise, you can just /// add it to the "We don't have enough information to reconstruct..." group in /// the match below. diff --git a/src/librustc/ty/relate.rs b/src/librustc/ty/relate.rs index 3dbd0dc1d97..db248072d9b 100644 --- a/src/librustc/ty/relate.rs +++ b/src/librustc/ty/relate.rs @@ -30,7 +30,7 @@ pub trait TypeRelation<'a, 'gcx: 'a+'tcx, 'tcx: 'a> : Sized { /// Returns a static string we can use for printouts. fn tag(&self) -> &'static str; - /// Returns true if the value `a` is the "expected" type in the + /// Returns `true` if the value `a` is the "expected" type in the /// relation. Just affects error messages. fn a_is_expected(&self) -> bool; diff --git a/src/librustc/ty/steal.rs b/src/librustc/ty/steal.rs index 336a4c3bf22..a8f9301ba51 100644 --- a/src/librustc/ty/steal.rs +++ b/src/librustc/ty/steal.rs @@ -12,14 +12,14 @@ use rustc_data_structures::sync::{RwLock, ReadGuard, MappedReadGuard}; /// Steal>` (to be very specific). Now we can read from this /// as much as we want (using `borrow()`), but you can also /// `steal()`. Once you steal, any further attempt to read will panic. -/// Therefore we know that -- assuming no ICE -- nobody is observing +/// Therefore, we know that -- assuming no ICE -- nobody is observing /// the fact that the MIR was updated. /// /// Obviously, whenever you have a query that yields a `Steal` value, /// you must treat it with caution, and make sure that you know that /// -- once the value is stolen -- it will never be read from again. -/// -/// FIXME(#41710) -- what is the best way to model linear queries? +// +// FIXME(#41710): what is the best way to model linear queries? pub struct Steal { value: RwLock> } diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index d4c18c64c99..66efd2aea15 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -47,7 +47,7 @@ pub enum BoundRegion { /// Named region parameters for functions (a in &'a T) /// - /// The def-id is needed to distinguish free regions in + /// The `DefId` is needed to distinguish free regions in /// the event of shadowing. BrNamed(DefId, InternedString), @@ -87,7 +87,7 @@ pub enum TyKind<'tcx> { Bool, /// The primitive character type; holds a Unicode scalar value - /// (a non-surrogate code point). Written as `char`. + /// (a non-surrogate code point). Written as `char`. Char, /// A primitive signed integer type. For example, `i32`. @@ -116,7 +116,7 @@ pub enum TyKind<'tcx> { /// An array with the given length. Written as `[T; n]`. Array(Ty<'tcx>, &'tcx ty::LazyConst<'tcx>), - /// The pointee of an array slice. Written as `[T]`. + /// The pointee of an array slice. Written as `[T]`. Slice(Ty<'tcx>), /// A raw pointer. Written as `*mut T` or `*const T` @@ -138,7 +138,7 @@ pub enum TyKind<'tcx> { /// ``` FnDef(DefId, &'tcx Substs<'tcx>), - /// A pointer to a function. Written as `fn() -> i32`. + /// A pointer to a function. Written as `fn() -> i32`. /// /// For example the type of `bar` here: /// @@ -166,10 +166,10 @@ pub enum TyKind<'tcx> { /// The never type `!` Never, - /// A tuple type. For example, `(i32, bool)`. + /// A tuple type. For example, `(i32, bool)`. Tuple(&'tcx List>), - /// The projection of an associated type. For example, + /// The projection of an associated type. For example, /// `>::N`. Projection(ProjectionTy<'tcx>), @@ -278,7 +278,7 @@ static_assert!(MEM_SIZE_OF_TY_KIND: ::std::mem::size_of::>() == 24); /// /// All right, you say, but why include the type parameters from the /// original function then? The answer is that codegen may need them -/// when monomorphizing, and they may not appear in the upvars. A +/// when monomorphizing, and they may not appear in the upvars. A /// closure could capture no variables but still make use of some /// in-scope type parameter with a bound (e.g., if our example above /// had an extra `U: Default`, and the closure called `U::default()`). @@ -295,9 +295,9 @@ static_assert!(MEM_SIZE_OF_TY_KIND: ::std::mem::size_of::>() == 24); /// ## Generators /// /// Perhaps surprisingly, `ClosureSubsts` are also used for -/// generators. In that case, what is written above is only half-true +/// generators. In that case, what is written above is only half-true /// -- the set of type parameters is similar, but the role of CK and -/// CS are different. CK represents the "yield type" and CS +/// CS are different. CK represents the "yield type" and CS /// represents the "return type" of the generator. /// /// It'd be nice to split this struct into ClosureSubsts and @@ -442,17 +442,17 @@ impl<'tcx> GeneratorSubsts<'tcx> { self.split(def_id, tcx).return_ty } - /// Return the "generator signature", which consists of its yield + /// Returns the "generator signature", which consists of its yield /// and return types. /// - /// NB. Some bits of the code prefers to see this wrapped in a + /// N.B., some bits of the code prefers to see this wrapped in a /// binder, but it never contains bound regions. Probably this /// function should be removed. pub fn poly_sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> PolyGenSig<'tcx> { ty::Binder::dummy(self.sig(def_id, tcx)) } - /// Return the "generator signature", which consists of its yield + /// Returns the "generator signature", which consists of its yield /// and return types. pub fn sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> GenSig<'tcx> { ty::GenSig { @@ -520,11 +520,11 @@ impl<'tcx> UpvarSubsts<'tcx> { #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, RustcEncodable, RustcDecodable)] pub enum ExistentialPredicate<'tcx> { - /// e.g., Iterator + /// E.g., `Iterator`. Trait(ExistentialTraitRef<'tcx>), - /// e.g., Iterator::Item = T + /// E.g., `Iterator::Item = T`. Projection(ExistentialProjection<'tcx>), - /// e.g., Send + /// E.g., `Send`. AutoTrait(DefId), } @@ -655,12 +655,12 @@ impl<'tcx> Binder<&'tcx List>> { } /// A complete reference to a trait. These take numerous guises in syntax, -/// but perhaps the most recognizable form is in a where clause: +/// but perhaps the most recognizable form is in a where-clause: /// /// T: Foo /// -/// This would be represented by a trait-reference where the def-id is the -/// def-id for the trait `Foo` and the substs define `T` as parameter 0, +/// This would be represented by a trait-reference where the `DefId` is the +/// `DefId` for the trait `Foo` and the substs define `T` as parameter 0, /// and `U` as parameter 1. /// /// Trait references also appear in object types like `Foo`, but in @@ -766,9 +766,9 @@ impl<'a, 'gcx, 'tcx> ExistentialTraitRef<'tcx> { } } - /// Object types don't have a self-type specified. Therefore, when + /// Object types don't have a self type specified. Therefore, when /// we convert the principal trait-ref into a normal trait-ref, - /// you must give *some* self-type. A common choice is `mk_err()` + /// you must give *some* self type. A common choice is `mk_err()` /// or some placeholder type. pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>) -> ty::TraitRef<'tcx> { @@ -789,9 +789,9 @@ impl<'tcx> PolyExistentialTraitRef<'tcx> { self.skip_binder().def_id } - /// Object types don't have a self-type specified. Therefore, when + /// Object types don't have a self type specified. Therefore, when /// we convert the principal trait-ref into a normal trait-ref, - /// you must give *some* self-type. A common choice is `mk_err()` + /// you must give *some* self type. A common choice is `mk_err()` /// or some placeholder type. pub fn with_self_ty(&self, tcx: TyCtxt<'_, '_, 'tcx>, self_ty: Ty<'tcx>) @@ -829,7 +829,7 @@ impl Binder { /// Skips the binder and returns the "bound" value. This is a /// risky thing to do because it's easy to get confused about - /// debruijn indices and the like. It is usually better to + /// De Bruijn indices and the like. It is usually better to /// discharge the binder using `no_bound_vars` or /// `replace_late_bound_regions` or something like /// that. `skip_binder` is only valid when you are either @@ -840,7 +840,7 @@ impl Binder { /// /// Some examples where `skip_binder` is reasonable: /// - /// - extracting the def-id from a PolyTraitRef; + /// - extracting the `DefId` from a PolyTraitRef; /// - comparing the self type of a PolyTraitRef to see if it is equal to /// a type parameter `X`, since the type `X` does not reference any regions pub fn skip_binder(&self) -> &T { @@ -884,8 +884,8 @@ impl Binder { } /// Given two things that have the same binder level, - /// and an operation that wraps on their contents, execute the operation - /// and then wrap its result. + /// and an operation that wraps on their contents, executes the operation + /// and then wraps its result. /// /// `f` should consider bound regions at depth 1 to be free, and /// anything it produces with bound regions at depth 1 will be @@ -896,7 +896,7 @@ impl Binder { Binder(f(self.0, u.0)) } - /// Split the contents into two things that share the same binder + /// Splits the contents into two things that share the same binder /// level as the original, returning two distinct binders. /// /// `f` should consider bound regions at depth 1 to be free, and @@ -1118,14 +1118,14 @@ pub type Region<'tcx> = &'tcx RegionKind; /// ## Bound Regions /// /// These are regions that are stored behind a binder and must be substituted -/// with some concrete region before being used. There are 2 kind of -/// bound regions: early-bound, which are bound in an item's Generics, -/// and are substituted by a Substs, and late-bound, which are part of -/// higher-ranked types (e.g., `for<'a> fn(&'a ())`) and are substituted by +/// with some concrete region before being used. There are two kind of +/// bound regions: early-bound, which are bound in an item's `Generics`, +/// and are substituted by a `Substs`, and late-bound, which are part of +/// higher-ranked types (e.g., `for<'a> fn(&'a ())`), and are substituted by /// the likes of `liberate_late_bound_regions`. The distinction exists /// because higher-ranked lifetimes aren't supported in all places. See [1][2]. /// -/// Unlike Param-s, bound regions are not supposed to exist "in the wild" +/// Unlike `Param`s, bound regions are not supposed to exist "in the wild" /// outside their binder, e.g., in types passed to type inference, and /// should first be substituted (by placeholder regions, free regions, /// or region variables). @@ -1141,7 +1141,7 @@ pub type Region<'tcx> = &'tcx RegionKind; /// To do this, we replace the bound regions with placeholder markers, /// which don't satisfy any relation not explicitly provided. /// -/// There are 2 kinds of placeholder regions in rustc: `ReFree` and +/// There are two kinds of placeholder regions in rustc: `ReFree` and /// `RePlaceholder`. When checking an item's body, `ReFree` is supposed /// to be used. These also support explicit bounds: both the internally-stored /// *scope*, which the region is assumed to outlive, as well as other @@ -1189,7 +1189,7 @@ pub enum RegionKind { /// Static data that has an "infinite" lifetime. Top in the region lattice. ReStatic, - /// A region variable. Should not exist after typeck. + /// A region variable. Should not exist after typeck. ReVar(RegionVid), /// A placeholder region - basically the higher-ranked version of ReFree. @@ -1346,11 +1346,11 @@ impl<'a, 'tcx, 'gcx> PolyExistentialProjection<'tcx> { impl DebruijnIndex { /// Returns the resulting index when this value is moved into - /// `amount` number of new binders. So e.g., if you had + /// `amount` number of new binders. So, e.g., if you had /// /// for<'a> fn(&'a x) /// - /// and you wanted to change to + /// and you wanted to change it to /// /// for<'a> fn(for<'b> fn(&'a x)) /// @@ -1378,7 +1378,7 @@ impl DebruijnIndex { *self = self.shifted_out(amount); } - /// Adjusts any Debruijn Indices so as to make `to_binder` the + /// Adjusts any De Bruijn indices so as to make `to_binder` the /// innermost binder. That is, if we have something bound at `to_binder`, /// it will now be bound at INNERMOST. This is an appropriate thing to do /// when moving a region out from inside binders: @@ -1388,12 +1388,12 @@ impl DebruijnIndex { /// // Binder: D3 D2 D1 ^^ /// ``` /// - /// Here, the region `'a` would have the debruijn index D3, + /// Here, the region `'a` would have the De Bruijn index D3, /// because it is the bound 3 binders out. However, if we wanted /// to refer to that region `'a` in the second argument (the `_`), /// those two binders would not be in scope. In that case, we /// might invoke `shift_out_to_binder(D3)`. This would adjust the - /// debruijn index of `'a` to D1 (the innermost binder). + /// De Bruijn index of `'a` to D1 (the innermost binder). /// /// If we invoke `shift_out_to_binder` and the region is in fact /// bound by one of the binders we are shifting out of, that is an @@ -1444,7 +1444,7 @@ impl RegionKind { } } - /// Adjusts any Debruijn Indices so as to make `to_binder` the + /// Adjusts any De Bruijn indices so as to make `to_binder` the /// innermost binder. That is, if we have something bound at `to_binder`, /// it will now be bound at INNERMOST. This is an appropriate thing to do /// when moving a region out from inside binders: @@ -1454,12 +1454,12 @@ impl RegionKind { /// // Binder: D3 D2 D1 ^^ /// ``` /// - /// Here, the region `'a` would have the debruijn index D3, + /// Here, the region `'a` would have the De Bruijn index D3, /// because it is the bound 3 binders out. However, if we wanted /// to refer to that region `'a` in the second argument (the `_`), /// those two binders would not be in scope. In that case, we /// might invoke `shift_out_to_binder(D3)`. This would adjust the - /// debruijn index of `'a` to D1 (the innermost binder). + /// De Bruijn index of `'a` to D1 (the innermost binder). /// /// If we invoke `shift_out_to_binder` and the region is in fact /// bound by one of the binders we are shifting out of, that is an @@ -1528,7 +1528,7 @@ impl RegionKind { flags } - /// Given an early-bound or free region, returns the def-id where it was bound. + /// Given an early-bound or free region, returns the `DefId` where it was bound. /// For example, consider the regions in this snippet of code: /// /// ``` @@ -1543,10 +1543,10 @@ impl RegionKind { /// } /// ``` /// - /// Here, `free_region_binding_scope('a)` would return the def-id + /// Here, `free_region_binding_scope('a)` would return the `DefId` /// of the impl, and for all the other highlighted regions, it - /// would return the def-id of the function. In other cases (not shown), this - /// function might return the def-id of a closure. + /// would return the `DefId` of the function. In other cases (not shown), this + /// function might return the `DefId` of a closure. pub fn free_region_binding_scope(&self, tcx: TyCtxt<'_, '_, '_>) -> DefId { match self { ty::ReEarlyBound(br) => { @@ -1772,7 +1772,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> { } } - /// Returns true if this type is a floating point type and false otherwise. + /// Returns `true` if this type is a floating point type. pub fn is_floating_point(&self) -> bool { match self.sty { Float(_) | diff --git a/src/librustc/ty/subst.rs b/src/librustc/ty/subst.rs index d7c322d0f84..7559ea90b17 100644 --- a/src/librustc/ty/subst.rs +++ b/src/librustc/ty/subst.rs @@ -171,7 +171,7 @@ impl<'a, 'gcx, 'tcx> Substs<'tcx> { /// Creates a `Substs` that maps each generic parameter to a higher-ranked /// var bound at index `0`. For types, we use a `BoundVar` index equal to /// the type parameter index. For regions, we use the `BoundRegion::BrNamed` - /// variant (which has a def-id). + /// variant (which has a `DefId`). pub fn bound_vars_for_item( tcx: TyCtxt<'a, 'gcx, 'tcx>, def_id: DefId @@ -492,7 +492,7 @@ impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> { self.shift_vars_through_binders(ty) } - /// It is sometimes necessary to adjust the debruijn indices during substitution. This occurs + /// It is sometimes necessary to adjust the De Bruijn indices during substitution. This occurs /// when we are substituting a type with escaping bound vars into a context where we have /// passed through binders. That's quite a mouthful. Let's see an example: /// @@ -511,9 +511,9 @@ impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> { /// /// Here the `'a` lifetime is bound in the outer function, but appears as an argument of the /// inner one. Therefore, that appearance will have a DebruijnIndex of 2, because we must skip - /// over the inner binder (remember that we count Debruijn indices from 1). However, in the + /// over the inner binder (remember that we count De Bruijn indices from 1). However, in the /// definition of `MetaFunc`, the binder is not visible, so the type `&'a int` will have a - /// debruijn index of 1. It's only during the substitution that we can see we must increase the + /// De Bruijn index of 1. It's only during the substitution that we can see we must increase the /// depth by 1 to account for the binder that we passed through. /// /// As a second example, consider this twist: @@ -532,7 +532,7 @@ impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> { /// DebruijnIndex of 2 /// /// As indicated in the diagram, here the same type `&'a int` is substituted once, but in the - /// first case we do not increase the Debruijn index and in the second case we do. The reason + /// first case we do not increase the De Bruijn index and in the second case we do. The reason /// is that only in the second case have we passed through a fn binder. fn shift_vars_through_binders(&self, ty: Ty<'tcx>) -> Ty<'tcx> { debug!("shift_vars(ty={:?}, binders_passed={:?}, has_escaping_bound_vars={:?})", @@ -565,7 +565,7 @@ pub struct UserSubsts<'tcx> { /// The substitutions for the item as given by the user. pub substs: &'tcx Substs<'tcx>, - /// The self-type, in the case of a `::Item` path (when applied + /// The self type, in the case of a `::Item` path (when applied /// to an inherent impl). See `UserSelfTy` below. pub user_self_ty: Option>, } @@ -585,8 +585,8 @@ BraceStructLiftImpl! { } } -/// Specifies the user-given self-type. In the case of a path that -/// refers to a member in an inherent impl, this self-type is +/// Specifies the user-given self type. In the case of a path that +/// refers to a member in an inherent impl, this self type is /// sometimes needed to constrain the type parameters on the impl. For /// example, in this code: /// @@ -596,11 +596,11 @@ BraceStructLiftImpl! { /// ``` /// /// when you then have a path like `>::method`, -/// this struct would carry the def-id of the impl along with the -/// self-type `Foo`. Then we can instantiate the parameters of +/// this struct would carry the `DefId` of the impl along with the +/// self type `Foo`. Then we can instantiate the parameters of /// the impl (with the substs from `UserSubsts`) and apply those to -/// the self-type, giving `Foo`. Finally, we unify that with -/// the self-type here, which contains `?A` to be `&'static u32` +/// the self type, giving `Foo`. Finally, we unify that with +/// the self type here, which contains `?A` to be `&'static u32` #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct UserSelfTy<'tcx> { pub impl_def_id: DefId, diff --git a/src/librustc/ty/trait_def.rs b/src/librustc/ty/trait_def.rs index 5429a2504b9..9ce8bf2e60a 100644 --- a/src/librustc/ty/trait_def.rs +++ b/src/librustc/ty/trait_def.rs @@ -39,7 +39,7 @@ pub struct TraitDef { #[derive(Default)] pub struct TraitImpls { blanket_impls: Vec, - /// Impls indexed by their simplified self-type, for fast lookup. + /// Impls indexed by their simplified self type, for fast lookup. non_blanket_impls: FxHashMap>, } @@ -84,7 +84,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } /// Iterate over every impl that could possibly match the - /// self-type `self_ty`. + /// self type `self_ty`. pub fn for_each_relevant_impl(self, def_id: DefId, self_ty: Ty<'tcx>, @@ -134,7 +134,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// Return a vector containing all impls + /// Returns a vector containing all impls pub fn all_impls(self, def_id: DefId) -> Vec { let impls = self.trait_impls_of(def_id); diff --git a/src/librustc/ty/util.rs b/src/librustc/ty/util.rs index 61544932b43..0578162f84d 100644 --- a/src/librustc/ty/util.rs +++ b/src/librustc/ty/util.rs @@ -1,4 +1,4 @@ -//! misc. type-system utilities too small to deserve their own file +//! Miscellaneous type-system utilities that are too small to deserve their own modules. use crate::hir::def::Def; use crate::hir::def_id::DefId; @@ -23,7 +23,7 @@ use syntax_pos::{Span, DUMMY_SP}; #[derive(Copy, Clone, Debug)] pub struct Discr<'tcx> { - /// bit representation of the discriminant, so `-128i8` is `0xFF_u128` + /// Bit representation of the discriminant (e.g., `-128i8` is `0xFF_u128`). pub val: u128, pub ty: Ty<'tcx> } @@ -46,7 +46,7 @@ impl<'tcx> fmt::Display for Discr<'tcx> { } impl<'tcx> Discr<'tcx> { - /// Adds 1 to the value and wraps around if the maximum for the type is reached + /// Adds `1` to the value and wraps around if the maximum for the type is reached. pub fn wrap_incr<'a, 'gcx>(self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self { self.checked_add(tcx, 1).0 } @@ -342,9 +342,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// /// Requires that trait definitions have been processed so that we can /// elaborate predicates and walk supertraits. - /// - /// FIXME callers may only have a &[Predicate], not a Vec, so that's - /// what this code should accept. + // + // FIXME: callers may only have a `&[Predicate]`, not a `Vec`, so that's + // what this code should accept. pub fn required_region_bounds(self, erased_self_ty: Ty<'tcx>, predicates: Vec>) @@ -417,7 +417,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { Some(ty::Destructor { did: dtor_did? }) } - /// Return the set of types that are required to be alive in + /// Returns the set of types that are required to be alive in /// order to run the destructor of `def` (see RFCs 769 and /// 1238). /// @@ -507,17 +507,17 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { result } - /// True if `def_id` refers to a closure (e.g., `|x| x * 2`). Note - /// that closures have a def-id, but the closure *expression* also + /// Returns `true` if `def_id` refers to a closure (e.g., `|x| x * 2`). Note + /// that closures have a `DefId`, but the closure *expression* also /// has a `HirId` that is located within the context where the /// closure appears (and, sadly, a corresponding `NodeId`, since /// those are not yet phased out). The parent of the closure's - /// def-id will also be the context where it appears. + /// `DefId` will also be the context where it appears. pub fn is_closure(self, def_id: DefId) -> bool { self.def_key(def_id).disambiguated_data.data == DefPathData::ClosureExpr } - /// True if `def_id` refers to a trait (i.e., `trait Foo { ... }`). + /// Returns `true` if `def_id` refers to a trait (i.e., `trait Foo { ... }`). pub fn is_trait(self, def_id: DefId) -> bool { if let DefPathData::Trait(_) = self.def_key(def_id).disambiguated_data.data { true @@ -526,7 +526,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// True if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`). + /// Returns `true` if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`), + /// and `false` otherwise. pub fn is_trait_alias(self, def_id: DefId) -> bool { if let DefPathData::TraitAlias(_) = self.def_key(def_id).disambiguated_data.data { true @@ -535,17 +536,17 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - /// True if this def-id refers to the implicit constructor for - /// a tuple struct like `struct Foo(u32)`. + /// Returns `true` if this `DefId` refers to the implicit constructor for + /// a tuple struct like `struct Foo(u32)`, and `false` otherwise. pub fn is_struct_constructor(self, def_id: DefId) -> bool { self.def_key(def_id).disambiguated_data.data == DefPathData::StructCtor } /// Given the `DefId` of a fn or closure, returns the `DefId` of /// the innermost fn item that the closure is contained within. - /// This is a significant def-id because, when we do + /// This is a significant `DefId` because, when we do /// type-checking, we type-check this fn item and all of its - /// (transitive) closures together. Therefore, when we fetch the + /// (transitive) closures together. Therefore, when we fetch the /// `typeck_tables_of` the closure, for example, we really wind up /// fetching the `typeck_tables_of` the enclosing fn item. pub fn closure_base_def_id(self, def_id: DefId) -> DefId { @@ -558,10 +559,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { def_id } - /// Given the def-id and substs a closure, creates the type of + /// Given the `DefId` and substs a closure, creates the type of /// `self` argument that the closure expects. For example, for a /// `Fn` closure, this would return a reference type `&T` where - /// `T=closure_ty`. + /// `T = closure_ty`. /// /// Returns `None` if this closure's kind has not yet been inferred. /// This should only be possible during type checking. @@ -585,7 +586,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { Some(ty::Binder::bind(env_ty)) } - /// Given the def-id of some item that has no type parameters, make + /// Given the `DefId` of some item that has no type parameters, make /// a suitable "empty substs" for it. pub fn empty_substs_for_def_id(self, item_def_id: DefId) -> &'tcx Substs<'tcx> { Substs::for_item(self, item_def_id, |param, _| { @@ -598,7 +599,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }) } - /// Return whether the node pointed to by def_id is a static item, and its mutability + /// Returns `true` if the node pointed to by `def_id` is a static item, and its mutability. pub fn is_static(&self, def_id: DefId) -> Option { if let Some(node) = self.hir().get_if_local(def_id) { match node { @@ -730,7 +731,7 @@ impl<'a, 'tcx> ty::TyS<'tcx> { /// Checks whether values of this type `T` implement the `Freeze` /// trait -- frozen types are those that do not contain a - /// `UnsafeCell` anywhere. This is a language concept used to + /// `UnsafeCell` anywhere. This is a language concept used to /// distinguish "true immutability", which is relevant to /// optimization as well as the rules around static values. Note /// that the `Freeze` trait is not exposed to end users and is diff --git a/src/librustc/ty/wf.rs b/src/librustc/ty/wf.rs index 2aae953c1c4..ffb5471e34f 100644 --- a/src/librustc/ty/wf.rs +++ b/src/librustc/ty/wf.rs @@ -227,7 +227,7 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { } } - /// Push new obligations into `out`. Returns true if it was able + /// Pushes new obligations into `out`. Returns `true` if it was able /// to generate all the predicates needed to validate that `ty0` /// is WF. Returns false if `ty0` is an unresolved type variable, /// in which case we are not able to simplify at all. @@ -502,7 +502,7 @@ impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { } } -/// Given an object type like `SomeTrait+Send`, computes the lifetime +/// Given an object type like `SomeTrait + Send`, computes the lifetime /// bounds that must hold on the elided self type. These are derived /// from the declarations of `SomeTrait`, `Send`, and friends -- if /// they declare `trait SomeTrait : 'static`, for example, then diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index f6743ed75d9..dd635e5c946 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -63,11 +63,11 @@ pub fn install_panic_hook() { /// Parameters to the `Dump` variant of type `ProfileQueriesMsg`. #[derive(Clone,Debug)] pub struct ProfQDumpParams { - /// A base path for the files we will dump + /// A base path for the files we will dump. pub path:String, - /// To ensure that the compiler waits for us to finish our dumps + /// To ensure that the compiler waits for us to finish our dumps. pub ack:Sender<()>, - /// toggle dumping a log file with every `ProfileQueriesMsg` + /// Toggle dumping a log file with every `ProfileQueriesMsg`. pub dump_profq_msg_log:bool, } @@ -131,7 +131,7 @@ pub fn time_depth() -> usize { TIME_DEPTH.with(|slot| slot.get()) } -/// Set the current depth of `time()` calls. The idea is to call +/// Sets the current depth of `time()` calls. The idea is to call /// `set_time_depth()` with the result from `time_depth()` in the /// parent thread. pub fn set_time_depth(depth: usize) { diff --git a/src/librustc/util/nodemap.rs b/src/librustc/util/nodemap.rs index 6969b2f872a..63c7b76d1b6 100644 --- a/src/librustc/util/nodemap.rs +++ b/src/librustc/util/nodemap.rs @@ -1,4 +1,4 @@ -//! An efficient hash map for node IDs +//! An efficient hash map for `NodeId`s. use crate::hir::def_id::DefId; use crate::hir::{HirId, ItemLocalId}; diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 4945bf83648..1cb9f47bb31 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -23,18 +23,18 @@ use crate::hir; /// The "region highlights" are used to control region printing during /// specific error messages. When a "region highlight" is enabled, it /// gives an alternate way to print specific regions. For now, we -/// always print those regions using a number, so something like `'0`. +/// always print those regions using a number, so something like "`'0`". /// /// Regions not selected by the region highlight mode are presently /// unaffected. #[derive(Copy, Clone, Default)] pub struct RegionHighlightMode { - /// If enabled, when we see the selected region, use `"'N"` + /// If enabled, when we see the selected region, use "`'N`" /// instead of the ordinary behavior. highlight_regions: [Option<(ty::RegionKind, usize)>; 3], /// If enabled, when printing a "free region" that originated from - /// the given `ty::BoundRegion`, print it as `'1`. Free regions that would ordinarily + /// the given `ty::BoundRegion`, print it as "`'1`". Free regions that would ordinarily /// have names print as normal. /// /// This is used when you have a signature like `fn foo(x: &u32, @@ -51,12 +51,12 @@ thread_local! { } impl RegionHighlightMode { - /// Read and return current region highlight settings (accesses thread-local state).a + /// Reads and returns the current region highlight settings (accesses thread-local state). pub fn get() -> Self { REGION_HIGHLIGHT_MODE.with(|c| c.get()) } - /// Internal helper to update current settings during the execution of `op`. + // Internal helper to update current settings during the execution of `op`. fn set( old_mode: Self, new_mode: Self, @@ -70,8 +70,8 @@ impl RegionHighlightMode { }) } - /// If `region` and `number` are both `Some`, invoke - /// `highlighting_region`. Otherwise, just invoke `op` directly. + /// If `region` and `number` are both `Some`, invokes + /// `highlighting_region`; otherwise, just invokes `op` directly. pub fn maybe_highlighting_region( region: Option>, number: Option, @@ -86,8 +86,8 @@ impl RegionHighlightMode { op() } - /// During the execution of `op`, highlight the region inference - /// vairable `vid` as `'N`. We can only highlight one region vid + /// During the execution of `op`, highlights the region inference + /// variable `vid` as `'N`. We can only highlight one region `vid` /// at a time. pub fn highlighting_region( region: ty::Region<'_>, @@ -109,7 +109,7 @@ impl RegionHighlightMode { Self::set(old_mode, new_mode, op) } - /// Convenience wrapper for `highlighting_region` + /// Convenience wrapper for `highlighting_region`. pub fn highlighting_region_vid( vid: ty::RegionVid, number: usize, @@ -118,7 +118,7 @@ impl RegionHighlightMode { Self::highlighting_region(&ty::ReVar(vid), number, op) } - /// Returns true if any placeholders are highlighted. + /// Returns `true` if any placeholders are highlighted, and `false` otherwise. fn any_region_vids_highlighted(&self) -> bool { Self::get() .highlight_regions @@ -129,8 +129,7 @@ impl RegionHighlightMode { }) } - /// Returns `Some(n)` with the number to use for the given region, - /// if any. + /// Returns `Some(n)` with the number to use for the given region, if any. fn region_highlighted(&self, region: ty::Region<'_>) -> Option { Self::get() .highlight_regions @@ -143,7 +142,7 @@ impl RegionHighlightMode { } /// During the execution of `op`, highlight the given bound - /// region. We can only highlight one bound region at a time. See + /// region. We can only highlight one bound region at a time. See /// the field `highlight_bound_region` for more detailed notes. pub fn highlighting_bound_region( br: ty::BoundRegion, @@ -162,7 +161,7 @@ impl RegionHighlightMode { ) } - /// Returns true if any placeholders are highlighted. + /// Returns `true` if any placeholders are highlighted, and `false` otherwise. pub fn any_placeholders_highlighted(&self) -> bool { Self::get() .highlight_regions @@ -173,7 +172,7 @@ impl RegionHighlightMode { }) } - /// Returns `Some(N)` if the placeholder `p` is highlighted to print as `'N`. + /// Returns `Some(N)` if the placeholder `p` is highlighted to print as "`'N`". pub fn placeholder_highlight(&self, p: ty::PlaceholderRegion) -> Option { self.region_highlighted(&ty::RePlaceholder(p)) } diff --git a/src/librustc_apfloat/ieee.rs b/src/librustc_apfloat/ieee.rs index 58066a9cada..9f68d770b9e 100644 --- a/src/librustc_apfloat/ieee.rs +++ b/src/librustc_apfloat/ieee.rs @@ -186,7 +186,7 @@ impl Semantics for X87DoubleExtendedS { /// exponent = all 1's, integer bit 0, significand 0 ("pseudoinfinity") /// exponent = all 1's, integer bit 0, significand nonzero ("pseudoNaN") /// exponent = 0, integer bit 1 ("pseudodenormal") - /// exponent!=0 nor all 1's, integer bit 0 ("unnormal") + /// exponent != 0 nor all 1's, integer bit 0 ("unnormal") /// At the moment, the first two are treated as NaNs, the second two as Normal. fn from_bits(bits: u128) -> IeeeFloat { let sign = bits & (1 << (Self::BITS - 1)); @@ -1549,11 +1549,11 @@ impl IeeeFloat { } } - /// Returns TRUE if, when truncating the current number, with BIT the + /// Returns `true` if, when truncating the current number, with `bit` the /// new LSB, with the given lost fraction and rounding mode, the result /// would need to be rounded away from zero (i.e., by increasing the - /// signficand). This routine must work for Category::Zero of both signs, and - /// Category::Normal numbers. + /// signficand). This routine must work for `Category::Zero` of both signs, and + /// `Category::Normal` numbers. fn round_away_from_zero(&self, round: Round, loss: Loss, bit: usize) -> bool { // NaNs and infinities should not have lost fractions. assert!(self.is_finite_non_zero() || self.is_zero()); @@ -2257,7 +2257,7 @@ impl Loss { more_significant } - /// Return the fraction lost were a bignum truncated losing the least + /// Returns the fraction lost were a bignum truncated losing the least /// significant `bits` bits. fn through_truncation(limbs: &[Limb], bits: usize) -> Loss { if bits == 0 { @@ -2320,12 +2320,12 @@ mod sig { Ordering::Equal } - /// Extract the given bit. + /// Extracts the given bit. pub(super) fn get_bit(limbs: &[Limb], bit: usize) -> bool { limbs[bit / LIMB_BITS] & (1 << (bit % LIMB_BITS)) != 0 } - /// Set the given bit. + /// Sets the given bit. pub(super) fn set_bit(limbs: &mut [Limb], bit: usize) { limbs[bit / LIMB_BITS] |= 1 << (bit % LIMB_BITS); } @@ -2335,7 +2335,7 @@ mod sig { limbs[bit / LIMB_BITS] &= !(1 << (bit % LIMB_BITS)); } - /// Shift `dst` left `bits` bits, subtract `bits` from its exponent. + /// Shifts `dst` left `bits` bits, subtract `bits` from its exponent. pub(super) fn shift_left(dst: &mut [Limb], exp: &mut ExpInt, bits: usize) { if bits > 0 { // Our exponent should not underflow. @@ -2367,7 +2367,7 @@ mod sig { } } - /// Shift `dst` right `bits` bits noting lost fraction. + /// Shifts `dst` right `bits` bits noting lost fraction. pub(super) fn shift_right(dst: &mut [Limb], exp: &mut ExpInt, bits: usize) -> Loss { let loss = Loss::through_truncation(dst, bits); @@ -2403,7 +2403,7 @@ mod sig { loss } - /// Copy the bit vector of width `src_bits` from `src`, starting at bit SRC_LSB, + /// Copies the bit vector of width `src_bits` from `src`, starting at bit SRC_LSB, /// to `dst`, such that the bit SRC_LSB becomes the least significant bit of `dst`. /// All high bits above `src_bits` in `dst` are zero-filled. pub(super) fn extract(dst: &mut [Limb], src: &[Limb], src_bits: usize, src_lsb: usize) { diff --git a/src/librustc_apfloat/lib.rs b/src/librustc_apfloat/lib.rs index f79d448edce..18fd06960ea 100644 --- a/src/librustc_apfloat/lib.rs +++ b/src/librustc_apfloat/lib.rs @@ -374,7 +374,7 @@ pub trait Float fn from_str_r(s: &str, round: Round) -> Result, ParseError>; fn to_bits(self) -> u128; - /// Convert a floating point number to an integer according to the + /// Converts a floating point number to an integer according to the /// rounding mode. In case of an invalid operation exception, /// deterministic values are returned, namely zero for NaNs and the /// minimal or maximal value respectively for underflow or overflow. @@ -387,7 +387,7 @@ pub trait Float /// /// The *is_exact output tells whether the result is exact, in the sense /// that converting it back to the original floating point type produces - /// the original value. This is almost equivalent to result==Status::OK, + /// the original value. This is almost equivalent to `result == Status::OK`, /// except for negative zeroes. fn to_i128_r(self, width: usize, round: Round, is_exact: &mut bool) -> StatusAnd { let status; @@ -457,13 +457,13 @@ pub trait Float } } - /// IEEE-754R isSignMinus: Returns true if and only if the current value is + /// IEEE-754R isSignMinus: Returns whether the current value is /// negative. /// /// This applies to zeros and NaNs as well. fn is_negative(self) -> bool; - /// IEEE-754R isNormal: Returns true if and only if the current value is normal. + /// IEEE-754R isNormal: Returns whether the current value is normal. /// /// This implies that the current value of the float is not zero, subnormal, /// infinite, or NaN following the definition of normality from IEEE-754R. @@ -471,7 +471,7 @@ pub trait Float !self.is_denormal() && self.is_finite_non_zero() } - /// Returns true if and only if the current value is zero, subnormal, or + /// Returns `true` if the current value is zero, subnormal, or /// normal. /// /// This means that the value is not infinite or NaN. @@ -479,26 +479,26 @@ pub trait Float !self.is_nan() && !self.is_infinite() } - /// Returns true if and only if the float is plus or minus zero. + /// Returns `true` if the float is plus or minus zero. fn is_zero(self) -> bool { self.category() == Category::Zero } - /// IEEE-754R isSubnormal(): Returns true if and only if the float is a + /// IEEE-754R isSubnormal(): Returns whether the float is a /// denormal. fn is_denormal(self) -> bool; - /// IEEE-754R isInfinite(): Returns true if and only if the float is infinity. + /// IEEE-754R isInfinite(): Returns whether the float is infinity. fn is_infinite(self) -> bool { self.category() == Category::Infinity } - /// Returns true if and only if the float is a quiet or signaling NaN. + /// Returns `true` if the float is a quiet or signaling NaN. fn is_nan(self) -> bool { self.category() == Category::NaN } - /// Returns true if and only if the float is a signaling NaN. + /// Returns `true` if the float is a signaling NaN. fn is_signaling(self) -> bool; // Simple Queries @@ -517,19 +517,19 @@ pub trait Float self.is_zero() && self.is_negative() } - /// Returns true if and only if the number has the smallest possible non-zero + /// Returns `true` if the number has the smallest possible non-zero /// magnitude in the current semantics. fn is_smallest(self) -> bool { Self::SMALLEST.copy_sign(self).bitwise_eq(self) } - /// Returns true if and only if the number has the largest possible finite + /// Returns `true` if the number has the largest possible finite /// magnitude in the current semantics. fn is_largest(self) -> bool { Self::largest().copy_sign(self).bitwise_eq(self) } - /// Returns true if and only if the number is an exact integer. + /// Returns `true` if the number is an exact integer. fn is_integer(self) -> bool { // This could be made more efficient; I'm going for obviously correct. if !self.is_finite() { @@ -571,11 +571,11 @@ pub trait Float } pub trait FloatConvert: Float { - /// Convert a value of one floating point type to another. + /// Converts a value of one floating point type to another. /// The return value corresponds to the IEEE754 exceptions. *loses_info /// records whether the transformation lost information, i.e., whether /// converting the result back to the original type will produce the - /// original value (this is almost the same as return value==Status::OK, + /// original value (this is almost the same as return `value == Status::OK`, /// but there are edge cases where this is not so). fn convert_r(self, round: Round, loses_info: &mut bool) -> StatusAnd; fn convert(self, loses_info: &mut bool) -> StatusAnd { diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index f675c8d38a6..b528967dd65 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -239,7 +239,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { { //! Iterates over each loan that has been issued //! on entrance to `node`, regardless of whether it is - //! actually *in scope* at that point. Sometimes loans + //! actually *in scope* at that point. Sometimes loans //! are issued for future scopes and thus they may have been //! *issued* but not yet be in effect. diff --git a/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs b/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs index 11597455bca..ae1d49afd49 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/lifetime.rs @@ -53,7 +53,7 @@ struct GuaranteeLifetimeContext<'a, 'tcx: 'a> { impl<'a, 'tcx> GuaranteeLifetimeContext<'a, 'tcx> { fn check(&self, cmt: &mc::cmt_<'tcx>, discr_scope: Option) -> R { //! Main routine. Walks down `cmt` until we find the - //! "guarantor". Reports an error if `self.loan_region` is + //! "guarantor". Reports an error if `self.loan_region` is //! larger than scope of `cmt`. debug!("guarantee_lifetime.check(cmt={:?}, loan_region={:?})", cmt, diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs index c21a43bc683..1971c666312 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs @@ -285,7 +285,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { } /// Guarantees that `addr_of(cmt)` will be valid for the duration of `static_scope_r`, or - /// reports an error. This may entail taking out loans, which will be added to the + /// reports an error. This may entail taking out loans, which will be added to the /// `req_loan_map`. fn guarantee_valid(&mut self, borrow_id: hir::ItemLocalId, diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 4ced72cd279..85c4ca7bd37 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -296,11 +296,11 @@ pub struct Loan<'tcx> { /// gen_scope indicates where loan is introduced. Typically the /// loan is introduced at the point of the borrow, but in some /// cases, notably method arguments, the loan may be introduced - /// only later, once it comes into scope. See also + /// only later, once it comes into scope. See also /// `GatherLoanCtxt::compute_gen_scope`. gen_scope: region::Scope, - /// kill_scope indicates when the loan goes out of scope. This is + /// kill_scope indicates when the loan goes out of scope. This is /// either when the lifetime expires or when the local variable /// which roots the loan-path goes out of scope, whichever happens /// faster. See also `GatherLoanCtxt::compute_kill_scope`. diff --git a/src/librustc_borrowck/borrowck/move_data.rs b/src/librustc_borrowck/borrowck/move_data.rs index a206c37e97b..325d3559f0a 100644 --- a/src/librustc_borrowck/borrowck/move_data.rs +++ b/src/librustc_borrowck/borrowck/move_data.rs @@ -114,7 +114,7 @@ pub struct Move { /// Path being moved. pub path: MovePathIndex, - /// id of node that is doing the move. + /// ID of node that is doing the move. pub id: hir::ItemLocalId, /// Kind of move, for error messages. @@ -129,7 +129,7 @@ pub struct Assignment { /// Path being assigned. pub path: MovePathIndex, - /// id where assignment occurs + /// ID where assignment occurs pub id: hir::ItemLocalId, /// span of node where assignment occurs @@ -168,8 +168,8 @@ fn loan_path_is_precise(loan_path: &LoanPath<'_>) -> bool { } impl<'a, 'tcx> MoveData<'tcx> { - /// return true if there are no trackable assignments or moves - /// in this move data - that means that there is nothing that + /// Returns `true` if there are no trackable assignments or moves + /// in this move data -- that means that there is nothing that /// could cause a borrow error. pub fn is_empty(&self) -> bool { self.moves.borrow().is_empty() && diff --git a/src/librustc_borrowck/dataflow.rs b/src/librustc_borrowck/dataflow.rs index 90f33ede62c..de2a3c4cb22 100644 --- a/src/librustc_borrowck/dataflow.rs +++ b/src/librustc_borrowck/dataflow.rs @@ -178,7 +178,7 @@ fn build_local_id_to_index(body: Option<&hir::Body>, return index; - /// Add mappings from the ast nodes for the formal bindings to + /// Adds mappings from the ast nodes for the formal bindings to /// the entry-node in the graph. fn add_entries_from_fn_body(index: &mut FxHashMap>, body: &hir::Body, diff --git a/src/librustc_codegen_llvm/abi.rs b/src/librustc_codegen_llvm/abi.rs index 258d839d32e..f7d2699a27e 100644 --- a/src/librustc_codegen_llvm/abi.rs +++ b/src/librustc_codegen_llvm/abi.rs @@ -174,13 +174,13 @@ pub trait ArgTypeExt<'ll, 'tcx> { } impl ArgTypeExt<'ll, 'tcx> for ArgType<'tcx, Ty<'tcx>> { - /// Get the LLVM type for a place of the original Rust type of + /// Gets the LLVM type for a place of the original Rust type of /// this argument/return, i.e., the result of `type_of::type_of`. fn memory_ty(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type { self.layout.llvm_type(cx) } - /// Store a direct/indirect value described by this ArgType into a + /// Stores a direct/indirect value described by this ArgType into a /// place for the original Rust type of this argument/return. /// Can be used for both storing formal arguments into Rust variables /// or results of call/invoke instructions into their destinations. diff --git a/src/librustc_codegen_llvm/back/archive.rs b/src/librustc_codegen_llvm/back/archive.rs index 1cf150dad60..e02f7df2efc 100644 --- a/src/librustc_codegen_llvm/back/archive.rs +++ b/src/librustc_codegen_llvm/back/archive.rs @@ -51,7 +51,7 @@ fn is_relevant_child(c: &Child) -> bool { } impl<'a> ArchiveBuilder<'a> { - /// Create a new static archive, ready for modifying the archive specified + /// Creates a new static archive, ready for modifying the archive specified /// by `config`. pub fn new(config: ArchiveConfig<'a>) -> ArchiveBuilder<'a> { ArchiveBuilder { diff --git a/src/librustc_codegen_llvm/back/link.rs b/src/librustc_codegen_llvm/back/link.rs index fc744201a33..725009e1377 100644 --- a/src/librustc_codegen_llvm/back/link.rs +++ b/src/librustc_codegen_llvm/back/link.rs @@ -42,7 +42,7 @@ pub use rustc_codegen_utils::link::{find_crate_name, filename_for_input, default out_filename, check_file_is_writeable}; -/// Perform the linkage portion of the compilation phase. This will generate all +/// Performs the linkage portion of the compilation phase. This will generate all /// of the requested outputs for this compilation session. pub(crate) fn link_binary(sess: &Session, codegen_results: &CodegenResults, diff --git a/src/librustc_codegen_llvm/back/lto.rs b/src/librustc_codegen_llvm/back/lto.rs index 3e51078dc64..be7733bf554 100644 --- a/src/librustc_codegen_llvm/back/lto.rs +++ b/src/librustc_codegen_llvm/back/lto.rs @@ -791,7 +791,7 @@ impl ThinLTOImports { self.imports.get(llvm_module_name).map(|v| &v[..]).unwrap_or(&[]) } - /// Load the ThinLTO import map from ThinLTOData. + /// Loads the ThinLTO import map from ThinLTOData. unsafe fn from_thin_lto_data(data: *const llvm::ThinLTOData) -> ThinLTOImports { unsafe extern "C" fn imported_module_callback(payload: *mut libc::c_void, importing_module_name: *const libc::c_char, diff --git a/src/librustc_codegen_llvm/back/wasm.rs b/src/librustc_codegen_llvm/back/wasm.rs index 3501123a37f..b403660fa51 100644 --- a/src/librustc_codegen_llvm/back/wasm.rs +++ b/src/librustc_codegen_llvm/back/wasm.rs @@ -112,7 +112,7 @@ pub fn rewrite_imports(path: &Path, import_map: &FxHashMap) { } } -/// Add or augment the existing `producers` section to encode information about +/// Adds or augment the existing `producers` section to encode information about /// the Rust compiler used to produce the wasm file. pub fn add_producer_section( path: &Path, diff --git a/src/librustc_codegen_llvm/base.rs b/src/librustc_codegen_llvm/base.rs index d9f44ca6e45..70986a4e179 100644 --- a/src/librustc_codegen_llvm/base.rs +++ b/src/librustc_codegen_llvm/base.rs @@ -7,11 +7,11 @@ //! //! Hopefully useful general knowledge about codegen: //! -//! * There's no way to find out the Ty type of a Value. Doing so -//! would be "trying to get the eggs out of an omelette" (credit: -//! pcwalton). You can, instead, find out its llvm::Type by calling val_ty, -//! but one llvm::Type corresponds to many `Ty`s; for instance, tup(int, int, -//! int) and rec(x=int, y=int, z=int) will have the same llvm::Type. +//! * There's no way to find out the `Ty` type of a Value. Doing so +//! would be "trying to get the eggs out of an omelette" (credit: +//! pcwalton). You can, instead, find out its `llvm::Type` by calling `val_ty`, +//! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int, +//! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`. use super::ModuleLlvm; use rustc_codegen_ssa::{ModuleCodegen, ModuleKind}; diff --git a/src/librustc_codegen_llvm/callee.rs b/src/librustc_codegen_llvm/callee.rs index 0d9d6aa5aa2..9426328da32 100644 --- a/src/librustc_codegen_llvm/callee.rs +++ b/src/librustc_codegen_llvm/callee.rs @@ -1,6 +1,6 @@ //! Handles codegen of callees as well as other call-related -//! things. Callees are a superset of normal rust values and sometimes -//! have different representations. In particular, top-level fn items +//! things. Callees are a superset of normal rust values and sometimes +//! have different representations. In particular, top-level fn items //! and methods are represented as just a fn ptr and not a full //! closure. diff --git a/src/librustc_codegen_llvm/context.rs b/src/librustc_codegen_llvm/context.rs index f6795588441..a4b976dfbd9 100644 --- a/src/librustc_codegen_llvm/context.rs +++ b/src/librustc_codegen_llvm/context.rs @@ -75,7 +75,7 @@ pub struct CodegenCx<'ll, 'tcx: 'll> { pub statics_to_rauw: RefCell>, /// Statics that will be placed in the llvm.used variable - /// See http://llvm.org/docs/LangRef.html#the-llvm-used-global-variable for details + /// See for details pub used_statics: RefCell>, pub lltypes: RefCell, Option), &'ll Type>>, @@ -807,7 +807,7 @@ impl CodegenCx<'b, 'tcx> { } impl<'b, 'tcx> CodegenCx<'b, 'tcx> { - /// Generate a new symbol name with the given prefix. This symbol name must + /// Generates a new symbol name with the given prefix. This symbol name must /// only be used for definitions with `internal` or `private` linkage. pub fn generate_local_symbol_name(&self, prefix: &str) -> String { let idx = self.local_gen_sym_counter.get(); diff --git a/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs b/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs index dbd821865f9..791526c98c8 100644 --- a/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs +++ b/src/librustc_codegen_llvm/debuginfo/create_scope_map.rs @@ -16,7 +16,7 @@ use rustc_data_structures::indexed_vec::{Idx, IndexVec}; use syntax_pos::BytePos; -/// Produce DIScope DIEs for each MIR Scope which has variables defined in it. +/// Produces DIScope DIEs for each MIR Scope which has variables defined in it. /// If debuginfo is disabled, the returned vector is empty. pub fn create_mir_scopes( cx: &CodegenCx<'ll, '_>, diff --git a/src/librustc_codegen_llvm/debuginfo/doc.rs b/src/librustc_codegen_llvm/debuginfo/doc.rs index a4acc58eca9..cf18b995b61 100644 --- a/src/librustc_codegen_llvm/debuginfo/doc.rs +++ b/src/librustc_codegen_llvm/debuginfo/doc.rs @@ -160,7 +160,7 @@ //! //! This algorithm also provides a stable ID for types that are defined in one //! crate but instantiated from metadata within another crate. We just have to -//! take care to always map crate and node IDs back to the original crate +//! take care to always map crate and `NodeId`s back to the original crate //! context. //! //! As a side-effect these unique type IDs also help to solve a problem arising @@ -170,7 +170,7 @@ //! with different concrete substitutions for `'a`, and thus there will be N //! `Ty` instances for the type `Struct<'a>` even though it is not generic //! otherwise. Unfortunately this means that we cannot use `ty::type_id()` as -//! cheap identifier for type metadata---we have done this in the past, but it +//! cheap identifier for type metadata -- we have done this in the past, but it //! led to unnecessary metadata duplication in the best case and LLVM //! assertions in the worst. However, the unique type ID as described above //! *can* be used as identifier. Since it is comparatively expensive to diff --git a/src/librustc_codegen_llvm/debuginfo/mod.rs b/src/librustc_codegen_llvm/debuginfo/mod.rs index 113b9958c7f..625f6cd45fb 100644 --- a/src/librustc_codegen_llvm/debuginfo/mod.rs +++ b/src/librustc_codegen_llvm/debuginfo/mod.rs @@ -102,7 +102,7 @@ impl<'a, 'tcx> CrateDebugContext<'a, 'tcx> { } } -/// Create any deferred debug metadata nodes +/// Creates any deferred debug metadata nodes pub fn finalize(cx: &CodegenCx) { if cx.dbg_cx.is_none() { return; diff --git a/src/librustc_codegen_llvm/debuginfo/utils.rs b/src/librustc_codegen_llvm/debuginfo/utils.rs index 8b85df79d04..f2d92eefad3 100644 --- a/src/librustc_codegen_llvm/debuginfo/utils.rs +++ b/src/librustc_codegen_llvm/debuginfo/utils.rs @@ -36,7 +36,7 @@ pub fn create_DIArray( }; } -/// Return syntax_pos::Loc corresponding to the beginning of the span +/// Returns syntax_pos::Loc corresponding to the beginning of the span pub fn span_start(cx: &CodegenCx, span: Span) -> syntax_pos::Loc { cx.sess().source_map().lookup_char_pos(span.lo()) } diff --git a/src/librustc_codegen_llvm/llvm/ffi.rs b/src/librustc_codegen_llvm/llvm/ffi.rs index 58bdfc47fca..3232f4e8f51 100644 --- a/src/librustc_codegen_llvm/llvm/ffi.rs +++ b/src/librustc_codegen_llvm/llvm/ffi.rs @@ -1337,7 +1337,7 @@ extern "C" { pub fn LLVMGetSections(ObjFile: &'a ObjectFile) -> &'a mut SectionIterator<'a>; /// Destroys a section iterator. pub fn LLVMDisposeSectionIterator(SI: &'a mut SectionIterator<'a>); - /// Returns true if the section iterator is at the end of the section + /// Returns `true` if the section iterator is at the end of the section /// list: pub fn LLVMIsSectionIteratorAtEnd(ObjFile: &'a ObjectFile, SI: &SectionIterator<'a>) -> Bool; /// Moves the section iterator to point to the next section. diff --git a/src/librustc_codegen_llvm/type_of.rs b/src/librustc_codegen_llvm/type_of.rs index afaeb352cd9..39f48b266c2 100644 --- a/src/librustc_codegen_llvm/type_of.rs +++ b/src/librustc_codegen_llvm/type_of.rs @@ -226,7 +226,7 @@ impl<'tcx> LayoutLlvmExt<'tcx> for TyLayout<'tcx> { } } - /// Get the LLVM type corresponding to a Rust type, i.e., `rustc::ty::Ty`. + /// Gets the LLVM type corresponding to a Rust type, i.e., `rustc::ty::Ty`. /// The pointee type of the pointer in `PlaceRef` is always this type. /// For sized types, it is also the right LLVM type for an `alloca` /// containing a value of that type, and most immediates (except `bool`). diff --git a/src/librustc_codegen_ssa/back/linker.rs b/src/librustc_codegen_ssa/back/linker.rs index 249715a7b6e..3cbe3793f10 100644 --- a/src/librustc_codegen_ssa/back/linker.rs +++ b/src/librustc_codegen_ssa/back/linker.rs @@ -91,7 +91,7 @@ impl LinkerInfo { } } -/// Linker abstraction used by back::link to build up the command to invoke a +/// Linker abstraction used by `back::link` to build up the command to invoke a /// linker. /// /// This trait is the total list of requirements needed by `back::link` and @@ -145,7 +145,7 @@ pub struct GccLinker<'a> { impl<'a> GccLinker<'a> { /// Argument that must be passed *directly* to the linker /// - /// These arguments need to be prepended with '-Wl,' when a gcc-style linker is used + /// These arguments need to be prepended with `-Wl`, when a GCC-style linker is used. fn linker_arg(&mut self, arg: S) -> &mut Self where S: AsRef { diff --git a/src/librustc_codegen_ssa/back/write.rs b/src/librustc_codegen_ssa/back/write.rs index 67d4d408bab..8f8095a96ee 100644 --- a/src/librustc_codegen_ssa/back/write.rs +++ b/src/librustc_codegen_ssa/back/write.rs @@ -663,7 +663,7 @@ pub enum WorkItem { /// Copy the post-LTO artifacts from the incremental cache to the output /// directory. CopyPostLtoArtifacts(CachedModuleCodegen), - /// Perform (Thin)LTO on the given module. + /// Performs (Thin)LTO on the given module. LTO(lto::LtoModuleCodegen), } @@ -1798,7 +1798,7 @@ impl OngoingCodegen { drop(self.coordinator_send.send(Box::new(Message::CodegenComplete::))); } - /// Consume this context indicating that codegen was entirely aborted, and + /// Consumes this context indicating that codegen was entirely aborted, and /// we need to exit as quickly as possible. /// /// This method blocks the current thread until all worker threads have diff --git a/src/librustc_codegen_ssa/base.rs b/src/librustc_codegen_ssa/base.rs index 84e55ce0f22..ecac82db947 100644 --- a/src/librustc_codegen_ssa/base.rs +++ b/src/librustc_codegen_ssa/base.rs @@ -7,11 +7,11 @@ //! //! Hopefully useful general knowledge about codegen: //! -//! * There's no way to find out the Ty type of a Value. Doing so -//! would be "trying to get the eggs out of an omelette" (credit: -//! pcwalton). You can, instead, find out its llvm::Type by calling val_ty, -//! but one llvm::Type corresponds to many `Ty`s; for instance, tup(int, int, -//! int) and rec(x=int, y=int, z=int) will have the same llvm::Type. +//! * There's no way to find out the `Ty` type of a Value. Doing so +//! would be "trying to get the eggs out of an omelette" (credit: +//! pcwalton). You can, instead, find out its `llvm::Type` by calling `val_ty`, +//! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int, +//! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`. use {ModuleCodegen, ModuleKind, CachedModuleCodegen}; @@ -156,7 +156,7 @@ pub fn compare_simd_types<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( bx.sext(cmp, ret_ty) } -/// Retrieve the information we are losing (making dynamic) in an unsizing +/// Retrieves the information we are losing (making dynamic) in an unsizing /// adjustment. /// /// The `old_info` argument is a bit funny. It is intended for use @@ -347,7 +347,7 @@ fn cast_shift_rhs<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( } } -/// Returns whether this session's target will use SEH-based unwinding. +/// Returns `true` if this session's target will use SEH-based unwinding. /// /// This is only true for MSVC targets, and even then the 64-bit MSVC target /// currently uses SEH-ish unwinding with DWARF info tables to the side (same as @@ -436,7 +436,7 @@ pub fn codegen_instance<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( mir::codegen_mir::(cx, lldecl, &mir, instance, sig); } -/// Create the `main` function which will initialize the rust runtime and call +/// Creates the `main` function which will initialize the rust runtime and call /// users main function. pub fn maybe_create_entry_wrapper<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( cx: &'a Bx::CodegenCx diff --git a/src/librustc_codegen_ssa/lib.rs b/src/librustc_codegen_ssa/lib.rs index 58b3f0434a6..1a911d94b1b 100644 --- a/src/librustc_codegen_ssa/lib.rs +++ b/src/librustc_codegen_ssa/lib.rs @@ -68,7 +68,7 @@ pub mod back; pub struct ModuleCodegen { /// The name of the module. When the crate may be saved between /// compilations, incremental compilation requires that name be - /// unique amongst **all** crates. Therefore, it should contain + /// unique amongst **all** crates. Therefore, it should contain /// something unique to this crate (e.g., a module path) as well /// as the crate name and disambiguator. /// We currently generate these names via CodegenUnit::build_cgu_name(). @@ -141,7 +141,7 @@ bitflags! { } } -/// Misc info we load from metadata to persist beyond the tcx +/// Misc info we load from metadata to persist beyond the tcx. pub struct CrateInfo { pub panic_runtime: Option, pub compiler_builtins: Option, diff --git a/src/librustc_codegen_ssa/mir/block.rs b/src/librustc_codegen_ssa/mir/block.rs index aa82c853257..be2db47a533 100644 --- a/src/librustc_codegen_ssa/mir/block.rs +++ b/src/librustc_codegen_ssa/mir/block.rs @@ -884,7 +884,7 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } } - /// Return the landingpad wrapper around the given basic block + /// Returns the landing-pad wrapper around the given basic block. /// /// No-op in MSVC SEH scheme. fn landing_pad_to( diff --git a/src/librustc_codegen_ssa/mir/mod.rs b/src/librustc_codegen_ssa/mir/mod.rs index c7e2131eed5..32c3408f1cb 100644 --- a/src/librustc_codegen_ssa/mir/mod.rs +++ b/src/librustc_codegen_ssa/mir/mod.rs @@ -422,7 +422,7 @@ fn create_funclets<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( }).unzip() } -/// Produce, for each argument, a `Value` pointing at the +/// Produces, for each argument, a `Value` pointing at the /// argument's value. As arguments are places, these are always /// indirect. fn arg_local_refs<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>( diff --git a/src/librustc_codegen_ssa/mir/place.rs b/src/librustc_codegen_ssa/mir/place.rs index 596f97a0388..efededb06dd 100644 --- a/src/librustc_codegen_ssa/mir/place.rs +++ b/src/librustc_codegen_ssa/mir/place.rs @@ -13,16 +13,16 @@ use super::operand::OperandValue; #[derive(Copy, Clone, Debug)] pub struct PlaceRef<'tcx, V> { - /// Pointer to the contents of the place + /// Pointer to the contents of the place. pub llval: V, - /// This place's extra data if it is unsized, or null + /// This place's extra data if it is unsized, or null. pub llextra: Option, - /// Monomorphized type of this place, including variant information + /// Monomorphized type of this place, including variant information. pub layout: TyLayout<'tcx>, - /// What alignment we know for this place + /// What alignment we know for this place. pub align: Align, } @@ -277,7 +277,7 @@ impl<'a, 'tcx: 'a, V: CodegenObject> PlaceRef<'tcx, V> { } } - /// Set the discriminant for a new value of the given case of the given + /// Sets the discriminant for a new value of the given case of the given /// representation. pub fn codegen_set_discr>( &self, diff --git a/src/librustc_codegen_ssa/traits/declare.rs b/src/librustc_codegen_ssa/traits/declare.rs index 3cd3c4e48b9..6a400a7d7a4 100644 --- a/src/librustc_codegen_ssa/traits/declare.rs +++ b/src/librustc_codegen_ssa/traits/declare.rs @@ -29,7 +29,7 @@ pub trait DeclareMethods<'tcx>: BackendTypes { /// Declare a global with an intention to define it. /// /// Use this function when you intend to define a global. This function will - /// return None if the name already has a definition associated with it. In that + /// return `None` if the name already has a definition associated with it. In that /// case an error should be reported to the user, because it usually happens due /// to user’s fault (e.g., misuse of #[no_mangle] or #[export_name] attributes). fn define_global(&self, name: &str, ty: Self::Type) -> Option; @@ -53,10 +53,10 @@ pub trait DeclareMethods<'tcx>: BackendTypes { /// can happen with #[no_mangle] or #[export_name], for example. fn define_internal_fn(&self, name: &str, fn_sig: ty::PolyFnSig<'tcx>) -> Self::Value; - /// Get declared value by name. + /// Gets declared value by name. fn get_declared_value(&self, name: &str) -> Option; - /// Get defined or externally defined (AvailableExternally linkage) value by + /// Gets defined or externally defined (AvailableExternally linkage) value by /// name. fn get_defined_value(&self, name: &str) -> Option; } diff --git a/src/librustc_codegen_ssa/traits/type_.rs b/src/librustc_codegen_ssa/traits/type_.rs index 2ec0c8e5a75..2c990ed89c9 100644 --- a/src/librustc_codegen_ssa/traits/type_.rs +++ b/src/librustc_codegen_ssa/traits/type_.rs @@ -39,13 +39,13 @@ pub trait BaseTypeMethods<'tcx>: Backend<'tcx> { fn type_ptr_to(&self, ty: Self::Type) -> Self::Type; fn element_type(&self, ty: Self::Type) -> Self::Type; - /// Return the number of elements in `self` if it is a LLVM vector type. + /// Returns the number of elements in `self` if it is a LLVM vector type. fn vector_length(&self, ty: Self::Type) -> usize; fn func_params_types(&self, ty: Self::Type) -> Vec; fn float_width(&self, ty: Self::Type) -> usize; - /// Retrieve the bit width of the integer type `self`. + /// Retrieves the bit width of the integer type `self`. fn int_width(&self, ty: Self::Type) -> u64; fn val_ty(&self, v: Self::Value) -> Self::Type; diff --git a/src/librustc_data_structures/base_n.rs b/src/librustc_data_structures/base_n.rs index c9c1933f25b..f1bd3f03aef 100644 --- a/src/librustc_data_structures/base_n.rs +++ b/src/librustc_data_structures/base_n.rs @@ -1,4 +1,4 @@ -/// Convert unsigned integers into a string representation with some base. +/// Converts unsigned integers into a string representation with some base. /// Bases up to and including 36 can be used for case-insensitive things. use std::str; diff --git a/src/librustc_data_structures/bit_set.rs b/src/librustc_data_structures/bit_set.rs index 05d2185ae69..ff7964646d6 100644 --- a/src/librustc_data_structures/bit_set.rs +++ b/src/librustc_data_structures/bit_set.rs @@ -27,7 +27,7 @@ pub struct BitSet { } impl BitSet { - /// Create a new, empty bitset with a given `domain_size`. + /// Creates a new, empty bitset with a given `domain_size`. #[inline] pub fn new_empty(domain_size: usize) -> BitSet { let num_words = num_words(domain_size); @@ -38,7 +38,7 @@ impl BitSet { } } - /// Create a new, filled bitset with a given `domain_size`. + /// Creates a new, filled bitset with a given `domain_size`. #[inline] pub fn new_filled(domain_size: usize) -> BitSet { let num_words = num_words(domain_size); @@ -51,7 +51,7 @@ impl BitSet { result } - /// Get the domain size. + /// Gets the domain size. pub fn domain_size(&self) -> usize { self.domain_size } @@ -85,7 +85,7 @@ impl BitSet { self.words.iter().map(|e| e.count_ones() as usize).sum() } - /// True if `self` contains `elem`. + /// Returns `true` if `self` contains `elem`. #[inline] pub fn contains(&self, elem: T) -> bool { assert!(elem.index() < self.domain_size); @@ -106,7 +106,7 @@ impl BitSet { self.words.iter().all(|a| *a == 0) } - /// Insert `elem`. Returns true if the set has changed. + /// Insert `elem`. Returns whether the set has changed. #[inline] pub fn insert(&mut self, elem: T) -> bool { assert!(elem.index() < self.domain_size); @@ -126,7 +126,7 @@ impl BitSet { self.clear_excess_bits(); } - /// Returns true if the set has changed. + /// Returns `true` if the set has changed. #[inline] pub fn remove(&mut self, elem: T) -> bool { assert!(elem.index() < self.domain_size); @@ -138,26 +138,26 @@ impl BitSet { new_word != word } - /// Set `self = self | other` and return true if `self` changed + /// Sets `self = self | other` and returns `true` if `self` changed /// (i.e., if new bits were added). pub fn union(&mut self, other: &impl UnionIntoBitSet) -> bool { other.union_into(self) } - /// Set `self = self - other` and return true if `self` changed. + /// Sets `self = self - other` and returns `true` if `self` changed. /// (i.e., if any bits were removed). pub fn subtract(&mut self, other: &impl SubtractFromBitSet) -> bool { other.subtract_from(self) } - /// Set `self = self & other` and return true if `self` changed. + /// Sets `self = self & other` and return `true` if `self` changed. /// (i.e., if any bits were removed). pub fn intersect(&mut self, other: &BitSet) -> bool { assert_eq!(self.domain_size, other.domain_size); bitwise(&mut self.words, &other.words, |a, b| { a & b }) } - /// Get a slice of the underlying words. + /// Gets a slice of the underlying words. pub fn words(&self) -> &[Word] { &self.words } @@ -611,7 +611,7 @@ impl GrowableBitSet { GrowableBitSet { bit_set: BitSet::new_empty(bits) } } - /// Returns true if the set has changed. + /// Returns `true` if the set has changed. #[inline] pub fn insert(&mut self, elem: T) -> bool { self.ensure(elem.index() + 1); @@ -645,7 +645,7 @@ pub struct BitMatrix { } impl BitMatrix { - /// Create a new `rows x columns` matrix, initially empty. + /// Creates a new `rows x columns` matrix, initially empty. pub fn new(num_rows: usize, num_columns: usize) -> BitMatrix { // For every element, we need one bit for every other // element. Round up to an even number of words. @@ -668,7 +668,7 @@ impl BitMatrix { /// Sets the cell at `(row, column)` to true. Put another way, insert /// `column` to the bitset for `row`. /// - /// Returns true if this changed the matrix, and false otherwise. + /// Returns `true` if this changed the matrix. pub fn insert(&mut self, row: R, column: C) -> bool { assert!(row.index() < self.num_rows && column.index() < self.num_columns); let (start, _) = self.range(row); @@ -691,7 +691,7 @@ impl BitMatrix { (self.words[start + word_index] & mask) != 0 } - /// Returns those indices that are true in rows `a` and `b`. This + /// Returns those indices that are true in rows `a` and `b`. This /// is an O(n) operation where `n` is the number of elements /// (somewhat independent from the actual size of the /// intersection, in particular). @@ -715,8 +715,8 @@ impl BitMatrix { result } - /// Add the bits from row `read` to the bits from row `write`, - /// return true if anything changed. + /// Adds the bits from row `read` to the bits from row `write`, and + /// returns `true` if anything changed. /// /// This is used when computing transitive reachability because if /// you have an edge `write -> read`, because in that case @@ -772,7 +772,7 @@ where } impl SparseBitMatrix { - /// Create a new empty sparse bit matrix with no rows or columns. + /// Creates a new empty sparse bit matrix with no rows or columns. pub fn new(num_columns: usize) -> Self { Self { num_columns, @@ -793,7 +793,7 @@ impl SparseBitMatrix { /// Sets the cell at `(row, column)` to true. Put another way, insert /// `column` to the bitset for `row`. /// - /// Returns true if this changed the matrix, and false otherwise. + /// Returns `true` if this changed the matrix. pub fn insert(&mut self, row: R, column: C) -> bool { self.ensure_row(row).insert(column) } @@ -806,8 +806,8 @@ impl SparseBitMatrix { self.row(row).map_or(false, |r| r.contains(column)) } - /// Add the bits from row `read` to the bits from row `write`, - /// return true if anything changed. + /// Adds the bits from row `read` to the bits from row `write`, and + /// returns `true` if anything changed. /// /// This is used when computing transitive reachability because if /// you have an edge `write -> read`, because in that case diff --git a/src/librustc_data_structures/graph/implementation/mod.rs b/src/librustc_data_structures/graph/implementation/mod.rs index a8b73409406..de4b1bcd0c2 100644 --- a/src/librustc_data_structures/graph/implementation/mod.rs +++ b/src/librustc_data_structures/graph/implementation/mod.rs @@ -14,7 +14,7 @@ //! stored. The edges are stored in a central array, but they are also //! threaded onto two linked lists for each node, one for incoming edges //! and one for outgoing edges. Note that every edge is a member of some -//! incoming list and some outgoing list. Basically you can load the +//! incoming list and some outgoing list. Basically you can load the //! first index of the linked list from the node data structures (the //! field `first_edge`) and then, for each edge, load the next index from //! the field `next_edge`). Each of those fields is an array that should @@ -79,7 +79,7 @@ pub const OUTGOING: Direction = Direction { repr: 0 }; pub const INCOMING: Direction = Direction { repr: 1 }; impl NodeIndex { - /// Returns unique id (unique with respect to the graph holding associated node). + /// Returns unique ID (unique with respect to the graph holding associated node). pub fn node_id(self) -> usize { self.0 } diff --git a/src/librustc_data_structures/graph/scc/mod.rs b/src/librustc_data_structures/graph/scc/mod.rs index e3264fda262..24c5448639e 100644 --- a/src/librustc_data_structures/graph/scc/mod.rs +++ b/src/librustc_data_structures/graph/scc/mod.rs @@ -200,7 +200,7 @@ where } } - /// Visit a node during the DFS. We first examine its current + /// Visits a node during the DFS. We first examine its current /// state -- if it is not yet visited (`NotVisited`), we can push /// it onto the stack and start walking its successors. /// diff --git a/src/librustc_data_structures/indexed_vec.rs b/src/librustc_data_structures/indexed_vec.rs index 516ea7fb7d9..09aec50e4bb 100644 --- a/src/librustc_data_structures/indexed_vec.rs +++ b/src/librustc_data_structures/indexed_vec.rs @@ -12,7 +12,7 @@ use rustc_serialize as serialize; /// Represents some newtyped `usize` wrapper. /// -/// (purpose: avoid mixing indexes for different bitvector domains.) +/// Purpose: avoid mixing indexes for different bitvector domains. pub trait Idx: Copy + 'static + Ord + Debug + Hash { fn new(idx: usize) -> Self; @@ -144,19 +144,19 @@ macro_rules! newtype_index { unsafe { $type { private: value } } } - /// Extract value of this index as an integer. + /// Extracts the value of this index as an integer. #[inline] $v fn index(self) -> usize { self.as_usize() } - /// Extract value of this index as a usize. + /// Extracts the value of this index as a `u32`. #[inline] $v fn as_u32(self) -> u32 { self.private } - /// Extract value of this index as a u32. + /// Extracts the value of this index as a `usize`. #[inline] $v fn as_usize(self) -> usize { self.as_u32() as usize @@ -641,7 +641,7 @@ impl IndexVec { self.raw.get_mut(index.index()) } - /// Return mutable references to two distinct elements, a and b. Panics if a == b. + /// Returns mutable references to two distinct elements, a and b. Panics if a == b. #[inline] pub fn pick2_mut(&mut self, a: I, b: I) -> (&mut T, &mut T) { let (ai, bi) = (a.index(), b.index()); diff --git a/src/librustc_data_structures/obligation_forest/graphviz.rs b/src/librustc_data_structures/obligation_forest/graphviz.rs index 72551b42324..a0363e165e0 100644 --- a/src/librustc_data_structures/obligation_forest/graphviz.rs +++ b/src/librustc_data_structures/obligation_forest/graphviz.rs @@ -7,8 +7,8 @@ use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; impl ObligationForest { - /// Create a graphviz representation of the obligation forest. Given a directory this will - /// create files with name of the format `_.gv`. The counter is + /// Creates a graphviz representation of the obligation forest. Given a directory this will + /// create files with name of the format `_.gv`. The counter is /// global and is maintained internally. /// /// Calling this will do nothing unless the environment variable diff --git a/src/librustc_data_structures/obligation_forest/mod.rs b/src/librustc_data_structures/obligation_forest/mod.rs index 546bb64168e..4490e5f86d2 100644 --- a/src/librustc_data_structures/obligation_forest/mod.rs +++ b/src/librustc_data_structures/obligation_forest/mod.rs @@ -64,7 +64,7 @@ //! #### Snapshots //! //! The `ObligationForest` supports a limited form of snapshots; see -//! `start_snapshot`; `commit_snapshot`; and `rollback_snapshot`. In +//! `start_snapshot`, `commit_snapshot`, and `rollback_snapshot`. In //! particular, you can use a snapshot to roll back new root //! obligations. However, it is an error to attempt to //! `process_obligations` during a snapshot. @@ -72,7 +72,7 @@ //! ### Implementation details //! //! For the most part, comments specific to the implementation are in the -//! code. This file only contains a very high-level overview. Basically, +//! code. This file only contains a very high-level overview. Basically, //! the forest is stored in a vector. Each element of the vector is a node //! in some tree. Each node in the vector has the index of an (optional) //! parent and (for convenience) its root (which may be itself). It also @@ -163,7 +163,7 @@ pub struct ObligationForest { obligation_tree_id_generator: ObligationTreeIdGenerator, - /// Per tree error cache. This is used to deduplicate errors, + /// Per tree error cache. This is used to deduplicate errors, /// which is necessary to avoid trait resolution overflow in /// some cases. /// @@ -268,13 +268,13 @@ impl ObligationForest { } } - /// Return the total number of nodes in the forest that have not + /// Returns the total number of nodes in the forest that have not /// yet been fully resolved. pub fn len(&self) -> usize { self.nodes.len() } - /// Registers an obligation + /// Registers an obligation. /// /// This CAN be done in a snapshot pub fn register_obligation(&mut self, obligation: O) { @@ -341,7 +341,7 @@ impl ObligationForest { } } - /// Convert all remaining obligations to the given error. + /// Converts all remaining obligations to the given error. /// /// This cannot be done during a snapshot. pub fn to_errors(&mut self, error: E) -> Vec> { @@ -380,10 +380,10 @@ impl ObligationForest { .insert(node.obligation.as_predicate().clone()); } - /// Perform a pass through the obligation list. This must + /// Performs a pass through the obligation list. This must /// be called in a loop until `outcome.stalled` is false. /// - /// This CANNOT be unrolled (presently, at least). + /// This _cannot_ be unrolled (presently, at least). pub fn process_obligations

(&mut self, processor: &mut P, do_completed: DoCompleted) -> Outcome where P: ObligationProcessor @@ -461,7 +461,7 @@ impl ObligationForest { } } - /// Mark all NodeState::Success nodes as NodeState::Done and + /// Mark all `NodeState::Success` nodes as `NodeState::Done` and /// report all cycles between them. This should be called /// after `mark_as_waiting` marks all nodes with pending /// subobligations as NodeState::Waiting. @@ -566,7 +566,7 @@ impl ObligationForest { } } - /// Marks all nodes that depend on a pending node as NodeState::Waiting. + /// Marks all nodes that depend on a pending node as `NodeState::Waiting`. fn mark_as_waiting(&self) { for node in &self.nodes { if node.state.get() == NodeState::Waiting { diff --git a/src/librustc_data_structures/owning_ref/mod.rs b/src/librustc_data_structures/owning_ref/mod.rs index 30e510cc5b0..236559dcd7c 100644 --- a/src/librustc_data_structures/owning_ref/mod.rs +++ b/src/librustc_data_structures/owning_ref/mod.rs @@ -286,7 +286,7 @@ impl Erased for T {} pub unsafe trait IntoErased<'a> { /// Owner with the dereference type substituted to `Erased`. type Erased; - /// Perform the type erasure. + /// Performs the type erasure. fn into_erased(self) -> Self::Erased; } @@ -296,7 +296,7 @@ pub unsafe trait IntoErased<'a> { pub unsafe trait IntoErasedSend<'a> { /// Owner with the dereference type substituted to `Erased + Send`. type Erased: Send; - /// Perform the type erasure. + /// Performs the type erasure. fn into_erased_send(self) -> Self::Erased; } @@ -306,7 +306,7 @@ pub unsafe trait IntoErasedSend<'a> { pub unsafe trait IntoErasedSendSync<'a> { /// Owner with the dereference type substituted to `Erased + Send + Sync`. type Erased: Send + Sync; - /// Perform the type erasure. + /// Performs the type erasure. fn into_erased_send_sync(self) -> Self::Erased; } @@ -844,7 +844,7 @@ pub trait ToHandleMut { impl OwningHandle where O: StableAddress, O::Target: ToHandle, H: Deref, { - /// Create a new `OwningHandle` for a type that implements `ToHandle`. For types + /// Creates a new `OwningHandle` for a type that implements `ToHandle`. For types /// that don't implement `ToHandle`, callers may invoke `new_with_fn`, which accepts /// a callback to perform the conversion. pub fn new(o: O) -> Self { @@ -855,7 +855,7 @@ impl OwningHandle impl OwningHandle where O: StableAddress, O::Target: ToHandleMut, H: DerefMut, { - /// Create a new mutable `OwningHandle` for a type that implements `ToHandleMut`. + /// Creates a new mutable `OwningHandle` for a type that implements `ToHandleMut`. pub fn new_mut(o: O) -> Self { OwningHandle::new_with_fn(o, |x| unsafe { O::Target::to_handle_mut(x) }) } @@ -864,7 +864,7 @@ impl OwningHandle impl OwningHandle where O: StableAddress, H: Deref, { - /// Create a new OwningHandle. The provided callback will be invoked with + /// Creates a new OwningHandle. The provided callback will be invoked with /// a pointer to the object owned by `o`, and the returned value is stored /// as the object to which this `OwningHandle` will forward `Deref` and /// `DerefMut`. @@ -882,7 +882,7 @@ impl OwningHandle } } - /// Create a new OwningHandle. The provided callback will be invoked with + /// Creates a new OwningHandle. The provided callback will be invoked with /// a pointer to the object owned by `o`, and the returned value is stored /// as the object to which this `OwningHandle` will forward `Deref` and /// `DerefMut`. diff --git a/src/librustc_data_structures/sip128.rs b/src/librustc_data_structures/sip128.rs index 9ec9a398400..06f157f9729 100644 --- a/src/librustc_data_structures/sip128.rs +++ b/src/librustc_data_structures/sip128.rs @@ -44,7 +44,7 @@ macro_rules! compress { }); } -/// Load an integer of the desired type from a byte stream, in LE order. Uses +/// Loads an integer of the desired type from a byte stream, in LE order. Uses /// `copy_nonoverlapping` to let the compiler generate the most efficient way /// to load it from a possibly unaligned address. /// @@ -61,7 +61,7 @@ macro_rules! load_int_le { }); } -/// Load an u64 using up to 7 bytes of a byte slice. +/// Loads an u64 using up to 7 bytes of a byte slice. /// /// Unsafe because: unchecked indexing at start..start+len #[inline] diff --git a/src/librustc_data_structures/svh.rs b/src/librustc_data_structures/svh.rs index 3757f921098..df4f6176837 100644 --- a/src/librustc_data_structures/svh.rs +++ b/src/librustc_data_structures/svh.rs @@ -17,7 +17,7 @@ pub struct Svh { } impl Svh { - /// Create a new `Svh` given the hash. If you actually want to + /// Creates a new `Svh` given the hash. If you actually want to /// compute the SVH from some HIR, you want the `calculate_svh` /// function found in `librustc_incremental`. pub fn new(hash: u64) -> Svh { diff --git a/src/librustc_data_structures/transitive_relation.rs b/src/librustc_data_structures/transitive_relation.rs index 39aed983360..0974607fabe 100644 --- a/src/librustc_data_structures/transitive_relation.rs +++ b/src/librustc_data_structures/transitive_relation.rs @@ -82,7 +82,7 @@ impl TransitiveRelation { } /// Applies the (partial) function to each edge and returns a new - /// relation. If `f` returns `None` for any end-point, returns + /// relation. If `f` returns `None` for any end-point, returns /// `None`. pub fn maybe_map(&self, mut f: F) -> Option> where F: FnMut(&T) -> Option, @@ -111,7 +111,7 @@ impl TransitiveRelation { } } - /// Check whether `a < target` (transitively) + /// Checks whether `a < target` (transitively) pub fn contains(&self, a: &T, b: &T) -> bool { match (self.index(a), self.index(b)) { (Some(a), Some(b)) => self.with_closure(|closure| closure.contains(a.0, b.0)), @@ -122,7 +122,7 @@ impl TransitiveRelation { /// Thinking of `x R y` as an edge `x -> y` in a graph, this /// returns all things reachable from `a`. /// - /// Really this probably ought to be `impl Iterator`, but + /// Really this probably ought to be `impl Iterator`, but /// I'm too lazy to make that work, and -- given the caching /// strategy -- it'd be a touch tricky anyhow. pub fn reachable_from(&self, a: &T) -> Vec<&T> { @@ -152,20 +152,20 @@ impl TransitiveRelation { /// the query is `postdom_upper_bound(a, b)`: /// /// ```text - /// // returns Some(x), which is also LUB + /// // Returns Some(x), which is also LUB. /// a -> a1 -> x /// ^ /// | /// b -> b1 ---+ /// - /// // returns Some(x), which is not LUB (there is none) - /// // diagonal edges run left-to-right + /// // Returns `Some(x)`, which is not LUB (there is none) + /// // diagonal edges run left-to-right. /// a -> a1 -> x /// \/ ^ /// /\ | /// b -> b1 ---+ /// - /// // returns None + /// // Returns `None`. /// a -> a1 /// b -> b1 /// ``` diff --git a/src/librustc_data_structures/work_queue.rs b/src/librustc_data_structures/work_queue.rs index 06418b1051a..193025aafad 100644 --- a/src/librustc_data_structures/work_queue.rs +++ b/src/librustc_data_structures/work_queue.rs @@ -14,7 +14,7 @@ pub struct WorkQueue { } impl WorkQueue { - /// Create a new work queue with all the elements from (0..len). + /// Creates a new work queue with all the elements from (0..len). #[inline] pub fn with_all(len: usize) -> Self { WorkQueue { @@ -23,7 +23,7 @@ impl WorkQueue { } } - /// Create a new work queue that starts empty, where elements range from (0..len). + /// Creates a new work queue that starts empty, where elements range from (0..len). #[inline] pub fn with_none(len: usize) -> Self { WorkQueue { @@ -54,7 +54,7 @@ impl WorkQueue { } } - /// True if nothing is enqueued. + /// Returns `true` if nothing is enqueued. #[inline] pub fn is_empty(&self) -> bool { self.deque.is_empty() diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 6a23cadf877..09804a706ec 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -711,7 +711,7 @@ pub struct InnerExpansionResult<'a> { pub hir_forest: hir_map::Forest, } -/// Run the "early phases" of the compiler: initial `cfg` processing, +/// Runs the "early phases" of the compiler: initial `cfg` processing, /// loading compiler plugins (including those from `addl_plugins`), /// syntax expansion, secondary `cfg` expansion, synthesis of a test /// harness if one is to be provided, injection of a dependency on the @@ -1167,7 +1167,7 @@ pub fn default_provide_extern(providers: &mut ty::query::Providers) { cstore::provide_extern(providers); } -/// Run the resolution, typechecking, region checking and other +/// Runs the resolution, typec-hecking, region checking and other /// miscellaneous analysis passes on the crate. Return various /// structures carrying the results of the analysis. pub fn phase_3_run_analysis_passes<'tcx, F, R>( @@ -1334,7 +1334,7 @@ where ) } -/// Run the codegen backend, after which the AST and analysis can +/// Runs the codegen backend, after which the AST and analysis can /// be discarded. pub fn phase_4_codegen<'a, 'tcx>( codegen_backend: &dyn CodegenBackend, diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index b356ae38e24..2d894bd65b2 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -114,7 +114,7 @@ pub mod target_features { use rustc::session::Session; use rustc_codegen_utils::codegen_backend::CodegenBackend; - /// Add `target_feature = "..."` cfgs for a variety of platform + /// Adds `target_feature = "..."` cfgs for a variety of platform /// specific features (SSE, NEON etc.). /// /// This is performed by checking whether a whitelisted set of @@ -1316,7 +1316,7 @@ fn print_flag_list(cmdline_opt: &str, /// Process command line options. Emits messages as appropriate. If compilation /// should continue, returns a getopts::Matches object parsed from args, -/// otherwise returns None. +/// otherwise returns `None`. /// /// The compiler's handling of options is a little complicated as it ties into /// our stability story, and it's even *more* complicated by historical @@ -1480,7 +1480,7 @@ pub fn in_rustc_thread(f: F) -> Result> in_named_rustc_thread("rustc".to_string(), f) } -/// Get a list of extra command-line flags provided by the user, as strings. +/// Gets a list of extra command-line flags provided by the user, as strings. /// /// This function is used during ICEs to show more information useful for /// debugging, since some ICEs only happens with non-default compiler flags @@ -1545,7 +1545,7 @@ impl Display for CompilationFailure { } } -/// Run a procedure which will detect panics in the compiler and print nicer +/// Runs a procedure which will detect panics in the compiler and print nicer /// error messages rather than just failing the test. /// /// The diagnostic emitter yielded to the procedure should be used for reporting diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index afcf08632a4..2ec755bd626 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -1,4 +1,4 @@ -//! # Standalone Tests for the Inference Module +//! Standalone tests for the inference module. use driver; use errors; @@ -508,8 +508,8 @@ fn subst_ty_renumber_bound() { }) } -/// Test substituting a bound region into a function, which introduces another level of binding. -/// This requires adjusting the Debruijn index. +/// Tests substituting a bound region into a function, which introduces another level of binding. +/// This requires adjusting the De Bruijn index. #[test] fn subst_ty_renumber_some_bounds() { test_env(EMPTY_SOURCE_STR, errors(&[]), |env| { @@ -544,7 +544,7 @@ fn subst_ty_renumber_some_bounds() { }) } -/// Test that we correctly compute whether a type has escaping regions or not. +/// Tests that we correctly compute whether a type has escaping regions or not. #[test] fn escaping() { test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| { @@ -571,7 +571,7 @@ fn escaping() { }) } -/// Test applying a substitution where the value being substituted for an early-bound region is a +/// Tests applying a substitution where the value being substituted for an early-bound region is a /// late-bound region. #[test] fn subst_region_renumber_region() { diff --git a/src/librustc_errors/diagnostic.rs b/src/librustc_errors/diagnostic.rs index aefe296ad0f..2c410f69bfc 100644 --- a/src/librustc_errors/diagnostic.rs +++ b/src/librustc_errors/diagnostic.rs @@ -118,7 +118,7 @@ impl Diagnostic { self.level == Level::Cancelled } - /// Add a span/label to be included in the resulting snippet. + /// Adds a span/label to be included in the resulting snippet. /// This is pushed onto the `MultiSpan` that was created when the /// diagnostic was first built. If you don't call this function at /// all, and you just supplied a `Span` to create the diagnostic, diff --git a/src/librustc_errors/diagnostic_builder.rs b/src/librustc_errors/diagnostic_builder.rs index fd4ea7f2d82..9d5e8d10b17 100644 --- a/src/librustc_errors/diagnostic_builder.rs +++ b/src/librustc_errors/diagnostic_builder.rs @@ -26,7 +26,7 @@ pub struct DiagnosticBuilder<'a> { /// In general, the `DiagnosticBuilder` uses deref to allow access to /// the fields and methods of the embedded `diagnostic` in a -/// transparent way. *However,* many of the methods are intended to +/// transparent way. *However,* many of the methods are intended to /// be used in a chained way, and hence ought to return `self`. In /// that case, we can't just naively forward to the method on the /// `diagnostic`, because the return type would be a `&Diagnostic` @@ -150,7 +150,7 @@ impl<'a> DiagnosticBuilder<'a> { self.cancel(); } - /// Add a span/label to be included in the resulting snippet. + /// Adds a span/label to be included in the resulting snippet. /// This is pushed onto the `MultiSpan` that was created when the /// diagnostic was first built. If you don't call this function at /// all, and you just supplied a `Span` to create the diagnostic, diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 2821201173e..1c0c9d137e4 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -22,7 +22,7 @@ pub trait Emitter { /// Emit a structured diagnostic. fn emit(&mut self, db: &DiagnosticBuilder<'_>); - /// Check if should show explanations about "rustc --explain" + /// Checks if should show explanations about "rustc --explain" fn should_show_explain(&self) -> bool { true } @@ -868,7 +868,7 @@ impl EmitterWriter { } } - /// Add a left margin to every line but the first, given a padding length and the label being + /// Adds a left margin to every line but the first, given a padding length and the label being /// displayed, keeping the provided highlighting. fn msg_to_buffer(&self, buffer: &mut StyledBuffer, @@ -895,7 +895,7 @@ impl EmitterWriter { // `max_line_num_len` let padding = " ".repeat(padding + label.len() + 5); - /// Return whether `style`, or the override if present and the style is `NoStyle`. + /// Returns `true` if `style`, or the override if present and the style is `NoStyle`. fn style_or_override(style: Style, override_style: Option