From a4af9d1ac25113362898ca598556db5eaa3d8f31 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Mon, 12 Aug 2019 07:34:08 +0200 Subject: parse_pat_with_range_pat: remove unnecessary assignments. --- src/libsyntax/parse/parser/pat.rs | 54 ++++++++++++++++++--------------------- 1 file changed, 25 insertions(+), 29 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index 5cc428a4df1..21b38751831 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -108,8 +108,7 @@ impl<'a> Parser<'a> { maybe_whole!(self, NtPat, |x| x); let lo = self.token.span; - let pat; - match self.token.kind { + let pat = match self.token.kind { token::BinOp(token::And) | token::AndAnd => { // Parse &pat / &mut pat self.expect_and()?; @@ -120,7 +119,7 @@ impl<'a> Parser<'a> { return Err(err); } let subpat = self.parse_pat_with_range_pat(false, expected)?; - pat = PatKind::Ref(subpat, mutbl); + PatKind::Ref(subpat, mutbl) } token::OpenDelim(token::Paren) => { // Parse a tuple or parenthesis pattern. @@ -128,41 +127,40 @@ impl<'a> Parser<'a> { // Here, `(pat,)` is a tuple pattern. // For backward compatibility, `(..)` is a tuple pattern as well. - pat = if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) { + if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) { PatKind::Paren(fields.into_iter().nth(0).unwrap()) } else { PatKind::Tuple(fields) - }; + } } token::OpenDelim(token::Bracket) => { // Parse `[pat, pat,...]` as a slice pattern. - let (slice, _) = self.parse_delim_comma_seq(token::Bracket, |p| p.parse_pat(None))?; - pat = PatKind::Slice(slice); + PatKind::Slice(self.parse_delim_comma_seq(token::Bracket, |p| p.parse_pat(None))?.0) } token::DotDot => { self.bump(); - pat = if self.is_pat_range_end_start() { + if self.is_pat_range_end_start() { // Parse `..42` for recovery. self.parse_pat_range_to(RangeEnd::Excluded, "..")? } else { // A rest pattern `..`. PatKind::Rest - }; + } } token::DotDotEq => { // Parse `..=42` for recovery. self.bump(); - pat = self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotEq), "..=")?; + self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotEq), "..=")? } token::DotDotDot => { // Parse `...42` for recovery. self.bump(); - pat = self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotDot), "...")?; + self.parse_pat_range_to(RangeEnd::Included(RangeSyntax::DotDotDot), "...")? } // At this point, token != &, &&, (, [ _ => if self.eat_keyword(kw::Underscore) { // Parse _ - pat = PatKind::Wild; + PatKind::Wild } else if self.eat_keyword(kw::Mut) { // Parse mut ident @ pat / mut ref ident @ pat let mutref_span = self.prev_span.to(self.token.span); @@ -179,22 +177,20 @@ impl<'a> Parser<'a> { } else { BindingMode::ByValue(Mutability::Mutable) }; - pat = self.parse_pat_ident(binding_mode)?; + self.parse_pat_ident(binding_mode)? } else if self.eat_keyword(kw::Ref) { // Parse ref ident @ pat / ref mut ident @ pat let mutbl = self.parse_mutability(); - pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?; + self.parse_pat_ident(BindingMode::ByRef(mutbl))? } else if self.eat_keyword(kw::Box) { - // Parse box pat - let subpat = self.parse_pat_with_range_pat(false, None)?; - pat = PatKind::Box(subpat); + // Parse `box pat` + PatKind::Box(self.parse_pat_with_range_pat(false, None)?) } else if self.token.is_ident() && !self.token.is_reserved_ident() && self.parse_as_ident() { - // Parse ident @ pat + // Parse `ident @ pat` // This can give false positives and parse nullary enums, - // they are dealt with later in resolve - let binding_mode = BindingMode::ByValue(Mutability::Immutable); - pat = self.parse_pat_ident(binding_mode)?; + // they are dealt with later in resolve. + self.parse_pat_ident(BindingMode::ByValue(Mutability::Immutable))? } else if self.token.is_path_start() { // Parse pattern starting with a path let (qself, path) = if self.eat_lt() { @@ -216,7 +212,7 @@ impl<'a> Parser<'a> { delim, prior_type_ascription: self.last_type_ascription, }); - pat = PatKind::Mac(mac); + PatKind::Mac(mac) } token::DotDotDot | token::DotDotEq | token::DotDot => { let (end_kind, form) = match self.token.kind { @@ -232,7 +228,7 @@ impl<'a> Parser<'a> { let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new()); self.bump(); let end = self.parse_pat_range_end_opt(&begin, form)?; - pat = PatKind::Range(begin, end, respan(op_span, end_kind)); + PatKind::Range(begin, end, respan(op_span, end_kind)) } token::OpenDelim(token::Brace) => { if qself.is_some() { @@ -249,7 +245,7 @@ impl<'a> Parser<'a> { (vec![], true) }); self.bump(); - pat = PatKind::Struct(path, fields, etc); + PatKind::Struct(path, fields, etc) } token::OpenDelim(token::Paren) => { if qself.is_some() { @@ -260,9 +256,9 @@ impl<'a> Parser<'a> { } // Parse tuple struct or enum pattern let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?; - pat = PatKind::TupleStruct(path, fields) + PatKind::TupleStruct(path, fields) } - _ => pat = PatKind::Path(qself, path), + _ => PatKind::Path(qself, path), } } else { // Try to parse everything else as literal with optional minus @@ -282,9 +278,9 @@ impl<'a> Parser<'a> { on a range-operator token") }; let end = self.parse_pat_range_end_opt(&begin, form)?; - pat = PatKind::Range(begin, end, respan(op_span, end_kind)) + PatKind::Range(begin, end, respan(op_span, end_kind)) } else { - pat = PatKind::Lit(begin); + PatKind::Lit(begin) } } Err(mut err) => { @@ -305,7 +301,7 @@ impl<'a> Parser<'a> { } } } - } + }; let pat = self.mk_pat(lo.to(self.prev_span), pat); let pat = self.maybe_recover_from_bad_qpath(pat, true)?; -- cgit 1.4.1-3-g733a5 From 90793c0f126a9d5a0ffab297e9fef8bbbed6ae70 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Mon, 12 Aug 2019 07:37:08 +0200 Subject: extract parse_pat_deref --- src/libsyntax/parse/parser/pat.rs | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index 21b38751831..95678f9f7a1 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -109,18 +109,7 @@ impl<'a> Parser<'a> { let lo = self.token.span; let pat = match self.token.kind { - token::BinOp(token::And) | token::AndAnd => { - // Parse &pat / &mut pat - self.expect_and()?; - let mutbl = self.parse_mutability(); - if let token::Lifetime(name) = self.token.kind { - let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name)); - err.span_label(self.token.span, "unexpected lifetime"); - return Err(err); - } - let subpat = self.parse_pat_with_range_pat(false, expected)?; - PatKind::Ref(subpat, mutbl) - } + token::BinOp(token::And) | token::AndAnd => self.parse_pat_deref(expected)?, token::OpenDelim(token::Paren) => { // Parse a tuple or parenthesis pattern. let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?; @@ -332,6 +321,21 @@ impl<'a> Parser<'a> { Ok(pat) } + /// Parse `&pat` / `&mut pat`. + fn parse_pat_deref(&mut self, expected: Option<&'static str>) -> PResult<'a, PatKind> { + self.expect_and()?; + let mutbl = self.parse_mutability(); + + if let token::Lifetime(name) = self.token.kind { + let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", name)); + err.span_label(self.token.span, "unexpected lifetime"); + return Err(err); + } + + let subpat = self.parse_pat_with_range_pat(false, expected)?; + Ok(PatKind::Ref(subpat, mutbl)) + } + // Helper function to decide whether to parse as ident binding // or to try to do something more complex like range patterns. fn parse_as_ident(&mut self) -> bool { -- cgit 1.4.1-3-g733a5 From c69b3ede8a98b45633736f7a84757fe7f3b5a392 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Mon, 12 Aug 2019 07:41:50 +0200 Subject: extract parse_pat_tuple_or_parens --- src/libsyntax/parse/parser/pat.rs | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index 95678f9f7a1..b7e40969d3e 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -110,18 +110,7 @@ impl<'a> Parser<'a> { let lo = self.token.span; let pat = match self.token.kind { token::BinOp(token::And) | token::AndAnd => self.parse_pat_deref(expected)?, - token::OpenDelim(token::Paren) => { - // Parse a tuple or parenthesis pattern. - let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?; - - // Here, `(pat,)` is a tuple pattern. - // For backward compatibility, `(..)` is a tuple pattern as well. - if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) { - PatKind::Paren(fields.into_iter().nth(0).unwrap()) - } else { - PatKind::Tuple(fields) - } - } + token::OpenDelim(token::Paren) => self.parse_pat_tuple_or_parens()?, token::OpenDelim(token::Bracket) => { // Parse `[pat, pat,...]` as a slice pattern. PatKind::Slice(self.parse_delim_comma_seq(token::Bracket, |p| p.parse_pat(None))?.0) @@ -336,6 +325,19 @@ impl<'a> Parser<'a> { Ok(PatKind::Ref(subpat, mutbl)) } + /// Parse a tuple or parenthesis pattern. + fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> { + let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?; + + // Here, `(pat,)` is a tuple pattern. + // For backward compatibility, `(..)` is a tuple pattern as well. + Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) { + PatKind::Paren(fields.into_iter().nth(0).unwrap()) + } else { + PatKind::Tuple(fields) + }) + } + // Helper function to decide whether to parse as ident binding // or to try to do something more complex like range patterns. fn parse_as_ident(&mut self) -> bool { -- cgit 1.4.1-3-g733a5 From 91af5c2daf950bd6f99e17dd2e0d23e7cd45e131 Mon Sep 17 00:00:00 2001 From: Ilija Tovilo Date: Sun, 11 Aug 2019 23:37:05 +0200 Subject: Bring back suggestion for splitting `<-` into `< -` Closes #62632 --- src/libsyntax/parse/parser/expr.rs | 17 +++++++++++++++++ src/libsyntax/util/parser.rs | 2 ++ src/test/ui/obsolete-in-place/bad.rs | 2 +- src/test/ui/obsolete-in-place/bad.stderr | 8 ++++++-- src/test/ui/placement-syntax.rs | 2 +- src/test/ui/placement-syntax.stderr | 10 ++++++---- 6 files changed, 33 insertions(+), 8 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index 4432c1329cb..4fdb000ed90 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -224,6 +224,10 @@ impl<'a> Parser<'a> { self.err_dotdotdot_syntax(self.token.span); } + if self.token == token::LArrow { + self.err_larrow_operator(self.token.span); + } + self.bump(); if op.is_comparison() { self.check_no_chained_comparison(&lhs, &op); @@ -1702,6 +1706,19 @@ impl<'a> Parser<'a> { .emit(); } + fn err_larrow_operator(&self, span: Span) { + self.struct_span_err( + span, + "unexpected token: `<-`" + ).span_suggestion( + span, + "if you meant to write a comparison against a negative value, add a \ + space in between `<` and `-`", + "< -".to_string(), + Applicability::MaybeIncorrect + ).emit(); + } + fn mk_assign_op(&self, binop: BinOp, lhs: P, rhs: P) -> ExprKind { ExprKind::AssignOp(binop, lhs, rhs) } diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs index d71358f45c4..a501541c959 100644 --- a/src/libsyntax/util/parser.rs +++ b/src/libsyntax/util/parser.rs @@ -97,6 +97,8 @@ impl AssocOp { // DotDotDot is no longer supported, but we need some way to display the error token::DotDotDot => Some(DotDotEq), token::Colon => Some(Colon), + // `<-` should probably be `< -` + token::LArrow => Some(Less), _ if t.is_keyword(kw::As) => Some(As), _ => None } diff --git a/src/test/ui/obsolete-in-place/bad.rs b/src/test/ui/obsolete-in-place/bad.rs index 3530862f767..a491bb21a57 100644 --- a/src/test/ui/obsolete-in-place/bad.rs +++ b/src/test/ui/obsolete-in-place/bad.rs @@ -2,7 +2,7 @@ fn foo() { let (x, y) = (0, 0); - x <- y; //~ ERROR expected one of + x <- y; //~ ERROR unexpected token: `<-` } fn main() { diff --git a/src/test/ui/obsolete-in-place/bad.stderr b/src/test/ui/obsolete-in-place/bad.stderr index 373b7ea4218..8a731b6240b 100644 --- a/src/test/ui/obsolete-in-place/bad.stderr +++ b/src/test/ui/obsolete-in-place/bad.stderr @@ -1,8 +1,12 @@ -error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, `}`, or an operator, found `<-` +error: unexpected token: `<-` --> $DIR/bad.rs:5:7 | LL | x <- y; - | ^^ expected one of 8 possible tokens here + | ^^ +help: if you meant to write a comparison against a negative value, add a space in between `<` and `-` + | +LL | x < - y; + | ^^^ error: expected expression, found keyword `in` --> $DIR/bad.rs:10:5 diff --git a/src/test/ui/placement-syntax.rs b/src/test/ui/placement-syntax.rs index 2edd78ec8ab..4df96dedbd4 100644 --- a/src/test/ui/placement-syntax.rs +++ b/src/test/ui/placement-syntax.rs @@ -1,6 +1,6 @@ fn main() { let x = -5; - if x<-1 { //~ ERROR expected `{`, found `<-` + if x<-1 { //~ ERROR unexpected token: `<-` println!("ok"); } } diff --git a/src/test/ui/placement-syntax.stderr b/src/test/ui/placement-syntax.stderr index e90acce168e..e26931e60d8 100644 --- a/src/test/ui/placement-syntax.stderr +++ b/src/test/ui/placement-syntax.stderr @@ -1,10 +1,12 @@ -error: expected `{`, found `<-` +error: unexpected token: `<-` --> $DIR/placement-syntax.rs:3:9 | LL | if x<-1 { - | -- ^^ expected `{` - | | - | this `if` statement has a condition, but no block + | ^^ +help: if you meant to write a comparison against a negative value, add a space in between `<` and `-` + | +LL | if x< -1 { + | ^^^ error: aborting due to previous error -- cgit 1.4.1-3-g733a5 From 3b651330e0ff4090f18fc834486a8f0a9aa62748 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Mon, 12 Aug 2019 07:45:55 +0200 Subject: extract recover_pat_ident_mut_first --- src/libsyntax/parse/parser/pat.rs | 36 ++++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 16 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index b7e40969d3e..1b6baf09d81 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -140,22 +140,7 @@ impl<'a> Parser<'a> { // Parse _ PatKind::Wild } else if self.eat_keyword(kw::Mut) { - // Parse mut ident @ pat / mut ref ident @ pat - let mutref_span = self.prev_span.to(self.token.span); - let binding_mode = if self.eat_keyword(kw::Ref) { - self.diagnostic() - .struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect") - .span_suggestion( - mutref_span, - "try switching the order", - "ref mut".into(), - Applicability::MachineApplicable - ).emit(); - BindingMode::ByRef(Mutability::Mutable) - } else { - BindingMode::ByValue(Mutability::Mutable) - }; - self.parse_pat_ident(binding_mode)? + self.recover_pat_ident_mut_first()? } else if self.eat_keyword(kw::Ref) { // Parse ref ident @ pat / ref mut ident @ pat let mutbl = self.parse_mutability(); @@ -338,6 +323,25 @@ impl<'a> Parser<'a> { }) } + // Recover on `mut ref? ident @ pat` and suggest that the order of `mut` and `ref` is incorrect. + fn recover_pat_ident_mut_first(&mut self) -> PResult<'a, PatKind> { + let mutref_span = self.prev_span.to(self.token.span); + let binding_mode = if self.eat_keyword(kw::Ref) { + self.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect") + .span_suggestion( + mutref_span, + "try switching the order", + "ref mut".into(), + Applicability::MachineApplicable + ) + .emit(); + BindingMode::ByRef(Mutability::Mutable) + } else { + BindingMode::ByValue(Mutability::Mutable) + }; + self.parse_pat_ident(binding_mode) + } + // Helper function to decide whether to parse as ident binding // or to try to do something more complex like range patterns. fn parse_as_ident(&mut self) -> bool { -- cgit 1.4.1-3-g733a5 From 231da7e044255286ba92675e89ca168a4932452c Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Mon, 12 Aug 2019 08:16:04 +0200 Subject: extract ban_pat_range_if_ambiguous --- src/libsyntax/parse/parser/pat.rs | 45 ++++++++++++++++++++++----------------- 1 file changed, 25 insertions(+), 20 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index 1b6baf09d81..7c7dad1fd94 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -270,31 +270,36 @@ impl<'a> Parser<'a> { let pat = self.maybe_recover_from_bad_qpath(pat, true)?; if !allow_range_pat { - match pat.node { - PatKind::Range( - _, _, Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. } - ) => {}, - PatKind::Range(..) => { - let mut err = self.struct_span_err( - pat.span, - "the range pattern here has ambiguous interpretation", - ); - err.span_suggestion( - pat.span, - "add parentheses to clarify the precedence", - format!("({})", pprust::pat_to_string(&pat)), - // "ambiguous interpretation" implies that we have to be guessing - Applicability::MaybeIncorrect - ); - return Err(err); - } - _ => {} - } + self.ban_pat_range_if_ambiguous(&pat)? } Ok(pat) } + /// Ban a range pattern if it has an ambiguous interpretation. + fn ban_pat_range_if_ambiguous(&self, pat: &Pat) -> PResult<'a, ()> { + match pat.node { + PatKind::Range( + .., Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. } + ) => return Ok(()), + PatKind::Range(..) => {} + _ => return Ok(()), + } + + let mut err = self.struct_span_err( + pat.span, + "the range pattern here has ambiguous interpretation", + ); + err.span_suggestion( + pat.span, + "add parentheses to clarify the precedence", + format!("({})", pprust::pat_to_string(&pat)), + // "ambiguous interpretation" implies that we have to be guessing + Applicability::MaybeIncorrect + ); + Err(err) + } + /// Parse `&pat` / `&mut pat`. fn parse_pat_deref(&mut self, expected: Option<&'static str>) -> PResult<'a, PatKind> { self.expect_and()?; -- cgit 1.4.1-3-g733a5 From e32bd69d0f7443bf76af4a8129fc43b381e5afaa Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Mon, 12 Aug 2019 08:27:01 +0200 Subject: extract parse_pat_mac_invoc --- src/libsyntax/parse/parser/pat.rs | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index 7c7dad1fd94..40dfa86834f 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -3,7 +3,7 @@ use super::{Parser, PResult, PathStyle}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; use crate::ptr::P; use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac_}; -use crate::ast::{BindingMode, Ident, Mutability, Expr, ExprKind}; +use crate::ast::{BindingMode, Ident, Mutability, Path, Expr, ExprKind}; use crate::parse::token::{self}; use crate::print::pprust; use crate::source_map::{respan, Span, Spanned}; @@ -165,18 +165,7 @@ impl<'a> Parser<'a> { (None, self.parse_path(PathStyle::Expr)?) }; match self.token.kind { - token::Not if qself.is_none() => { - // Parse macro invocation - self.bump(); - let (delim, tts) = self.expect_delimited_token_tree()?; - let mac = respan(lo.to(self.prev_span), Mac_ { - path, - tts, - delim, - prior_type_ascription: self.last_type_ascription, - }); - PatKind::Mac(mac) - } + token::Not if qself.is_none() => self.parse_pat_mac_invoc(lo, path)?, token::DotDotDot | token::DotDotEq | token::DotDot => { let (end_kind, form) = match self.token.kind { token::DotDot => (RangeEnd::Excluded, ".."), @@ -328,7 +317,8 @@ impl<'a> Parser<'a> { }) } - // Recover on `mut ref? ident @ pat` and suggest that the order of `mut` and `ref` is incorrect. + /// Recover on `mut ref? ident @ pat` and suggest + /// that the order of `mut` and `ref` is incorrect. fn recover_pat_ident_mut_first(&mut self) -> PResult<'a, PatKind> { let mutref_span = self.prev_span.to(self.token.span); let binding_mode = if self.eat_keyword(kw::Ref) { @@ -347,6 +337,19 @@ impl<'a> Parser<'a> { self.parse_pat_ident(binding_mode) } + /// Parse macro invocation + fn parse_pat_mac_invoc(&mut self, lo: Span, path: Path) -> PResult<'a, PatKind> { + self.bump(); + let (delim, tts) = self.expect_delimited_token_tree()?; + let mac = respan(lo.to(self.prev_span), Mac_ { + path, + tts, + delim, + prior_type_ascription: self.last_type_ascription, + }); + Ok(PatKind::Mac(mac)) + } + // Helper function to decide whether to parse as ident binding // or to try to do something more complex like range patterns. fn parse_as_ident(&mut self) -> bool { -- cgit 1.4.1-3-g733a5 From e6f980f9b804acb42e72ba4b071320ca9e7f22e0 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Mon, 12 Aug 2019 08:35:24 +0200 Subject: extract parse_pat_range_starting_with_path --- src/libsyntax/parse/parser/pat.rs | 40 ++++++++++++++++++++++++--------------- 1 file changed, 25 insertions(+), 15 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index 40dfa86834f..5c53a497ff4 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -3,7 +3,7 @@ use super::{Parser, PResult, PathStyle}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; use crate::ptr::P; use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac_}; -use crate::ast::{BindingMode, Ident, Mutability, Path, Expr, ExprKind}; +use crate::ast::{BindingMode, Ident, Mutability, Path, QSelf, Expr, ExprKind}; use crate::parse::token::{self}; use crate::print::pprust; use crate::source_map::{respan, Span, Spanned}; @@ -167,20 +167,7 @@ impl<'a> Parser<'a> { match self.token.kind { token::Not if qself.is_none() => self.parse_pat_mac_invoc(lo, path)?, token::DotDotDot | token::DotDotEq | token::DotDot => { - let (end_kind, form) = match self.token.kind { - token::DotDot => (RangeEnd::Excluded, ".."), - token::DotDotDot => (RangeEnd::Included(RangeSyntax::DotDotDot), "..."), - token::DotDotEq => (RangeEnd::Included(RangeSyntax::DotDotEq), "..="), - _ => panic!("can only parse `..`/`...`/`..=` for ranges \ - (checked above)"), - }; - let op_span = self.token.span; - // Parse range - let span = lo.to(self.prev_span); - let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new()); - self.bump(); - let end = self.parse_pat_range_end_opt(&begin, form)?; - PatKind::Range(begin, end, respan(op_span, end_kind)) + self.parse_pat_range_starting_with_path(lo, qself, path)? } token::OpenDelim(token::Brace) => { if qself.is_some() { @@ -350,6 +337,29 @@ impl<'a> Parser<'a> { Ok(PatKind::Mac(mac)) } + /// Parse a range pattern `$path $form $end?` where `$form = ".." | "..." | "..=" ;`. + /// The `$path` has already been parsed and the next token is the `$form`. + fn parse_pat_range_starting_with_path( + &mut self, + lo: Span, + qself: Option, + path: Path + ) -> PResult<'a, PatKind> { + let (end_kind, form) = match self.token.kind { + token::DotDot => (RangeEnd::Excluded, ".."), + token::DotDotDot => (RangeEnd::Included(RangeSyntax::DotDotDot), "..."), + token::DotDotEq => (RangeEnd::Included(RangeSyntax::DotDotEq), "..="), + _ => panic!("can only parse `..`/`...`/`..=` for ranges (checked above)"), + }; + let op_span = self.token.span; + // Parse range + let span = lo.to(self.prev_span); + let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new()); + self.bump(); + let end = self.parse_pat_range_end_opt(&begin, form)?; + Ok(PatKind::Range(begin, end, respan(op_span, end_kind))) + } + // Helper function to decide whether to parse as ident binding // or to try to do something more complex like range patterns. fn parse_as_ident(&mut self) -> bool { -- cgit 1.4.1-3-g733a5 From 49740b792ddf1bc6d98445b8955b2ebfb742772b Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Mon, 12 Aug 2019 09:01:08 +0200 Subject: extract parse_pat_range_starting_with_lit --- src/libsyntax/parse/parser/pat.rs | 41 ++++++++++++++++++++------------------- 1 file changed, 21 insertions(+), 20 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index 5c53a497ff4..b821d9da354 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -202,26 +202,10 @@ impl<'a> Parser<'a> { } else { // Try to parse everything else as literal with optional minus match self.parse_literal_maybe_minus() { - Ok(begin) => { - let op_span = self.token.span; - if self.check(&token::DotDot) || self.check(&token::DotDotEq) || - self.check(&token::DotDotDot) { - let (end_kind, form) = if self.eat(&token::DotDotDot) { - (RangeEnd::Included(RangeSyntax::DotDotDot), "...") - } else if self.eat(&token::DotDotEq) { - (RangeEnd::Included(RangeSyntax::DotDotEq), "..=") - } else if self.eat(&token::DotDot) { - (RangeEnd::Excluded, "..") - } else { - panic!("impossible case: we already matched \ - on a range-operator token") - }; - let end = self.parse_pat_range_end_opt(&begin, form)?; - PatKind::Range(begin, end, respan(op_span, end_kind)) - } else { - PatKind::Lit(begin) - } - } + Ok(begin) if self.check(&token::DotDot) || self.check(&token::DotDotEq) + || self.check(&token::DotDotDot) + => self.parse_pat_range_starting_with_lit(begin)?, + Ok(begin) => PatKind::Lit(begin), Err(mut err) => { self.cancel(&mut err); let expected = expected.unwrap_or("pattern"); @@ -360,6 +344,23 @@ impl<'a> Parser<'a> { Ok(PatKind::Range(begin, end, respan(op_span, end_kind))) } + /// Parse a range pattern `$literal $form $end?` where `$form = ".." | "..." | "..=" ;`. + /// The `$path` has already been parsed and the next token is the `$form`. + fn parse_pat_range_starting_with_lit(&mut self, begin: P) -> PResult<'a, PatKind> { + let op_span = self.token.span; + let (end_kind, form) = if self.eat(&token::DotDotDot) { + (RangeEnd::Included(RangeSyntax::DotDotDot), "...") + } else if self.eat(&token::DotDotEq) { + (RangeEnd::Included(RangeSyntax::DotDotEq), "..=") + } else if self.eat(&token::DotDot) { + (RangeEnd::Excluded, "..") + } else { + panic!("impossible case: we already matched on a range-operator token") + }; + let end = self.parse_pat_range_end_opt(&begin, form)?; + Ok(PatKind::Range(begin, end, respan(op_span, end_kind))) + } + // Helper function to decide whether to parse as ident binding // or to try to do something more complex like range patterns. fn parse_as_ident(&mut self) -> bool { -- cgit 1.4.1-3-g733a5 From 37f37a5df1b4873ab2a4562fca04dc6454817429 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Mon, 12 Aug 2019 09:28:30 +0200 Subject: parser/pat: minor misc cleanup --- src/libsyntax/parse/parser/pat.rs | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index b821d9da354..b7a60a2a4fe 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -202,9 +202,13 @@ impl<'a> Parser<'a> { } else { // Try to parse everything else as literal with optional minus match self.parse_literal_maybe_minus() { - Ok(begin) if self.check(&token::DotDot) || self.check(&token::DotDotEq) - || self.check(&token::DotDotDot) - => self.parse_pat_range_starting_with_lit(begin)?, + Ok(begin) + if self.check(&token::DotDot) + || self.check(&token::DotDotEq) + || self.check(&token::DotDotDot) => + { + self.parse_pat_range_starting_with_lit(begin)? + } Ok(begin) => PatKind::Lit(begin), Err(mut err) => { self.cancel(&mut err); @@ -446,11 +450,9 @@ impl<'a> Parser<'a> { } /// Parses `ident` or `ident @ pat`. - /// used by the copy foo and ref foo patterns to give a good + /// Used by the copy foo and ref foo patterns to give a good /// error message when parsing mistakes like `ref foo(a, b)`. - fn parse_pat_ident(&mut self, - binding_mode: ast::BindingMode) - -> PResult<'a, PatKind> { + fn parse_pat_ident(&mut self, binding_mode: BindingMode) -> PResult<'a, PatKind> { let ident = self.parse_ident()?; let sub = if self.eat(&token::At) { Some(self.parse_pat(Some("binding pattern"))?) @@ -458,16 +460,16 @@ impl<'a> Parser<'a> { None }; - // just to be friendly, if they write something like - // ref Some(i) - // we end up here with ( as the current token. This shortly - // leads to a parse error. Note that if there is no explicit + // Just to be friendly, if they write something like `ref Some(i)`, + // we end up here with `(` as the current token. + // This shortly leads to a parse error. Note that if there is no explicit // binding mode then we do not end up here, because the lookahead - // will direct us over to parse_enum_variant() + // will direct us over to `parse_enum_variant()`. if self.token == token::OpenDelim(token::Paren) { return Err(self.span_fatal( self.prev_span, - "expected identifier, found enum pattern")) + "expected identifier, found enum pattern", + )) } Ok(PatKind::Ident(binding_mode, ident, sub)) -- cgit 1.4.1-3-g733a5 From ddf734deb2c48247e06603262145aec3eedbb315 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Mon, 12 Aug 2019 11:39:44 +0200 Subject: extract fatal_unexpected_non_pat --- src/libsyntax/parse/parser/pat.rs | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index b7a60a2a4fe..49090a57f62 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -210,22 +210,7 @@ impl<'a> Parser<'a> { self.parse_pat_range_starting_with_lit(begin)? } Ok(begin) => PatKind::Lit(begin), - Err(mut err) => { - self.cancel(&mut err); - let expected = expected.unwrap_or("pattern"); - let msg = format!( - "expected {}, found {}", - expected, - self.this_token_descr(), - ); - let mut err = self.fatal(&msg); - err.span_label(self.token.span, format!("expected {}", expected)); - let sp = self.sess.source_map().start_point(self.token.span); - if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) { - self.sess.expr_parentheses_needed(&mut err, *sp, None); - } - return Err(err); - } + Err(err) => return self.fatal_unexpected_non_pat(err, expected), } } }; @@ -365,6 +350,27 @@ impl<'a> Parser<'a> { Ok(PatKind::Range(begin, end, respan(op_span, end_kind))) } + fn fatal_unexpected_non_pat( + &mut self, + mut err: DiagnosticBuilder<'a>, + expected: Option<&'static str>, + ) -> PResult<'a, P> { + self.cancel(&mut err); + + let expected = expected.unwrap_or("pattern"); + let msg = format!("expected {}, found {}", expected, self.this_token_descr()); + + let mut err = self.fatal(&msg); + err.span_label(self.token.span, format!("expected {}", expected)); + + let sp = self.sess.source_map().start_point(self.token.span); + if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) { + self.sess.expr_parentheses_needed(&mut err, *sp, None); + } + + Err(err) + } + // Helper function to decide whether to parse as ident binding // or to try to do something more complex like range patterns. fn parse_as_ident(&mut self) -> bool { -- cgit 1.4.1-3-g733a5 From c8fc4c106cfb7594dedf3372e33959e9b859c228 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Mon, 12 Aug 2019 12:27:43 +0200 Subject: extract parse_pat_{tuple_}struct + recover_one_fewer_dotdot --- src/libsyntax/parse/parser/pat.rs | 90 ++++++++++++++++++++++----------------- 1 file changed, 51 insertions(+), 39 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index 49090a57f62..53f4d0998c3 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -169,34 +169,8 @@ impl<'a> Parser<'a> { token::DotDotDot | token::DotDotEq | token::DotDot => { self.parse_pat_range_starting_with_path(lo, qself, path)? } - token::OpenDelim(token::Brace) => { - if qself.is_some() { - let msg = "unexpected `{` after qualified path"; - let mut err = self.fatal(msg); - err.span_label(self.token.span, msg); - return Err(err); - } - // Parse struct pattern - self.bump(); - let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| { - e.emit(); - self.recover_stmt(); - (vec![], true) - }); - self.bump(); - PatKind::Struct(path, fields, etc) - } - token::OpenDelim(token::Paren) => { - if qself.is_some() { - let msg = "unexpected `(` after qualified path"; - let mut err = self.fatal(msg); - err.span_label(self.token.span, msg); - return Err(err); - } - // Parse tuple struct or enum pattern - let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?; - PatKind::TupleStruct(path, fields) - } + token::OpenDelim(token::Brace) => self.parse_pat_struct(qself, path)?, + token::OpenDelim(token::Paren) => self.parse_pat_tuple_struct(qself, path)?, _ => PatKind::Path(qself, path), } } else { @@ -481,6 +455,37 @@ impl<'a> Parser<'a> { Ok(PatKind::Ident(binding_mode, ident, sub)) } + /// Parse a struct ("record") pattern (e.g. `Foo { ... }` or `Foo::Bar { ... }`). + fn parse_pat_struct(&mut self, qself: Option, path: Path) -> PResult<'a, PatKind> { + if qself.is_some() { + let msg = "unexpected `{` after qualified path"; + let mut err = self.fatal(msg); + err.span_label(self.token.span, msg); + return Err(err); + } + + self.bump(); + let (fields, etc) = self.parse_pat_fields().unwrap_or_else(|mut e| { + e.emit(); + self.recover_stmt(); + (vec![], true) + }); + self.bump(); + Ok(PatKind::Struct(path, fields, etc)) + } + + /// Parse tuple struct or tuple variant pattern (e.g. `Foo(...)` or `Foo::Bar(...)`). + fn parse_pat_tuple_struct(&mut self, qself: Option, path: Path) -> PResult<'a, PatKind> { + if qself.is_some() { + let msg = "unexpected `(` after qualified path"; + let mut err = self.fatal(msg); + err.span_label(self.token.span, msg); + return Err(err); + } + let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?; + Ok(PatKind::TupleStruct(path, fields)) + } + /// Parses the fields of a struct-like pattern. fn parse_pat_fields(&mut self) -> PResult<'a, (Vec>, bool)> { let mut fields = Vec::new(); @@ -515,17 +520,7 @@ impl<'a> Parser<'a> { etc = true; let mut etc_sp = self.token.span; - if self.token == token::DotDotDot { // Issue #46718 - // Accept `...` as if it were `..` to avoid further errors - self.struct_span_err(self.token.span, "expected field pattern, found `...`") - .span_suggestion( - self.token.span, - "to omit remaining fields, use one fewer `.`", - "..".to_owned(), - Applicability::MachineApplicable - ) - .emit(); - } + self.recover_one_fewer_dotdot(); self.bump(); // `..` || `...` if self.token == token::CloseDelim(token::Brace) { @@ -607,6 +602,23 @@ impl<'a> Parser<'a> { return Ok((fields, etc)); } + /// Recover on `...` as if it were `..` to avoid further errors. + /// See issue #46718. + fn recover_one_fewer_dotdot(&self) { + if self.token != token::DotDotDot { + return; + } + + self.struct_span_err(self.token.span, "expected field pattern, found `...`") + .span_suggestion( + self.token.span, + "to omit remaining fields, use one fewer `.`", + "..".to_owned(), + Applicability::MachineApplicable + ) + .emit(); + } + fn parse_pat_field( &mut self, lo: Span, -- cgit 1.4.1-3-g733a5 From 34dcca20e5909513f08d1c21df1168357c3b6b6a Mon Sep 17 00:00:00 2001 From: Eduard-Mihai Burtescu Date: Sun, 11 Aug 2019 08:25:30 +0300 Subject: syntax: account for CVarArgs being in the argument list. --- src/libsyntax/parse/parser.rs | 2 +- src/test/ui/c-variadic/variadic-ffi-no-fixed-args.rs | 6 ++++++ src/test/ui/c-variadic/variadic-ffi-no-fixed-args.stderr | 8 ++++++++ 3 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 src/test/ui/c-variadic/variadic-ffi-no-fixed-args.rs create mode 100644 src/test/ui/c-variadic/variadic-ffi-no-fixed-args.stderr (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 1c1428c5713..2286e74e633 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1236,7 +1236,7 @@ impl<'a> Parser<'a> { let args: Vec<_> = args.into_iter().filter_map(|x| x).collect(); - if c_variadic && args.is_empty() { + if c_variadic && args.len() <= 1 { self.span_err(sp, "C-variadic function must be declared with at least one named argument"); } diff --git a/src/test/ui/c-variadic/variadic-ffi-no-fixed-args.rs b/src/test/ui/c-variadic/variadic-ffi-no-fixed-args.rs new file mode 100644 index 00000000000..e3b642a9d41 --- /dev/null +++ b/src/test/ui/c-variadic/variadic-ffi-no-fixed-args.rs @@ -0,0 +1,6 @@ +extern { + fn foo(...); + //~^ ERROR C-variadic function must be declared with at least one named argument +} + +fn main() {} diff --git a/src/test/ui/c-variadic/variadic-ffi-no-fixed-args.stderr b/src/test/ui/c-variadic/variadic-ffi-no-fixed-args.stderr new file mode 100644 index 00000000000..cb6060525fc --- /dev/null +++ b/src/test/ui/c-variadic/variadic-ffi-no-fixed-args.stderr @@ -0,0 +1,8 @@ +error: C-variadic function must be declared with at least one named argument + --> $DIR/variadic-ffi-no-fixed-args.rs:2:11 + | +LL | fn foo(...); + | ^ + +error: aborting due to previous error + -- cgit 1.4.1-3-g733a5 From 71415ef9bd697a49db34742172aacb792ce8d116 Mon Sep 17 00:00:00 2001 From: nathanwhit Date: Thu, 25 Jul 2019 11:51:05 -0400 Subject: Parse excess semicolons as empty stmts for linting --- src/libsyntax/parse/parser/stmt.rs | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/stmt.rs b/src/libsyntax/parse/parser/stmt.rs index f182edcbff4..750d8fbbddc 100644 --- a/src/libsyntax/parse/parser/stmt.rs +++ b/src/libsyntax/parse/parser/stmt.rs @@ -167,7 +167,22 @@ impl<'a> Parser<'a> { if self.token == token::Semi { unused_attrs(&attrs, self); self.bump(); - return Ok(None); + let mut last_semi = lo; + while self.token == token::Semi { + last_semi = self.token.span; + self.bump(); + } + // We are encoding a string of semicolons as an + // an empty tuple that spans the excess semicolons + // to preserve this info until the lint stage + return Ok(Some(Stmt { + id: ast::DUMMY_NODE_ID, + span: lo.to(last_semi), + node: StmtKind::Semi(self.mk_expr(lo.to(last_semi), + ExprKind::Tup(Vec::new()), + ThinVec::new() + )), + })); } if self.token == token::CloseDelim(token::Brace) { -- cgit 1.4.1-3-g733a5 From d824edfc2c063cff6e6536a1fcb56be6d89fa0cd Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Mon, 12 Aug 2019 23:31:13 -0700 Subject: Do not ICE when synthesizing spans falling inside unicode chars --- src/libsyntax/source_map.rs | 6 ++++++ src/test/ui/suggestions/issue-61226.rs | 5 +++++ src/test/ui/suggestions/issue-61226.stderr | 17 +++++++++++++++++ 3 files changed, 28 insertions(+) create mode 100644 src/test/ui/suggestions/issue-61226.rs create mode 100644 src/test/ui/suggestions/issue-61226.stderr (limited to 'src/libsyntax') diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs index 4e29c77c89e..3c58cfbbb2b 100644 --- a/src/libsyntax/source_map.rs +++ b/src/libsyntax/source_map.rs @@ -554,8 +554,14 @@ impl SourceMap { } if let Some(ref src) = local_begin.sf.src { + if !src.is_char_boundary(start_index) || !src.is_char_boundary(end_index) { + return Err(SpanSnippetError::IllFormedSpan(sp)); + } return Ok(extract_source(src, start_index, end_index)); } else if let Some(src) = local_begin.sf.external_src.borrow().get_source() { + if !src.is_char_boundary(start_index) || !src.is_char_boundary(end_index) { + return Err(SpanSnippetError::IllFormedSpan(sp)); + } return Ok(extract_source(src, start_index, end_index)); } else { return Err(SpanSnippetError::SourceNotAvailable { diff --git a/src/test/ui/suggestions/issue-61226.rs b/src/test/ui/suggestions/issue-61226.rs new file mode 100644 index 00000000000..1eed55e5f9f --- /dev/null +++ b/src/test/ui/suggestions/issue-61226.rs @@ -0,0 +1,5 @@ +struct X {} +fn f() { + vec![X]; //… + //~^ ERROR expected value, found struct `X` +} diff --git a/src/test/ui/suggestions/issue-61226.stderr b/src/test/ui/suggestions/issue-61226.stderr new file mode 100644 index 00000000000..ac27fb1f758 --- /dev/null +++ b/src/test/ui/suggestions/issue-61226.stderr @@ -0,0 +1,17 @@ +error[E0423]: expected value, found struct `X` + --> $DIR/issue-61226.rs:3:10 + | +LL | vec![X]; //… + | ^ + | | + | did you mean `X { /* fields */ }`? + | help: a function with a similar name exists: `f` + +error[E0601]: `main` function not found in crate `issue_61226` + | + = note: consider adding a `main` function to `$DIR/issue-61226.rs` + +error: aborting due to 2 previous errors + +Some errors have detailed explanations: E0423, E0601. +For more information about an error, try `rustc --explain E0423`. -- cgit 1.4.1-3-g733a5 From 376636e51719588edba82fc284328e14ce1f2d74 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Tue, 13 Aug 2019 20:51:54 +0300 Subject: syntax: Remove `DummyResult::expn_only` --- src/libsyntax/ext/base.rs | 39 ++++++--------------------------------- src/libsyntax_ext/asm.rs | 4 ++-- src/libsyntax_ext/assert.rs | 2 +- src/libsyntax_ext/cfg.rs | 2 +- src/libsyntax_ext/concat.rs | 8 ++++---- src/libsyntax_ext/env.rs | 14 +++++++------- src/libsyntax_ext/format.rs | 2 +- src/libsyntax_ext/source_util.rs | 10 +++++----- 8 files changed, 27 insertions(+), 54 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 7f4feff6be6..11544d43ac3 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -405,7 +405,6 @@ impl MacResult for MacEager { /// after hitting errors. #[derive(Copy, Clone)] pub struct DummyResult { - expr_only: bool, is_error: bool, span: Span, } @@ -416,21 +415,12 @@ impl DummyResult { /// Use this as a return value after hitting any errors and /// calling `span_err`. pub fn any(span: Span) -> Box { - Box::new(DummyResult { expr_only: false, is_error: true, span }) + Box::new(DummyResult { is_error: true, span }) } /// Same as `any`, but must be a valid fragment, not error. pub fn any_valid(span: Span) -> Box { - Box::new(DummyResult { expr_only: false, is_error: false, span }) - } - - /// Creates a default MacResult that can only be an expression. - /// - /// Use this for macros that must expand to an expression, so even - /// if an error is encountered internally, the user will receive - /// an error that they also used it in the wrong place. - pub fn expr(span: Span) -> Box { - Box::new(DummyResult { expr_only: true, is_error: true, span }) + Box::new(DummyResult { is_error: false, span }) } /// A plain dummy expression. @@ -472,36 +462,19 @@ impl MacResult for DummyResult { } fn make_items(self: Box) -> Option; 1]>> { - // this code needs a comment... why not always just return the Some() ? - if self.expr_only { - None - } else { - Some(SmallVec::new()) - } + Some(SmallVec::new()) } fn make_impl_items(self: Box) -> Option> { - if self.expr_only { - None - } else { - Some(SmallVec::new()) - } + Some(SmallVec::new()) } fn make_trait_items(self: Box) -> Option> { - if self.expr_only { - None - } else { - Some(SmallVec::new()) - } + Some(SmallVec::new()) } fn make_foreign_items(self: Box) -> Option> { - if self.expr_only { - None - } else { - Some(SmallVec::new()) - } + Some(SmallVec::new()) } fn make_stmts(self: Box) -> Option> { diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index c1c2732605c..950166f9260 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -47,10 +47,10 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>, -> Box { let mut inline_asm = match parse_inline_asm(cx, sp, tts) { Ok(Some(inline_asm)) => inline_asm, - Ok(None) => return DummyResult::expr(sp), + Ok(None) => return DummyResult::any(sp), Err(mut err) => { err.emit(); - return DummyResult::expr(sp); + return DummyResult::any(sp); } }; diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs index d7571f43edd..e3ef39075e2 100644 --- a/src/libsyntax_ext/assert.rs +++ b/src/libsyntax_ext/assert.rs @@ -20,7 +20,7 @@ pub fn expand_assert<'cx>( Ok(assert) => assert, Err(mut err) => { err.emit(); - return DummyResult::expr(sp); + return DummyResult::any(sp); } }; diff --git a/src/libsyntax_ext/cfg.rs b/src/libsyntax_ext/cfg.rs index 84830e6ddda..0e52c1af908 100644 --- a/src/libsyntax_ext/cfg.rs +++ b/src/libsyntax_ext/cfg.rs @@ -25,7 +25,7 @@ pub fn expand_cfg( } Err(mut err) => { err.emit(); - DummyResult::expr(sp) + DummyResult::any(sp) } } } diff --git a/src/libsyntax_ext/concat.rs b/src/libsyntax_ext/concat.rs index f1d079eb053..4cd17531a45 100644 --- a/src/libsyntax_ext/concat.rs +++ b/src/libsyntax_ext/concat.rs @@ -1,5 +1,5 @@ use syntax::ast; -use syntax::ext::base; +use syntax::ext::base::{self, DummyResult}; use syntax::symbol::Symbol; use syntax::tokenstream; @@ -12,7 +12,7 @@ pub fn expand_syntax_ext( ) -> Box { let es = match base::get_exprs_from_tts(cx, sp, tts) { Some(e) => e, - None => return base::DummyResult::expr(sp), + None => return DummyResult::any(sp), }; let mut accumulator = String::new(); let mut missing_literal = vec![]; @@ -55,9 +55,9 @@ pub fn expand_syntax_ext( let mut err = cx.struct_span_err(missing_literal, "expected a literal"); err.note("only literals (like `\"foo\"`, `42` and `3.14`) can be passed to `concat!()`"); err.emit(); - return base::DummyResult::expr(sp); + return DummyResult::any(sp); } else if has_errors { - return base::DummyResult::expr(sp); + return DummyResult::any(sp); } let sp = sp.apply_mark(cx.current_expansion.id); base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator))) diff --git a/src/libsyntax_ext/env.rs b/src/libsyntax_ext/env.rs index 39fc90decc9..442f27c7821 100644 --- a/src/libsyntax_ext/env.rs +++ b/src/libsyntax_ext/env.rs @@ -16,7 +16,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>, tts: &[tokenstream::TokenTree]) -> Box { let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") { - None => return DummyResult::expr(sp), + None => return DummyResult::any(sp), Some(v) => v, }; @@ -50,21 +50,21 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt<'_>, let mut exprs = match get_exprs_from_tts(cx, sp, tts) { Some(ref exprs) if exprs.is_empty() => { cx.span_err(sp, "env! takes 1 or 2 arguments"); - return DummyResult::expr(sp); + return DummyResult::any(sp); } - None => return DummyResult::expr(sp), + None => return DummyResult::any(sp), Some(exprs) => exprs.into_iter(), }; let var = match expr_to_string(cx, exprs.next().unwrap(), "expected string literal") { - None => return DummyResult::expr(sp), + None => return DummyResult::any(sp), Some((v, _style)) => v, }; let msg = match exprs.next() { None => Symbol::intern(&format!("environment variable `{}` not defined", var)), Some(second) => { match expr_to_string(cx, second, "expected string literal") { - None => return DummyResult::expr(sp), + None => return DummyResult::any(sp), Some((s, _style)) => s, } } @@ -72,13 +72,13 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt<'_>, if exprs.next().is_some() { cx.span_err(sp, "env! takes 1 or 2 arguments"); - return DummyResult::expr(sp); + return DummyResult::any(sp); } let e = match env::var(&*var.as_str()) { Err(_) => { cx.span_err(sp, &msg.as_str()); - return DummyResult::expr(sp); + return DummyResult::any(sp); } Ok(s) => cx.expr_str(sp, Symbol::intern(&s)), }; diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 2ae13b66e28..d699b3b1a90 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -805,7 +805,7 @@ fn expand_format_args_impl<'cx>( } Err(mut err) => { err.emit(); - DummyResult::expr(sp) + DummyResult::any(sp) } } } diff --git a/src/libsyntax_ext/source_util.rs b/src/libsyntax_ext/source_util.rs index 2c8d53a2315..cbc01b48afd 100644 --- a/src/libsyntax_ext/source_util.rs +++ b/src/libsyntax_ext/source_util.rs @@ -111,7 +111,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::To -> Box { let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") { Some(f) => f, - None => return DummyResult::expr(sp) + None => return DummyResult::any(sp) }; let file = cx.resolve_path(file, sp); match fs::read_to_string(&file) { @@ -126,11 +126,11 @@ pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::To }, Err(ref e) if e.kind() == ErrorKind::InvalidData => { cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display())); - DummyResult::expr(sp) + DummyResult::any(sp) } Err(e) => { cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e)); - DummyResult::expr(sp) + DummyResult::any(sp) } } } @@ -139,7 +139,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream:: -> Box { let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") { Some(f) => f, - None => return DummyResult::expr(sp) + None => return DummyResult::any(sp) }; let file = cx.resolve_path(file, sp); match fs::read(&file) { @@ -158,7 +158,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream:: }, Err(e) => { cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e)); - DummyResult::expr(sp) + DummyResult::any(sp) } } } -- cgit 1.4.1-3-g733a5 From ea1a9a0e2ba1da31e2de524bf9b0a7af6b02daff Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Tue, 13 Aug 2019 11:21:09 -0700 Subject: Fix typo in error message. --- src/libsyntax/parse/parser/expr.rs | 2 +- src/test/ui/issues/issue-13483.stderr | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index 4432c1329cb..5f04012d710 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -1199,7 +1199,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(kw::Else) || !cond.returns() { let sp = self.sess.source_map().next_point(lo); let mut err = self.diagnostic() - .struct_span_err(sp, "missing condition for `if` statemement"); + .struct_span_err(sp, "missing condition for `if` statement"); err.span_label(sp, "expected if condition here"); return Err(err) } diff --git a/src/test/ui/issues/issue-13483.stderr b/src/test/ui/issues/issue-13483.stderr index 739f0612366..faaf8690291 100644 --- a/src/test/ui/issues/issue-13483.stderr +++ b/src/test/ui/issues/issue-13483.stderr @@ -1,10 +1,10 @@ -error: missing condition for `if` statemement +error: missing condition for `if` statement --> $DIR/issue-13483.rs:3:14 | LL | } else if { | ^ expected if condition here -error: missing condition for `if` statemement +error: missing condition for `if` statement --> $DIR/issue-13483.rs:10:14 | LL | } else if { -- cgit 1.4.1-3-g733a5 From 84e202e6b36250dfc319aa5a869ad1df29b4b55a Mon Sep 17 00:00:00 2001 From: Esteban Küber Date: Tue, 13 Aug 2019 11:35:49 -0700 Subject: review comments --- src/libsyntax/source_map.rs | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs index 3c58cfbbb2b..74cab00d3c1 100644 --- a/src/libsyntax/source_map.rs +++ b/src/libsyntax/source_map.rs @@ -519,7 +519,7 @@ impl SourceMap { /// extract function takes three arguments: a string slice containing the source, an index in /// the slice for the beginning of the span and an index in the slice for the end of the span. fn span_to_source(&self, sp: Span, extract_source: F) -> Result - where F: Fn(&str, usize, usize) -> String + where F: Fn(&str, usize, usize) -> Result { if sp.lo() > sp.hi() { return Err(SpanSnippetError::IllFormedSpan(sp)); @@ -554,15 +554,9 @@ impl SourceMap { } if let Some(ref src) = local_begin.sf.src { - if !src.is_char_boundary(start_index) || !src.is_char_boundary(end_index) { - return Err(SpanSnippetError::IllFormedSpan(sp)); - } - return Ok(extract_source(src, start_index, end_index)); + return extract_source(src, start_index, end_index); } else if let Some(src) = local_begin.sf.external_src.borrow().get_source() { - if !src.is_char_boundary(start_index) || !src.is_char_boundary(end_index) { - return Err(SpanSnippetError::IllFormedSpan(sp)); - } - return Ok(extract_source(src, start_index, end_index)); + return extract_source(src, start_index, end_index); } else { return Err(SpanSnippetError::SourceNotAvailable { filename: local_begin.sf.name.clone() @@ -573,8 +567,9 @@ impl SourceMap { /// Returns the source snippet as `String` corresponding to the given `Span` pub fn span_to_snippet(&self, sp: Span) -> Result { - self.span_to_source(sp, |src, start_index, end_index| src[start_index..end_index] - .to_string()) + self.span_to_source(sp, |src, start_index, end_index| src.get(start_index..end_index) + .map(|s| s.to_string()) + .ok_or_else(|| SpanSnippetError::IllFormedSpan(sp))) } pub fn span_to_margin(&self, sp: Span) -> Option { @@ -588,7 +583,9 @@ impl SourceMap { /// Returns the source snippet as `String` before the given `Span` pub fn span_to_prev_source(&self, sp: Span) -> Result { - self.span_to_source(sp, |src, start_index, _| src[..start_index].to_string()) + self.span_to_source(sp, |src, start_index, _| src.get(..start_index) + .map(|s| s.to_string()) + .ok_or_else(|| SpanSnippetError::IllFormedSpan(sp))) } /// Extend the given `Span` to just after the previous occurrence of `c`. Return the same span -- cgit 1.4.1-3-g733a5 From 643ddfaaa8e11b2da052681b027b24b2718d4222 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Tue, 13 Aug 2019 15:09:11 -0700 Subject: Apply Centril's suggestion Co-Authored-By: Mazdak Farrokhzad --- src/libsyntax/parse/parser/expr.rs | 2 +- src/test/ui/issues/issue-13483.stderr | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index 5f04012d710..7f6406a89fb 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -1199,7 +1199,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(kw::Else) || !cond.returns() { let sp = self.sess.source_map().next_point(lo); let mut err = self.diagnostic() - .struct_span_err(sp, "missing condition for `if` statement"); + .struct_span_err(sp, "missing condition for `if` expression"); err.span_label(sp, "expected if condition here"); return Err(err) } diff --git a/src/test/ui/issues/issue-13483.stderr b/src/test/ui/issues/issue-13483.stderr index faaf8690291..df9f1dd0115 100644 --- a/src/test/ui/issues/issue-13483.stderr +++ b/src/test/ui/issues/issue-13483.stderr @@ -1,10 +1,10 @@ -error: missing condition for `if` statement +error: missing condition for `if` expression --> $DIR/issue-13483.rs:3:14 | LL | } else if { | ^ expected if condition here -error: missing condition for `if` statement +error: missing condition for `if` expression --> $DIR/issue-13483.rs:10:14 | LL | } else if { -- cgit 1.4.1-3-g733a5 From 0d29142aad9554a23f0881be95110ad96365bfcf Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 14 Aug 2019 01:59:14 +0300 Subject: expand: `expand_fragment` -> `fully_expand_fragment` --- src/libsyntax/ext/expand.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 9a3195b1165..21cf232ecc3 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -118,13 +118,13 @@ macro_rules! ast_fragments { impl<'a, 'b> MutVisitor for MacroExpander<'a, 'b> { fn filter_map_expr(&mut self, expr: P) -> Option> { - self.expand_fragment(AstFragment::OptExpr(Some(expr))).make_opt_expr() + self.fully_expand_fragment(AstFragment::OptExpr(Some(expr))).make_opt_expr() } $($(fn $mut_visit_ast(&mut self, ast: &mut $AstTy) { - visit_clobber(ast, |ast| self.expand_fragment(AstFragment::$Kind(ast)).$make_ast()); + visit_clobber(ast, |ast| self.fully_expand_fragment(AstFragment::$Kind(ast)).$make_ast()); })?)* $($(fn $flat_map_ast_elt(&mut self, ast_elt: <$AstTy as IntoIterator>::Item) -> $AstTy { - self.expand_fragment(AstFragment::$Kind(smallvec![ast_elt])).$make_ast() + self.fully_expand_fragment(AstFragment::$Kind(smallvec![ast_elt])).$make_ast() })?)* } @@ -265,7 +265,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { tokens: None, })]); - match self.expand_fragment(krate_item).make_items().pop().map(P::into_inner) { + match self.fully_expand_fragment(krate_item).make_items().pop().map(P::into_inner) { Some(ast::Item { attrs, node: ast::ItemKind::Mod(module), .. }) => { krate.attrs = attrs; krate.module = module; @@ -285,8 +285,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> { krate } - // Fully expand all macro invocations in this AST fragment. - fn expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment { + // Recursively expand all macro invocations in this AST fragment. + fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment { let orig_expansion_data = self.cx.current_expansion.clone(); self.cx.current_expansion.depth = 0; -- cgit 1.4.1-3-g733a5 From d416ebeb6ee265c980778df9bc4d84dc4a7b8580 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 14 Aug 2019 02:30:09 +0300 Subject: expand: Unimplement `MutVisitor` on `MacroExpander` Each call to `fully_expand_fragment` is something unique, interesting, and requiring attention. It represents a "root" of expansion and its use means that something unusual is happening, like eager expansion or expansion performed outside of the primary expansion pass. So, it shouldn't be hide under a generic visitor call. Also, from all the implemented visitor methods only two were actually used. --- src/libsyntax/ext/base.rs | 14 ++++++++++---- src/libsyntax/ext/expand.rs | 14 +------------- src/libsyntax_ext/proc_macro_harness.rs | 9 +++++---- src/libsyntax_ext/test_harness.rs | 27 +++++++++++++++------------ 4 files changed, 31 insertions(+), 33 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 7f4feff6be6..532de05eea2 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -947,8 +947,10 @@ pub fn expr_to_spanned_string<'a>( // Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation. expr.span = expr.span.apply_mark(cx.current_expansion.id); - // we want to be able to handle e.g., `concat!("foo", "bar")` - cx.expander().visit_expr(&mut expr); + // Perform eager expansion on the expression. + // We want to be able to handle e.g., `concat!("foo", "bar")`. + let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr(); + Err(match expr.node { ast::ExprKind::Lit(ref l) => match l.node { ast::LitKind::Str(s, style) => return Ok(respan(expr.span, (s, style))), @@ -1013,8 +1015,12 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>, let mut p = cx.new_parser_from_tts(tts); let mut es = Vec::new(); while p.token != token::Eof { - let mut expr = panictry!(p.parse_expr()); - cx.expander().visit_expr(&mut expr); + let expr = panictry!(p.parse_expr()); + + // Perform eager expansion on the expression. + // We want to be able to handle e.g., `concat!("foo", "bar")`. + let expr = cx.expander().fully_expand_fragment(AstFragment::Expr(expr)).make_expr(); + es.push(expr); if p.eat(&token::Comma) { continue; diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 21cf232ecc3..402b42dfbc8 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -116,18 +116,6 @@ macro_rules! ast_fragments { } } - impl<'a, 'b> MutVisitor for MacroExpander<'a, 'b> { - fn filter_map_expr(&mut self, expr: P) -> Option> { - self.fully_expand_fragment(AstFragment::OptExpr(Some(expr))).make_opt_expr() - } - $($(fn $mut_visit_ast(&mut self, ast: &mut $AstTy) { - visit_clobber(ast, |ast| self.fully_expand_fragment(AstFragment::$Kind(ast)).$make_ast()); - })?)* - $($(fn $flat_map_ast_elt(&mut self, ast_elt: <$AstTy as IntoIterator>::Item) -> $AstTy { - self.fully_expand_fragment(AstFragment::$Kind(smallvec![ast_elt])).$make_ast() - })?)* - } - impl<'a> MacResult for crate::ext::tt::macro_rules::ParserAnyMacro<'a> { $(fn $make_ast(self: Box>) -> Option<$AstTy> { @@ -286,7 +274,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } // Recursively expand all macro invocations in this AST fragment. - fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment { + pub fn fully_expand_fragment(&mut self, input_fragment: AstFragment) -> AstFragment { let orig_expansion_data = self.cx.current_expansion.clone(); self.cx.current_expansion.depth = 0; diff --git a/src/libsyntax_ext/proc_macro_harness.rs b/src/libsyntax_ext/proc_macro_harness.rs index 7913a7442ed..70325539f30 100644 --- a/src/libsyntax_ext/proc_macro_harness.rs +++ b/src/libsyntax_ext/proc_macro_harness.rs @@ -1,18 +1,17 @@ use std::mem; +use smallvec::smallvec; use syntax::ast::{self, Ident}; use syntax::attr; use syntax::source_map::{ExpnInfo, ExpnKind, respan}; use syntax::ext::base::{ExtCtxt, MacroKind}; -use syntax::ext::expand::ExpansionConfig; +use syntax::ext::expand::{AstFragment, ExpansionConfig}; use syntax::ext::hygiene::ExpnId; use syntax::ext::proc_macro::is_proc_macro_attr; -use syntax::mut_visit::MutVisitor; use syntax::parse::ParseSess; use syntax::ptr::P; use syntax::symbol::{kw, sym}; use syntax::visit::{self, Visitor}; - use syntax_pos::{Span, DUMMY_SP}; struct ProcMacroDerive { @@ -409,5 +408,7 @@ fn mk_decls( i }); - cx.monotonic_expander().flat_map_item(module).pop().unwrap() + // Integrate the new module into existing module structures. + let module = AstFragment::Items(smallvec![module]); + cx.monotonic_expander().fully_expand_fragment(module).make_items().pop().unwrap() } diff --git a/src/libsyntax_ext/test_harness.rs b/src/libsyntax_ext/test_harness.rs index eec8a3f8023..0267637e540 100644 --- a/src/libsyntax_ext/test_harness.rs +++ b/src/libsyntax_ext/test_harness.rs @@ -6,7 +6,7 @@ use syntax::ast::{self, Ident}; use syntax::attr; use syntax::entry::{self, EntryPointType}; use syntax::ext::base::{ExtCtxt, Resolver}; -use syntax::ext::expand::ExpansionConfig; +use syntax::ext::expand::{AstFragment, ExpansionConfig}; use syntax::ext::hygiene::{ExpnId, MacroKind}; use syntax::feature_gate::Features; use syntax::mut_visit::{*, ExpectOne}; @@ -74,12 +74,7 @@ impl<'a> MutVisitor for TestHarnessGenerator<'a> { noop_visit_crate(c, self); // Create a main function to run our tests - let test_main = { - let unresolved = mk_main(&mut self.cx); - self.cx.ext_cx.monotonic_expander().flat_map_item(unresolved).pop().unwrap() - }; - - c.module.items.push(test_main); + c.module.items.push(mk_main(&mut self.cx)); } fn flat_map_item(&mut self, i: P) -> SmallVec<[P; 1]> { @@ -216,7 +211,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt<'_>, let name = Ident::from_str("__test_reexports").gensym(); let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent }; cx.ext_cx.current_expansion.id = cx.ext_cx.resolver.get_module_scope(parent); - let it = cx.ext_cx.monotonic_expander().flat_map_item(P(ast::Item { + let module = P(ast::Item { ident: name, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, @@ -224,9 +219,14 @@ fn mk_reexport_mod(cx: &mut TestCtxt<'_>, vis: dummy_spanned(ast::VisibilityKind::Public), span: DUMMY_SP, tokens: None, - })).pop().unwrap(); + }); - (it, name) + // Integrate the new module into existing module structures. + let module = AstFragment::Items(smallvec![module]); + let module = + cx.ext_cx.monotonic_expander().fully_expand_fragment(module).make_items().pop().unwrap(); + + (module, name) } /// Crawl over the crate, inserting test reexports and the test main function @@ -321,7 +321,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P { None => Ident::from_str_and_span("main", sp).gensym(), }; - P(ast::Item { + let main = P(ast::Item { ident: main_id, attrs: vec![main_attr], id: ast::DUMMY_NODE_ID, @@ -329,8 +329,11 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P { vis: dummy_spanned(ast::VisibilityKind::Public), span: sp, tokens: None, - }) + }); + // Integrate the new item into existing module structures. + let main = AstFragment::Items(smallvec![main]); + cx.ext_cx.monotonic_expander().fully_expand_fragment(main).make_items().pop().unwrap() } fn path_name_i(idents: &[Ident]) -> String { -- cgit 1.4.1-3-g733a5 From 9348af8396c961f8bb79cc360c091d74ea4ba34a Mon Sep 17 00:00:00 2001 From: Caio Date: Tue, 13 Aug 2019 22:22:51 -0300 Subject: Add NodeId for Arm, Field and FieldPat --- src/libsyntax/ast.rs | 3 +++ src/libsyntax/ext/build.rs | 2 ++ src/libsyntax/mut_visit.rs | 12 +++++++++--- src/libsyntax/parse/parser/expr.rs | 3 +++ src/libsyntax/parse/parser/pat.rs | 1 + src/libsyntax_ext/deriving/generic/mod.rs | 1 + 6 files changed, 19 insertions(+), 3 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 052eb55b408..aadf7ec5588 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -608,6 +608,7 @@ pub struct FieldPat { pub pat: P, pub is_shorthand: bool, pub attrs: ThinVec, + pub id: NodeId, } #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)] @@ -925,6 +926,7 @@ pub struct Arm { pub guard: Option>, pub body: P, pub span: Span, + pub id: NodeId, } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] @@ -934,6 +936,7 @@ pub struct Field { pub span: Span, pub is_shorthand: bool, pub attrs: ThinVec, + pub id: NodeId, } pub type SpannedIdent = Spanned; diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 22962499a2b..aab782d612e 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -403,6 +403,7 @@ impl<'a> ExtCtxt<'a> { span, is_shorthand: false, attrs: ThinVec::new(), + id: ast::DUMMY_NODE_ID, } } pub fn expr_struct( @@ -612,6 +613,7 @@ impl<'a> ExtCtxt<'a> { guard: None, body: expr, span, + id: ast::DUMMY_NODE_ID, } } diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index be04c6a76b0..f910aaaf8fa 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -383,10 +383,11 @@ pub fn noop_visit_use_tree(use_tree: &mut UseTree, vis: &mut T) { } pub fn noop_visit_arm( - Arm { attrs, pats, guard, body, span }: &mut Arm, + Arm { attrs, pats, guard, body, span, id }: &mut Arm, vis: &mut T, ) { visit_attrs(attrs, vis); + vis.visit_id(id); visit_vec(pats, |pat| vis.visit_pat(pat)); visit_opt(guard, |guard| vis.visit_expr(guard)); vis.visit_expr(body); @@ -808,9 +809,10 @@ pub fn noop_visit_struct_field(f: &mut StructField, visitor: &mut } pub fn noop_visit_field(f: &mut Field, vis: &mut T) { - let Field { ident, expr, span, is_shorthand: _, attrs } = f; + let Field { ident, expr, span, is_shorthand: _, attrs, id } = f; vis.visit_ident(ident); vis.visit_expr(expr); + vis.visit_id(id); vis.visit_span(span); visit_thin_attrs(attrs, vis); } @@ -1040,8 +1042,12 @@ pub fn noop_visit_pat(pat: &mut P, vis: &mut T) { } PatKind::Struct(path, fields, _etc) => { vis.visit_path(path); - for Spanned { node: FieldPat { ident, pat, is_shorthand: _, attrs }, span } in fields { + for Spanned { + node: FieldPat { ident, pat, is_shorthand: _, attrs, id }, + span + } in fields { vis.visit_ident(ident); + vis.visit_id(id); vis.visit_pat(pat); visit_thin_attrs(attrs, vis); vis.visit_span(span); diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index 4432c1329cb..823dca2c9e7 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -1444,6 +1444,7 @@ impl<'a> Parser<'a> { guard, body: expr, span: lo.to(hi), + id: ast::DUMMY_NODE_ID, }) } @@ -1599,6 +1600,7 @@ impl<'a> Parser<'a> { expr: self.mk_expr(self.token.span, ExprKind::Err, ThinVec::new()), is_shorthand: false, attrs: ThinVec::new(), + id: ast::DUMMY_NODE_ID, }); } } @@ -1684,6 +1686,7 @@ impl<'a> Parser<'a> { expr, is_shorthand, attrs: attrs.into(), + id: ast::DUMMY_NODE_ID, }) } diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index 5cc428a4df1..5a1b4164509 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -620,6 +620,7 @@ impl<'a> Parser<'a> { pat: subpat, is_shorthand, attrs: attrs.into(), + id: ast::DUMMY_NODE_ID, } }) } diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 7e6d9126c87..b21a6e7bc78 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -1613,6 +1613,7 @@ impl<'a> TraitDef<'a> { source_map::Spanned { span: pat.span.with_ctxt(self.span.ctxt()), node: ast::FieldPat { + id: ast::DUMMY_NODE_ID, ident: ident.unwrap(), pat, is_shorthand: false, -- cgit 1.4.1-3-g733a5 From 911398b96cc4825798c0887ec6ebce775ff5d2d1 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Wed, 14 Aug 2019 16:38:40 +0300 Subject: remove special handling of \r\n from the lexer --- src/librustc_lexer/src/lib.rs | 2 - src/librustc_lexer/src/unescape.rs | 36 ++++---------- src/librustc_lexer/src/unescape/tests.rs | 11 ++--- src/libsyntax/parse/lexer/mod.rs | 81 ++++++-------------------------- 4 files changed, 26 insertions(+), 104 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc_lexer/src/lib.rs b/src/librustc_lexer/src/lib.rs index c02abe6b89f..afef307a0ed 100644 --- a/src/librustc_lexer/src/lib.rs +++ b/src/librustc_lexer/src/lib.rs @@ -352,7 +352,6 @@ impl Cursor<'_> { loop { match self.nth_char(0) { '\n' => break, - '\r' if self.nth_char(1) == '\n' => break, EOF_CHAR if self.is_eof() => break, _ => { self.bump(); @@ -525,7 +524,6 @@ impl Cursor<'_> { match self.nth_char(0) { '/' if !first => break, '\n' if self.nth_char(1) != '\'' => break, - '\r' if self.nth_char(1) == '\n' => break, EOF_CHAR if self.is_eof() => break, '\'' => { self.bump(); diff --git a/src/librustc_lexer/src/unescape.rs b/src/librustc_lexer/src/unescape.rs index d8e00d4c7c5..c709b752608 100644 --- a/src/librustc_lexer/src/unescape.rs +++ b/src/librustc_lexer/src/unescape.rs @@ -128,11 +128,7 @@ fn scan_escape(first_char: char, chars: &mut Chars<'_>, mode: Mode) -> Result Err(EscapeError::EscapeOnlyChar), - '\r' => Err(if chars.clone().next() == Some('\n') { - EscapeError::EscapeOnlyChar - } else { - EscapeError::BareCarriageReturn - }), + '\r' => Err(EscapeError::BareCarriageReturn), '\'' if mode.in_single_quotes() => Err(EscapeError::EscapeOnlyChar), '"' if mode.in_double_quotes() => Err(EscapeError::EscapeOnlyChar), _ => { @@ -244,27 +240,15 @@ where let unescaped_char = match first_char { '\\' => { - let (second_char, third_char) = { - let mut chars = chars.clone(); - (chars.next(), chars.next()) - }; - match (second_char, third_char) { - (Some('\n'), _) | (Some('\r'), Some('\n')) => { + let second_char = chars.clone().next(); + match second_char { + Some('\n') => { skip_ascii_whitespace(&mut chars); continue; } _ => scan_escape(first_char, &mut chars, mode), } } - '\r' => { - let second_char = chars.clone().next(); - if second_char == Some('\n') { - chars.next(); - Ok('\n') - } else { - scan_escape(first_char, &mut chars, mode) - } - } '\n' => Ok('\n'), '\t' => Ok('\t'), _ => scan_escape(first_char, &mut chars, mode), @@ -298,15 +282,11 @@ where while let Some(curr) = chars.next() { let start = initial_len - chars.as_str().len() - curr.len_utf8(); - let result = match (curr, chars.clone().next()) { - ('\r', Some('\n')) => { - chars.next(); - Ok('\n') - }, - ('\r', _) => Err(EscapeError::BareCarriageReturnInRawString), - (c, _) if mode.is_bytes() && !c.is_ascii() => + let result = match curr { + '\r' => Err(EscapeError::BareCarriageReturnInRawString), + c if mode.is_bytes() && !c.is_ascii() => Err(EscapeError::NonAsciiCharInByteString), - (c, _) => Ok(c), + c => Ok(c), }; let end = initial_len - chars.as_str().len(); diff --git a/src/librustc_lexer/src/unescape/tests.rs b/src/librustc_lexer/src/unescape/tests.rs index 496527eb265..e7b1ff6479d 100644 --- a/src/librustc_lexer/src/unescape/tests.rs +++ b/src/librustc_lexer/src/unescape/tests.rs @@ -11,7 +11,6 @@ fn test_unescape_char_bad() { check(r"\", EscapeError::LoneSlash); check("\n", EscapeError::EscapeOnlyChar); - check("\r\n", EscapeError::EscapeOnlyChar); check("\t", EscapeError::EscapeOnlyChar); check("'", EscapeError::EscapeOnlyChar); check("\r", EscapeError::BareCarriageReturn); @@ -31,6 +30,7 @@ fn test_unescape_char_bad() { check(r"\v", EscapeError::InvalidEscape); check(r"\💩", EscapeError::InvalidEscape); check(r"\●", EscapeError::InvalidEscape); + check("\\\r", EscapeError::InvalidEscape); check(r"\x", EscapeError::TooShortHexEscape); check(r"\x0", EscapeError::TooShortHexEscape); @@ -116,10 +116,9 @@ fn test_unescape_str_good() { check("foo", "foo"); check("", ""); - check(" \t\n\r\n", " \t\n\n"); + check(" \t\n", " \t\n"); check("hello \\\n world", "hello world"); - check("hello \\\r\n world", "hello world"); check("thread's", "thread's") } @@ -134,7 +133,6 @@ fn test_unescape_byte_bad() { check(r"\", EscapeError::LoneSlash); check("\n", EscapeError::EscapeOnlyChar); - check("\r\n", EscapeError::EscapeOnlyChar); check("\t", EscapeError::EscapeOnlyChar); check("'", EscapeError::EscapeOnlyChar); check("\r", EscapeError::BareCarriageReturn); @@ -238,10 +236,9 @@ fn test_unescape_byte_str_good() { check("foo", b"foo"); check("", b""); - check(" \t\n\r\n", b" \t\n\n"); + check(" \t\n", b" \t\n"); check("hello \\\n world", b"hello world"); - check("hello \\\r\n world", b"hello world"); check("thread's", b"thread's") } @@ -253,7 +250,6 @@ fn test_unescape_raw_str() { assert_eq!(unescaped, expected); } - check("\r\n", &[(0..2, Ok('\n'))]); check("\r", &[(0..1, Err(EscapeError::BareCarriageReturnInRawString))]); check("\rx", &[(0..1, Err(EscapeError::BareCarriageReturnInRawString)), (1..2, Ok('x'))]); } @@ -266,7 +262,6 @@ fn test_unescape_raw_byte_str() { assert_eq!(unescaped, expected); } - check("\r\n", &[(0..2, Ok(byte_from_char('\n')))]); check("\r", &[(0..1, Err(EscapeError::BareCarriageReturnInRawString))]); check("🦀", &[(0..4, Err(EscapeError::NonAsciiCharInByteString))]); check( diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index e86d4c7fde6..e811bf7a581 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -8,9 +8,7 @@ use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION}; use rustc_lexer::Base; use rustc_lexer::unescape; -use std::borrow::Cow; use std::char; -use std::iter; use std::convert::TryInto; use rustc_data_structures::sync::Lrc; use log::debug; @@ -181,18 +179,7 @@ impl<'a> StringReader<'a> { let string = self.str_from(start); // comments with only more "/"s are not doc comments let tok = if is_doc_comment(string) { - let mut idx = 0; - loop { - idx = match string[idx..].find('\r') { - None => break, - Some(it) => idx + it + 1 - }; - if string[idx..].chars().next() != Some('\n') { - self.err_span_(start + BytePos(idx as u32 - 1), - start + BytePos(idx as u32), - "bare CR not allowed in doc-comment"); - } - } + self.forbid_bare_cr(start, string, "bare CR not allowed in doc-comment"); token::DocComment(Symbol::intern(string)) } else { token::Comment @@ -217,15 +204,10 @@ impl<'a> StringReader<'a> { } let tok = if is_doc_comment { - let has_cr = string.contains('\r'); - let string = if has_cr { - self.translate_crlf(start, - string, - "bare CR not allowed in block doc-comment") - } else { - string.into() - }; - token::DocComment(Symbol::intern(&string[..])) + self.forbid_bare_cr(start, + string, + "bare CR not allowed in block doc-comment"); + token::DocComment(Symbol::intern(string)) } else { token::Comment }; @@ -516,49 +498,16 @@ impl<'a> StringReader<'a> { &self.src[self.src_index(start)..self.src_index(end)] } - /// Converts CRLF to LF in the given string, raising an error on bare CR. - fn translate_crlf<'b>(&self, start: BytePos, s: &'b str, errmsg: &'b str) -> Cow<'b, str> { - let mut chars = s.char_indices().peekable(); - while let Some((i, ch)) = chars.next() { - if ch == '\r' { - if let Some((lf_idx, '\n')) = chars.peek() { - return translate_crlf_(self, start, s, *lf_idx, chars, errmsg).into(); - } - let pos = start + BytePos(i as u32); - let end_pos = start + BytePos((i + ch.len_utf8()) as u32); - self.err_span_(pos, end_pos, errmsg); - } - } - return s.into(); - - fn translate_crlf_(rdr: &StringReader<'_>, - start: BytePos, - s: &str, - mut j: usize, - mut chars: iter::Peekable>, - errmsg: &str) - -> String { - let mut buf = String::with_capacity(s.len()); - // Skip first CR - buf.push_str(&s[.. j - 1]); - while let Some((i, ch)) = chars.next() { - if ch == '\r' { - if j < i { - buf.push_str(&s[j..i]); - } - let next = i + ch.len_utf8(); - j = next; - if chars.peek().map(|(_, ch)| *ch) != Some('\n') { - let pos = start + BytePos(i as u32); - let end_pos = start + BytePos(next as u32); - rdr.err_span_(pos, end_pos, errmsg); - } - } - } - if j < s.len() { - buf.push_str(&s[j..]); - } - buf + fn forbid_bare_cr(&self, start: BytePos, s: &str, errmsg: &str) { + let mut idx = 0; + loop { + idx = match s[idx..].find('\r') { + None => break, + Some(it) => idx + it + 1 + }; + self.err_span_(start + BytePos(idx as u32 - 1), + start + BytePos(idx as u32), + errmsg); } } -- cgit 1.4.1-3-g733a5 From 6a42b0b28db4c18fd9e0c0e6e69ecbe0c5d9b107 Mon Sep 17 00:00:00 2001 From: Caio Date: Tue, 13 Aug 2019 21:40:21 -0300 Subject: Merge Variant and Variant_ --- src/librustc/hir/check_attr.rs | 2 +- src/librustc/hir/intravisit.rs | 12 ++++---- src/librustc/hir/lowering/item.rs | 14 ++++----- src/librustc/hir/map/collector.rs | 8 ++--- src/librustc/hir/map/def_collector.rs | 6 ++-- src/librustc/hir/map/mod.rs | 10 +++--- src/librustc/hir/mod.rs | 6 ++-- src/librustc/hir/print.rs | 6 ++-- src/librustc/ich/impls_hir.rs | 2 +- src/librustc/lint/context.rs | 4 +-- src/librustc/lint/mod.rs | 2 +- src/librustc/middle/dead.rs | 12 ++++---- src/librustc/middle/stability.rs | 8 ++--- src/librustc_lint/builtin.rs | 4 +-- src/librustc_lint/nonstandard_style.rs | 2 +- src/librustc_lint/types.rs | 2 +- src/librustc_metadata/encoder.rs | 2 +- src/librustc_passes/ast_validation.rs | 2 +- src/librustc_privacy/lib.rs | 14 ++++----- src/librustc_resolve/build_reduced_graph.rs | 10 +++--- src/librustc_save_analysis/dump_visitor.rs | 22 +++++++------- src/librustc_save_analysis/lib.rs | 4 +-- src/librustc_save_analysis/sig.rs | 4 +-- src/librustc_typeck/check/mod.rs | 10 +++--- src/librustc_typeck/check/wfcheck.rs | 2 +- src/librustc_typeck/collect.rs | 47 ++++++++++++++--------------- src/librustc_typeck/variance/constraints.rs | 4 +-- src/librustc_typeck/variance/terms.rs | 4 +-- src/librustdoc/test.rs | 2 +- src/librustdoc/visit_ast.rs | 8 ++--- src/libsyntax/ast.rs | 6 ++-- src/libsyntax/attr/mod.rs | 2 +- src/libsyntax/config.rs | 2 +- src/libsyntax/ext/build.rs | 16 +++++----- src/libsyntax/feature_gate.rs | 2 +- src/libsyntax/mut_visit.rs | 2 +- src/libsyntax/parse/diagnostics.rs | 11 +++---- src/libsyntax/parse/parser/item.rs | 5 +-- src/libsyntax/print/pprust.rs | 6 ++-- src/libsyntax/print/pprust/tests.rs | 5 +-- src/libsyntax/visit.rs | 8 ++--- src/libsyntax_ext/deriving/clone.rs | 6 ++-- src/libsyntax_ext/deriving/cmp/eq.rs | 2 +- src/libsyntax_ext/deriving/debug.rs | 2 +- src/libsyntax_ext/deriving/encodable.rs | 2 +- src/libsyntax_ext/deriving/generic/mod.rs | 16 +++++----- 46 files changed, 162 insertions(+), 166 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs index 22124d4ee41..eae956c978a 100644 --- a/src/librustc/hir/check_attr.rs +++ b/src/librustc/hir/check_attr.rs @@ -336,7 +336,7 @@ impl Visitor<'tcx> for CheckAttrVisitor<'tcx> { fn is_c_like_enum(item: &hir::Item) -> bool { if let hir::ItemKind::Enum(ref def, _) = item.node { for variant in &def.variants { - match variant.node.data { + match variant.data { hir::VariantData::Unit(..) => { /* continue */ } _ => { return false; } } diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index b5c760bc9a0..71d7464540a 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -577,15 +577,15 @@ pub fn walk_variant<'v, V: Visitor<'v>>(visitor: &mut V, variant: &'v Variant, generics: &'v Generics, parent_item_id: HirId) { - visitor.visit_ident(variant.node.ident); - visitor.visit_id(variant.node.id); - visitor.visit_variant_data(&variant.node.data, - variant.node.ident.name, + visitor.visit_ident(variant.ident); + visitor.visit_id(variant.id); + visitor.visit_variant_data(&variant.data, + variant.ident.name, generics, parent_item_id, variant.span); - walk_list!(visitor, visit_anon_const, &variant.node.disr_expr); - walk_list!(visitor, visit_attribute, &variant.node.attrs); + walk_list!(visitor, visit_anon_const, &variant.disr_expr); + walk_list!(visitor, visit_attribute, &variant.attrs); } pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { diff --git a/src/librustc/hir/lowering/item.rs b/src/librustc/hir/lowering/item.rs index 6b717e75199..abe8b841cbd 100644 --- a/src/librustc/hir/lowering/item.rs +++ b/src/librustc/hir/lowering/item.rs @@ -733,14 +733,12 @@ impl LoweringContext<'_> { } fn lower_variant(&mut self, v: &Variant) -> hir::Variant { - Spanned { - node: hir::VariantKind { - ident: v.node.ident, - id: self.lower_node_id(v.node.id), - attrs: self.lower_attrs(&v.node.attrs), - data: self.lower_variant_data(&v.node.data), - disr_expr: v.node.disr_expr.as_ref().map(|e| self.lower_anon_const(e)), - }, + hir::Variant { + attrs: self.lower_attrs(&v.attrs), + data: self.lower_variant_data(&v.data), + disr_expr: v.disr_expr.as_ref().map(|e| self.lower_anon_const(e)), + id: self.lower_node_id(v.id), + ident: v.ident, span: v.span, } } diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs index b6807f7d3bb..effe2c0cc6a 100644 --- a/src/librustc/hir/map/collector.rs +++ b/src/librustc/hir/map/collector.rs @@ -544,11 +544,11 @@ impl<'a, 'hir> Visitor<'hir> for NodeCollector<'a, 'hir> { } fn visit_variant(&mut self, v: &'hir Variant, g: &'hir Generics, item_id: HirId) { - self.insert(v.span, v.node.id, Node::Variant(v)); - self.with_parent(v.node.id, |this| { + self.insert(v.span, v.id, Node::Variant(v)); + self.with_parent(v.id, |this| { // Register the constructor of this variant. - if let Some(ctor_hir_id) = v.node.data.ctor_hir_id() { - this.insert(v.span, ctor_hir_id, Node::Ctor(&v.node.data)); + if let Some(ctor_hir_id) = v.data.ctor_hir_id() { + this.insert(v.span, ctor_hir_id, Node::Ctor(&v.data)); } intravisit::walk_variant(this, v, g, item_id); }); diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index 2964b130ddd..df9bd3a35a6 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -155,11 +155,11 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> { } fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) { - let def = self.create_def(v.node.id, - DefPathData::TypeNs(v.node.ident.as_interned_str()), + let def = self.create_def(v.id, + DefPathData::TypeNs(v.ident.as_interned_str()), v.span); self.with_parent(def, |this| { - if let Some(ctor_hir_id) = v.node.data.ctor_id() { + if let Some(ctor_hir_id) = v.data.ctor_id() { this.create_def(ctor_hir_id, DefPathData::Ctor, v.span); } visit::walk_variant(this, v, g, item_id) diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index b85738dd29a..59e298ea138 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -885,7 +885,7 @@ impl<'hir> Map<'hir> { _ => bug!("struct ID bound to non-struct {}", self.node_to_string(id)) } } - Some(Node::Variant(variant)) => &variant.node.data, + Some(Node::Variant(variant)) => &variant.data, Some(Node::Ctor(data)) => data, _ => bug!("expected struct or variant, found {}", self.node_to_string(id)) } @@ -918,7 +918,7 @@ impl<'hir> Map<'hir> { Node::ForeignItem(fi) => fi.ident.name, Node::ImplItem(ii) => ii.ident.name, Node::TraitItem(ti) => ti.ident.name, - Node::Variant(v) => v.node.ident.name, + Node::Variant(v) => v.ident.name, Node::Field(f) => f.ident.name, Node::Lifetime(lt) => lt.name.ident().name, Node::GenericParam(param) => param.name.ident().name, @@ -939,7 +939,7 @@ impl<'hir> Map<'hir> { Some(Node::ForeignItem(fi)) => Some(&fi.attrs[..]), Some(Node::TraitItem(ref ti)) => Some(&ti.attrs[..]), Some(Node::ImplItem(ref ii)) => Some(&ii.attrs[..]), - Some(Node::Variant(ref v)) => Some(&v.node.attrs[..]), + Some(Node::Variant(ref v)) => Some(&v.attrs[..]), Some(Node::Field(ref f)) => Some(&f.attrs[..]), Some(Node::Expr(ref e)) => Some(&*e.attrs), Some(Node::Stmt(ref s)) => Some(s.node.attrs()), @@ -1133,7 +1133,7 @@ impl Named for Spanned { fn name(&self) -> Name { self.node.name() } impl Named for Item { fn name(&self) -> Name { self.ident.name } } impl Named for ForeignItem { fn name(&self) -> Name { self.ident.name } } -impl Named for VariantKind { fn name(&self) -> Name { self.ident.name } } +impl Named for Variant { fn name(&self) -> Name { self.ident.name } } impl Named for StructField { fn name(&self) -> Name { self.ident.name } } impl Named for TraitItem { fn name(&self) -> Name { self.ident.name } } impl Named for ImplItem { fn name(&self) -> Name { self.ident.name } } @@ -1310,7 +1310,7 @@ fn hir_id_to_string(map: &Map<'_>, id: HirId, include_id: bool) -> String { } Some(Node::Variant(ref variant)) => { format!("variant {} in {}{}", - variant.node.ident, + variant.ident, path_str(), id_str) } Some(Node::Field(ref field)) => { diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 3d049fe4ccd..56ab37e138f 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -2193,7 +2193,7 @@ pub struct EnumDef { } #[derive(RustcEncodable, RustcDecodable, Debug, HashStable)] -pub struct VariantKind { +pub struct Variant { /// Name of the variant. #[stable_hasher(project(name))] pub ident: Ident, @@ -2205,10 +2205,10 @@ pub struct VariantKind { pub data: VariantData, /// Explicit discriminant (e.g., `Foo = 1`). pub disr_expr: Option, + /// Span + pub span: Span } -pub type Variant = Spanned; - #[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, HashStable)] pub enum UseKind { /// One import, e.g., `use foo::bar` or `use foo::bar as baz`. diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 11ba5120530..0cbfe4d75f1 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -737,7 +737,7 @@ impl<'a> State<'a> { for v in variants { self.space_if_not_bol(); self.maybe_print_comment(v.span.lo()); - self.print_outer_attributes(&v.node.attrs); + self.print_outer_attributes(&v.attrs); self.ibox(INDENT_UNIT); self.print_variant(v); self.s.word(","); @@ -829,8 +829,8 @@ impl<'a> State<'a> { pub fn print_variant(&mut self, v: &hir::Variant) { self.head(""); let generics = hir::Generics::empty(); - self.print_struct(&v.node.data, &generics, v.node.ident.name, v.span, false); - if let Some(ref d) = v.node.disr_expr { + self.print_struct(&v.data, &generics, v.ident.name, v.span, false); + if let Some(ref d) = v.disr_expr { self.s.space(); self.word_space("="); self.print_anon_const(d); diff --git a/src/librustc/ich/impls_hir.rs b/src/librustc/ich/impls_hir.rs index 30d76f240d1..1fd4e00de49 100644 --- a/src/librustc/ich/impls_hir.rs +++ b/src/librustc/ich/impls_hir.rs @@ -304,7 +304,7 @@ impl<'a> HashStable> for hir::Mod { } } -impl_stable_hash_for_spanned!(hir::VariantKind); +impl_stable_hash_for_spanned!(hir::Variant); impl<'a> HashStable> for hir::Item { diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index de812410e8b..a05023718f8 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -1060,7 +1060,7 @@ for LateContextAndPass<'a, 'tcx, T> { v: &'tcx hir::Variant, g: &'tcx hir::Generics, item_id: hir::HirId) { - self.with_lint_attrs(v.node.id, &v.node.attrs, |cx| { + self.with_lint_attrs(v.id, &v.attrs, |cx| { lint_callback!(cx, check_variant, v, g); hir_visit::walk_variant(cx, v, g, item_id); lint_callback!(cx, check_variant_post, v, g); @@ -1236,7 +1236,7 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T> } fn visit_variant(&mut self, v: &'a ast::Variant, g: &'a ast::Generics, item_id: ast::NodeId) { - self.with_lint_attrs(item_id, &v.node.attrs, |cx| { + self.with_lint_attrs(item_id, &v.attrs, |cx| { run_early_pass!(cx, check_variant, v, g); ast_visit::walk_variant(cx, v, g, item_id); run_early_pass!(cx, check_variant_post, v, g); diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index 8ddf4603490..8cb5b1e26d9 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -846,7 +846,7 @@ impl intravisit::Visitor<'tcx> for LintLevelMapBuilder<'tcx> { v: &'tcx hir::Variant, g: &'tcx hir::Generics, item_id: hir::HirId) { - self.with_lint_attrs(v.node.id, &v.node.attrs, |builder| { + self.with_lint_attrs(v.id, &v.attrs, |builder| { intravisit::walk_variant(builder, v, g, item_id); }) } diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index 55fa261f1ed..1f3adf00923 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -366,12 +366,12 @@ impl<'v, 'k, 'tcx> ItemLikeVisitor<'v> for LifeSeeder<'k, 'tcx> { match item.node { hir::ItemKind::Enum(ref enum_def, _) => { if allow_dead_code { - self.worklist.extend(enum_def.variants.iter().map(|variant| variant.node.id)); + self.worklist.extend(enum_def.variants.iter().map(|variant| variant.id)); } for variant in &enum_def.variants { - if let Some(ctor_hir_id) = variant.node.data.ctor_hir_id() { - self.struct_constructors.insert(ctor_hir_id, variant.node.id); + if let Some(ctor_hir_id) = variant.data.ctor_hir_id() { + self.struct_constructors.insert(ctor_hir_id, variant.id); } } } @@ -497,7 +497,7 @@ impl DeadVisitor<'tcx> { && !has_allow_dead_code_or_lang_attr(self.tcx, field.hir_id, &field.attrs) } - fn should_warn_about_variant(&mut self, variant: &hir::VariantKind) -> bool { + fn should_warn_about_variant(&mut self, variant: &hir::Variant) -> bool { !self.symbol_is_live(variant.id) && !has_allow_dead_code_or_lang_attr(self.tcx, variant.id, @@ -596,8 +596,8 @@ impl Visitor<'tcx> for DeadVisitor<'tcx> { variant: &'tcx hir::Variant, g: &'tcx hir::Generics, id: hir::HirId) { - if self.should_warn_about_variant(&variant.node) { - self.warn_dead_code(variant.node.id, variant.span, variant.node.ident.name, + if self.should_warn_about_variant(&variant) { + self.warn_dead_code(variant.id, variant.span, variant.ident.name, "variant", "constructed"); } else { intravisit::walk_variant(self, variant, g, id); diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 5ab762ab225..d02259bf301 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -290,10 +290,10 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> { } fn visit_variant(&mut self, var: &'tcx Variant, g: &'tcx Generics, item_id: HirId) { - self.annotate(var.node.id, &var.node.attrs, var.span, AnnotationKind::Required, + self.annotate(var.id, &var.attrs, var.span, AnnotationKind::Required, |v| { - if let Some(ctor_hir_id) = var.node.data.ctor_hir_id() { - v.annotate(ctor_hir_id, &var.node.attrs, var.span, AnnotationKind::Required, + if let Some(ctor_hir_id) = var.data.ctor_hir_id() { + v.annotate(ctor_hir_id, &var.attrs, var.span, AnnotationKind::Required, |_| {}); } @@ -372,7 +372,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MissingStabilityAnnotations<'a, 'tcx> { } fn visit_variant(&mut self, var: &'tcx Variant, g: &'tcx Generics, item_id: HirId) { - self.check_missing_stability(var.node.id, var.span, "variant"); + self.check_missing_stability(var.id, var.span, "variant"); intravisit::walk_variant(self, var, g, item_id); } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index bb2a5cab7d9..a83d4905115 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -484,8 +484,8 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDoc { fn check_variant(&mut self, cx: &LateContext<'_, '_>, v: &hir::Variant, _: &hir::Generics) { self.check_missing_docs_attrs(cx, - Some(v.node.id), - &v.node.attrs, + Some(v.id), + &v.attrs, v.span, "a variant"); } diff --git a/src/librustc_lint/nonstandard_style.rs b/src/librustc_lint/nonstandard_style.rs index 8f7fe6680cb..acd17f76632 100644 --- a/src/librustc_lint/nonstandard_style.rs +++ b/src/librustc_lint/nonstandard_style.rs @@ -147,7 +147,7 @@ impl EarlyLintPass for NonCamelCaseTypes { } fn check_variant(&mut self, cx: &EarlyContext<'_>, v: &ast::Variant, _: &ast::Generics) { - self.check_case(cx, "variant", &v.node.ident); + self.check_case(cx, "variant", &v.ident); } fn check_generic_param(&mut self, cx: &EarlyContext<'_>, param: &ast::GenericParam) { diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index e86230437f2..217e10ab24f 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -976,7 +976,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for VariantSizeDifferences { let bytes = variant_layout.size.bytes().saturating_sub(discr_size); debug!("- variant `{}` is {} bytes large", - variant.node.ident, + variant.ident, bytes); bytes }) diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index d73a4966bca..fb675d7d841 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -1676,7 +1676,7 @@ impl Visitor<'tcx> for EncodeContext<'tcx> { id: hir::HirId) { intravisit::walk_variant(self, v, g, id); - if let Some(ref discr) = v.node.disr_expr { + if let Some(ref discr) = v.disr_expr { let def_id = self.tcx.hir().local_def_id(discr.hir_id); self.record(def_id, EncodeContext::encode_info_for_anon_const, def_id); } diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 3c31bcef32b..489806f75a5 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -602,7 +602,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } ItemKind::Enum(ref def, _) => { for variant in &def.variants { - for field in variant.node.data.fields() { + for field in variant.data.fields() { self.invalid_visibility(&field.vis, None); } } diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 673762ee4c6..852b4898f4a 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -687,11 +687,11 @@ impl Visitor<'tcx> for EmbargoVisitor<'tcx> { match item.node { hir::ItemKind::Enum(ref def, _) => { for variant in &def.variants { - let variant_level = self.update(variant.node.id, item_level); - if let Some(ctor_hir_id) = variant.node.data.ctor_hir_id() { + let variant_level = self.update(variant.id, item_level); + if let Some(ctor_hir_id) = variant.data.ctor_hir_id() { self.update(ctor_hir_id, item_level); } - for field in variant.node.data.fields() { + for field in variant.data.fields() { self.update(field.hir_id, variant_level); } } @@ -810,9 +810,9 @@ impl Visitor<'tcx> for EmbargoVisitor<'tcx> { self.reach(item.hir_id, item_level).generics().predicates(); } for variant in &def.variants { - let variant_level = self.get(variant.node.id); + let variant_level = self.get(variant.id); if variant_level.is_some() { - for field in variant.node.data.fields() { + for field in variant.data.fields() { self.reach(field.hir_id, variant_level).ty(); } // Corner case: if the variant is reachable, but its @@ -1647,7 +1647,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> { v: &'tcx hir::Variant, g: &'tcx hir::Generics, item_id: hir::HirId) { - if self.access_levels.is_reachable(v.node.id) { + if self.access_levels.is_reachable(v.id) { self.in_variant = true; intravisit::walk_variant(self, v, g, item_id); self.in_variant = false; @@ -1898,7 +1898,7 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> self.check(item.hir_id, item_visibility).generics().predicates(); for variant in &def.variants { - for field in variant.node.data.fields() { + for field in variant.data.fields() { self.check(field.hir_id, item_visibility).ty(); } } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 5dd7bc30548..0a32b0c6e95 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -799,17 +799,17 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { parent: Module<'a>, vis: ty::Visibility, expn_id: ExpnId) { - let ident = variant.node.ident; + let ident = variant.ident; // Define a name in the type namespace. - let def_id = self.r.definitions.local_def_id(variant.node.id); + let def_id = self.r.definitions.local_def_id(variant.id); let res = Res::Def(DefKind::Variant, def_id); self.r.define(parent, ident, TypeNS, (res, vis, variant.span, expn_id)); // If the variant is marked as non_exhaustive then lower the visibility to within the // crate. let mut ctor_vis = vis; - let has_non_exhaustive = attr::contains_name(&variant.node.attrs, sym::non_exhaustive); + let has_non_exhaustive = attr::contains_name(&variant.attrs, sym::non_exhaustive); if has_non_exhaustive && vis == ty::Visibility::Public { ctor_vis = ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)); } @@ -819,9 +819,9 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { // value namespace, they are reserved for possible future use. // It's ok to use the variant's id as a ctor id since an // error will be reported on any use of such resolution anyway. - let ctor_node_id = variant.node.data.ctor_id().unwrap_or(variant.node.id); + let ctor_node_id = variant.data.ctor_id().unwrap_or(variant.id); let ctor_def_id = self.r.definitions.local_def_id(ctor_node_id); - let ctor_kind = CtorKind::from_ast(&variant.node.data); + let ctor_kind = CtorKind::from_ast(&variant.data); let ctor_res = Res::Def(DefKind::Ctor(CtorOf::Variant, ctor_kind), ctor_def_id); self.r.define(parent, ident, ValueNS, (ctor_res, ctor_vis, variant.span, expn_id)); } diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index c32d4885c4a..d93c12513c5 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -557,11 +557,11 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> { let access = access_from!(self.save_ctxt, item, hir_id); for variant in &enum_definition.variants { - let name = variant.node.ident.name.to_string(); + let name = variant.ident.name.to_string(); let qualname = format!("{}::{}", enum_data.qualname, name); - let name_span = variant.node.ident.span; + let name_span = variant.ident.span; - match variant.node.data { + match variant.data { ast::VariantData::Struct(ref fields, ..) => { let fields_str = fields .iter() @@ -574,7 +574,7 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> { let value = format!("{}::{} {{ {} }}", enum_data.name, name, fields_str); if !self.span.filter_generated(name_span) { let span = self.span_from_span(name_span); - let id = id_from_node_id(variant.node.id, &self.save_ctxt); + let id = id_from_node_id(variant.id, &self.save_ctxt); let parent = Some(id_from_node_id(item.id, &self.save_ctxt)); self.dumper.dump_def( @@ -589,10 +589,10 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> { parent, children: vec![], decl_id: None, - docs: self.save_ctxt.docs_for_attrs(&variant.node.attrs), + docs: self.save_ctxt.docs_for_attrs(&variant.attrs), sig: sig::variant_signature(variant, &self.save_ctxt), attributes: lower_attributes( - variant.node.attrs.clone(), + variant.attrs.clone(), &self.save_ctxt, ), }, @@ -612,7 +612,7 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> { } if !self.span.filter_generated(name_span) { let span = self.span_from_span(name_span); - let id = id_from_node_id(variant.node.id, &self.save_ctxt); + let id = id_from_node_id(variant.id, &self.save_ctxt); let parent = Some(id_from_node_id(item.id, &self.save_ctxt)); self.dumper.dump_def( @@ -627,10 +627,10 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> { parent, children: vec![], decl_id: None, - docs: self.save_ctxt.docs_for_attrs(&variant.node.attrs), + docs: self.save_ctxt.docs_for_attrs(&variant.attrs), sig: sig::variant_signature(variant, &self.save_ctxt), attributes: lower_attributes( - variant.node.attrs.clone(), + variant.attrs.clone(), &self.save_ctxt, ), }, @@ -640,8 +640,8 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> { } - for field in variant.node.data.fields() { - self.process_struct_field_def(field, variant.node.id); + for field in variant.data.fields() { + self.process_struct_field_def(field, variant.id); self.visit_ty(&field.ty); } } diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index c699a8834e0..9da6cd80057 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -277,7 +277,7 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> { filter!(self.span_utils, item.ident.span); let variants_str = def.variants .iter() - .map(|v| v.node.ident.to_string()) + .map(|v| v.ident.to_string()) .collect::>() .join(", "); let value = format!("{}::{{{}}}", name, variants_str); @@ -291,7 +291,7 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> { parent: None, children: def.variants .iter() - .map(|v| id_from_node_id(v.node.id, self)) + .map(|v| id_from_node_id(v.id, self)) .collect(), decl_id: None, docs: self.docs_for_attrs(&item.attrs), diff --git a/src/librustc_save_analysis/sig.rs b/src/librustc_save_analysis/sig.rs index c212cda2d66..b34506a4f1d 100644 --- a/src/librustc_save_analysis/sig.rs +++ b/src/librustc_save_analysis/sig.rs @@ -65,7 +65,7 @@ pub fn variant_signature(variant: &ast::Variant, scx: &SaveContext<'_, '_>) -> O if !scx.config.signatures { return None; } - variant.node.make(0, None, scx).ok() + variant.make(0, None, scx).ok() } pub fn method_signature( @@ -699,7 +699,7 @@ impl Sig for ast::StructField { } -impl Sig for ast::Variant_ { +impl Sig for ast::Variant { fn make(&self, offset: usize, parent_id: Option, scx: &SaveContext<'_, '_>) -> Result { let mut text = self.ident.to_string(); match self.data { diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 4fb28db6e94..9559f47383a 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -1968,19 +1968,19 @@ pub fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, vs: &'tcx [hir::Variant], i } for v in vs { - if let Some(ref e) = v.node.disr_expr { + if let Some(ref e) = v.disr_expr { tcx.typeck_tables_of(tcx.hir().local_def_id(e.hir_id)); } } if tcx.adt_def(def_id).repr.int.is_none() && tcx.features().arbitrary_enum_discriminant { let is_unit = - |var: &hir::Variant| match var.node.data { + |var: &hir::Variant| match var.data { hir::VariantData::Unit(..) => true, _ => false }; - let has_disr = |var: &hir::Variant| var.node.disr_expr.is_some(); + let has_disr = |var: &hir::Variant| var.disr_expr.is_some(); let has_non_units = vs.iter().any(|var| !is_unit(var)); let disr_units = vs.iter().any(|var| is_unit(&var) && has_disr(&var)); let disr_non_unit = vs.iter().any(|var| !is_unit(&var) && has_disr(&var)); @@ -1999,11 +1999,11 @@ pub fn check_enum<'tcx>(tcx: TyCtxt<'tcx>, sp: Span, vs: &'tcx [hir::Variant], i let variant_did = def.variants[VariantIdx::new(i)].def_id; let variant_i_hir_id = tcx.hir().as_local_hir_id(variant_did).unwrap(); let variant_i = tcx.hir().expect_variant(variant_i_hir_id); - let i_span = match variant_i.node.disr_expr { + let i_span = match variant_i.disr_expr { Some(ref expr) => tcx.hir().span(expr.hir_id), None => tcx.hir().span(variant_i_hir_id) }; - let span = match v.node.disr_expr { + let span = match v.disr_expr { Some(ref expr) => tcx.hir().span(expr.hir_id), None => v.span }; diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs index c1d8fde3be1..b2c0e34d6fa 100644 --- a/src/librustc_typeck/check/wfcheck.rs +++ b/src/librustc_typeck/check/wfcheck.rs @@ -1119,7 +1119,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn enum_variants(&self, enum_def: &hir::EnumDef) -> Vec> { enum_def.variants.iter() - .map(|variant| self.non_enum_variant(&variant.node.data)) + .map(|variant| self.non_enum_variant(&variant.data)) .collect() } diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 0f0568907c6..5ff64224c5a 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -35,7 +35,6 @@ use rustc_target::spec::abi; use syntax::ast; use syntax::ast::{Ident, MetaItemKind}; use syntax::attr::{InlineAttr, OptimizeAttr, list_contains_name, mark_used}; -use syntax::source_map::Spanned; use syntax::feature_gate; use syntax::symbol::{InternedString, kw, Symbol, sym}; use syntax_pos::{Span, DUMMY_SP}; @@ -520,7 +519,11 @@ fn convert_variant_ctor(tcx: TyCtxt<'_>, ctor_id: hir::HirId) { tcx.predicates_of(def_id); } -fn convert_enum_variant_types<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, variants: &[hir::Variant]) { +fn convert_enum_variant_types<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, + variants: &[hir::Variant] +) { let def = tcx.adt_def(def_id); let repr_type = def.repr.discr_type(); let initial = repr_type.initial_discriminant(tcx); @@ -530,7 +533,7 @@ fn convert_enum_variant_types<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, variants: for variant in variants { let wrapped_discr = prev_discr.map_or(initial, |d| d.wrap_incr(tcx)); prev_discr = Some( - if let Some(ref e) = variant.node.disr_expr { + if let Some(ref e) = variant.disr_expr { let expr_did = tcx.hir().local_def_id(e.hir_id); def.eval_explicit_discr(tcx, expr_did) } else if let Some(discr) = repr_type.disr_incr(tcx, prev_discr) { @@ -546,14 +549,14 @@ fn convert_enum_variant_types<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, variants: format!("overflowed on value after {}", prev_discr.unwrap()), ).note(&format!( "explicitly set `{} = {}` if that is desired outcome", - variant.node.ident, wrapped_discr + variant.ident, wrapped_discr )) .emit(); None }.unwrap_or(wrapped_discr), ); - for f in variant.node.data.fields() { + for f in variant.data.fields() { let def_id = tcx.hir().local_def_id(f.hir_id); tcx.generics_of(def_id); tcx.type_of(def_id); @@ -562,7 +565,7 @@ fn convert_enum_variant_types<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, variants: // Convert the ctor, if any. This also registers the variant as // an item. - if let Some(ctor_hir_id) = variant.node.data.ctor_hir_id() { + if let Some(ctor_hir_id) = variant.data.ctor_hir_id() { convert_variant_ctor(tcx, ctor_hir_id); } } @@ -641,11 +644,11 @@ fn adt_def(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::AdtDef { let variants = def.variants .iter() .map(|v| { - let variant_did = Some(tcx.hir().local_def_id(v.node.id)); - let ctor_did = v.node.data.ctor_hir_id() + let variant_did = Some(tcx.hir().local_def_id(v.id)); + let ctor_did = v.data.ctor_hir_id() .map(|hir_id| tcx.hir().local_def_id(hir_id)); - let discr = if let Some(ref e) = v.node.disr_expr { + let discr = if let Some(ref e) = v.disr_expr { distance_from_explicit = 0; ty::VariantDiscr::Explicit(tcx.hir().local_def_id(e.hir_id)) } else { @@ -653,8 +656,8 @@ fn adt_def(tcx: TyCtxt<'_>, def_id: DefId) -> &ty::AdtDef { }; distance_from_explicit += 1; - convert_variant(tcx, variant_did, ctor_did, v.node.ident, discr, - &v.node.data, AdtKind::Enum, def_id) + convert_variant(tcx, variant_did, ctor_did, v.ident, discr, + &v.data, AdtKind::Enum, def_id) }) .collect(); @@ -1314,10 +1317,9 @@ pub fn checked_type_of(tcx: TyCtxt<'_>, def_id: DefId, fail: bool) -> Option tcx.mk_foreign(def_id), }, - Node::Ctor(&ref def) | Node::Variant(&Spanned { - node: hir::VariantKind { data: ref def, .. }, - .. - }) => match *def { + Node::Ctor(&ref def) | Node::Variant( + hir::Variant { data: ref def, .. } + ) => match *def { VariantData::Unit(..) | VariantData::Struct(..) => { tcx.type_of(tcx.hir().get_parent_did(hir_id)) } @@ -1363,12 +1365,8 @@ pub fn checked_type_of(tcx: TyCtxt<'_>, def_id: DefId, fail: bool) -> Option { @@ -1809,10 +1807,9 @@ fn fn_sig(tcx: TyCtxt<'_>, def_id: DefId) -> ty::PolyFnSig<'_> { compute_sig_of_foreign_fn_decl(tcx, def_id, fn_decl, abi) } - Ctor(data) | Variant(Spanned { - node: hir::VariantKind { data, .. }, - .. - }) if data.ctor_hir_id().is_some() => { + Ctor(data) | Variant( + hir::Variant { data, .. } + ) if data.ctor_hir_id().is_some() => { let ty = tcx.type_of(tcx.hir().get_parent_did(hir_id)); let inputs = data.fields() .iter() diff --git a/src/librustc_typeck/variance/constraints.rs b/src/librustc_typeck/variance/constraints.rs index b75a0912657..7ed9d6606f6 100644 --- a/src/librustc_typeck/variance/constraints.rs +++ b/src/librustc_typeck/variance/constraints.rs @@ -82,8 +82,8 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ConstraintContext<'a, 'tcx> { self.visit_node_helper(item.hir_id); for variant in &enum_def.variants { - if let hir::VariantData::Tuple(..) = variant.node.data { - self.visit_node_helper(variant.node.data.ctor_hir_id().unwrap()); + if let hir::VariantData::Tuple(..) = variant.data { + self.visit_node_helper(variant.data.ctor_hir_id().unwrap()); } } } diff --git a/src/librustc_typeck/variance/terms.rs b/src/librustc_typeck/variance/terms.rs index 7af7c79bb3c..e10837e52ad 100644 --- a/src/librustc_typeck/variance/terms.rs +++ b/src/librustc_typeck/variance/terms.rs @@ -145,8 +145,8 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for TermsContext<'a, 'tcx> { self.add_inferreds_for_item(item.hir_id); for variant in &enum_def.variants { - if let hir::VariantData::Tuple(..) = variant.node.data { - self.add_inferreds_for_item(variant.node.data.ctor_hir_id().unwrap()); + if let hir::VariantData::Tuple(..) = variant.data { + self.add_inferreds_for_item(variant.data.ctor_hir_id().unwrap()); } } } diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 462e21b8f6b..83a8d3fc109 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -951,7 +951,7 @@ impl<'a, 'hir> intravisit::Visitor<'hir> for HirCollector<'a, 'hir> { v: &'hir hir::Variant, g: &'hir hir::Generics, item_id: hir::HirId) { - self.visit_testable(v.node.ident.to_string(), &v.node.attrs, |this| { + self.visit_testable(v.ident.to_string(), &v.attrs, |this| { intravisit::walk_variant(this, v, g, item_id); }); } diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 35b6d9972da..903ed3aae14 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -130,10 +130,10 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { Enum { name, variants: def.variants.iter().map(|v| Variant { - name: v.node.ident.name, - id: v.node.id, - attrs: &v.node.attrs, - def: &v.node.data, + name: v.ident.name, + id: v.id, + attrs: &v.attrs, + def: &v.data, whence: v.span, }).collect(), vis: &it.vis, diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 052eb55b408..a65f0b2ff51 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -2038,7 +2038,7 @@ pub struct EnumDef { } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] -pub struct Variant_ { +pub struct Variant { /// Name of the variant. pub ident: Ident, /// Attributes of the variant. @@ -2049,10 +2049,10 @@ pub struct Variant_ { pub data: VariantData, /// Explicit discriminant, e.g., `Foo = 1`. pub disr_expr: Option, + /// Span + pub span: Span, } -pub type Variant = Spanned; - /// Part of `use` item to the right of its prefix. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum UseTreeKind { diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index a9d3227b3a8..70b1d3fc73b 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -712,7 +712,7 @@ macro_rules! derive_has_attrs { derive_has_attrs! { Item, Expr, Local, ast::ForeignItem, ast::StructField, ast::ImplItem, ast::TraitItem, ast::Arm, - ast::Field, ast::FieldPat, ast::Variant_, ast::Arg + ast::Field, ast::FieldPat, ast::Variant, ast::Arg } pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -> ast::Crate { diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index 1ab367f73c1..7eeea4e7bdf 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -260,7 +260,7 @@ impl<'a> StripUnconfigured<'a> { ast::ItemKind::Enum(ast::EnumDef { variants }, _generics) => { variants.flat_map_in_place(|variant| self.configure(variant)); for variant in variants { - self.configure_variant_data(&mut variant.node.data); + self.configure_variant_data(&mut variant.data); } } _ => {} diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 22962499a2b..3dfcd825704 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -781,14 +781,14 @@ impl<'a> ExtCtxt<'a> { ast::VariantData::Tuple(fields, ast::DUMMY_NODE_ID) }; - respan(span, - ast::Variant_ { - ident, - id: ast::DUMMY_NODE_ID, - attrs: Vec::new(), - data: vdata, - disr_expr: None, - }) + ast::Variant { + attrs: Vec::new(), + data: vdata, + disr_expr: None, + id: ast::DUMMY_NODE_ID, + ident, + span, + } } pub fn item_enum_poly(&self, span: Span, name: Ident, diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 08a113b53d0..8a56ae13b6f 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -1956,7 +1956,7 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { ast::ItemKind::Enum(ast::EnumDef{ref variants, ..}, ..) => { for variant in variants { - match (&variant.node.data, &variant.node.disr_expr) { + match (&variant.data, &variant.disr_expr) { (ast::VariantData::Unit(..), _) => {}, (_, Some(disr_expr)) => gate_feature_post!( diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index be04c6a76b0..36327fe10dc 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -455,7 +455,7 @@ pub fn noop_visit_foreign_mod(foreign_mod: &mut ForeignMod, vis: } pub fn noop_visit_variant(variant: &mut Variant, vis: &mut T) { - let Spanned { node: Variant_ { ident, attrs, id, data, disr_expr }, span } = variant; + let Variant { ident, attrs, id, data, disr_expr, span } = variant; vis.visit_ident(ident); visit_attrs(attrs, vis); vis.visit_id(id); diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index 730efb5ef01..1fbf28fb830 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -8,7 +8,6 @@ use crate::parse::parser::{BlockMode, PathStyle, SemiColonMode, TokenType, Token use crate::parse::token::{self, TokenKind}; use crate::print::pprust; use crate::ptr::P; -use crate::source_map::Spanned; use crate::symbol::{kw, sym}; use crate::ThinVec; use crate::util::parser::AssocOp; @@ -592,18 +591,18 @@ impl<'a> Parser<'a> { crate fn maybe_report_invalid_custom_discriminants( sess: &ParseSess, - variants: &[Spanned], + variants: &[ast::Variant], ) { - let has_fields = variants.iter().any(|variant| match variant.node.data { + let has_fields = variants.iter().any(|variant| match variant.data { VariantData::Tuple(..) | VariantData::Struct(..) => true, VariantData::Unit(..) => false, }); - let discriminant_spans = variants.iter().filter(|variant| match variant.node.data { + let discriminant_spans = variants.iter().filter(|variant| match variant.data { VariantData::Tuple(..) | VariantData::Struct(..) => false, VariantData::Unit(..) => true, }) - .filter_map(|variant| variant.node.disr_expr.as_ref().map(|c| c.value.span)) + .filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span)) .collect::>(); if !discriminant_spans.is_empty() && has_fields { @@ -618,7 +617,7 @@ impl<'a> Parser<'a> { err.span_label(sp, "disallowed custom discriminant"); } for variant in variants.iter() { - match &variant.node.data { + match &variant.data { VariantData::Struct(..) => { err.span_label( variant.span, diff --git a/src/libsyntax/parse/parser/item.rs b/src/libsyntax/parse/parser/item.rs index e85ef9cc974..60873ecb134 100644 --- a/src/libsyntax/parse/parser/item.rs +++ b/src/libsyntax/parse/parser/item.rs @@ -1564,14 +1564,15 @@ impl<'a> Parser<'a> { None }; - let vr = ast::Variant_ { + let vr = ast::Variant { ident, id: ast::DUMMY_NODE_ID, attrs: variant_attrs, data: struct_def, disr_expr, + span: vlo.to(self.prev_span), }; - variants.push(respan(vlo.to(self.prev_span), vr)); + variants.push(vr); if !self.eat(&token::Comma) { if self.token.is_ident() && !self.token.is_reserved_ident() { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index bda761244d5..8b97ec3da0b 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1402,7 +1402,7 @@ impl<'a> State<'a> { for v in variants { self.space_if_not_bol(); self.maybe_print_comment(v.span.lo()); - self.print_outer_attributes(&v.node.attrs); + self.print_outer_attributes(&v.attrs); self.ibox(INDENT_UNIT); self.print_variant(v); self.s.word(","); @@ -1492,8 +1492,8 @@ impl<'a> State<'a> { crate fn print_variant(&mut self, v: &ast::Variant) { self.head(""); let generics = ast::Generics::default(); - self.print_struct(&v.node.data, &generics, v.node.ident, v.span, false); - match v.node.disr_expr { + self.print_struct(&v.data, &generics, v.ident, v.span, false); + match v.disr_expr { Some(ref d) => { self.s.space(); self.word_space("="); diff --git a/src/libsyntax/print/pprust/tests.rs b/src/libsyntax/print/pprust/tests.rs index 082a430e0ed..25214673e69 100644 --- a/src/libsyntax/print/pprust/tests.rs +++ b/src/libsyntax/print/pprust/tests.rs @@ -54,14 +54,15 @@ fn test_variant_to_string() { with_default_globals(|| { let ident = ast::Ident::from_str("principal_skinner"); - let var = source_map::respan(syntax_pos::DUMMY_SP, ast::Variant_ { + let var = ast::Variant { ident, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, // making this up as I go.... ? data: ast::VariantData::Unit(ast::DUMMY_NODE_ID), disr_expr: None, - }); + span: syntax_pos::DUMMY_SP, + }; let varstr = variant_to_string(&var); assert_eq!(varstr, "principal_skinner"); diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 596c5b46b98..41b8ef16665 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -311,11 +311,11 @@ pub fn walk_variant<'a, V>(visitor: &mut V, item_id: NodeId) where V: Visitor<'a>, { - visitor.visit_ident(variant.node.ident); - visitor.visit_variant_data(&variant.node.data, variant.node.ident, + visitor.visit_ident(variant.ident); + visitor.visit_variant_data(&variant.data, variant.ident, generics, item_id, variant.span); - walk_list!(visitor, visit_anon_const, &variant.node.disr_expr); - walk_list!(visitor, visit_attribute, &variant.node.attrs); + walk_list!(visitor, visit_anon_const, &variant.disr_expr); + walk_list!(visitor, visit_attribute, &variant.attrs); } pub fn walk_ty<'a, V: Visitor<'a>>(visitor: &mut V, typ: &'a Ty) { diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index 3b1edf90d6b..d80da566185 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -138,7 +138,7 @@ fn cs_clone_shallow(name: &str, } StaticEnum(enum_def, ..) => { for variant in &enum_def.variants { - process_variant(cx, &mut stmts, &variant.node.data); + process_variant(cx, &mut stmts, &variant.data); } } _ => cx.span_bug(trait_span, &format!("unexpected substructure in \ @@ -170,9 +170,9 @@ fn cs_clone(name: &str, vdata = vdata_; } EnumMatching(.., variant, ref af) => { - ctor_path = cx.path(trait_span, vec![substr.type_ident, variant.node.ident]); + ctor_path = cx.path(trait_span, vec![substr.type_ident, variant.ident]); all_fields = af; - vdata = &variant.node.data; + vdata = &variant.data; } EnumNonMatchingCollapsed(..) => { cx.span_bug(trait_span, diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index 1909729f4a9..5d7c4a84389 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -75,7 +75,7 @@ fn cs_total_eq_assert(cx: &mut ExtCtxt<'_>, } StaticEnum(enum_def, ..) => { for variant in &enum_def.variants { - process_variant(cx, &mut stmts, &variant.node.data); + process_variant(cx, &mut stmts, &variant.data); } } _ => cx.span_bug(trait_span, "unexpected substructure in `derive(Eq)`") diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index 0f709630bf4..15e93f2843a 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -53,7 +53,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> // based on the "shape". let (ident, vdata, fields) = match substr.fields { Struct(vdata, fields) => (substr.type_ident, *vdata, fields), - EnumMatching(_, _, v, fields) => (v.node.ident, &v.node.data, fields), + EnumMatching(_, _, v, fields) => (v.ident, &v.data, fields), EnumNonMatchingCollapsed(..) | StaticStruct(..) | StaticEnum(..) => cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`"), diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs index 6d0d3b96a56..52e74a7c57e 100644 --- a/src/libsyntax_ext/deriving/encodable.rs +++ b/src/libsyntax_ext/deriving/encodable.rs @@ -238,7 +238,7 @@ fn encodable_substructure(cx: &mut ExtCtxt<'_>, } let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); - let name = cx.expr_str(trait_span, variant.node.ident.name); + let name = cx.expr_str(trait_span, variant.ident.name); let call = cx.expr_method_call(trait_span, blkencoder, cx.ident_of("emit_enum_variant"), diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 7e6d9126c87..91e73810d66 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -758,7 +758,7 @@ impl<'a> TraitDef<'a> { let mut field_tys = Vec::new(); for variant in &enum_def.variants { - field_tys.extend(variant.node + field_tys.extend(variant .data .fields() .iter() @@ -1220,7 +1220,7 @@ impl<'a> MethodDef<'a> { let catch_all_substructure = EnumNonMatchingCollapsed(self_arg_idents, &variants[..], &vi_idents[..]); - let first_fieldless = variants.iter().find(|v| v.node.data.fields().is_empty()); + let first_fieldless = variants.iter().find(|v| v.data.fields().is_empty()); // These arms are of the form: // (Variant1, Variant1, ...) => Body1 @@ -1229,7 +1229,7 @@ impl<'a> MethodDef<'a> { // where each tuple has length = self_args.len() let mut match_arms: Vec = variants.iter() .enumerate() - .filter(|&(_, v)| !(self.unify_fieldless_variants && v.node.data.fields().is_empty())) + .filter(|&(_, v)| !(self.unify_fieldless_variants && v.data.fields().is_empty())) .map(|(index, variant)| { let mk_self_pat = |cx: &mut ExtCtxt<'_>, self_arg_name: &str| { let (p, idents) = trait_.create_enum_variant_pattern(cx, @@ -1513,8 +1513,8 @@ impl<'a> MethodDef<'a> { .iter() .map(|v| { let sp = v.span.with_ctxt(trait_.span.ctxt()); - let summary = trait_.summarise_struct(cx, &v.node.data); - (v.node.ident, sp, summary) + let summary = trait_.summarise_struct(cx, &v.data); + (v.ident, sp, summary) }) .collect(); self.call_substructure_method(cx, @@ -1643,9 +1643,9 @@ impl<'a> TraitDef<'a> { mutbl: ast::Mutability) -> (P, Vec<(Span, Option, P, &'a [ast::Attribute])>) { let sp = variant.span.with_ctxt(self.span.ctxt()); - let variant_path = cx.path(sp, vec![enum_ident, variant.node.ident]); + let variant_path = cx.path(sp, vec![enum_ident, variant.ident]); let use_temporaries = false; // enums can't be repr(packed) - self.create_struct_pattern(cx, variant_path, &variant.node.data, prefix, mutbl, + self.create_struct_pattern(cx, variant_path, &variant.data, prefix, mutbl, use_temporaries) } } @@ -1776,7 +1776,7 @@ pub fn is_type_without_fields(item: &Annotatable) -> bool { if let Annotatable::Item(ref item) = *item { match item.node { ast::ItemKind::Enum(ref enum_def, _) => { - enum_def.variants.iter().all(|v| v.node.data.fields().is_empty()) + enum_def.variants.iter().all(|v| v.data.fields().is_empty()) } ast::ItemKind::Struct(ref variant_data, _) => variant_data.fields().is_empty(), _ => false, -- cgit 1.4.1-3-g733a5 From 66dc08ad604cdb75cbc2a89d3551c51fbc6cc20e Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Tue, 13 Aug 2019 19:51:32 +0300 Subject: Make sure that all file loading happens via SourceMap That way, callers don't need to repeat "let's add this to sm manually for tracking dependencies" trick. It should make it easier to switch to using `FileLoader` for binary files in the future as well --- src/libsyntax/ext/expand.rs | 13 +++++-------- src/libsyntax/source_map.rs | 20 ++++++++++++++++++++ src/libsyntax_ext/source_util.rs | 37 +++++++++++-------------------------- 3 files changed, 36 insertions(+), 34 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 402b42dfbc8..e9789764383 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -25,7 +25,6 @@ use syntax_pos::{Span, DUMMY_SP, FileName}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lrc; -use std::fs; use std::io::ErrorKind; use std::{iter, mem}; use std::ops::DerefMut; @@ -1239,13 +1238,11 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { } let filename = self.cx.resolve_path(&*file.as_str(), it.span()); - match fs::read_to_string(&filename) { - Ok(src) => { - let src_interned = Symbol::intern(&src); - - // Add this input file to the code map to make it available as - // dependency information - self.cx.source_map().new_source_file(filename.into(), src); + match self.cx.source_map().load_file(&filename) { + Ok(source_file) => { + let src = source_file.src.as_ref() + .expect("freshly loaded file should have a source"); + let src_interned = Symbol::intern(src.as_str()); let include_info = vec![ ast::NestedMetaItem::MetaItem( diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs index 74cab00d3c1..ceaa5ee3aa5 100644 --- a/src/libsyntax/source_map.rs +++ b/src/libsyntax/source_map.rs @@ -170,6 +170,26 @@ impl SourceMap { Ok(self.new_source_file(filename, src)) } + /// Loads source file as a binary blob. + /// + /// Unlike `load_file`, guarantees that no normalization like BOM-removal + /// takes place. + pub fn load_binary_file(&self, path: &Path) -> io::Result> { + // Ideally, this should use `self.file_loader`, but it can't + // deal with binary files yet. + let bytes = fs::read(path)?; + + // We need to add file to the `SourceMap`, so that it is present + // in dep-info. There's also an edge case that file might be both + // loaded as a binary via `include_bytes!` and as proper `SourceFile` + // via `mod`, so we try to use real file contents and not just an + // empty string. + let text = std::str::from_utf8(&bytes).unwrap_or("") + .to_string(); + self.new_source_file(path.to_owned().into(), text); + Ok(bytes) + } + pub fn files(&self) -> MappedLockGuard<'_, Vec>> { LockGuard::map(self.files.borrow(), |files| &mut files.source_files) } diff --git a/src/libsyntax_ext/source_util.rs b/src/libsyntax_ext/source_util.rs index cbc01b48afd..e008ed710e4 100644 --- a/src/libsyntax_ext/source_util.rs +++ b/src/libsyntax_ext/source_util.rs @@ -9,8 +9,6 @@ use syntax::tokenstream; use smallvec::SmallVec; use syntax_pos::{self, Pos, Span}; -use std::fs; -use std::io::ErrorKind; use rustc_data_structures::sync::Lrc; // These macros all relate to the file system; they either return @@ -114,20 +112,17 @@ pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::To None => return DummyResult::any(sp) }; let file = cx.resolve_path(file, sp); - match fs::read_to_string(&file) { - Ok(src) => { - let interned_src = Symbol::intern(&src); - - // Add this input file to the code map to make it available as - // dependency information - cx.source_map().new_source_file(file.into(), src); - - base::MacEager::expr(cx.expr_str(sp, interned_src)) + match cx.source_map().load_binary_file(&file) { + Ok(bytes) => match std::str::from_utf8(&bytes) { + Ok(src) => { + let interned_src = Symbol::intern(&src); + base::MacEager::expr(cx.expr_str(sp, interned_src)) + } + Err(_) => { + cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display())); + DummyResult::any(sp) + } }, - Err(ref e) if e.kind() == ErrorKind::InvalidData => { - cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display())); - DummyResult::any(sp) - } Err(e) => { cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e)); DummyResult::any(sp) @@ -142,18 +137,8 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream:: None => return DummyResult::any(sp) }; let file = cx.resolve_path(file, sp); - match fs::read(&file) { + match cx.source_map().load_binary_file(&file) { Ok(bytes) => { - // Add the contents to the source map if it contains UTF-8. - let (contents, bytes) = match String::from_utf8(bytes) { - Ok(s) => { - let bytes = s.as_bytes().to_owned(); - (s, bytes) - }, - Err(e) => (String::new(), e.into_bytes()), - }; - cx.source_map().new_source_file(file.into(), contents); - base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes)))) }, Err(e) => { -- cgit 1.4.1-3-g733a5 From 73d2da08949eb5cd78e8081ea1ec9a068cdbaf6b Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Thu, 15 Aug 2019 01:56:44 +0300 Subject: Remove `Spanned` from `mk_name_value_item_str` and `expr_to_spanned_string` --- src/librustdoc/clean/cfg/tests.rs | 4 ++-- src/librustdoc/clean/mod.rs | 6 +++--- src/libsyntax/ast.rs | 2 -- src/libsyntax/attr/mod.rs | 12 +++++++----- src/libsyntax/ext/base.rs | 8 ++++---- src/libsyntax/ext/expand.rs | 8 +++++--- src/libsyntax/parse/tests.rs | 4 ++-- src/libsyntax_ext/format.rs | 16 ++++++++-------- 8 files changed, 31 insertions(+), 29 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustdoc/clean/cfg/tests.rs b/src/librustdoc/clean/cfg/tests.rs index 405144b444f..ec5d86b2c61 100644 --- a/src/librustdoc/clean/cfg/tests.rs +++ b/src/librustdoc/clean/cfg/tests.rs @@ -3,7 +3,6 @@ use super::*; use syntax_pos::DUMMY_SP; use syntax::ast::*; use syntax::attr; -use syntax::source_map::dummy_spanned; use syntax::symbol::Symbol; use syntax::with_default_globals; @@ -181,7 +180,8 @@ fn test_parse_ok() { let mi = attr::mk_name_value_item_str( Ident::from_str("all"), - dummy_spanned(Symbol::intern("done")) + Symbol::intern("done"), + DUMMY_SP, ); assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done"))); diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index b281505956d..983adc0eae8 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -29,7 +29,7 @@ use rustc::util::nodemap::{FxHashMap, FxHashSet}; use syntax::ast::{self, AttrStyle, Ident}; use syntax::attr; use syntax::ext::base::MacroKind; -use syntax::source_map::{dummy_spanned, Spanned}; +use syntax::source_map::{DUMMY_SP, Spanned}; use syntax::symbol::{Symbol, kw, sym}; use syntax::symbol::InternedString; use syntax_pos::{self, Pos, FileName}; @@ -930,8 +930,8 @@ impl Attributes { if attr.check_name(sym::enable) { if let Some(feat) = attr.value_str() { let meta = attr::mk_name_value_item_str( - Ident::with_empty_ctxt(sym::target_feature), - dummy_spanned(feat)); + Ident::with_empty_ctxt(sym::target_feature), feat, DUMMY_SP + ); if let Ok(feat_cfg) = Cfg::parse(&meta) { cfg &= feat_cfg; } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index f0f090c8e89..c6d006213ca 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -939,8 +939,6 @@ pub struct Field { pub id: NodeId, } -pub type SpannedIdent = Spanned; - #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)] pub enum BlockCheckMode { Default, diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 70b1d3fc73b..85c661d320a 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -13,7 +13,7 @@ use crate::ast::{AttrId, AttrStyle, Name, Ident, Path, PathSegment}; use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem}; use crate::ast::{Lit, LitKind, Expr, Item, Local, Stmt, StmtKind, GenericParam}; use crate::mut_visit::visit_clobber; -use crate::source_map::{BytePos, Spanned, dummy_spanned}; +use crate::source_map::{BytePos, Spanned, DUMMY_SP}; use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use crate::parse::parser::Parser; use crate::parse::{self, ParseSess, PResult}; @@ -328,7 +328,9 @@ impl Attribute { let comment = self.value_str().unwrap(); let meta = mk_name_value_item_str( Ident::with_empty_ctxt(sym::doc), - dummy_spanned(Symbol::intern(&strip_doc_comment_decoration(&comment.as_str())))); + Symbol::intern(&strip_doc_comment_decoration(&comment.as_str())), + DUMMY_SP, + ); f(&Attribute { id: self.id, style: self.style, @@ -345,9 +347,9 @@ impl Attribute { /* Constructors */ -pub fn mk_name_value_item_str(ident: Ident, value: Spanned) -> MetaItem { - let lit_kind = LitKind::Str(value.node, ast::StrStyle::Cooked); - mk_name_value_item(ident, lit_kind, value.span) +pub fn mk_name_value_item_str(ident: Ident, str: Symbol, str_span: Span) -> MetaItem { + let lit_kind = LitKind::Str(str, ast::StrStyle::Cooked); + mk_name_value_item(ident, lit_kind, str_span) } pub fn mk_name_value_item(ident: Ident, lit_kind: LitKind, lit_span: Span) -> MetaItem { diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 6886b4bf421..edeca046c7b 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -1,6 +1,6 @@ use crate::ast::{self, Attribute, Name, PatKind}; use crate::attr::{HasAttrs, Stability, Deprecation}; -use crate::source_map::{SourceMap, Spanned, respan}; +use crate::source_map::SourceMap; use crate::edition::Edition; use crate::ext::expand::{self, AstFragment, Invocation}; use crate::ext::hygiene::{ExpnId, SyntaxContext, Transparency}; @@ -916,7 +916,7 @@ pub fn expr_to_spanned_string<'a>( cx: &'a mut ExtCtxt<'_>, mut expr: P, err_msg: &str, -) -> Result, Option>> { +) -> Result<(Symbol, ast::StrStyle, Span), Option>> { // Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation. expr.span = expr.span.apply_mark(cx.current_expansion.id); @@ -926,7 +926,7 @@ pub fn expr_to_spanned_string<'a>( Err(match expr.node { ast::ExprKind::Lit(ref l) => match l.node { - ast::LitKind::Str(s, style) => return Ok(respan(expr.span, (s, style))), + ast::LitKind::Str(s, style) => return Ok((s, style, expr.span)), ast::LitKind::Err(_) => None, _ => Some(cx.struct_span_err(l.span, err_msg)) }, @@ -940,7 +940,7 @@ pub fn expr_to_string(cx: &mut ExtCtxt<'_>, expr: P, err_msg: &str) expr_to_spanned_string(cx, expr, err_msg) .map_err(|err| err.map(|mut err| err.emit())) .ok() - .map(|s| s.node) + .map(|(symbol, style, _)| (symbol, style)) } /// Non-fatally assert that `tts` is empty. Note that this function diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 402b42dfbc8..6cd9b2efdd2 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -1,7 +1,7 @@ use crate::ast::{self, Block, Ident, LitKind, NodeId, PatKind, Path}; use crate::ast::{MacStmtStyle, StmtKind, ItemKind}; use crate::attr::{self, HasAttrs}; -use crate::source_map::{dummy_spanned, respan}; +use crate::source_map::respan; use crate::config::StripUnconfigured; use crate::ext::base::*; use crate::ext::proc_macro::collect_derives; @@ -1251,13 +1251,15 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { ast::NestedMetaItem::MetaItem( attr::mk_name_value_item_str( Ident::with_empty_ctxt(sym::file), - dummy_spanned(file), + file, + DUMMY_SP, ), ), ast::NestedMetaItem::MetaItem( attr::mk_name_value_item_str( Ident::with_empty_ctxt(sym::contents), - dummy_spanned(src_interned), + src_interned, + DUMMY_SP, ), ), ]; diff --git a/src/libsyntax/parse/tests.rs b/src/libsyntax/parse/tests.rs index e619fd17fb5..443a6434b78 100644 --- a/src/libsyntax/parse/tests.rs +++ b/src/libsyntax/parse/tests.rs @@ -172,8 +172,8 @@ fn get_spans_of_pat_idents(src: &str) -> Vec { impl<'a> crate::visit::Visitor<'a> for PatIdentVisitor { fn visit_pat(&mut self, p: &'a ast::Pat) { match p.node { - PatKind::Ident(_ , ref spannedident, _) => { - self.spans.push(spannedident.span.clone()); + PatKind::Ident(_ , ref ident, _) => { + self.spans.push(ident.span.clone()); } _ => { crate::visit::walk_pat(self, p); diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index d699b3b1a90..83764205a19 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -846,9 +846,9 @@ pub fn expand_preparsed_format_args( let msg = "format argument must be a string literal"; let fmt_sp = efmt.span; - let fmt = match expr_to_spanned_string(ecx, efmt, msg) { + let (fmt_str, fmt_style, fmt_span) = match expr_to_spanned_string(ecx, efmt, msg) { Ok(mut fmt) if append_newline => { - fmt.node.0 = Symbol::intern(&format!("{}\n", fmt.node.0)); + fmt.0 = Symbol::intern(&format!("{}\n", fmt.0)); fmt } Ok(fmt) => fmt, @@ -875,7 +875,7 @@ pub fn expand_preparsed_format_args( _ => (false, None), }; - let str_style = match fmt.node.1 { + let str_style = match fmt_style { ast::StrStyle::Cooked => None, ast::StrStyle::Raw(raw) => { Some(raw as usize) @@ -981,7 +981,7 @@ pub fn expand_preparsed_format_args( vec![] }; - let fmt_str = &*fmt.node.0.as_str(); // for the suggestions below + let fmt_str = &*fmt_str.as_str(); // for the suggestions below let mut parser = parse::Parser::new(fmt_str, str_style, skips, append_newline); let mut unverified_pieces = Vec::new(); @@ -995,7 +995,7 @@ pub fn expand_preparsed_format_args( if !parser.errors.is_empty() { let err = parser.errors.remove(0); - let sp = fmt.span.from_inner(err.span); + let sp = fmt_span.from_inner(err.span); let mut e = ecx.struct_span_err(sp, &format!("invalid format string: {}", err.description)); e.span_label(sp, err.label + " in format string"); @@ -1003,7 +1003,7 @@ pub fn expand_preparsed_format_args( e.note(¬e); } if let Some((label, span)) = err.secondary_label { - let sp = fmt.span.from_inner(span); + let sp = fmt_span.from_inner(span); e.span_label(sp, label); } e.emit(); @@ -1011,7 +1011,7 @@ pub fn expand_preparsed_format_args( } let arg_spans = parser.arg_places.iter() - .map(|span| fmt.span.from_inner(*span)) + .map(|span| fmt_span.from_inner(*span)) .collect(); let named_pos: FxHashSet = names.values().cloned().collect(); @@ -1034,7 +1034,7 @@ pub fn expand_preparsed_format_args( str_pieces: Vec::with_capacity(unverified_pieces.len()), all_pieces_simple: true, macsp, - fmtsp: fmt.span, + fmtsp: fmt_span, invalid_refs: Vec::new(), arg_spans, arg_with_formatting: Vec::new(), -- cgit 1.4.1-3-g733a5 From 433b1e36e19824742175de681b8579c861217207 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Thu, 15 Aug 2019 02:13:53 +0300 Subject: Remove `Spanned` from `ast::Mac` --- src/librustc/lint/context.rs | 2 +- src/librustc_lint/builtin.rs | 2 +- src/librustc_passes/ast_validation.rs | 2 +- src/librustc_resolve/macros.rs | 2 +- src/libsyntax/ast.rs | 7 +++---- src/libsyntax/ext/expand.rs | 11 +++++------ src/libsyntax/ext/placeholders.rs | 5 +++-- src/libsyntax/mut_visit.rs | 4 ++-- src/libsyntax/parse/parser/expr.rs | 9 +++++---- src/libsyntax/parse/parser/item.rs | 12 +++++++----- src/libsyntax/parse/parser/pat.rs | 7 ++++--- src/libsyntax/parse/parser/stmt.rs | 7 ++++--- src/libsyntax/parse/parser/ty.rs | 9 +++++---- src/libsyntax/parse/tests.rs | 2 +- src/libsyntax/print/pprust.rs | 14 +++++++------- src/libsyntax/visit.rs | 2 +- src/libsyntax_ext/assert.rs | 9 +++------ 17 files changed, 54 insertions(+), 52 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 1b73361dcd4..6801fa8d8db 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -1345,7 +1345,7 @@ impl<'a, T: EarlyLintPass> ast_visit::Visitor<'a> for EarlyContextAndPass<'a, T> // part of `walk_mac`, and (b) we should be calling // `visit_path`, *but* that would require a `NodeId`, and I // want to get #53686 fixed quickly. -nmatsakis - ast_visit::walk_path(self, &mac.node.path); + ast_visit::walk_path(self, &mac.path); run_early_pass!(self, check_mac, mac); } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index a83d4905115..c34828a6b45 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -1493,7 +1493,7 @@ impl EarlyLintPass for KeywordIdents { self.check_tokens(cx, mac_def.stream()); } fn check_mac(&mut self, cx: &EarlyContext<'_>, mac: &ast::Mac) { - self.check_tokens(cx, mac.node.tts.clone().into()); + self.check_tokens(cx, mac.tts.clone().into()); } fn check_ident(&mut self, cx: &EarlyContext<'_>, ident: ast::Ident) { self.check_ident_token(cx, UnderMacro(false), ident); diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 489806f75a5..bd46ca4779a 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -824,7 +824,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { |this| visit::walk_enum_def(this, enum_definition, generics, item_id)) } - fn visit_mac(&mut self, mac: &Spanned) { + fn visit_mac(&mut self, mac: &Mac) { // when a new macro kind is added but the author forgets to set it up for expansion // because that's the only part that won't cause a compiler error self.session.diagnostic() diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 8e9e1380002..71e26dac57c 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -186,7 +186,7 @@ impl<'a> base::Resolver for Resolver<'a> { InvocationKind::Attr { ref attr, ref derives, after_derive, .. } => (&attr.path, MacroKind::Attr, derives.clone(), after_derive), InvocationKind::Bang { ref mac, .. } => - (&mac.node.path, MacroKind::Bang, Vec::new(), false), + (&mac.path, MacroKind::Bang, Vec::new(), false), InvocationKind::Derive { ref path, .. } => (path, MacroKind::Derive, Vec::new(), false), InvocationKind::DeriveContainer { ref derives, .. } => { diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index c6d006213ca..b8bfa671bcf 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -1285,8 +1285,6 @@ pub enum Movability { Movable, } -pub type Mac = Spanned; - /// Represents a macro invocation. The `Path` indicates which macro /// is being invoked, and the vector of token-trees contains the source /// of the macro invocation. @@ -1294,10 +1292,11 @@ pub type Mac = Spanned; /// N.B., the additional ident for a `macro_rules`-style macro is actually /// stored in the enclosing item. #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] -pub struct Mac_ { +pub struct Mac { pub path: Path, pub delim: MacDelimiter, pub tts: TokenStream, + pub span: Span, pub prior_type_ascription: Option<(Span, bool)>, } @@ -1308,7 +1307,7 @@ pub enum MacDelimiter { Brace, } -impl Mac_ { +impl Mac { pub fn stream(&self) -> TokenStream { self.tts.clone() } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 6cd9b2efdd2..97983944931 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -492,22 +492,21 @@ impl<'a, 'b> MacroExpander<'a, 'b> { InvocationKind::Bang { mac, .. } => match ext { SyntaxExtensionKind::Bang(expander) => { self.gate_proc_macro_expansion_kind(span, fragment_kind); - let tok_result = expander.expand(self.cx, span, mac.node.stream()); + let tok_result = expander.expand(self.cx, span, mac.stream()); let result = - self.parse_ast_fragment(tok_result, fragment_kind, &mac.node.path, span); + self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span); self.gate_proc_macro_expansion(span, &result); result } SyntaxExtensionKind::LegacyBang(expander) => { let prev = self.cx.current_expansion.prior_type_ascription; - self.cx.current_expansion.prior_type_ascription = - mac.node.prior_type_ascription; - let tok_result = expander.expand(self.cx, span, mac.node.stream()); + self.cx.current_expansion.prior_type_ascription = mac.prior_type_ascription; + let tok_result = expander.expand(self.cx, span, mac.stream()); let result = if let Some(result) = fragment_kind.make_from(tok_result) { result } else { let msg = format!("non-{kind} macro in {kind} position: {path}", - kind = fragment_kind.name(), path = mac.node.path); + kind = fragment_kind.name(), path = mac.path); self.cx.span_err(span, &msg); self.cx.trace_macros_diag(); fragment_kind.dummy(span) diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index b2b17b0fb28..2d05f8f0b00 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -14,12 +14,13 @@ use rustc_data_structures::fx::FxHashMap; pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment { fn mac_placeholder() -> ast::Mac { - dummy_spanned(ast::Mac_ { + ast::Mac { path: ast::Path { span: DUMMY_SP, segments: Vec::new() }, tts: TokenStream::empty().into(), delim: ast::MacDelimiter::Brace, + span: DUMMY_SP, prior_type_ascription: None, - }) + } } let ident = ast::Ident::invalid(); diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index 82446989997..d22c05d9b2e 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -533,8 +533,8 @@ pub fn noop_visit_attribute(attr: &mut Attribute, vis: &mut T) { vis.visit_span(span); } -pub fn noop_visit_mac(Spanned { node, span }: &mut Mac, vis: &mut T) { - let Mac_ { path, delim: _, tts, .. } = node; +pub fn noop_visit_mac(mac: &mut Mac, vis: &mut T) { + let Mac { path, delim: _, tts, span, prior_type_ascription: _ } = mac; vis.visit_path(path); vis.visit_tts(tts); vis.visit_span(span); diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index 7b98d7a1801..f4b6a926734 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -8,13 +8,13 @@ use crate::ast::{self, Attribute, AttrStyle, Ident, CaptureBy, BlockCheckMode}; use crate::ast::{Expr, ExprKind, RangeLimits, Label, Movability, IsAsync, Arm}; use crate::ast::{Ty, TyKind, FunctionRetTy, Arg, FnDecl}; use crate::ast::{BinOpKind, BinOp, UnOp}; -use crate::ast::{Mac_, AnonConst, Field}; +use crate::ast::{Mac, AnonConst, Field}; use crate::parse::classify; use crate::parse::token::{self, Token}; use crate::parse::diagnostics::{Error}; use crate::print::pprust; -use crate::source_map::{self, respan, Span}; +use crate::source_map::{self, Span}; use crate::symbol::{kw, sym}; use crate::util::parser::{AssocOp, Fixity, prec_let_scrutinee_needs_par}; @@ -1011,12 +1011,13 @@ impl<'a> Parser<'a> { // MACRO INVOCATION expression let (delim, tts) = self.expect_delimited_token_tree()?; hi = self.prev_span; - ex = ExprKind::Mac(respan(lo.to(hi), Mac_ { + ex = ExprKind::Mac(Mac { path, tts, delim, + span: lo.to(hi), prior_type_ascription: self.last_type_ascription, - })); + }); } else if self.check(&token::OpenDelim(token::Brace)) { if let Some(expr) = self.maybe_parse_struct_expr(lo, &path, &attrs) { return expr; diff --git a/src/libsyntax/parse/parser/item.rs b/src/libsyntax/parse/parser/item.rs index 60873ecb134..72819c99660 100644 --- a/src/libsyntax/parse/parser/item.rs +++ b/src/libsyntax/parse/parser/item.rs @@ -10,7 +10,7 @@ use crate::ast::{Visibility, VisibilityKind, Mutability, FnDecl, FnHeader}; use crate::ast::{ForeignItem, ForeignItemKind}; use crate::ast::{Ty, TyKind, GenericBounds, TraitRef}; use crate::ast::{EnumDef, VariantData, StructField, AnonConst}; -use crate::ast::{Mac, Mac_, MacDelimiter}; +use crate::ast::{Mac, MacDelimiter}; use crate::ext::base::DummyResult; use crate::parse::token; use crate::parse::parser::maybe_append; @@ -530,12 +530,13 @@ impl<'a> Parser<'a> { } let hi = self.prev_span; - let mac = respan(mac_lo.to(hi), Mac_ { + let mac = Mac { path, tts, delim, + span: mac_lo.to(hi), prior_type_ascription: self.last_type_ascription, - }); + }; let item = self.mk_item(lo.to(hi), Ident::invalid(), ItemKind::Mac(mac), visibility, attrs); return Ok(Some(item)); @@ -604,12 +605,13 @@ impl<'a> Parser<'a> { self.expect(&token::Semi)?; } - Ok(Some(respan(lo.to(self.prev_span), Mac_ { + Ok(Some(Mac { path, tts, delim, + span: lo.to(self.prev_span), prior_type_ascription: self.last_type_ascription, - }))) + })) } else { Ok(None) } diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index 40aa8d7b46f..da44ebd8415 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -2,7 +2,7 @@ use super::{Parser, PResult, PathStyle}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; use crate::ptr::P; -use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac_}; +use crate::ast::{self, Attribute, Pat, PatKind, FieldPat, RangeEnd, RangeSyntax, Mac}; use crate::ast::{BindingMode, Ident, Mutability, Path, QSelf, Expr, ExprKind}; use crate::parse::token::{self}; use crate::print::pprust; @@ -275,12 +275,13 @@ impl<'a> Parser<'a> { fn parse_pat_mac_invoc(&mut self, lo: Span, path: Path) -> PResult<'a, PatKind> { self.bump(); let (delim, tts) = self.expect_delimited_token_tree()?; - let mac = respan(lo.to(self.prev_span), Mac_ { + let mac = Mac { path, tts, delim, + span: lo.to(self.prev_span), prior_type_ascription: self.last_type_ascription, - }); + }; Ok(PatKind::Mac(mac)) } diff --git a/src/libsyntax/parse/parser/stmt.rs b/src/libsyntax/parse/parser/stmt.rs index 750d8fbbddc..c911caba4cd 100644 --- a/src/libsyntax/parse/parser/stmt.rs +++ b/src/libsyntax/parse/parser/stmt.rs @@ -5,7 +5,7 @@ use super::path::PathStyle; use crate::ptr::P; use crate::{maybe_whole, ThinVec}; use crate::ast::{self, Stmt, StmtKind, Local, Block, BlockCheckMode, Expr, ExprKind}; -use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac_, MacDelimiter}; +use crate::ast::{Attribute, AttrStyle, VisibilityKind, MacStmtStyle, Mac, MacDelimiter}; use crate::ext::base::DummyResult; use crate::parse::{classify, DirectoryOwnership}; use crate::parse::diagnostics::Error; @@ -99,12 +99,13 @@ impl<'a> Parser<'a> { MacStmtStyle::NoBraces }; - let mac = respan(lo.to(hi), Mac_ { + let mac = Mac { path, tts, delim, + span: lo.to(hi), prior_type_ascription: self.last_type_ascription, - }); + }; let node = if delim == MacDelimiter::Brace || self.token == token::Semi || self.token == token::Eof { StmtKind::Mac(P((mac, style, attrs.into()))) diff --git a/src/libsyntax/parse/parser/ty.rs b/src/libsyntax/parse/parser/ty.rs index 1eb3d441e69..337702b8d30 100644 --- a/src/libsyntax/parse/parser/ty.rs +++ b/src/libsyntax/parse/parser/ty.rs @@ -4,9 +4,9 @@ use crate::{maybe_whole, maybe_recover_from_interpolated_ty_qpath}; use crate::ptr::P; use crate::ast::{self, Ty, TyKind, MutTy, BareFnTy, FunctionRetTy, GenericParam, Lifetime, Ident}; use crate::ast::{TraitBoundModifier, TraitObjectSyntax, GenericBound, GenericBounds, PolyTraitRef}; -use crate::ast::{Mutability, AnonConst, FnDecl, Mac_}; +use crate::ast::{Mutability, AnonConst, FnDecl, Mac}; use crate::parse::token::{self, Token}; -use crate::source_map::{respan, Span}; +use crate::source_map::Span; use crate::symbol::{kw}; use rustc_target::spec::abi::Abi; @@ -175,13 +175,14 @@ impl<'a> Parser<'a> { if self.eat(&token::Not) { // Macro invocation in type position let (delim, tts) = self.expect_delimited_token_tree()?; - let node = Mac_ { + let mac = Mac { path, tts, delim, + span: lo.to(self.prev_span), prior_type_ascription: self.last_type_ascription, }; - TyKind::Mac(respan(lo.to(self.prev_span), node)) + TyKind::Mac(mac) } else { // Just a type path or bound list (trait object type) starting with a trait. // `Type` diff --git a/src/libsyntax/parse/tests.rs b/src/libsyntax/parse/tests.rs index 443a6434b78..9edc83a3594 100644 --- a/src/libsyntax/parse/tests.rs +++ b/src/libsyntax/parse/tests.rs @@ -273,7 +273,7 @@ fn ttdelim_span() { "foo!( fn main() { body } )".to_string(), &sess).unwrap(); let tts: Vec<_> = match expr.node { - ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), + ast::ExprKind::Mac(ref mac) => mac.stream().trees().collect(), _ => panic!("not a macro"), }; diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 8b97ec3da0b..fabbe1ede1b 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1067,7 +1067,7 @@ impl<'a> State<'a> { } ast::ForeignItemKind::Macro(ref m) => { self.print_mac(m); - match m.node.delim { + match m.delim { MacDelimiter::Brace => {}, _ => self.s.word(";") } @@ -1341,7 +1341,7 @@ impl<'a> State<'a> { } ast::ItemKind::Mac(ref mac) => { self.print_mac(mac); - match mac.node.delim { + match mac.delim { MacDelimiter::Brace => {} _ => self.s.word(";"), } @@ -1554,7 +1554,7 @@ impl<'a> State<'a> { } ast::TraitItemKind::Macro(ref mac) => { self.print_mac(mac); - match mac.node.delim { + match mac.delim { MacDelimiter::Brace => {} _ => self.s.word(";"), } @@ -1591,7 +1591,7 @@ impl<'a> State<'a> { } ast::ImplItemKind::Macro(ref mac) => { self.print_mac(mac); - match mac.node.delim { + match mac.delim { MacDelimiter::Brace => {} _ => self.s.word(";"), } @@ -1749,11 +1749,11 @@ impl<'a> State<'a> { crate fn print_mac(&mut self, m: &ast::Mac) { self.print_mac_common( - Some(MacHeader::Path(&m.node.path)), + Some(MacHeader::Path(&m.path)), true, None, - m.node.delim.to_token(), - m.node.stream(), + m.delim.to_token(), + m.stream(), true, m.span, ); diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 41b8ef16665..8c06bf25eb5 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -663,7 +663,7 @@ pub fn walk_stmt<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Stmt) { } pub fn walk_mac<'a, V: Visitor<'a>>(visitor: &mut V, mac: &'a Mac) { - visitor.visit_path(&mac.node.path, DUMMY_NODE_ID); + visitor.visit_path(&mac.path, DUMMY_NODE_ID); } pub fn walk_anon_const<'a, V: Visitor<'a>>(visitor: &mut V, constant: &'a AnonConst) { diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs index e3ef39075e2..6301283460a 100644 --- a/src/libsyntax_ext/assert.rs +++ b/src/libsyntax_ext/assert.rs @@ -1,7 +1,6 @@ use errors::{Applicability, DiagnosticBuilder}; use syntax::ast::{self, *}; -use syntax::source_map::Spanned; use syntax::ext::base::*; use syntax::parse::token::{self, TokenKind}; use syntax::parse::parser::Parser; @@ -25,7 +24,7 @@ pub fn expand_assert<'cx>( }; let sp = sp.apply_mark(cx.current_expansion.id); - let panic_call = Mac_ { + let panic_call = Mac { path: Path::from_ident(Ident::new(sym::panic, sp)), tts: custom_message.unwrap_or_else(|| { TokenStream::from(TokenTree::token( @@ -37,6 +36,7 @@ pub fn expand_assert<'cx>( )) }).into(), delim: MacDelimiter::Parenthesis, + span: sp, prior_type_ascription: None, }; let if_expr = cx.expr_if( @@ -44,10 +44,7 @@ pub fn expand_assert<'cx>( cx.expr(sp, ExprKind::Unary(UnOp::Not, cond_expr)), cx.expr( sp, - ExprKind::Mac(Spanned { - span: sp, - node: panic_call, - }), + ExprKind::Mac(panic_call), ), None, ); -- cgit 1.4.1-3-g733a5 From a6182711efe32d4dd68da2663129e3e2e462d8cb Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Thu, 15 Aug 2019 02:35:36 +0300 Subject: Remove `Spanned` from `{ast,hir}::FieldPat` --- src/librustc/cfg/construct.rs | 2 +- src/librustc/hir/intravisit.rs | 6 +++--- src/librustc/hir/lowering.rs | 16 ++++++---------- src/librustc/hir/mod.rs | 5 +++-- src/librustc/hir/print.rs | 8 ++++---- src/librustc/ich/impls_hir.rs | 4 ---- src/librustc/middle/dead.rs | 10 ++++------ src/librustc/middle/liveness.rs | 4 ++-- src/librustc/middle/mem_categorization.rs | 8 ++++---- src/librustc/middle/region.rs | 2 +- src/librustc_lint/builtin.rs | 6 +++--- src/librustc_mir/hair/pattern/mod.rs | 4 ++-- src/librustc_privacy/lib.rs | 4 ++-- src/librustc_save_analysis/dump_visitor.rs | 4 ++-- src/librustc_typeck/check/_match.rs | 10 +++++----- src/librustc_typeck/check/writeback.rs | 2 +- src/librustdoc/clean/mod.rs | 5 ++--- src/libsyntax/ast.rs | 5 +++-- src/libsyntax/ext/build.rs | 2 +- src/libsyntax/mut_visit.rs | 5 +---- src/libsyntax/parse/parser/pat.rs | 22 ++++++++-------------- src/libsyntax/print/pprust.rs | 8 ++++---- src/libsyntax/visit.rs | 6 +++--- src/libsyntax_ext/deriving/generic/mod.rs | 16 +++++++--------- 24 files changed, 72 insertions(+), 92 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/cfg/construct.rs b/src/librustc/cfg/construct.rs index ca852fe7622..7ada56cfa76 100644 --- a/src/librustc/cfg/construct.rs +++ b/src/librustc/cfg/construct.rs @@ -136,7 +136,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } PatKind::Struct(_, ref subpats, _) => { - let pats_exit = self.pats_all(subpats.iter().map(|f| &f.node.pat), pred); + let pats_exit = self.pats_all(subpats.iter().map(|f| &f.pat), pred); self.add_ast_node(pat.hir_id.local_id, &[pats_exit]) } diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index 71d7464540a..99fe9f1682f 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -704,9 +704,9 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { PatKind::Struct(ref qpath, ref fields, _) => { visitor.visit_qpath(qpath, pattern.hir_id, pattern.span); for field in fields { - visitor.visit_id(field.node.hir_id); - visitor.visit_ident(field.node.ident); - visitor.visit_pat(&field.node.pat) + visitor.visit_id(field.hir_id); + visitor.visit_ident(field.ident); + visitor.visit_pat(&field.pat) } } PatKind::Tuple(ref tuple_elements, _) => { diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index a1f38d7dd13..9e5d6378c40 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -2691,16 +2691,12 @@ impl<'a> LoweringContext<'a> { let fs = fields .iter() - .map(|f| { - Spanned { - span: f.span, - node: hir::FieldPat { - hir_id: self.next_id(), - ident: f.node.ident, - pat: self.lower_pat(&f.node.pat), - is_shorthand: f.node.is_shorthand, - }, - } + .map(|f| hir::FieldPat { + hir_id: self.next_id(), + ident: f.ident, + pat: self.lower_pat(&f.pat), + is_shorthand: f.is_shorthand, + span: f.span, }) .collect(); hir::PatKind::Struct(qpath, fs, etc) diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index dccc2774f52..7c2f9907217 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -877,7 +877,7 @@ impl Pat { match self.node { PatKind::Binding(.., Some(ref p)) => p.walk_(it), PatKind::Struct(_, ref fields, _) => { - fields.iter().all(|field| field.node.pat.walk_(it)) + fields.iter().all(|field| field.pat.walk_(it)) } PatKind::TupleStruct(_, ref s, _) | PatKind::Tuple(ref s, _) => { s.iter().all(|p| p.walk_(it)) @@ -923,6 +923,7 @@ pub struct FieldPat { /// The pattern the field is destructured to. pub pat: P, pub is_shorthand: bool, + pub span: Span, } /// Explicit binding annotations given in the HIR for a binding. Note @@ -968,7 +969,7 @@ pub enum PatKind { /// A struct or struct variant pattern (e.g., `Variant {x, y, ..}`). /// The `bool` is `true` in the presence of a `..`. - Struct(QPath, HirVec>, bool), + Struct(QPath, HirVec, bool), /// A tuple struct/variant pattern `Variant(x, y, .., z)`. /// If the `..` pattern fragment is present, then `Option` denotes its position. diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 0cbfe4d75f1..caf8220bbf4 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -1670,14 +1670,14 @@ impl<'a> State<'a> { &fields[..], |s, f| { s.cbox(INDENT_UNIT); - if !f.node.is_shorthand { - s.print_ident(f.node.ident); + if !f.is_shorthand { + s.print_ident(f.ident); s.word_nbsp(":"); } - s.print_pat(&f.node.pat); + s.print_pat(&f.pat); s.end() }, - |f| f.node.pat.span); + |f| f.pat.span); if etc { if !fields.is_empty() { self.word_space(","); diff --git a/src/librustc/ich/impls_hir.rs b/src/librustc/ich/impls_hir.rs index 1fd4e00de49..60b338010b0 100644 --- a/src/librustc/ich/impls_hir.rs +++ b/src/librustc/ich/impls_hir.rs @@ -153,8 +153,6 @@ impl<'a> HashStable> for hir::Ty { } } -impl_stable_hash_for_spanned!(hir::FieldPat); - impl_stable_hash_for_spanned!(hir::BinOpKind); impl_stable_hash_for!(struct hir::Stmt { @@ -187,8 +185,6 @@ impl<'a> HashStable> for hir::Expr { impl_stable_hash_for_spanned!(usize); -impl_stable_hash_for_spanned!(ast::Ident); - impl_stable_hash_for!(struct ast::Ident { name, span, diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index 1f3adf00923..8ce8bb52566 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -17,8 +17,7 @@ use crate::util::nodemap::FxHashSet; use rustc_data_structures::fx::FxHashMap; -use syntax::{ast, source_map}; -use syntax::attr; +use syntax::{ast, attr}; use syntax::symbol::sym; use syntax_pos; @@ -119,17 +118,16 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { } } - fn handle_field_pattern_match(&mut self, lhs: &hir::Pat, res: Res, - pats: &[source_map::Spanned]) { + fn handle_field_pattern_match(&mut self, lhs: &hir::Pat, res: Res, pats: &[hir::FieldPat]) { let variant = match self.tables.node_type(lhs.hir_id).sty { ty::Adt(adt, _) => adt.variant_of_res(res), _ => span_bug!(lhs.span, "non-ADT in struct pattern") }; for pat in pats { - if let PatKind::Wild = pat.node.pat.node { + if let PatKind::Wild = pat.pat.node { continue; } - let index = self.tcx.field_index(pat.node.hir_id, self.tables); + let index = self.tcx.field_index(pat.hir_id, self.tables); self.insert_def_id(variant.fields[index].did); } } diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index daf0d8103a2..9c9e8c0bca3 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -418,8 +418,8 @@ fn add_from_pat<'tcx>(ir: &mut IrMaps<'tcx>, pat: &P) { } Struct(_, ref fields, _) => { for field in fields { - if field.node.is_shorthand { - shorthand_field_ids.insert(field.node.pat.hir_id); + if field.is_shorthand { + shorthand_field_ids.insert(field.pat.hir_id); } } } diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 77d6f393244..a55803e255b 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -1282,11 +1282,11 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { }; for fp in field_pats { - let field_ty = self.pat_ty_adjusted(&fp.node.pat)?; // see (*2) - let f_index = self.tcx.field_index(fp.node.hir_id, self.tables); + let field_ty = self.pat_ty_adjusted(&fp.pat)?; // see (*2) + let f_index = self.tcx.field_index(fp.hir_id, self.tables); let cmt_field = Rc::new(self.cat_field(pat, cmt.clone(), f_index, - fp.node.ident, field_ty)); - self.cat_pattern_(cmt_field, &fp.node.pat, op)?; + fp.ident, field_ty)); + self.cat_pattern_(cmt_field, &fp.pat, op)?; } } diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 88c19715811..3d100d2fbf8 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -1207,7 +1207,7 @@ fn resolve_local<'tcx>( PatKind::Binding(hir::BindingAnnotation::RefMut, ..) => true, PatKind::Struct(_, ref field_pats, _) => { - field_pats.iter().any(|fp| is_binding_pat(&fp.node.pat)) + field_pats.iter().any(|fp| is_binding_pat(&fp.pat)) } PatKind::Slice(ref pats1, ref pats2, ref pats3) => { diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index c34828a6b45..47b4e7c9487 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -164,7 +164,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonShorthandFieldPatterns { .expect("struct pattern type is not an ADT") .variant_of_res(cx.tables.qpath_res(qpath, pat.hir_id)); for fieldpat in field_pats { - if fieldpat.node.is_shorthand { + if fieldpat.is_shorthand { continue; } if fieldpat.span.ctxt().outer_expn_info().is_some() { @@ -173,9 +173,9 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonShorthandFieldPatterns { // (Issue #49588) continue; } - if let PatKind::Binding(_, _, ident, None) = fieldpat.node.pat.node { + if let PatKind::Binding(_, _, ident, None) = fieldpat.pat.node { if cx.tcx.find_field_index(ident, &variant) == - Some(cx.tcx.field_index(fieldpat.node.hir_id, cx.tables)) { + Some(cx.tcx.field_index(fieldpat.hir_id, cx.tables)) { let mut err = cx.struct_span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span, &format!("the `{}:` in this pattern is redundant", ident)); diff --git a/src/librustc_mir/hair/pattern/mod.rs b/src/librustc_mir/hair/pattern/mod.rs index 5ecfb84b632..10223151f5c 100644 --- a/src/librustc_mir/hair/pattern/mod.rs +++ b/src/librustc_mir/hair/pattern/mod.rs @@ -645,9 +645,9 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { fields.iter() .map(|field| { FieldPattern { - field: Field::new(self.tcx.field_index(field.node.hir_id, + field: Field::new(self.tcx.field_index(field.hir_id, self.tables)), - pattern: self.lower_pattern(&field.node.pat), + pattern: self.lower_pattern(&field.pat), } }) .collect(); diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 852b4898f4a..bca77621e55 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -1075,8 +1075,8 @@ impl<'a, 'tcx> Visitor<'tcx> for NamePrivacyVisitor<'a, 'tcx> { let adt = self.tables.pat_ty(pat).ty_adt_def().unwrap(); let variant = adt.variant_of_res(res); for field in fields { - let use_ctxt = field.node.ident.span; - let index = self.tcx.field_index(field.node.hir_id, self.tables); + let use_ctxt = field.ident.span; + let index = self.tcx.field_index(field.hir_id, self.tables); self.check_field(use_ctxt, field.span, adt, &variant.fields[index]); } } diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index d93c12513c5..9068605b075 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -32,7 +32,7 @@ use syntax::print::pprust::{ ty_to_string }; use syntax::ptr::P; -use syntax::source_map::{Spanned, DUMMY_SP, respan}; +use syntax::source_map::{DUMMY_SP, respan}; use syntax::walk_list; use syntax_pos::*; @@ -879,7 +879,7 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> { }; let variant = adt.variant_of_res(self.save_ctxt.get_path_res(p.id)); - for &Spanned { node: ref field, .. } in fields { + for field in fields { if let Some(index) = self.tcx.find_field_index(field.ident, variant) { if !self.span.filter_generated(field.ident.span) { let span = self.span_from_span(field.ident.span); diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index 3a43e764dd0..99ae777bb63 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -12,7 +12,6 @@ use rustc::traits::{ObligationCause, ObligationCauseCode}; use rustc::ty::{self, Ty, TypeFoldable}; use rustc::ty::subst::Kind; use syntax::ast; -use syntax::source_map::Spanned; use syntax::util::lev_distance::find_best_match_for_name; use syntax_pos::Span; use syntax_pos::hygiene::DesugaringKind; @@ -1036,7 +1035,7 @@ https://doc.rust-lang.org/reference/types.html#trait-objects"); &self, pat: &'tcx hir::Pat, qpath: &hir::QPath, - fields: &'tcx [Spanned], + fields: &'tcx [hir::FieldPat], etc: bool, expected: Ty<'tcx>, def_bm: ty::BindingMode, @@ -1048,7 +1047,7 @@ https://doc.rust-lang.org/reference/types.html#trait-objects"); variant_ty } else { for field in fields { - self.check_pat_walk(&field.node.pat, self.tcx.types.err, def_bm, discrim_span); + self.check_pat_walk(&field.pat, self.tcx.types.err, def_bm, discrim_span); } return self.tcx.types.err; }; @@ -1206,7 +1205,7 @@ https://doc.rust-lang.org/reference/types.html#trait-objects"); pat_id: hir::HirId, span: Span, variant: &'tcx ty::VariantDef, - fields: &'tcx [Spanned], + fields: &'tcx [hir::FieldPat], etc: bool, def_bm: ty::BindingMode, ) -> bool { @@ -1231,7 +1230,8 @@ https://doc.rust-lang.org/reference/types.html#trait-objects"); let mut inexistent_fields = vec![]; // Typecheck each field. - for &Spanned { node: ref field, span } in fields { + for field in fields { + let span = field.span; let ident = tcx.adjust_ident(field.ident, variant.def_id); let field_ty = match used_fields.entry(ident) { Occupied(occupied) => { diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs index 67a8ecaf1da..a88e32eb34d 100644 --- a/src/librustc_typeck/check/writeback.rs +++ b/src/librustc_typeck/check/writeback.rs @@ -283,7 +283,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> { } hir::PatKind::Struct(_, ref fields, _) => { for field in fields { - self.visit_field_id(field.node.hir_id); + self.visit_field_id(field.hir_id); } } _ => {} diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 983adc0eae8..d5becd2e1a9 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -29,7 +29,7 @@ use rustc::util::nodemap::{FxHashMap, FxHashSet}; use syntax::ast::{self, AttrStyle, Ident}; use syntax::attr; use syntax::ext::base::MacroKind; -use syntax::source_map::{DUMMY_SP, Spanned}; +use syntax::source_map::DUMMY_SP; use syntax::symbol::{Symbol, kw, sym}; use syntax::symbol::InternedString; use syntax_pos::{self, Pos, FileName}; @@ -4102,8 +4102,7 @@ fn name_from_pat(p: &hir::Pat) -> String { PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p), PatKind::Struct(ref name, ref fields, etc) => { format!("{} {{ {}{} }}", qpath_to_string(name), - fields.iter().map(|&Spanned { node: ref fp, .. }| - format!("{}: {}", fp.ident, name_from_pat(&*fp.pat))) + fields.iter().map(|fp| format!("{}: {}", fp.ident, name_from_pat(&fp.pat))) .collect::>().join(", "), if etc { ", .." } else { "" } ) diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index b8bfa671bcf..3ae37f734b7 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -571,7 +571,7 @@ impl Pat { match &self.node { PatKind::Ident(_, _, Some(p)) => p.walk(it), - PatKind::Struct(_, fields, _) => fields.iter().all(|field| field.node.pat.walk(it)), + PatKind::Struct(_, fields, _) => fields.iter().all(|field| field.pat.walk(it)), PatKind::TupleStruct(_, s) | PatKind::Tuple(s) | PatKind::Slice(s) => { s.iter().all(|p| p.walk(it)) } @@ -609,6 +609,7 @@ pub struct FieldPat { pub is_shorthand: bool, pub attrs: ThinVec, pub id: NodeId, + pub span: Span, } #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)] @@ -642,7 +643,7 @@ pub enum PatKind { /// A struct or struct variant pattern (e.g., `Variant {x, y, ..}`). /// The `bool` is `true` in the presence of a `..`. - Struct(Path, Vec>, /* recovered */ bool), + Struct(Path, Vec, /* recovered */ bool), /// A tuple struct/variant pattern (`Variant(x, y, .., z)`). TupleStruct(Path, Vec>), diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index f18cf86243e..38f46ee207c 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -575,7 +575,7 @@ impl<'a> ExtCtxt<'a> { self.pat(span, PatKind::TupleStruct(path, subpats)) } pub fn pat_struct(&self, span: Span, path: ast::Path, - field_pats: Vec>) -> P { + field_pats: Vec) -> P { self.pat(span, PatKind::Struct(path, field_pats, false)) } pub fn pat_tuple(&self, span: Span, pats: Vec>) -> P { diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index d22c05d9b2e..acafe327640 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -1042,10 +1042,7 @@ pub fn noop_visit_pat(pat: &mut P, vis: &mut T) { } PatKind::Struct(path, fields, _etc) => { vis.visit_path(path); - for Spanned { - node: FieldPat { ident, pat, is_shorthand: _, attrs, id }, - span - } in fields { + for FieldPat { ident, pat, is_shorthand: _, attrs, id, span } in fields { vis.visit_ident(ident); vis.visit_id(id); vis.visit_pat(pat); diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index da44ebd8415..c3079d2da0c 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -488,7 +488,7 @@ impl<'a> Parser<'a> { } /// Parses the fields of a struct-like pattern. - fn parse_pat_fields(&mut self) -> PResult<'a, (Vec>, bool)> { + fn parse_pat_fields(&mut self) -> PResult<'a, (Vec, bool)> { let mut fields = Vec::new(); let mut etc = false; let mut ate_comma = true; @@ -620,11 +620,7 @@ impl<'a> Parser<'a> { .emit(); } - fn parse_pat_field( - &mut self, - lo: Span, - attrs: Vec - ) -> PResult<'a, Spanned> { + fn parse_pat_field(&mut self, lo: Span, attrs: Vec) -> PResult<'a, FieldPat> { // Check if a colon exists one ahead. This means we're parsing a fieldname. let hi; let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) { @@ -659,15 +655,13 @@ impl<'a> Parser<'a> { (subpat, fieldname, true) }; - Ok(Spanned { + Ok(FieldPat { + ident: fieldname, + pat: subpat, + is_shorthand, + attrs: attrs.into(), + id: ast::DUMMY_NODE_ID, span: lo.to(hi), - node: FieldPat { - ident: fieldname, - pat: subpat, - is_shorthand, - attrs: attrs.into(), - id: ast::DUMMY_NODE_ID, - } }) } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index fabbe1ede1b..5955b913842 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -2367,14 +2367,14 @@ impl<'a> State<'a> { Consistent, &fields[..], |s, f| { s.cbox(INDENT_UNIT); - if !f.node.is_shorthand { - s.print_ident(f.node.ident); + if !f.is_shorthand { + s.print_ident(f.ident); s.word_nbsp(":"); } - s.print_pat(&f.node.pat); + s.print_pat(&f.pat); s.end(); }, - |f| f.node.pat.span); + |f| f.pat.span); if etc { if !fields.is_empty() { self.word_space(","); } self.s.word(".."); diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 8c06bf25eb5..6648347d4ae 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -442,9 +442,9 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) { PatKind::Struct(ref path, ref fields, _) => { visitor.visit_path(path, pattern.id); for field in fields { - walk_list!(visitor, visit_attribute, field.node.attrs.iter()); - visitor.visit_ident(field.node.ident); - visitor.visit_pat(&field.node.pat) + walk_list!(visitor, visit_attribute, field.attrs.iter()); + visitor.visit_ident(field.ident); + visitor.visit_pat(&field.pat) } } PatKind::Tuple(ref elems) => { diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index a454593bc65..d080dc37a92 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -187,7 +187,7 @@ use syntax::ast::{self, BinOpKind, EnumDef, Expr, Generics, Ident, PatKind}; use syntax::ast::{VariantData, GenericParamKind, GenericArg}; use syntax::attr; use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives}; -use syntax::source_map::{self, respan}; +use syntax::source_map::respan; use syntax::util::map_in_place::MapInPlace; use syntax::ptr::P; use syntax::symbol::{Symbol, kw, sym}; @@ -1610,15 +1610,13 @@ impl<'a> TraitDef<'a> { if ident.is_none() { cx.span_bug(sp, "a braced struct with unnamed fields in `derive`"); } - source_map::Spanned { + ast::FieldPat { + ident: ident.unwrap(), + is_shorthand: false, + attrs: ThinVec::new(), + id: ast::DUMMY_NODE_ID, span: pat.span.with_ctxt(self.span.ctxt()), - node: ast::FieldPat { - id: ast::DUMMY_NODE_ID, - ident: ident.unwrap(), - pat, - is_shorthand: false, - attrs: ThinVec::new(), - }, + pat, } }) .collect(); -- cgit 1.4.1-3-g733a5 From dfcbe75900f2cb813754ef104526ebce568fd75b Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sun, 11 Aug 2019 01:08:30 +0300 Subject: syntax_pos: Introduce a helper for checking whether a span comes from expansion --- src/librustc/lint/internal.rs | 2 +- src/librustc_codegen_ssa/mir/mod.rs | 4 ++-- src/librustc_lint/builtin.rs | 8 ++++---- src/librustc_lint/unused.rs | 2 +- src/librustc_resolve/lib.rs | 4 ++-- src/librustc_save_analysis/lib.rs | 2 +- src/librustc_typeck/check/demand.rs | 6 +++--- src/librustc_typeck/check/method/suggest.rs | 4 ++-- src/libsyntax/parse/parser.rs | 2 +- src/libsyntax_pos/hygiene.rs | 2 +- src/libsyntax_pos/lib.rs | 6 ++++++ src/libsyntax_pos/symbol.rs | 3 +-- 12 files changed, 25 insertions(+), 20 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/lint/internal.rs b/src/librustc/lint/internal.rs index dea1cc6601b..d9ad34a5297 100644 --- a/src/librustc/lint/internal.rs +++ b/src/librustc/lint/internal.rs @@ -108,7 +108,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TyTyKind { .help("try using `Ty` instead") .emit(); } else { - if ty.span.ctxt().outer_expn_info().is_some() { + if ty.span.from_expansion() { return; } if let Some(t) = is_ty_or_ty_ctxt(cx, ty) { diff --git a/src/librustc_codegen_ssa/mir/mod.rs b/src/librustc_codegen_ssa/mir/mod.rs index e7517d69991..32bcdebc1c4 100644 --- a/src/librustc_codegen_ssa/mir/mod.rs +++ b/src/librustc_codegen_ssa/mir/mod.rs @@ -8,7 +8,7 @@ use crate::base; use crate::debuginfo::{self, VariableAccess, VariableKind, FunctionDebugContext}; use crate::traits::*; -use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span}; +use syntax_pos::{DUMMY_SP, BytePos, Span}; use syntax::symbol::kw; use std::iter; @@ -120,7 +120,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // In order to have a good line stepping behavior in debugger, we overwrite debug // locations of macro expansions with that of the outermost expansion site // (unless the crate is being compiled with `-Z debug-macros`). - if source_info.span.ctxt() == NO_EXPANSION || + if !source_info.span.from_expansion() || self.cx.sess().opts.debugging_opts.debug_macros { let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo()); (scope, source_info.span) diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 47b4e7c9487..82160080a44 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -42,7 +42,7 @@ use syntax::source_map::Spanned; use syntax::edition::Edition; use syntax::feature_gate::{self, AttributeGate, AttributeType}; use syntax::feature_gate::{Stability, deprecated_attributes}; -use syntax_pos::{BytePos, Span, SyntaxContext}; +use syntax_pos::{BytePos, Span}; use syntax::symbol::{Symbol, kw, sym}; use syntax::errors::{Applicability, DiagnosticBuilder}; use syntax::print::pprust::expr_to_string; @@ -78,7 +78,7 @@ impl EarlyLintPass for WhileTrue { if let ast::ExprKind::While(cond, ..) = &e.node { if let ast::ExprKind::Lit(ref lit) = pierce_parens(cond).node { if let ast::LitKind::Bool(true) = lit.node { - if lit.span.ctxt() == SyntaxContext::empty() { + if !lit.span.from_expansion() { let msg = "denote infinite loops with `loop { ... }`"; let condition_span = cx.sess.source_map().def_span(e.span); cx.struct_span_lint(WHILE_TRUE, condition_span, msg) @@ -167,7 +167,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonShorthandFieldPatterns { if fieldpat.is_shorthand { continue; } - if fieldpat.span.ctxt().outer_expn_info().is_some() { + if fieldpat.span.from_expansion() { // Don't lint if this is a macro expansion: macro authors // shouldn't have to worry about this kind of style issue // (Issue #49588) @@ -1012,7 +1012,7 @@ impl UnreachablePub { let mut applicability = Applicability::MachineApplicable; match vis.node { hir::VisibilityKind::Public if !cx.access_levels.is_reachable(id) => { - if span.ctxt().outer_expn_info().is_some() { + if span.from_expansion() { applicability = Applicability::MaybeIncorrect; } let def_span = cx.tcx.sess.source_map().def_span(span); diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index 6a3dfdbe316..9cad8f58d41 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -518,7 +518,7 @@ impl EarlyLintPass for UnusedParens { // when a parenthesized token tree matched in one macro expansion is matched as // an expression in another and used as a fn/method argument (Issue #47775) if e.span.ctxt().outer_expn_info() - .map_or(false, |info| info.call_site.ctxt().outer_expn_info().is_some()) { + .map_or(false, |info| info.call_site.from_expansion()) { return; } let msg = format!("{} argument", call_kind); diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 8a4a60c16b0..f0916c2ff3e 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -274,7 +274,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder { ItemKind::Use(..) => { // don't suggest placing a use before the prelude // import or other generated ones - if item.span.ctxt().outer_expn_info().is_none() { + if !item.span.from_expansion() { self.span = Some(item.span.shrink_to_lo()); self.found_use = true; return; @@ -284,7 +284,7 @@ impl<'tcx> Visitor<'tcx> for UsePlacementFinder { ItemKind::ExternCrate(_) => {} // but place them before the first other item _ => if self.span.map_or(true, |span| item.span < span ) { - if item.span.ctxt().outer_expn_info().is_none() { + if !item.span.from_expansion() { // don't insert between attributes and an item if item.attrs.is_empty() { self.span = Some(item.span.shrink_to_lo()); diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index 9da6cd80057..0bbbbb8249c 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -1156,7 +1156,7 @@ fn escape(s: String) -> String { // Helper function to determine if a span came from a // macro expansion or syntax extension. fn generated_code(span: Span) -> bool { - span.ctxt() != NO_EXPANSION || span.is_dummy() + span.from_expansion() || span.is_dummy() } // DefId::index is a newtype and so the JSON serialisation is ugly. Therefore diff --git a/src/librustc_typeck/check/demand.rs b/src/librustc_typeck/check/demand.rs index ed25601208a..c72966edc5a 100644 --- a/src/librustc_typeck/check/demand.rs +++ b/src/librustc_typeck/check/demand.rs @@ -347,9 +347,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { sp, ); - // Check the `expn_info()` to see if this is a macro; if so, it's hard to - // extract the text and make a good suggestion, so don't bother. - let is_macro = sp.ctxt().outer_expn_info().is_some(); + // If the span is from a macro, then it's hard to extract the text + // and make a good suggestion, so don't bother. + let is_macro = sp.from_expansion(); match (&expr.node, &expected.sty, &checked_ty.sty) { (_, &ty::Ref(_, exp, _), &ty::Ref(_, check, _)) => match (&exp.sty, &check.sty) { diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index 4a5eba1df88..53024d97c3b 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -985,7 +985,7 @@ impl hir::intravisit::Visitor<'tcx> for UsePlacementFinder<'tcx> { hir::ItemKind::Use(..) => { // Don't suggest placing a `use` before the prelude // import or other generated ones. - if item.span.ctxt().outer_expn_info().is_none() { + if !item.span.from_expansion() { self.span = Some(item.span.shrink_to_lo()); self.found_use = true; return; @@ -995,7 +995,7 @@ impl hir::intravisit::Visitor<'tcx> for UsePlacementFinder<'tcx> { hir::ItemKind::ExternCrate(_) => {} // ...but do place them before the first other item. _ => if self.span.map_or(true, |span| item.span < span ) { - if item.span.ctxt().outer_expn_info().is_none() { + if !item.span.from_expansion() { // Don't insert between attributes and an item. if item.attrs.is_empty() { self.span = Some(item.span.shrink_to_lo()); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 2286e74e633..3b0af88f651 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1101,7 +1101,7 @@ impl<'a> Parser<'a> { crate fn process_potential_macro_variable(&mut self) { self.token = match self.token.kind { - token::Dollar if self.token.span.ctxt() != SyntaxContext::empty() && + token::Dollar if self.token.span.from_expansion() && self.look_ahead(1, |t| t.is_ident()) => { self.bump(); let name = match self.token.kind { diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index f91a2291544..4132b99cf41 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -255,7 +255,7 @@ impl HygieneData { } fn walk_chain(&self, mut span: Span, to: SyntaxContext) -> Span { - while span.ctxt() != crate::NO_EXPANSION && span.ctxt() != to { + while span.from_expansion() && span.ctxt() != to { if let Some(info) = self.expn_info(self.outer_expn(span.ctxt())) { span = info.call_site; } else { diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 02a7433d946..793710b453f 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -288,6 +288,12 @@ impl Span { span.lo.0 == 0 && span.hi.0 == 0 } + /// Returns `true` if this span comes from a macro or desugaring. + #[inline] + pub fn from_expansion(self) -> bool { + self.ctxt() != SyntaxContext::empty() + } + /// Returns a new span representing an empty span at the beginning of this span #[inline] pub fn shrink_to_lo(self) -> Span { diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index 2d9556233d1..6f5a458a874 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -14,7 +14,6 @@ use std::fmt; use std::hash::{Hash, Hasher}; use std::str; -use crate::hygiene::SyntaxContext; use crate::{Span, DUMMY_SP, GLOBALS}; #[cfg(test)] @@ -851,7 +850,7 @@ impl fmt::Display for Ident { impl Encodable for Ident { fn encode(&self, s: &mut S) -> Result<(), S::Error> { - if self.span.ctxt().modern() == SyntaxContext::empty() { + if !self.span.modern().from_expansion() { s.emit_str(&self.as_str()) } else { // FIXME(jseyfried): intercrate hygiene let mut string = "#".to_owned(); -- cgit 1.4.1-3-g733a5 From 67d6ce42063732d7c7b12d94f872dcafb5efb607 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sun, 11 Aug 2019 01:44:55 +0300 Subject: syntax_pos: `NO_EXPANSION`/`SyntaxContext::empty()` -> `SyntaxContext::root()` For consistency with `ExpnId::root`. Also introduce a helper `Span::with_root_ctxt` for creating spans with `SyntaxContext::root()` context --- src/librustc/ich/hcx.rs | 2 +- src/librustc/ty/query/on_disk_cache.rs | 4 ++-- src/librustc_errors/lib.rs | 5 ++--- src/librustc_metadata/cstore_impl.rs | 4 ++-- src/librustc_metadata/decoder.rs | 4 ++-- src/librustc_resolve/lib.rs | 2 +- src/libsyntax/ext/base.rs | 2 +- src/libsyntax/ext/expand.rs | 2 +- src/libsyntax/ext/proc_macro_server.rs | 2 +- src/libsyntax/parse/lexer/mod.rs | 4 ++-- src/libsyntax/parse/lexer/tests.rs | 6 +++--- src/libsyntax/parse/lexer/unicode_chars.rs | 7 +++---- src/libsyntax/parse/tests.rs | 4 ++-- src/libsyntax/source_map/tests.rs | 11 +++++------ src/libsyntax/tests.rs | 4 ++-- src/libsyntax/tokenstream/tests.rs | 4 ++-- src/libsyntax_ext/global_allocator.rs | 2 +- src/libsyntax_ext/test.rs | 4 ++-- src/libsyntax_pos/hygiene.rs | 12 ++++++------ src/libsyntax_pos/lib.rs | 21 ++++++++++++--------- 20 files changed, 53 insertions(+), 53 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs index ae7d82c2020..39f6b0d4344 100644 --- a/src/librustc/ich/hcx.rs +++ b/src/librustc/ich/hcx.rs @@ -350,7 +350,7 @@ impl<'a> HashStable> for Span { let line_col_len = col | line | len; std_hash::Hash::hash(&line_col_len, hasher); - if span.ctxt == SyntaxContext::empty() { + if span.ctxt == SyntaxContext::root() { TAG_NO_EXPANSION.hash_stable(hcx, hasher); } else { TAG_EXPANSION.hash_stable(hcx, hasher); diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index 00871a1cbf2..1c5baa638c2 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -592,7 +592,7 @@ impl<'a, 'tcx> SpecializedDecoder for CacheDecoder<'a, 'tcx> { // `SyntaxContextData::prev_ctxt` or `SyntaxContextData::opaque`. These things // don't seem to be used after HIR lowering, so everything should be fine // as long as incremental compilation does not kick in before that. - let location = || Span::new(lo, hi, SyntaxContext::empty()); + let location = || Span::with_root_ctxt(lo, hi); let recover_from_expn_info = |this: &Self, expn_info, pos| { let span = location().fresh_expansion(ExpnId::root(), expn_info); this.synthetic_expansion_infos.borrow_mut().insert(pos, span.ctxt()); @@ -816,7 +816,7 @@ where col_lo.encode(self)?; len.encode(self)?; - if span_data.ctxt == SyntaxContext::empty() { + if span_data.ctxt == SyntaxContext::root() { TAG_NO_EXPANSION_INFO.encode(self) } else { let (expn_id, expn_info) = span_data.ctxt.outer_expn_with_info(); diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index f3e524152ff..4018a667bf2 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -43,8 +43,7 @@ use syntax_pos::{BytePos, SourceFile, FileName, MultiSpan, - Span, - NO_EXPANSION}; + Span}; /// Indicates the confidence in the correctness of a suggestion. /// @@ -189,7 +188,7 @@ impl CodeSuggestion { // Find the bounding span. let lo = substitution.parts.iter().map(|part| part.span.lo()).min().unwrap(); let hi = substitution.parts.iter().map(|part| part.span.hi()).min().unwrap(); - let bounding_span = Span::new(lo, hi, NO_EXPANSION); + let bounding_span = Span::with_root_ctxt(lo, hi); let lines = cm.span_to_lines(bounding_span).unwrap(); assert!(!lines.lines.is_empty()); diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index ee1175e798d..b46758abb5f 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -35,7 +35,7 @@ use syntax::ext::proc_macro::BangProcMacro; use syntax::parse::source_file_to_stream; use syntax::parse::parser::emit_unclosed_delims; use syntax::symbol::{Symbol, sym}; -use syntax_pos::{Span, NO_EXPANSION, FileName}; +use syntax_pos::{Span, FileName}; use rustc_data_structures::bit_set::BitSet; macro_rules! provide { @@ -443,7 +443,7 @@ impl cstore::CStore { let source_name = FileName::Macros(macro_full_name); let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body); - let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION); + let local_span = Span::with_root_ctxt(source_file.start_pos, source_file.end_pos); let (body, mut errors) = source_file_to_stream(&sess.parse_sess, source_file, None); emit_unclosed_delims(&mut errors, &sess.diagnostic()); diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 935187dd066..3de9bf4da11 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -32,7 +32,7 @@ use syntax::source_map; use syntax::symbol::{Symbol, sym}; use syntax::ext::base::{MacroKind, SyntaxExtension}; use syntax::ext::hygiene::ExpnId; -use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, NO_EXPANSION}; +use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP}; use log::debug; pub struct DecodeContext<'a, 'tcx> { @@ -344,7 +344,7 @@ impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { let hi = (hi + source_file.translated_source_file.start_pos) - source_file.original_start_pos; - Ok(Span::new(lo, hi, NO_EXPANSION)) + Ok(Span::with_root_ctxt(lo, hi)) } } diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index f0916c2ff3e..8b2e371f0f6 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -1430,7 +1430,7 @@ impl<'a> Resolver<'a> { } let (general_span, modern_span) = if ident.name == kw::SelfUpper { // FIXME(jseyfried) improve `Self` hygiene - let empty_span = ident.span.with_ctxt(SyntaxContext::empty()); + let empty_span = ident.span.with_ctxt(SyntaxContext::root()); (empty_span, empty_span) } else if ns == TypeNS { let modern_span = ident.span.modern(); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index edeca046c7b..d9fd31db4dd 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -762,7 +762,7 @@ impl<'a> ExtCtxt<'a> { } } pub fn backtrace(&self) -> SyntaxContext { - SyntaxContext::empty().apply_mark(self.current_expansion.id) + SyntaxContext::root().apply_mark(self.current_expansion.id) } /// Returns span for the macro which originally caused the current expansion to happen. diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 97983944931..aa409199afd 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -759,7 +759,7 @@ impl<'a> Parser<'a> { let msg = format!("macro expansion ignores token `{}` and any following", self.this_token_to_string()); // Avoid emitting backtrace info twice. - let def_site_span = self.token.span.with_ctxt(SyntaxContext::empty()); + let def_site_span = self.token.span.with_ctxt(SyntaxContext::root()); let mut err = self.diagnostic().struct_span_err(def_site_span, &msg); err.span_label(span, "caused by the macro expansion here"); let msg = format!( diff --git a/src/libsyntax/ext/proc_macro_server.rs b/src/libsyntax/ext/proc_macro_server.rs index 36621ce7775..fd93910004e 100644 --- a/src/libsyntax/ext/proc_macro_server.rs +++ b/src/libsyntax/ext/proc_macro_server.rs @@ -365,7 +365,7 @@ impl<'a> Rustc<'a> { let location = cx.current_expansion.id.expn_info().unwrap().call_site; let to_span = |transparency| { location.with_ctxt( - SyntaxContext::empty() + SyntaxContext::root() .apply_mark_with_transparency(cx.current_expansion.id, transparency), ) }; diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index e86d4c7fde6..17629d392cd 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -4,7 +4,7 @@ use crate::symbol::{sym, Symbol}; use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char}; use errors::{FatalError, DiagnosticBuilder}; -use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION}; +use syntax_pos::{BytePos, Pos, Span}; use rustc_lexer::Base; use rustc_lexer::unescape; @@ -84,7 +84,7 @@ impl<'a> StringReader<'a> { fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { - self.override_span.unwrap_or_else(|| Span::new(lo, hi, NO_EXPANSION)) + self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi)) } /// Returns the next token, including trivia like whitespace or comments. diff --git a/src/libsyntax/parse/lexer/tests.rs b/src/libsyntax/parse/lexer/tests.rs index fc47e4f0b18..1e4d9048b41 100644 --- a/src/libsyntax/parse/lexer/tests.rs +++ b/src/libsyntax/parse/lexer/tests.rs @@ -9,7 +9,7 @@ use crate::diagnostics::plugin::ErrorMap; use crate::with_default_globals; use std::io; use std::path::PathBuf; -use syntax_pos::{BytePos, Span, NO_EXPANSION, edition::Edition}; +use syntax_pos::{BytePos, Span, edition::Edition}; use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use rustc_data_structures::sync::{Lock, Once}; @@ -61,7 +61,7 @@ fn t1() { let tok1 = string_reader.next_token(); let tok2 = Token::new( mk_ident("fn"), - Span::new(BytePos(21), BytePos(23), NO_EXPANSION), + Span::with_root_ctxt(BytePos(21), BytePos(23)), ); assert_eq!(tok1.kind, tok2.kind); assert_eq!(tok1.span, tok2.span); @@ -71,7 +71,7 @@ fn t1() { assert_eq!(string_reader.pos.clone(), BytePos(28)); let tok4 = Token::new( mk_ident("main"), - Span::new(BytePos(24), BytePos(28), NO_EXPANSION), + Span::with_root_ctxt(BytePos(24), BytePos(28)), ); assert_eq!(tok3.kind, tok4.kind); assert_eq!(tok3.span, tok4.span); diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs index eaa736c6a35..525b4215aff 100644 --- a/src/libsyntax/parse/lexer/unicode_chars.rs +++ b/src/libsyntax/parse/lexer/unicode_chars.rs @@ -3,7 +3,7 @@ use super::StringReader; use errors::{Applicability, DiagnosticBuilder}; -use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION, symbol::kw}; +use syntax_pos::{BytePos, Pos, Span, symbol::kw}; use crate::parse::token; #[rustfmt::skip] // for line breaks @@ -343,7 +343,7 @@ crate fn check_for_substitution<'a>( None => return None, }; - let span = Span::new(pos, pos + Pos::from_usize(ch.len_utf8()), NO_EXPANSION); + let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8())); let (ascii_name, token) = match ASCII_ARRAY.iter().find(|&&(c, _, _)| c == ascii_char) { Some((_ascii_char, ascii_name, token)) => (ascii_name, token), @@ -362,10 +362,9 @@ crate fn check_for_substitution<'a>( ascii_char, ascii_name ); err.span_suggestion( - Span::new( + Span::with_root_ctxt( pos, pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()), - NO_EXPANSION, ), &msg, format!("\"{}\"", s), diff --git a/src/libsyntax/parse/tests.rs b/src/libsyntax/parse/tests.rs index 9edc83a3594..6a789ef99d6 100644 --- a/src/libsyntax/parse/tests.rs +++ b/src/libsyntax/parse/tests.rs @@ -12,7 +12,7 @@ use crate::symbol::{kw, sym}; use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse}; use crate::tokenstream::{DelimSpan, TokenTree, TokenStream}; use crate::with_default_globals; -use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION}; +use syntax_pos::{Span, BytePos, Pos}; use std::path::PathBuf; @@ -27,7 +27,7 @@ fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess) // produce a syntax_pos::span fn sp(a: u32, b: u32) -> Span { - Span::new(BytePos(a), BytePos(b), NO_EXPANSION) + Span::with_root_ctxt(BytePos(a), BytePos(b)) } /// Parse a string, return an expr diff --git a/src/libsyntax/source_map/tests.rs b/src/libsyntax/source_map/tests.rs index 427e86b56e1..c7b8332c53e 100644 --- a/src/libsyntax/source_map/tests.rs +++ b/src/libsyntax/source_map/tests.rs @@ -91,7 +91,7 @@ fn t6() { fn t7() { // Test span_to_lines for a span ending at the end of source_file let sm = init_source_map(); - let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); + let span = Span::with_root_ctxt(BytePos(12), BytePos(23)); let file_lines = sm.span_to_lines(span).unwrap(); assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into()); @@ -107,7 +107,7 @@ fn span_from_selection(input: &str, selection: &str) -> Span { assert_eq!(input.len(), selection.len()); let left_index = selection.find('~').unwrap() as u32; let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); - Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION) + Span::with_root_ctxt(BytePos(left_index), BytePos(right_index + 1)) } /// Tests span_to_snippet and span_to_lines for a span converting 3 @@ -137,7 +137,7 @@ fn span_to_snippet_and_lines_spanning_multiple_lines() { fn t8() { // Test span_to_snippet for a span ending at the end of source_file let sm = init_source_map(); - let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); + let span = Span::with_root_ctxt(BytePos(12), BytePos(23)); let snippet = sm.span_to_snippet(span); assert_eq!(snippet, Ok("second line".to_string())); @@ -147,7 +147,7 @@ fn t8() { fn t9() { // Test span_to_str for a span ending at the end of source_file let sm = init_source_map(); - let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION); + let span = Span::with_root_ctxt(BytePos(12), BytePos(23)); let sstr = sm.span_to_string(span); assert_eq!(sstr, "blork.rs:2:1: 2:12"); @@ -198,10 +198,9 @@ impl SourceMapExtension for SourceMap { let lo = hi + offset; hi = lo + substring.len(); if i == n { - let span = Span::new( + let span = Span::with_root_ctxt( BytePos(lo as u32 + file.start_pos.0), BytePos(hi as u32 + file.start_pos.0), - NO_EXPANSION, ); assert_eq!(&self.span_to_snippet(span).unwrap()[..], substring); diff --git a/src/libsyntax/tests.rs b/src/libsyntax/tests.rs index cff034fdeb1..4c0e1e3704d 100644 --- a/src/libsyntax/tests.rs +++ b/src/libsyntax/tests.rs @@ -9,7 +9,7 @@ use crate::with_default_globals; use errors::emitter::EmitterWriter; use errors::Handler; use rustc_data_structures::sync::Lrc; -use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan}; +use syntax_pos::{BytePos, Span, MultiSpan}; use std::io; use std::io::prelude::*; @@ -169,7 +169,7 @@ fn make_span(file_text: &str, start: &Position, end: &Position) -> Span { let start = make_pos(file_text, start); let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends assert!(start <= end); - Span::new(BytePos(start as u32), BytePos(end as u32), NO_EXPANSION) + Span::with_root_ctxt(BytePos(start as u32), BytePos(end as u32)) } fn make_pos(file_text: &str, pos: &Position) -> usize { diff --git a/src/libsyntax/tokenstream/tests.rs b/src/libsyntax/tokenstream/tests.rs index 72e22a49876..5017e5f5424 100644 --- a/src/libsyntax/tokenstream/tests.rs +++ b/src/libsyntax/tokenstream/tests.rs @@ -3,14 +3,14 @@ use super::*; use crate::ast::Name; use crate::with_default_globals; use crate::tests::string_to_stream; -use syntax_pos::{Span, BytePos, NO_EXPANSION}; +use syntax_pos::{Span, BytePos}; fn string_to_ts(string: &str) -> TokenStream { string_to_stream(string.to_owned()) } fn sp(a: u32, b: u32) -> Span { - Span::new(BytePos(a), BytePos(b), NO_EXPANSION) + Span::with_root_ctxt(BytePos(a), BytePos(b)) } #[test] diff --git a/src/libsyntax_ext/global_allocator.rs b/src/libsyntax_ext/global_allocator.rs index f788b513804..b1f6f55732a 100644 --- a/src/libsyntax_ext/global_allocator.rs +++ b/src/libsyntax_ext/global_allocator.rs @@ -29,7 +29,7 @@ pub fn expand( }; // Generate a bunch of new items using the AllocFnFactory - let span = item.span.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id)); + let span = item.span.with_ctxt(SyntaxContext::root().apply_mark(ecx.current_expansion.id)); let f = AllocFnFactory { span, kind: AllocatorKind::Global, diff --git a/src/libsyntax_ext/test.rs b/src/libsyntax_ext/test.rs index 993ef257527..08582e714cc 100644 --- a/src/libsyntax_ext/test.rs +++ b/src/libsyntax_ext/test.rs @@ -29,7 +29,7 @@ pub fn expand_test_case( if !ecx.ecfg.should_test { return vec![]; } - let sp = attr_sp.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id)); + let sp = attr_sp.with_ctxt(SyntaxContext::root().apply_mark(ecx.current_expansion.id)); let mut item = anno_item.expect_item(); item = item.map(|mut item| { item.vis = respan(item.vis.span, ast::VisibilityKind::Public); @@ -93,7 +93,7 @@ pub fn expand_test_or_bench( return vec![Annotatable::Item(item)]; } - let ctxt = SyntaxContext::empty().apply_mark(cx.current_expansion.id); + let ctxt = SyntaxContext::root().apply_mark(cx.current_expansion.id); let (sp, attr_sp) = (item.span.with_ctxt(ctxt), attr_sp.with_ctxt(ctxt)); // Gensym "test" so we can extern crate without conflicting with any local names diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index 4132b99cf41..c832e058cdf 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -246,7 +246,7 @@ impl HygieneData { fn marks(&self, mut ctxt: SyntaxContext) -> Vec<(ExpnId, Transparency)> { let mut marks = Vec::new(); - while ctxt != SyntaxContext::empty() { + while ctxt != SyntaxContext::root() { marks.push((self.outer_expn(ctxt), self.outer_transparency(ctxt))); ctxt = self.parent_ctxt(ctxt); } @@ -286,14 +286,14 @@ impl HygieneData { } let call_site_ctxt = - self.expn_info(expn_id).map_or(SyntaxContext::empty(), |info| info.call_site.ctxt()); + self.expn_info(expn_id).map_or(SyntaxContext::root(), |info| info.call_site.ctxt()); let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { self.modern(call_site_ctxt) } else { self.modern_and_legacy(call_site_ctxt) }; - if call_site_ctxt == SyntaxContext::empty() { + if call_site_ctxt == SyntaxContext::root() { return self.apply_mark_internal(ctxt, expn_id, transparency); } @@ -400,7 +400,7 @@ pub fn update_dollar_crate_names(mut get_name: impl FnMut(SyntaxContext) -> Symb impl SyntaxContext { #[inline] - pub const fn empty() -> Self { + pub const fn root() -> Self { SyntaxContext(0) } @@ -615,7 +615,7 @@ impl Span { pub fn fresh_expansion(self, parent: ExpnId, expn_info: ExpnInfo) -> Span { HygieneData::with(|data| { let expn_id = data.fresh_expn(parent, Some(expn_info)); - self.with_ctxt(data.apply_mark(SyntaxContext::empty(), expn_id)) + self.with_ctxt(data.apply_mark(SyntaxContext::root(), expn_id)) }) } } @@ -775,6 +775,6 @@ impl Encodable for SyntaxContext { impl Decodable for SyntaxContext { fn decode(_: &mut D) -> Result { - Ok(SyntaxContext::empty()) // FIXME(jseyfried) intercrate hygiene + Ok(SyntaxContext::root()) // FIXME(jseyfried) intercrate hygiene } } diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 793710b453f..7c8539198b9 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -291,7 +291,12 @@ impl Span { /// Returns `true` if this span comes from a macro or desugaring. #[inline] pub fn from_expansion(self) -> bool { - self.ctxt() != SyntaxContext::empty() + self.ctxt() != SyntaxContext::root() + } + + #[inline] + pub fn with_root_ctxt(lo: BytePos, hi: BytePos) -> Span { + Span::new(lo, hi, SyntaxContext::root()) } /// Returns a new span representing an empty span at the beginning of this span @@ -474,9 +479,9 @@ impl Span { // Return the macro span on its own to avoid weird diagnostic output. It is preferable to // have an incomplete span than a completely nonsensical one. if span_data.ctxt != end_data.ctxt { - if span_data.ctxt == SyntaxContext::empty() { + if span_data.ctxt == SyntaxContext::root() { return end; - } else if end_data.ctxt == SyntaxContext::empty() { + } else if end_data.ctxt == SyntaxContext::root() { return self; } // Both spans fall within a macro. @@ -485,7 +490,7 @@ impl Span { Span::new( cmp::min(span_data.lo, end_data.lo), cmp::max(span_data.hi, end_data.hi), - if span_data.ctxt == SyntaxContext::empty() { end_data.ctxt } else { span_data.ctxt }, + if span_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt }, ) } @@ -496,7 +501,7 @@ impl Span { Span::new( span.hi, end.lo, - if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt }, + if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt }, ) } @@ -507,7 +512,7 @@ impl Span { Span::new( span.lo, end.lo, - if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt }, + if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt }, ) } @@ -617,7 +622,7 @@ impl rustc_serialize::UseSpecializedDecodable for Span { d.read_struct("Span", 2, |d| { let lo = d.read_struct_field("lo", 0, Decodable::decode)?; let hi = d.read_struct_field("hi", 1, Decodable::decode)?; - Ok(Span::new(lo, hi, NO_EXPANSION)) + Ok(Span::with_root_ctxt(lo, hi)) }) } } @@ -761,8 +766,6 @@ impl From> for MultiSpan { } } -pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty(); - /// Identifies an offset of a multi-byte character in a `SourceFile`. #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)] pub struct MultiByteChar { -- cgit 1.4.1-3-g733a5 From 6cb28b6617e25b74389f1cee2ec0335c2ccfb865 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sun, 11 Aug 2019 02:20:18 +0300 Subject: `Ident::with_empty_ctxt` -> `Ident::with_dummy_span` `Ident` has had a full span rather than just a `SyntaxContext` for a long time now. --- src/librustc/hir/lowering.rs | 8 ++++---- src/librustc/hir/lowering/expr.rs | 14 +++++++------- src/librustc/hir/mod.rs | 6 +++--- src/librustc/hir/print.rs | 2 +- src/librustc/traits/project.rs | 2 +- src/librustc_driver/lib.rs | 2 +- src/librustc_metadata/decoder.rs | 8 ++++---- src/librustc_resolve/diagnostics.rs | 2 +- src/librustc_resolve/late.rs | 16 ++++++++-------- src/librustc_resolve/lib.rs | 14 +++++++------- src/librustc_typeck/check/mod.rs | 2 +- src/librustdoc/clean/mod.rs | 2 +- src/libsyntax/attr/mod.rs | 4 ++-- src/libsyntax/diagnostics/plugin.rs | 2 +- src/libsyntax/ext/base.rs | 2 +- src/libsyntax/ext/build.rs | 2 +- src/libsyntax/ext/expand.rs | 8 ++++---- src/libsyntax/parse/parser/module.rs | 2 +- src/libsyntax/print/pprust.rs | 6 +++--- src/libsyntax_ext/deriving/clone.rs | 2 +- src/libsyntax_ext/deriving/debug.rs | 4 ++-- src/libsyntax_ext/deriving/generic/mod.rs | 2 +- src/libsyntax_ext/env.rs | 4 ++-- src/libsyntax_ext/global_allocator.rs | 8 ++++---- src/libsyntax_ext/lib.rs | 2 +- src/libsyntax_ext/plugin_macro_defs.rs | 2 +- src/libsyntax_ext/proc_macro_harness.rs | 4 ++-- src/libsyntax_ext/standard_library_imports.rs | 6 +++--- src/libsyntax_ext/test_harness.rs | 6 +++--- src/libsyntax_pos/symbol.rs | 10 +++++----- 30 files changed, 77 insertions(+), 77 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 9e5d6378c40..f942a0fb857 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -1224,7 +1224,7 @@ impl<'a> LoweringContext<'a> { P(hir::Path { res, segments: hir_vec![hir::PathSegment::from_ident( - Ident::with_empty_ctxt(kw::SelfUpper) + Ident::with_dummy_span(kw::SelfUpper) )], span: t.span, }), @@ -1558,7 +1558,7 @@ impl<'a> LoweringContext<'a> { let (name, kind) = match name { hir::LifetimeName::Underscore => ( - hir::ParamName::Plain(Ident::with_empty_ctxt(kw::UnderscoreLifetime)), + hir::ParamName::Plain(Ident::with_dummy_span(kw::UnderscoreLifetime)), hir::LifetimeParamKind::Elided, ), hir::LifetimeName::Param(param_name) => ( @@ -2002,7 +2002,7 @@ impl<'a> LoweringContext<'a> { bindings: hir_vec![ hir::TypeBinding { hir_id: this.next_id(), - ident: Ident::with_empty_ctxt(FN_OUTPUT_NAME), + ident: Ident::with_dummy_span(FN_OUTPUT_NAME), kind: hir::TypeBindingKind::Equality { ty: output .as_ref() @@ -2394,7 +2394,7 @@ impl<'a> LoweringContext<'a> { let future_params = P(hir::GenericArgs { args: hir_vec![], bindings: hir_vec![hir::TypeBinding { - ident: Ident::with_empty_ctxt(FN_OUTPUT_NAME), + ident: Ident::with_dummy_span(FN_OUTPUT_NAME), kind: hir::TypeBindingKind::Equality { ty: output_ty, }, diff --git a/src/librustc/hir/lowering/expr.rs b/src/librustc/hir/lowering/expr.rs index e3a5400942d..4ba61e9d4fd 100644 --- a/src/librustc/hir/lowering/expr.rs +++ b/src/librustc/hir/lowering/expr.rs @@ -552,7 +552,7 @@ impl LoweringContext<'_> { // let mut pinned = ; let expr = P(self.lower_expr(expr)); - let pinned_ident = Ident::with_empty_ctxt(sym::pinned); + let pinned_ident = Ident::with_dummy_span(sym::pinned); let (pinned_pat, pinned_pat_hid) = self.pat_ident_binding_mode( span, pinned_ident, @@ -593,7 +593,7 @@ impl LoweringContext<'_> { let loop_node_id = self.sess.next_node_id(); let loop_hir_id = self.lower_node_id(loop_node_id); let ready_arm = { - let x_ident = Ident::with_empty_ctxt(sym::result); + let x_ident = Ident::with_dummy_span(sym::result); let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident); let x_expr = P(self.expr_ident(span, x_ident, x_pat_hid)); let ready_pat = self.pat_std_enum( @@ -1070,9 +1070,9 @@ impl LoweringContext<'_> { ); head.span = desugared_span; - let iter = Ident::with_empty_ctxt(sym::iter); + let iter = Ident::with_dummy_span(sym::iter); - let next_ident = Ident::with_empty_ctxt(sym::__next); + let next_ident = Ident::with_dummy_span(sym::__next); let (next_pat, next_pat_hid) = self.pat_ident_binding_mode( desugared_span, next_ident, @@ -1081,7 +1081,7 @@ impl LoweringContext<'_> { // `::std::option::Option::Some(val) => __next = val` let pat_arm = { - let val_ident = Ident::with_empty_ctxt(sym::val); + let val_ident = Ident::with_dummy_span(sym::val); let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident); let val_expr = P(self.expr_ident(pat.span, val_ident, val_pat_hid)); let next_expr = P(self.expr_ident(pat.span, next_ident, next_pat_hid)); @@ -1247,7 +1247,7 @@ impl LoweringContext<'_> { // `Ok(val) => #[allow(unreachable_code)] val,` let ok_arm = { - let val_ident = Ident::with_empty_ctxt(sym::val); + let val_ident = Ident::with_dummy_span(sym::val); let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident); let val_expr = P(self.expr_ident_with_attrs( span, @@ -1263,7 +1263,7 @@ impl LoweringContext<'_> { // `Err(err) => #[allow(unreachable_code)] // return Try::from_error(From::from(err)),` let err_arm = { - let err_ident = Ident::with_empty_ctxt(sym::err); + let err_ident = Ident::with_dummy_span(sym::err); let (err_local, err_local_nid) = self.pat_ident(try_span, err_ident); let from_expr = { let from_path = &[sym::convert, sym::From, sym::from]; diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 7c2f9907217..57fd0be77ec 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -202,7 +202,7 @@ impl ParamName { match *self { ParamName::Plain(ident) => ident, ParamName::Fresh(_) | - ParamName::Error => Ident::with_empty_ctxt(kw::UnderscoreLifetime), + ParamName::Error => Ident::with_dummy_span(kw::UnderscoreLifetime), } } @@ -237,8 +237,8 @@ impl LifetimeName { pub fn ident(&self) -> Ident { match *self { LifetimeName::Implicit | LifetimeName::Error => Ident::invalid(), - LifetimeName::Underscore => Ident::with_empty_ctxt(kw::UnderscoreLifetime), - LifetimeName::Static => Ident::with_empty_ctxt(kw::StaticLifetime), + LifetimeName::Underscore => Ident::with_dummy_span(kw::UnderscoreLifetime), + LifetimeName::Static => Ident::with_dummy_span(kw::StaticLifetime), LifetimeName::Param(param_name) => param_name.ident(), } } diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index caf8220bbf4..2fd683ed83c 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -1457,7 +1457,7 @@ impl<'a> State<'a> { } pub fn print_name(&mut self, name: ast::Name) { - self.print_ident(ast::Ident::with_empty_ctxt(name)) + self.print_ident(ast::Ident::with_dummy_span(name)) } pub fn print_for_decl(&mut self, loc: &hir::Local, coll: &hir::Expr) { diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index 38263f26a59..72df45df923 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -1417,7 +1417,7 @@ fn confirm_callable_candidate<'cx, 'tcx>( projection_ty: ty::ProjectionTy::from_ref_and_name( tcx, trait_ref, - Ident::with_empty_ctxt(FN_OUTPUT_NAME), + Ident::with_dummy_span(FN_OUTPUT_NAME), ), ty: ret_type } diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index e9d85a53d1e..fdd0773b73a 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -678,7 +678,7 @@ impl RustcDefaultCalls { let mut cfgs = sess.parse_sess.config.iter().filter_map(|&(name, ref value)| { let gated_cfg = GatedCfg::gate(&ast::MetaItem { - path: ast::Path::from_ident(ast::Ident::with_empty_ctxt(name)), + path: ast::Path::from_ident(ast::Ident::with_dummy_span(name)), node: ast::MetaItemKind::Word, span: DUMMY_SP, }); diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 3de9bf4da11..0bec31d7076 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -569,7 +569,7 @@ impl<'a, 'tcx> CrateMetadata { ty::VariantDef::new( tcx, - Ident::with_empty_ctxt(self.item_name(index)), + Ident::with_dummy_span(self.item_name(index)), variant_did, ctor_did, data.discr, @@ -577,7 +577,7 @@ impl<'a, 'tcx> CrateMetadata { let f = self.entry(index); ty::FieldDef { did: self.local_def_id(index), - ident: Ident::with_empty_ctxt(self.item_name(index)), + ident: Ident::with_dummy_span(self.item_name(index)), vis: f.visibility.decode(self) } }).collect(), @@ -741,7 +741,7 @@ impl<'a, 'tcx> CrateMetadata { DefKind::Macro(ext.macro_kind()), self.local_def_id(DefIndex::from_proc_macro_index(id)), ); - let ident = Ident::with_empty_ctxt(name); + let ident = Ident::with_dummy_span(name); callback(def::Export { ident: ident, res: res, @@ -783,7 +783,7 @@ impl<'a, 'tcx> CrateMetadata { if let Some(kind) = self.def_kind(child_index) { callback(def::Export { res: Res::Def(kind, self.local_def_id(child_index)), - ident: Ident::with_empty_ctxt(self.item_name(child_index)), + ident: Ident::with_dummy_span(self.item_name(child_index)), vis: self.get_visibility(child_index), span: self.entry(child_index).span.decode((self, sess)), }); diff --git a/src/librustc_resolve/diagnostics.rs b/src/librustc_resolve/diagnostics.rs index 1de67edb95c..f824dfe8e78 100644 --- a/src/librustc_resolve/diagnostics.rs +++ b/src/librustc_resolve/diagnostics.rs @@ -595,7 +595,7 @@ impl<'a> Resolver<'a> { where FilterFn: Fn(Res) -> bool { let mut suggestions = self.lookup_import_candidates_from_module( - lookup_ident, namespace, self.graph_root, Ident::with_empty_ctxt(kw::Crate), &filter_fn + lookup_ident, namespace, self.graph_root, Ident::with_dummy_span(kw::Crate), &filter_fn ); if lookup_ident.span.rust_2018() { diff --git a/src/librustc_resolve/late.rs b/src/librustc_resolve/late.rs index 358eaae11e7..8c15bff7101 100644 --- a/src/librustc_resolve/late.rs +++ b/src/librustc_resolve/late.rs @@ -352,7 +352,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> { self.smart_resolve_path(ty.id, qself.as_ref(), path, PathSource::Type); } TyKind::ImplicitSelf => { - let self_ty = Ident::with_empty_ctxt(kw::SelfUpper); + let self_ty = Ident::with_dummy_span(kw::SelfUpper); let res = self.resolve_ident_in_lexical_scope(self_ty, TypeNS, Some(ty.id), ty.span) .map_or(Res::Err, |d| d.res()); self.r.record_partial_res(ty.id, PartialRes::new(res)); @@ -442,7 +442,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> { GenericParamKind::Type { ref default, .. } => { found_default |= default.is_some(); if found_default { - Some((Ident::with_empty_ctxt(param.ident.name), Res::Err)) + Some((Ident::with_dummy_span(param.ident.name), Res::Err)) } else { None } @@ -459,7 +459,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> { false } }) - .map(|param| (Ident::with_empty_ctxt(param.ident.name), Res::Err))); + .map(|param| (Ident::with_dummy_span(param.ident.name), Res::Err))); for param in &generics.params { match param.kind { @@ -476,7 +476,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LateResolutionVisitor<'a, '_> { } // Allow all following defaults to refer to this type parameter. - default_ban_rib.bindings.remove(&Ident::with_empty_ctxt(param.ident.name)); + default_ban_rib.bindings.remove(&Ident::with_dummy_span(param.ident.name)); } GenericParamKind::Const { ref ty } => { self.ribs[TypeNS].push(const_ty_param_ban_rib); @@ -965,7 +965,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { let mut self_type_rib = Rib::new(NormalRibKind); // Plain insert (no renaming, since types are not currently hygienic) - self_type_rib.bindings.insert(Ident::with_empty_ctxt(kw::SelfUpper), self_res); + self_type_rib.bindings.insert(Ident::with_dummy_span(kw::SelfUpper), self_res); self.ribs[TypeNS].push(self_type_rib); f(self); self.ribs[TypeNS].pop(); @@ -976,7 +976,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { { let self_res = Res::SelfCtor(impl_id); let mut self_type_rib = Rib::new(NormalRibKind); - self_type_rib.bindings.insert(Ident::with_empty_ctxt(kw::SelfUpper), self_res); + self_type_rib.bindings.insert(Ident::with_dummy_span(kw::SelfUpper), self_res); self.ribs[ValueNS].push(self_type_rib); f(self); self.ribs[ValueNS].pop(); @@ -1476,7 +1476,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { self.r.trait_map.insert(id, traits); } - let mut std_path = vec![Segment::from_ident(Ident::with_empty_ctxt(sym::std))]; + let mut std_path = vec![Segment::from_ident(Ident::with_dummy_span(sym::std))]; std_path.extend(path); if self.r.primitive_type_table.primitive_types.contains_key(&path[0].ident.name) { let cl = CrateLint::No; @@ -1507,7 +1507,7 @@ impl<'a, 'b> LateResolutionVisitor<'a, '_> { fn self_type_is_available(&mut self, span: Span) -> bool { let binding = self.resolve_ident_in_lexical_scope( - Ident::with_empty_ctxt(kw::SelfUpper), + Ident::with_dummy_span(kw::SelfUpper), TypeNS, None, span, diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 8b2e371f0f6..60a368fbb4b 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -985,11 +985,11 @@ impl<'a> hir::lowering::Resolver for Resolver<'a> { } else { kw::Crate }; - let segments = iter::once(Ident::with_empty_ctxt(root)) + let segments = iter::once(Ident::with_dummy_span(root)) .chain( crate_root.into_iter() .chain(components.iter().cloned()) - .map(Ident::with_empty_ctxt) + .map(Ident::with_dummy_span) ).map(|i| self.new_ast_path_segment(i)).collect::>(); let path = ast::Path { @@ -1060,11 +1060,11 @@ impl<'a> Resolver<'a> { .collect(); if !attr::contains_name(&krate.attrs, sym::no_core) { - extern_prelude.insert(Ident::with_empty_ctxt(sym::core), Default::default()); + extern_prelude.insert(Ident::with_dummy_span(sym::core), Default::default()); if !attr::contains_name(&krate.attrs, sym::no_std) { - extern_prelude.insert(Ident::with_empty_ctxt(sym::std), Default::default()); + extern_prelude.insert(Ident::with_dummy_span(sym::std), Default::default()); if session.rust_2018() { - extern_prelude.insert(Ident::with_empty_ctxt(sym::meta), Default::default()); + extern_prelude.insert(Ident::with_dummy_span(sym::meta), Default::default()); } } } @@ -2624,7 +2624,7 @@ impl<'a> Resolver<'a> { let path = if path_str.starts_with("::") { ast::Path { span, - segments: iter::once(Ident::with_empty_ctxt(kw::PathRoot)) + segments: iter::once(Ident::with_dummy_span(kw::PathRoot)) .chain({ path_str.split("::").skip(1).map(Ident::from_str) }) @@ -2713,7 +2713,7 @@ fn module_to_string(module: Module<'_>) -> Option { fn collect_mod(names: &mut Vec, module: Module<'_>) { if let ModuleKind::Def(.., name) = module.kind { if let Some(parent) = module.parent { - names.push(Ident::with_empty_ctxt(name)); + names.push(Ident::with_dummy_span(name)); collect_mod(names, parent); } } else { diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 92f8fb30db8..fc1ee649e28 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -2943,7 +2943,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { (PlaceOp::Index, false) => (self.tcx.lang_items().index_trait(), sym::index), (PlaceOp::Index, true) => (self.tcx.lang_items().index_mut_trait(), sym::index_mut), }; - (tr, ast::Ident::with_empty_ctxt(name)) + (tr, ast::Ident::with_dummy_span(name)) } fn try_overloaded_place_op(&self, diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index d5becd2e1a9..fede9e93010 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -930,7 +930,7 @@ impl Attributes { if attr.check_name(sym::enable) { if let Some(feat) = attr.value_str() { let meta = attr::mk_name_value_item_str( - Ident::with_empty_ctxt(sym::target_feature), feat, DUMMY_SP + Ident::with_dummy_span(sym::target_feature), feat, DUMMY_SP ); if let Ok(feat_cfg) = Cfg::parse(&meta) { cfg &= feat_cfg; diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 85c661d320a..bcf03b5237a 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -327,7 +327,7 @@ impl Attribute { if self.is_sugared_doc { let comment = self.value_str().unwrap(); let meta = mk_name_value_item_str( - Ident::with_empty_ctxt(sym::doc), + Ident::with_dummy_span(sym::doc), Symbol::intern(&strip_doc_comment_decoration(&comment.as_str())), DUMMY_SP, ); @@ -412,7 +412,7 @@ pub fn mk_sugared_doc_attr(text: Symbol, span: Span) -> Attribute { Attribute { id: mk_attr_id(), style, - path: Path::from_ident(Ident::with_empty_ctxt(sym::doc).with_span_pos(span)), + path: Path::from_ident(Ident::with_dummy_span(sym::doc).with_span_pos(span)), tokens: MetaItemKind::NameValue(lit).tokens(span), is_sugared_doc: true, span, diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 80591ad304d..9618b5acfb0 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -172,7 +172,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>, (descriptions.len(), ecx.expr_vec(span, descriptions)) }); - let static_ = ecx.lifetime(span, Ident::with_empty_ctxt(kw::StaticLifetime)); + let static_ = ecx.lifetime(span, Ident::with_dummy_span(kw::StaticLifetime)); let ty_str = ecx.ty_rptr( span, ecx.ty_ident(span, ecx.ident_of("str")), diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index d9fd31db4dd..fd6b9138fde 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -872,7 +872,7 @@ impl<'a> ExtCtxt<'a> { pub fn std_path(&self, components: &[Symbol]) -> Vec { let def_site = DUMMY_SP.apply_mark(self.current_expansion.id); iter::once(Ident::new(kw::DollarCrate, def_site)) - .chain(components.iter().map(|&s| Ident::with_empty_ctxt(s))) + .chain(components.iter().map(|&s| Ident::with_dummy_span(s))) .collect() } pub fn name_of(&self, st: &str) -> ast::Name { diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 38f46ee207c..e2ac4d573a1 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -340,7 +340,7 @@ impl<'a> ExtCtxt<'a> { self.expr_path(self.path_ident(span, id)) } pub fn expr_self(&self, span: Span) -> P { - self.expr_ident(span, Ident::with_empty_ctxt(kw::SelfLower)) + self.expr_ident(span, Ident::with_dummy_span(kw::SelfLower)) } pub fn expr_binary(&self, sp: Span, op: ast::BinOpKind, diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index aa409199afd..5f4074a217a 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -1249,21 +1249,21 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { let include_info = vec![ ast::NestedMetaItem::MetaItem( attr::mk_name_value_item_str( - Ident::with_empty_ctxt(sym::file), + Ident::with_dummy_span(sym::file), file, DUMMY_SP, ), ), ast::NestedMetaItem::MetaItem( attr::mk_name_value_item_str( - Ident::with_empty_ctxt(sym::contents), + Ident::with_dummy_span(sym::contents), src_interned, DUMMY_SP, ), ), ]; - let include_ident = Ident::with_empty_ctxt(sym::include); + let include_ident = Ident::with_dummy_span(sym::include); let item = attr::mk_list_item(include_ident, include_info); items.push(ast::NestedMetaItem::MetaItem(item)); } @@ -1325,7 +1325,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { } } - let meta = attr::mk_list_item(Ident::with_empty_ctxt(sym::doc), items); + let meta = attr::mk_list_item(Ident::with_dummy_span(sym::doc), items); *at = attr::Attribute { span: at.span, id: at.id, diff --git a/src/libsyntax/parse/parser/module.rs b/src/libsyntax/parse/parser/module.rs index 58a7ffba948..3f6f87b1c44 100644 --- a/src/libsyntax/parse/parser/module.rs +++ b/src/libsyntax/parse/parser/module.rs @@ -60,7 +60,7 @@ impl<'a> Parser<'a> { // Record that we fetched the mod from an external file if warn { let attr = attr::mk_attr_outer( - attr::mk_word_item(Ident::with_empty_ctxt(sym::warn_directory_ownership))); + attr::mk_word_item(Ident::with_dummy_span(sym::warn_directory_ownership))); attr::mark_known(&attr); attrs.push(attr); } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 5955b913842..8a7009828bc 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -123,13 +123,13 @@ pub fn print_crate<'a>(cm: &'a SourceMap, // of the feature gate, so we fake them up here. // #![feature(prelude_import)] - let pi_nested = attr::mk_nested_word_item(ast::Ident::with_empty_ctxt(sym::prelude_import)); - let list = attr::mk_list_item(ast::Ident::with_empty_ctxt(sym::feature), vec![pi_nested]); + let pi_nested = attr::mk_nested_word_item(ast::Ident::with_dummy_span(sym::prelude_import)); + let list = attr::mk_list_item(ast::Ident::with_dummy_span(sym::feature), vec![pi_nested]); let fake_attr = attr::mk_attr_inner(list); s.print_attribute(&fake_attr); // #![no_std] - let no_std_meta = attr::mk_word_item(ast::Ident::with_empty_ctxt(sym::no_std)); + let no_std_meta = attr::mk_word_item(ast::Ident::with_dummy_span(sym::no_std)); let fake_attr = attr::mk_attr_inner(no_std_meta); s.print_attribute(&fake_attr); } diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index d80da566185..5a02ae0afb9 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -129,7 +129,7 @@ fn cs_clone_shallow(name: &str, if is_union { // let _: AssertParamIsCopy; let self_ty = - cx.ty_path(cx.path_ident(trait_span, ast::Ident::with_empty_ctxt(kw::SelfUpper))); + cx.ty_path(cx.path_ident(trait_span, ast::Ident::with_dummy_span(kw::SelfUpper))); assert_ty_bounds(cx, &mut stmts, self_ty, trait_span, "AssertParamIsCopy"); } else { match *substr.fields { diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index 15e93f2843a..1d5234a9b7b 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -82,7 +82,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> let expr = cx.expr_method_call(span, builder_expr.clone(), - Ident::with_empty_ctxt(sym::field), + Ident::with_dummy_span(sym::field), vec![field]); // Use `let _ = expr;` to avoid triggering the @@ -106,7 +106,7 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_> let field = cx.expr_addr_of(field.span, field); let expr = cx.expr_method_call(span, builder_expr.clone(), - Ident::with_empty_ctxt(sym::field), + Ident::with_dummy_span(sym::field), vec![name, field]); stmts.push(stmt_let_undescore(cx, span, expr)); } diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index d080dc37a92..4bf004a71e4 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -928,7 +928,7 @@ impl<'a> MethodDef<'a> { let args = { let self_args = explicit_self.map(|explicit_self| { - let ident = Ident::with_empty_ctxt(kw::SelfLower).with_span_pos(trait_.span); + let ident = Ident::with_dummy_span(kw::SelfLower).with_span_pos(trait_.span); ast::Arg::from_self(ThinVec::default(), explicit_self, ident) }); let nonself_args = arg_types.into_iter() diff --git a/src/libsyntax_ext/env.rs b/src/libsyntax_ext/env.rs index 442f27c7821..9834130fa23 100644 --- a/src/libsyntax_ext/env.rs +++ b/src/libsyntax_ext/env.rs @@ -23,13 +23,13 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>, let sp = sp.apply_mark(cx.current_expansion.id); let e = match env::var(&*var.as_str()) { Err(..) => { - let lt = cx.lifetime(sp, Ident::with_empty_ctxt(kw::StaticLifetime)); + let lt = cx.lifetime(sp, Ident::with_dummy_span(kw::StaticLifetime)); cx.expr_path(cx.path_all(sp, true, cx.std_path(&[sym::option, sym::Option, sym::None]), vec![GenericArg::Type(cx.ty_rptr(sp, cx.ty_ident(sp, - Ident::with_empty_ctxt(sym::str)), + Ident::with_dummy_span(sym::str)), Some(lt), ast::Mutability::Immutable))], vec![])) diff --git a/src/libsyntax_ext/global_allocator.rs b/src/libsyntax_ext/global_allocator.rs index b1f6f55732a..d2121abe3b4 100644 --- a/src/libsyntax_ext/global_allocator.rs +++ b/src/libsyntax_ext/global_allocator.rs @@ -44,7 +44,7 @@ pub fn expand( let const_ty = ecx.ty(span, TyKind::Tup(Vec::new())); let const_body = ecx.expr_block(ecx.block(span, stmts)); let const_item = - ecx.item_const(span, Ident::with_empty_ctxt(kw::Underscore), const_ty, const_body); + ecx.item_const(span, Ident::with_dummy_span(kw::Underscore), const_ty, const_body); // Return the original item and the new methods. vec![Annotatable::Item(item), Annotatable::Item(const_item)] @@ -120,7 +120,7 @@ impl AllocFnFactory<'_, '_> { ) -> P { match *ty { AllocatorTy::Layout => { - let usize = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::usize)); + let usize = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::usize)); let ty_usize = self.cx.ty_path(usize); let size = ident(); let align = ident(); @@ -178,12 +178,12 @@ impl AllocFnFactory<'_, '_> { } fn usize(&self) -> P { - let usize = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::usize)); + let usize = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::usize)); self.cx.ty_path(usize) } fn ptr_u8(&self) -> P { - let u8 = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::u8)); + let u8 = self.cx.path_ident(self.span, Ident::with_dummy_span(sym::u8)); let ty_u8 = self.cx.ty_path(u8); self.cx.ty_ptr(self.span, ty_u8, Mutability::Mutable) } diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs index 0f3f5c0cd0e..3ded808bb35 100644 --- a/src/libsyntax_ext/lib.rs +++ b/src/libsyntax_ext/lib.rs @@ -42,7 +42,7 @@ pub mod test_harness; pub fn register_builtin_macros(resolver: &mut dyn syntax::ext::base::Resolver, edition: Edition) { let mut register = |name, kind| resolver.register_builtin_macro( - Ident::with_empty_ctxt(name), SyntaxExtension { + Ident::with_dummy_span(name), SyntaxExtension { is_builtin: true, ..SyntaxExtension::default(kind, edition) }, ); diff --git a/src/libsyntax_ext/plugin_macro_defs.rs b/src/libsyntax_ext/plugin_macro_defs.rs index a725f5e46ad..15737314b22 100644 --- a/src/libsyntax_ext/plugin_macro_defs.rs +++ b/src/libsyntax_ext/plugin_macro_defs.rs @@ -48,7 +48,7 @@ pub fn inject( [sym::rustc_attrs][..].into(), )); for (name, ext) in named_exts { - resolver.register_builtin_macro(Ident::with_empty_ctxt(name), ext); + resolver.register_builtin_macro(Ident::with_dummy_span(name), ext); extra_items.push(plugin_macro_def(name, span)); } // The `macro_rules` items must be inserted before any other items. diff --git a/src/libsyntax_ext/proc_macro_harness.rs b/src/libsyntax_ext/proc_macro_harness.rs index 70325539f30..62c74b2b9c6 100644 --- a/src/libsyntax_ext/proc_macro_harness.rs +++ b/src/libsyntax_ext/proc_macro_harness.rs @@ -337,7 +337,7 @@ fn mk_decls( let doc = cx.meta_list(span, sym::doc, vec![hidden]); let doc_hidden = cx.attribute(doc); - let proc_macro = Ident::with_empty_ctxt(sym::proc_macro); + let proc_macro = Ident::with_dummy_span(sym::proc_macro); let krate = cx.item(span, proc_macro, Vec::new(), @@ -349,7 +349,7 @@ fn mk_decls( let custom_derive = Ident::from_str("custom_derive"); let attr = Ident::from_str("attr"); let bang = Ident::from_str("bang"); - let crate_kw = Ident::with_empty_ctxt(kw::Crate); + let crate_kw = Ident::with_dummy_span(kw::Crate); let decls = { let local_path = |sp: Span, name| { diff --git a/src/libsyntax_ext/standard_library_imports.rs b/src/libsyntax_ext/standard_library_imports.rs index 68b13bdd171..4382fb8af85 100644 --- a/src/libsyntax_ext/standard_library_imports.rs +++ b/src/libsyntax_ext/standard_library_imports.rs @@ -32,7 +32,7 @@ pub fn inject( // HACK(eddyb) gensym the injected crates on the Rust 2018 edition, // so they don't accidentally interfere with the new import paths. let orig_name_sym = Symbol::intern(orig_name_str); - let orig_name_ident = Ident::with_empty_ctxt(orig_name_sym); + let orig_name_ident = Ident::with_dummy_span(orig_name_sym); let (rename, orig_name) = if rust_2018 { (orig_name_ident.gensym(), Some(orig_name_sym)) } else { @@ -40,7 +40,7 @@ pub fn inject( }; krate.module.items.insert(0, P(ast::Item { attrs: vec![attr::mk_attr_outer( - attr::mk_word_item(ast::Ident::with_empty_ctxt(sym::macro_use)) + attr::mk_word_item(ast::Ident::with_dummy_span(sym::macro_use)) )], vis: dummy_spanned(ast::VisibilityKind::Inherited), node: ast::ItemKind::ExternCrate(alt_std_name.or(orig_name)), @@ -66,7 +66,7 @@ pub fn inject( vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited), node: ast::ItemKind::Use(P(ast::UseTree { prefix: ast::Path { - segments: iter::once(ast::Ident::with_empty_ctxt(kw::PathRoot)) + segments: iter::once(ast::Ident::with_dummy_span(kw::PathRoot)) .chain( [name, "prelude", "v1"].iter().cloned() .map(ast::Ident::from_str) diff --git a/src/libsyntax_ext/test_harness.rs b/src/libsyntax_ext/test_harness.rs index 0267637e540..ab108290a93 100644 --- a/src/libsyntax_ext/test_harness.rs +++ b/src/libsyntax_ext/test_harness.rs @@ -150,7 +150,7 @@ impl MutVisitor for EntryPointCleaner { EntryPointType::MainAttr | EntryPointType::Start => item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| { - let allow_ident = Ident::with_empty_ctxt(sym::allow); + let allow_ident = Ident::with_dummy_span(sym::allow); let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code")); let allow_dead_code_item = attr::mk_list_item(allow_ident, vec![dc_nested]); let allow_dead_code = attr::mk_attr_outer(allow_dead_code_item); @@ -191,7 +191,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt<'_>, tests: Vec, tested_submods: Vec<(Ident, Ident)>) -> (P, Ident) { - let super_ = Ident::with_empty_ctxt(kw::Super); + let super_ = Ident::with_dummy_span(kw::Super); let items = tests.into_iter().map(|r| { cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public), @@ -274,7 +274,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P { [sym::main, sym::test, sym::rustc_attrs][..].into(), )); let ecx = &cx.ext_cx; - let test_id = Ident::with_empty_ctxt(sym::test); + let test_id = Ident::with_dummy_span(sym::test); // test::test_main_static(...) let mut test_runner = cx.test_runner.clone().unwrap_or( diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index 6f5a458a874..e2d1635f312 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -744,25 +744,25 @@ impl Ident { Ident { name, span } } - /// Constructs a new identifier with an empty syntax context. + /// Constructs a new identifier with a dummy span. #[inline] - pub const fn with_empty_ctxt(name: Symbol) -> Ident { + pub const fn with_dummy_span(name: Symbol) -> Ident { Ident::new(name, DUMMY_SP) } #[inline] pub fn invalid() -> Ident { - Ident::with_empty_ctxt(kw::Invalid) + Ident::with_dummy_span(kw::Invalid) } /// Maps an interned string to an identifier with an empty syntax context. pub fn from_interned_str(string: InternedString) -> Ident { - Ident::with_empty_ctxt(string.as_symbol()) + Ident::with_dummy_span(string.as_symbol()) } /// Maps a string to an identifier with an empty span. pub fn from_str(string: &str) -> Ident { - Ident::with_empty_ctxt(Symbol::intern(string)) + Ident::with_dummy_span(Symbol::intern(string)) } /// Maps a string and a span to an identifier. -- cgit 1.4.1-3-g733a5 From 73dee258c19a6e9e8249a0d7ff1db54014d0c7a1 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sun, 11 Aug 2019 03:00:05 +0300 Subject: hygiene: Remove `Option`s from functions returning `ExpnInfo` The expansion info is not optional and should always exist --- src/librustc/lint/internal.rs | 33 ++++++------------ src/librustc/lint/mod.rs | 19 ++++------ src/librustc/traits/error_reporting.rs | 8 ++--- src/librustc/ty/query/on_disk_cache.rs | 18 ++++------ src/librustc_codegen_ssa/back/write.rs | 5 +-- src/librustc_lint/unused.rs | 5 ++- src/librustc_resolve/macros.rs | 3 +- src/libsyntax/ext/base.rs | 21 ++++-------- src/libsyntax/ext/expand.rs | 2 +- src/libsyntax/ext/proc_macro_server.rs | 4 +-- src/libsyntax/parse/parser.rs | 1 - src/libsyntax/source_map.rs | 13 +++---- src/libsyntax_pos/hygiene.rs | 58 ++++++++++++------------------- src/libsyntax_pos/lib.rs | 63 +++++++++++++++------------------- 14 files changed, 98 insertions(+), 155 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/lint/internal.rs b/src/librustc/lint/internal.rs index d9ad34a5297..29106fe000b 100644 --- a/src/librustc/lint/internal.rs +++ b/src/librustc/lint/internal.rs @@ -9,7 +9,6 @@ use errors::Applicability; use rustc_data_structures::fx::FxHashMap; use syntax::ast::{Ident, Item, ItemKind}; use syntax::symbol::{sym, Symbol}; -use syntax_pos::ExpnInfo; declare_tool_lint! { pub rustc::DEFAULT_HASH_TYPES, @@ -228,30 +227,20 @@ impl EarlyLintPass for LintPassImpl { if let ItemKind::Impl(_, _, _, _, Some(lint_pass), _, _) = &item.node { if let Some(last) = lint_pass.path.segments.last() { if last.ident.name == sym::LintPass { - match &lint_pass.path.span.ctxt().outer_expn_info() { - Some(info) if is_lint_pass_expansion(info) => {} - _ => { - cx.struct_span_lint( - LINT_PASS_IMPL_WITHOUT_MACRO, - lint_pass.path.span, - "implementing `LintPass` by hand", - ) - .help("try using `declare_lint_pass!` or `impl_lint_pass!` instead") - .emit(); - } + let expn_info = lint_pass.path.span.ctxt().outer_expn_info(); + let call_site = expn_info.call_site; + if expn_info.kind.descr() != sym::impl_lint_pass && + call_site.ctxt().outer_expn_info().kind.descr() != sym::declare_lint_pass { + cx.struct_span_lint( + LINT_PASS_IMPL_WITHOUT_MACRO, + lint_pass.path.span, + "implementing `LintPass` by hand", + ) + .help("try using `declare_lint_pass!` or `impl_lint_pass!` instead") + .emit(); } } } } } } - -fn is_lint_pass_expansion(expn_info: &ExpnInfo) -> bool { - if expn_info.kind.descr() == sym::impl_lint_pass { - true - } else if let Some(info) = expn_info.call_site.ctxt().outer_expn_info() { - info.kind.descr() == sym::declare_lint_pass - } else { - false - } -} diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index 8cb5b1e26d9..3729ee81f5c 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -885,21 +885,16 @@ pub fn provide(providers: &mut Providers<'_>) { /// This is used to test whether a lint should not even begin to figure out whether it should /// be reported on the current node. pub fn in_external_macro(sess: &Session, span: Span) -> bool { - let info = match span.ctxt().outer_expn_info() { - Some(info) => info, - // no ExpnInfo means this span doesn't come from a macro - None => return false, - }; - - match info.kind { + let expn_info = span.ctxt().outer_expn_info(); + match expn_info.kind { ExpnKind::Root | ExpnKind::Desugaring(DesugaringKind::ForLoop) => false, ExpnKind::Desugaring(_) => true, // well, it's "external" ExpnKind::Macro(MacroKind::Bang, _) => { - if info.def_site.is_dummy() { + if expn_info.def_site.is_dummy() { // dummy span for the def_site means it's an external macro return true; } - match sess.source_map().span_to_snippet(info.def_site) { + match sess.source_map().span_to_snippet(expn_info.def_site) { Ok(code) => !code.starts_with("macro_rules"), // no snippet = external macro or compiler-builtin expansion Err(_) => true, @@ -911,10 +906,8 @@ pub fn in_external_macro(sess: &Session, span: Span) -> bool { /// Returns whether `span` originates in a derive macro's expansion pub fn in_derive_expansion(span: Span) -> bool { - if let Some(info) = span.ctxt().outer_expn_info() { - if let ExpnKind::Macro(MacroKind::Derive, _) = info.kind { - return true; - } + if let ExpnKind::Macro(MacroKind::Derive, _) = span.ctxt().outer_expn_info().kind { + return true; } false } diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 83bd5c56040..20568d4709b 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -36,7 +36,7 @@ use errors::{Applicability, DiagnosticBuilder}; use std::fmt; use syntax::ast; use syntax::symbol::sym; -use syntax_pos::{DUMMY_SP, Span, ExpnInfo, ExpnKind}; +use syntax_pos::{DUMMY_SP, Span, ExpnKind}; impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub fn report_fulfillment_errors(&self, @@ -61,9 +61,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { // We want to ignore desugarings here: spans are equivalent even // if one is the result of a desugaring and the other is not. let mut span = error.obligation.cause.span; - if let Some(ExpnInfo { kind: ExpnKind::Desugaring(_), def_site, .. }) - = span.ctxt().outer_expn_info() { - span = def_site; + let expn_info = span.ctxt().outer_expn_info(); + if let ExpnKind::Desugaring(_) = expn_info.kind { + span = expn_info.call_site; } error_map.entry(span).or_default().push( diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index 1c5baa638c2..2286271b9eb 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -820,18 +820,14 @@ where TAG_NO_EXPANSION_INFO.encode(self) } else { let (expn_id, expn_info) = span_data.ctxt.outer_expn_with_info(); - if let Some(expn_info) = expn_info { - if let Some(pos) = self.expn_info_shorthands.get(&expn_id).cloned() { - TAG_EXPANSION_INFO_SHORTHAND.encode(self)?; - pos.encode(self) - } else { - TAG_EXPANSION_INFO_INLINE.encode(self)?; - let pos = AbsoluteBytePos::new(self.position()); - self.expn_info_shorthands.insert(expn_id, pos); - expn_info.encode(self) - } + if let Some(pos) = self.expn_info_shorthands.get(&expn_id).cloned() { + TAG_EXPANSION_INFO_SHORTHAND.encode(self)?; + pos.encode(self) } else { - TAG_NO_EXPANSION_INFO.encode(self) + TAG_EXPANSION_INFO_INLINE.encode(self)?; + let pos = AbsoluteBytePos::new(self.position()); + self.expn_info_shorthands.insert(expn_id, pos); + expn_info.encode(self) } } } diff --git a/src/librustc_codegen_ssa/back/write.rs b/src/librustc_codegen_ssa/back/write.rs index c9e4663fdbd..240264a9822 100644 --- a/src/librustc_codegen_ssa/back/write.rs +++ b/src/librustc_codegen_ssa/back/write.rs @@ -1775,10 +1775,7 @@ impl SharedEmitterMain { } } Ok(SharedEmitterMessage::InlineAsmError(cookie, msg)) => { - match ExpnId::from_u32(cookie).expn_info() { - Some(ei) => sess.span_err(ei.call_site, &msg), - None => sess.err(&msg), - } + sess.span_err(ExpnId::from_u32(cookie).expn_info().call_site, &msg) } Ok(SharedEmitterMessage::AbortIfErrors) => { sess.abort_if_errors(); diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index 9cad8f58d41..1bb05bda69f 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -517,9 +517,8 @@ impl EarlyLintPass for UnusedParens { // trigger in situations that macro authors shouldn't have to care about, e.g., // when a parenthesized token tree matched in one macro expansion is matched as // an expression in another and used as a fn/method argument (Issue #47775) - if e.span.ctxt().outer_expn_info() - .map_or(false, |info| info.call_site.from_expansion()) { - return; + if e.span.ctxt().outer_expn_info().call_site.from_expansion() { + return; } let msg = format!("{} argument", call_kind); for arg in args_to_check { diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 71e26dac57c..97b0f825ee9 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -346,8 +346,7 @@ impl<'a> Resolver<'a> { // Possibly apply the macro helper hack if kind == Some(MacroKind::Bang) && path.len() == 1 && - path[0].ident.span.ctxt().outer_expn_info() - .map_or(false, |info| info.local_inner_macros) { + path[0].ident.span.ctxt().outer_expn_info().local_inner_macros { let root = Ident::new(kw::DollarCrate, path[0].ident.span); path.insert(0, Segment::from_ident(root)); } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index fd6b9138fde..8eacb96e3ff 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -756,10 +756,7 @@ impl<'a> ExtCtxt<'a> { pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config } pub fn call_site(&self) -> Span { - match self.current_expansion.id.expn_info() { - Some(expn_info) => expn_info.call_site, - None => DUMMY_SP, - } + self.current_expansion.id.expn_info().call_site } pub fn backtrace(&self) -> SyntaxContext { SyntaxContext::root().apply_mark(self.current_expansion.id) @@ -772,17 +769,13 @@ impl<'a> ExtCtxt<'a> { let mut ctxt = self.backtrace(); let mut last_macro = None; loop { - if ctxt.outer_expn_info().map_or(None, |info| { - if info.kind.descr() == sym::include { - // Stop going up the backtrace once include! is encountered - return None; - } - ctxt = info.call_site.ctxt(); - last_macro = Some(info.call_site); - Some(()) - }).is_none() { - break + let expn_info = ctxt.outer_expn_info(); + // Stop going up the backtrace once include! is encountered + if expn_info.is_root() || expn_info.kind.descr() == sym::include { + break; } + ctxt = expn_info.call_site.ctxt(); + last_macro = Some(expn_info.call_site); } last_macro } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 5f4074a217a..6f3e8f14b0b 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -475,7 +475,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit { - let info = self.cx.current_expansion.id.expn_info().unwrap(); + let info = self.cx.current_expansion.id.expn_info(); let suggested_limit = self.cx.ecfg.recursion_limit * 2; let mut err = self.cx.struct_span_err(info.call_site, &format!("recursion limit reached while expanding the macro `{}`", diff --git a/src/libsyntax/ext/proc_macro_server.rs b/src/libsyntax/ext/proc_macro_server.rs index fd93910004e..d370431a5da 100644 --- a/src/libsyntax/ext/proc_macro_server.rs +++ b/src/libsyntax/ext/proc_macro_server.rs @@ -362,7 +362,7 @@ pub(crate) struct Rustc<'a> { impl<'a> Rustc<'a> { pub fn new(cx: &'a ExtCtxt<'_>) -> Self { // No way to determine def location for a proc macro right now, so use call location. - let location = cx.current_expansion.id.expn_info().unwrap().call_site; + let location = cx.current_expansion.id.expn_info().call_site; let to_span = |transparency| { location.with_ctxt( SyntaxContext::root() @@ -677,7 +677,7 @@ impl server::Span for Rustc<'_> { self.sess.source_map().lookup_char_pos(span.lo()).file } fn parent(&mut self, span: Self::Span) -> Option { - span.ctxt().outer_expn_info().map(|i| i.call_site) + span.parent() } fn source(&mut self, span: Self::Span) -> Self::Span { span.source_callsite() diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 3b0af88f651..89725d8b339 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -13,7 +13,6 @@ mod generics; use crate::ast::{self, AttrStyle, Attribute, Arg, BindingMode, StrStyle, SelfKind}; use crate::ast::{FnDecl, Ident, IsAsync, MacDelimiter, Mutability, TyKind}; use crate::ast::{Visibility, VisibilityKind, Unsafety, CrateSugar}; -use crate::ext::hygiene::SyntaxContext; use crate::source_map::{self, respan}; use crate::parse::{SeqSep, literal, token}; use crate::parse::lexer::UnmatchedBrace; diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs index 74cab00d3c1..da7eb6add41 100644 --- a/src/libsyntax/source_map.rs +++ b/src/libsyntax/source_map.rs @@ -31,12 +31,13 @@ mod tests; /// otherwise return the call site span up to the `enclosing_sp` by /// following the `expn_info` chain. pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { - let call_site1 = sp.ctxt().outer_expn_info().map(|ei| ei.call_site); - let call_site2 = enclosing_sp.ctxt().outer_expn_info().map(|ei| ei.call_site); - match (call_site1, call_site2) { - (None, _) => sp, - (Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp, - (Some(call_site1), _) => original_sp(call_site1, enclosing_sp), + let expn_info1 = sp.ctxt().outer_expn_info(); + let expn_info2 = enclosing_sp.ctxt().outer_expn_info(); + if expn_info1.is_root() || + !expn_info2.is_root() && expn_info1.call_site == expn_info2.call_site { + sp + } else { + original_sp(expn_info1.call_site, enclosing_sp) } } diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index c832e058cdf..743bd437ee5 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -112,8 +112,8 @@ impl ExpnId { } #[inline] - pub fn expn_info(self) -> Option { - HygieneData::with(|data| data.expn_info(self).cloned()) + pub fn expn_info(self) -> ExpnInfo { + HygieneData::with(|data| data.expn_info(self).clone()) } #[inline] @@ -139,12 +139,9 @@ impl ExpnId { #[inline] pub fn looks_like_proc_macro_derive(self) -> bool { HygieneData::with(|data| { - if data.default_transparency(self) == Transparency::Opaque { - if let Some(expn_info) = data.expn_info(self) { - if let ExpnKind::Macro(MacroKind::Derive, _) = expn_info.kind { - return true; - } - } + let expn_info = data.expn_info(self); + if let ExpnKind::Macro(MacroKind::Derive, _) = expn_info.kind { + return expn_info.default_transparency == Transparency::Opaque; } false }) @@ -190,16 +187,9 @@ impl HygieneData { self.expn_data[expn_id.0 as usize].parent } - fn expn_info(&self, expn_id: ExpnId) -> Option<&ExpnInfo> { - if expn_id != ExpnId::root() { - Some(self.expn_data[expn_id.0 as usize].expn_info.as_ref() - .expect("no expansion info for an expansion ID")) - } else { - // FIXME: Some code relies on `expn_info().is_none()` meaning "no expansion". - // Introduce a method for checking for "no expansion" instead and always return - // `ExpnInfo` from this function instead of the `Option`. - None - } + fn expn_info(&self, expn_id: ExpnId) -> &ExpnInfo { + self.expn_data[expn_id.0 as usize].expn_info.as_ref() + .expect("no expansion info for an expansion ID") } fn is_descendant_of(&self, mut expn_id: ExpnId, ancestor: ExpnId) -> bool { @@ -212,12 +202,6 @@ impl HygieneData { true } - fn default_transparency(&self, expn_id: ExpnId) -> Transparency { - self.expn_info(expn_id).map_or( - Transparency::SemiTransparent, |einfo| einfo.default_transparency - ) - } - fn modern(&self, ctxt: SyntaxContext) -> SyntaxContext { self.syntax_context_data[ctxt.0 as usize].opaque } @@ -256,11 +240,7 @@ impl HygieneData { fn walk_chain(&self, mut span: Span, to: SyntaxContext) -> Span { while span.from_expansion() && span.ctxt() != to { - if let Some(info) = self.expn_info(self.outer_expn(span.ctxt())) { - span = info.call_site; - } else { - break; - } + span = self.expn_info(self.outer_expn(span.ctxt())).call_site; } span } @@ -275,7 +255,9 @@ impl HygieneData { fn apply_mark(&mut self, ctxt: SyntaxContext, expn_id: ExpnId) -> SyntaxContext { assert_ne!(expn_id, ExpnId::root()); - self.apply_mark_with_transparency(ctxt, expn_id, self.default_transparency(expn_id)) + self.apply_mark_with_transparency( + ctxt, expn_id, self.expn_info(expn_id).default_transparency + ) } fn apply_mark_with_transparency(&mut self, ctxt: SyntaxContext, expn_id: ExpnId, @@ -285,8 +267,7 @@ impl HygieneData { return self.apply_mark_internal(ctxt, expn_id, transparency); } - let call_site_ctxt = - self.expn_info(expn_id).map_or(SyntaxContext::root(), |info| info.call_site.ctxt()); + let call_site_ctxt = self.expn_info(expn_id).call_site.ctxt(); let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { self.modern(call_site_ctxt) } else { @@ -581,17 +562,17 @@ impl SyntaxContext { /// `ctxt.outer_expn_info()` is equivalent to but faster than /// `ctxt.outer_expn().expn_info()`. #[inline] - pub fn outer_expn_info(self) -> Option { - HygieneData::with(|data| data.expn_info(data.outer_expn(self)).cloned()) + pub fn outer_expn_info(self) -> ExpnInfo { + HygieneData::with(|data| data.expn_info(data.outer_expn(self)).clone()) } /// `ctxt.outer_expn_with_info()` is equivalent to but faster than /// `{ let outer = ctxt.outer_expn(); (outer, outer.expn_info()) }`. #[inline] - pub fn outer_expn_with_info(self) -> (ExpnId, Option) { + pub fn outer_expn_with_info(self) -> (ExpnId, ExpnInfo) { HygieneData::with(|data| { let outer = data.outer_expn(self); - (outer, data.expn_info(outer).cloned()) + (outer, data.expn_info(outer).clone()) }) } @@ -681,6 +662,11 @@ impl ExpnInfo { ..ExpnInfo::default(kind, call_site, edition) } } + + #[inline] + pub fn is_root(&self) -> bool { + if let ExpnKind::Root = self.kind { true } else { false } + } } /// Expansion kind. diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 7c8539198b9..7af426eaa13 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -355,20 +355,20 @@ impl Span { /// Returns the source span -- this is either the supplied span, or the span for /// the macro callsite that expanded to it. pub fn source_callsite(self) -> Span { - self.ctxt().outer_expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self) + let expn_info = self.ctxt().outer_expn_info(); + if !expn_info.is_root() { expn_info.call_site.source_callsite() } else { self } } /// The `Span` for the tokens in the previous macro expansion from which `self` was generated, /// if any. pub fn parent(self) -> Option { - self.ctxt().outer_expn_info().map(|i| i.call_site) + let expn_info = self.ctxt().outer_expn_info(); + if !expn_info.is_root() { Some(expn_info.call_site) } else { None } } /// Edition of the crate from which this span came. pub fn edition(self) -> edition::Edition { - self.ctxt().outer_expn_info().map_or_else(|| { - Edition::from_session() - }, |einfo| einfo.edition) + self.ctxt().outer_expn_info().edition } #[inline] @@ -387,49 +387,39 @@ impl Span { /// else returns the `ExpnInfo` for the macro definition /// corresponding to the source callsite. pub fn source_callee(self) -> Option { - fn source_callee(info: ExpnInfo) -> ExpnInfo { - match info.call_site.ctxt().outer_expn_info() { - Some(info) => source_callee(info), - None => info, - } + fn source_callee(expn_info: ExpnInfo) -> ExpnInfo { + let next_expn_info = expn_info.call_site.ctxt().outer_expn_info(); + if !next_expn_info.is_root() { source_callee(next_expn_info) } else { expn_info } } - self.ctxt().outer_expn_info().map(source_callee) + let expn_info = self.ctxt().outer_expn_info(); + if !expn_info.is_root() { Some(source_callee(expn_info)) } else { None } } /// Checks if a span is "internal" to a macro in which `#[unstable]` /// items can be used (that is, a macro marked with /// `#[allow_internal_unstable]`). pub fn allows_unstable(&self, feature: Symbol) -> bool { - match self.ctxt().outer_expn_info() { - Some(info) => info - .allow_internal_unstable - .map_or(false, |features| features.iter().any(|&f| - f == feature || f == sym::allow_internal_unstable_backcompat_hack - )), - None => false, - } + self.ctxt().outer_expn_info().allow_internal_unstable.map_or(false, |features| { + features.iter().any(|&f| { + f == feature || f == sym::allow_internal_unstable_backcompat_hack + }) + }) } /// Checks if this span arises from a compiler desugaring of kind `kind`. pub fn is_desugaring(&self, kind: DesugaringKind) -> bool { - match self.ctxt().outer_expn_info() { - Some(info) => match info.kind { - ExpnKind::Desugaring(k) => k == kind, - _ => false, - }, - None => false, + match self.ctxt().outer_expn_info().kind { + ExpnKind::Desugaring(k) => k == kind, + _ => false, } } /// Returns the compiler desugaring that created this span, or `None` /// if this span is not from a desugaring. pub fn desugaring_kind(&self) -> Option { - match self.ctxt().outer_expn_info() { - Some(info) => match info.kind { - ExpnKind::Desugaring(k) => Some(k), - _ => None - }, - None => None + match self.ctxt().outer_expn_info().kind { + ExpnKind::Desugaring(k) => Some(k), + _ => None } } @@ -437,16 +427,17 @@ impl Span { /// can be used without triggering the `unsafe_code` lint // (that is, a macro marked with `#[allow_internal_unsafe]`). pub fn allows_unsafe(&self) -> bool { - match self.ctxt().outer_expn_info() { - Some(info) => info.allow_internal_unsafe, - None => false, - } + self.ctxt().outer_expn_info().allow_internal_unsafe } pub fn macro_backtrace(mut self) -> Vec { let mut prev_span = DUMMY_SP; let mut result = vec![]; - while let Some(info) = self.ctxt().outer_expn_info() { + loop { + let info = self.ctxt().outer_expn_info(); + if info.is_root() { + break; + } // Don't print recursive invocations. if !info.call_site.source_equal(&prev_span) { let (pre, post) = match info.kind { -- cgit 1.4.1-3-g733a5 From 1a447738b8a7ac8f0a47a134f9fa1a60a4621620 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Tue, 13 Aug 2019 03:34:46 +0300 Subject: hygiene: Merge `ExpnInfo` and `InternalExpnData` --- src/librustc/hir/lowering.rs | 2 +- src/librustc/ich/impls_syntax.rs | 3 +- src/librustc/ty/query/on_disk_cache.rs | 4 +- src/librustc_resolve/macros.rs | 11 +++-- src/libsyntax/ext/base.rs | 5 +- src/libsyntax/ext/expand.rs | 15 +++--- src/libsyntax_ext/deriving/clone.rs | 2 +- src/libsyntax_ext/deriving/cmp/eq.rs | 2 +- src/libsyntax_ext/deriving/cmp/partial_eq.rs | 2 +- src/libsyntax_ext/deriving/generic/mod.rs | 2 +- src/libsyntax_ext/plugin_macro_defs.rs | 4 +- src/libsyntax_ext/proc_macro_harness.rs | 3 +- src/libsyntax_ext/standard_library_imports.rs | 4 +- src/libsyntax_ext/test_harness.rs | 5 +- src/libsyntax_pos/hygiene.rs | 70 +++++++++++++-------------- 15 files changed, 67 insertions(+), 67 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index f942a0fb857..0b7dacf8383 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -704,7 +704,7 @@ impl<'a> LoweringContext<'a> { span: Span, allow_internal_unstable: Option>, ) -> Span { - span.fresh_expansion(ExpnId::root(), ExpnInfo { + span.fresh_expansion(ExpnInfo { def_site: span, allow_internal_unstable, ..ExpnInfo::default(ExpnKind::Desugaring(reason), span, self.sess.edition()) diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index 5cc8324b316..22e2cff3595 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -398,8 +398,9 @@ impl_stable_hash_for!(enum ::syntax_pos::hygiene::Transparency { }); impl_stable_hash_for!(struct ::syntax_pos::hygiene::ExpnInfo { - call_site, kind, + parent -> _, + call_site, def_site, default_transparency, allow_internal_unstable, diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index 2286271b9eb..351b9988bb2 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -588,13 +588,13 @@ impl<'a, 'tcx> SpecializedDecoder for CacheDecoder<'a, 'tcx> { let expn_info_tag = u8::decode(self)?; - // FIXME(mw): This method does not restore `InternalExpnData::parent` or + // FIXME(mw): This method does not restore `ExpnInfo::parent` or // `SyntaxContextData::prev_ctxt` or `SyntaxContextData::opaque`. These things // don't seem to be used after HIR lowering, so everything should be fine // as long as incremental compilation does not kick in before that. let location = || Span::with_root_ctxt(lo, hi); let recover_from_expn_info = |this: &Self, expn_info, pos| { - let span = location().fresh_expansion(ExpnId::root(), expn_info); + let span = location().fresh_expansion(expn_info); this.synthetic_expansion_infos.borrow_mut().insert(pos, span.ctxt()); span }; diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 2f9bee74908..58e785ab8c2 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -97,7 +97,7 @@ impl<'a> base::Resolver for Resolver<'a> { } fn get_module_scope(&mut self, id: ast::NodeId) -> ExpnId { - let expn_id = ExpnId::fresh(ExpnId::root(), Some(ExpnInfo::default( + let expn_id = ExpnId::fresh(Some(ExpnInfo::default( ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, self.session.edition() ))); let module = self.module_map[&self.definitions.local_def_id(id)]; @@ -120,7 +120,8 @@ impl<'a> base::Resolver for Resolver<'a> { &mut self, expansion: ExpnId, fragment: &AstFragment, derives: &[ExpnId] ) { // Fill in some data for derives if the fragment is from a derive container. - let parent_scope = self.invocation_parent_scopes[&expansion]; + // We are inside the `expansion` now, but other parent scope components are still the same. + let parent_scope = ParentScope { expansion, ..self.invocation_parent_scopes[&expansion] }; let parent_def = self.definitions.invocation_parent(expansion); self.invocation_parent_scopes.extend(derives.iter().map(|&derive| (derive, parent_scope))); for &derive_invoc_id in derives { @@ -130,9 +131,7 @@ impl<'a> base::Resolver for Resolver<'a> { parent_scope.module.unresolved_invocations.borrow_mut().extend(derives); // Integrate the new AST fragment into all the definition and module structures. - // We are inside the `expansion` new, but other parent scope components are still the same. fragment.visit_with(&mut DefCollector::new(&mut self.definitions, expansion)); - let parent_scope = ParentScope { expansion, ..parent_scope }; let output_legacy_scope = self.build_reduced_graph(fragment, parent_scope); self.output_legacy_scopes.insert(expansion, output_legacy_scope); } @@ -186,7 +185,9 @@ impl<'a> base::Resolver for Resolver<'a> { let (ext, res) = self.smart_resolve_macro_path(path, kind, parent_scope, force)?; let span = invoc.span(); - invoc.expansion_data.id.set_expn_info(ext.expn_info(span, fast_print_path(path))); + invoc.expansion_data.id.set_expn_info( + ext.expn_info(parent_scope.expansion, span, fast_print_path(path)) + ); if let Res::Def(_, def_id) = res { if after_derive { diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 8eacb96e3ff..734b566b3ad 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -640,10 +640,11 @@ impl SyntaxExtension { SyntaxExtension::default(SyntaxExtensionKind::NonMacroAttr { mark_used }, edition) } - pub fn expn_info(&self, call_site: Span, descr: Symbol) -> ExpnInfo { + pub fn expn_info(&self, parent: ExpnId, call_site: Span, descr: Symbol) -> ExpnInfo { ExpnInfo { - call_site, kind: ExpnKind::Macro(self.macro_kind(), descr), + parent, + call_site, def_site: self.span, default_transparency: self.default_transparency, allow_internal_unstable: self.allow_internal_unstable.clone(), diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 6f3e8f14b0b..4233d5c0a22 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -353,7 +353,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { derives.reserve(traits.len()); invocations.reserve(traits.len()); for path in traits { - let expn_id = ExpnId::fresh(self.cx.current_expansion.id, None); + let expn_id = ExpnId::fresh(None); derives.push(expn_id); invocations.push(Invocation { kind: InvocationKind::Derive { path, item: item.clone() }, @@ -800,13 +800,16 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { // with exception of the derive container case which is not resolved and can get // its expansion info immediately. let expn_info = match &kind { - InvocationKind::DeriveContainer { item, .. } => Some(ExpnInfo::default( - ExpnKind::Macro(MacroKind::Attr, sym::derive), - item.span(), self.cx.parse_sess.edition, - )), + InvocationKind::DeriveContainer { item, .. } => Some(ExpnInfo { + parent: self.cx.current_expansion.id, + ..ExpnInfo::default( + ExpnKind::Macro(MacroKind::Attr, sym::derive), + item.span(), self.cx.parse_sess.edition, + ) + }), _ => None, }; - let expn_id = ExpnId::fresh(self.cx.current_expansion.id, expn_info); + let expn_id = ExpnId::fresh(expn_info); self.invocations.push(Invocation { kind, fragment_kind, diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index 5a02ae0afb9..73df625d5ee 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -35,7 +35,7 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt<'_>, match annitem.node { ItemKind::Struct(_, Generics { ref params, .. }) | ItemKind::Enum(_, Generics { ref params, .. }) => { - let container_id = cx.current_expansion.id.parent(); + let container_id = cx.current_expansion.id.expn_info().parent; if cx.resolver.has_derives(container_id, SpecialDerives::COPY) && !params.iter().any(|param| match param.kind { ast::GenericParamKind::Type { .. } => true, diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index 5d7c4a84389..1ef34a68004 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -13,7 +13,7 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt<'_>, mitem: &MetaItem, item: &Annotatable, push: &mut dyn FnMut(Annotatable)) { - cx.resolver.add_derives(cx.current_expansion.id.parent(), SpecialDerives::EQ); + cx.resolver.add_derives(cx.current_expansion.id.expn_info().parent, SpecialDerives::EQ); let inline = cx.meta_word(span, sym::inline); let hidden = cx.meta_list_item_word(span, sym::hidden); diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs index 7d7c4ae22a8..76befc98591 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_eq.rs @@ -13,7 +13,7 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt<'_>, mitem: &MetaItem, item: &Annotatable, push: &mut dyn FnMut(Annotatable)) { - cx.resolver.add_derives(cx.current_expansion.id.parent(), SpecialDerives::PARTIAL_EQ); + cx.resolver.add_derives(cx.current_expansion.id.expn_info().parent, SpecialDerives::PARTIAL_EQ); // structures are equal if all fields are equal, and non equal, if // any fields are not equal or if the enum variants are different diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 4bf004a71e4..6b739e27eee 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -425,7 +425,7 @@ impl<'a> TraitDef<'a> { return; } }; - let container_id = cx.current_expansion.id.parent(); + let container_id = cx.current_expansion.id.expn_info().parent; let is_always_copy = cx.resolver.has_derives(container_id, SpecialDerives::COPY) && has_no_type_params; diff --git a/src/libsyntax_ext/plugin_macro_defs.rs b/src/libsyntax_ext/plugin_macro_defs.rs index 15737314b22..b34a250881a 100644 --- a/src/libsyntax_ext/plugin_macro_defs.rs +++ b/src/libsyntax_ext/plugin_macro_defs.rs @@ -11,7 +11,7 @@ use syntax::source_map::respan; use syntax::symbol::sym; use syntax::tokenstream::*; use syntax_pos::{Span, DUMMY_SP}; -use syntax_pos::hygiene::{ExpnId, ExpnInfo, ExpnKind, MacroKind}; +use syntax_pos::hygiene::{ExpnInfo, ExpnKind, MacroKind}; use std::mem; @@ -43,7 +43,7 @@ pub fn inject( ) { if !named_exts.is_empty() { let mut extra_items = Vec::new(); - let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( + let span = DUMMY_SP.fresh_expansion(ExpnInfo::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::plugin), DUMMY_SP, edition, [sym::rustc_attrs][..].into(), )); diff --git a/src/libsyntax_ext/proc_macro_harness.rs b/src/libsyntax_ext/proc_macro_harness.rs index 62c74b2b9c6..9d8a8c17ba2 100644 --- a/src/libsyntax_ext/proc_macro_harness.rs +++ b/src/libsyntax_ext/proc_macro_harness.rs @@ -6,7 +6,6 @@ use syntax::attr; use syntax::source_map::{ExpnInfo, ExpnKind, respan}; use syntax::ext::base::{ExtCtxt, MacroKind}; use syntax::ext::expand::{AstFragment, ExpansionConfig}; -use syntax::ext::hygiene::ExpnId; use syntax::ext::proc_macro::is_proc_macro_attr; use syntax::parse::ParseSess; use syntax::ptr::P; @@ -328,7 +327,7 @@ fn mk_decls( custom_attrs: &[ProcMacroDef], custom_macros: &[ProcMacroDef], ) -> P { - let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( + let span = DUMMY_SP.fresh_expansion(ExpnInfo::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::proc_macro), DUMMY_SP, cx.parse_sess.edition, [sym::rustc_attrs, sym::proc_macro_internals][..].into(), )); diff --git a/src/libsyntax_ext/standard_library_imports.rs b/src/libsyntax_ext/standard_library_imports.rs index 4382fb8af85..c0041248652 100644 --- a/src/libsyntax_ext/standard_library_imports.rs +++ b/src/libsyntax_ext/standard_library_imports.rs @@ -1,6 +1,6 @@ use syntax::{ast, attr}; use syntax::edition::Edition; -use syntax::ext::hygiene::{ExpnId, MacroKind}; +use syntax::ext::hygiene::MacroKind; use syntax::ptr::P; use syntax::source_map::{ExpnInfo, ExpnKind, dummy_spanned, respan}; use syntax::symbol::{Ident, Symbol, kw, sym}; @@ -55,7 +55,7 @@ pub fn inject( // the prelude. let name = names[0]; - let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( + let span = DUMMY_SP.fresh_expansion(ExpnInfo::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::std_inject), DUMMY_SP, edition, [sym::prelude_import][..].into(), )); diff --git a/src/libsyntax_ext/test_harness.rs b/src/libsyntax_ext/test_harness.rs index ab108290a93..3fb1c1bd022 100644 --- a/src/libsyntax_ext/test_harness.rs +++ b/src/libsyntax_ext/test_harness.rs @@ -5,9 +5,8 @@ use smallvec::{smallvec, SmallVec}; use syntax::ast::{self, Ident}; use syntax::attr; use syntax::entry::{self, EntryPointType}; -use syntax::ext::base::{ExtCtxt, Resolver}; +use syntax::ext::base::{ExtCtxt, MacroKind, Resolver}; use syntax::ext::expand::{AstFragment, ExpansionConfig}; -use syntax::ext::hygiene::{ExpnId, MacroKind}; use syntax::feature_gate::Features; use syntax::mut_visit::{*, ExpectOne}; use syntax::parse::ParseSess; @@ -269,7 +268,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P { // #![main] // test::test_main_static(&[..tests]); // } - let sp = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable( + let sp = DUMMY_SP.fresh_expansion(ExpnInfo::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, cx.ext_cx.parse_sess.edition, [sym::main, sym::test, sym::rustc_attrs][..].into(), )); diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index 743bd437ee5..1dba466625a 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -56,16 +56,6 @@ struct SyntaxContextData { #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] pub struct ExpnId(u32); -// FIXME: Find a way to merge this with `ExpnInfo`. -#[derive(Debug)] -struct InternalExpnData { - parent: ExpnId, - /// Each expansion should have an associated expansion info, but sometimes there's a delay - /// between creation of an expansion ID and obtaining its info (e.g. macros are collected - /// first and then resolved later), so we use an `Option` here. - expn_info: Option, -} - /// A property of a macro expansion that determines how identifiers /// produced by that expansion are resolved. #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug, RustcEncodable, RustcDecodable)] @@ -86,8 +76,8 @@ pub enum Transparency { } impl ExpnId { - pub fn fresh(parent: ExpnId, expn_info: Option) -> Self { - HygieneData::with(|data| data.fresh_expn(parent, expn_info)) + pub fn fresh(expn_info: Option) -> Self { + HygieneData::with(|data| data.fresh_expn(expn_info)) } /// The ID of the theoretical expansion that generates freshly parsed, unexpanded AST. @@ -106,11 +96,6 @@ impl ExpnId { ExpnId(raw) } - #[inline] - pub fn parent(self) -> ExpnId { - HygieneData::with(|data| data.parent_expn(self)) - } - #[inline] pub fn expn_info(self) -> ExpnInfo { HygieneData::with(|data| data.expn_info(self).clone()) @@ -119,7 +104,7 @@ impl ExpnId { #[inline] pub fn set_expn_info(self, info: ExpnInfo) { HygieneData::with(|data| { - let old_info = &mut data.expn_data[self.0 as usize].expn_info; + let old_info = &mut data.expn_data[self.0 as usize]; assert!(old_info.is_none(), "expansion info is reset for an expansion ID"); *old_info = Some(info); }) @@ -150,7 +135,10 @@ impl ExpnId { #[derive(Debug)] crate struct HygieneData { - expn_data: Vec, + /// Each expansion should have an associated expansion info, but sometimes there's a delay + /// between creation of an expansion ID and obtaining its info (e.g. macros are collected + /// first and then resolved later), so we use an `Option` here. + expn_data: Vec>, syntax_context_data: Vec, syntax_context_map: FxHashMap<(SyntaxContext, ExpnId, Transparency), SyntaxContext>, } @@ -158,10 +146,7 @@ crate struct HygieneData { impl HygieneData { crate fn new(edition: Edition) -> Self { HygieneData { - expn_data: vec![InternalExpnData { - parent: ExpnId::root(), - expn_info: Some(ExpnInfo::default(ExpnKind::Root, DUMMY_SP, edition)), - }], + expn_data: vec![Some(ExpnInfo::default(ExpnKind::Root, DUMMY_SP, edition))], syntax_context_data: vec![SyntaxContextData { outer_expn: ExpnId::root(), outer_transparency: Transparency::Opaque, @@ -178,17 +163,13 @@ impl HygieneData { GLOBALS.with(|globals| f(&mut *globals.hygiene_data.borrow_mut())) } - fn fresh_expn(&mut self, parent: ExpnId, expn_info: Option) -> ExpnId { - self.expn_data.push(InternalExpnData { parent, expn_info }); + fn fresh_expn(&mut self, expn_info: Option) -> ExpnId { + self.expn_data.push(expn_info); ExpnId(self.expn_data.len() as u32 - 1) } - fn parent_expn(&self, expn_id: ExpnId) -> ExpnId { - self.expn_data[expn_id.0 as usize].parent - } - fn expn_info(&self, expn_id: ExpnId) -> &ExpnInfo { - self.expn_data[expn_id.0 as usize].expn_info.as_ref() + self.expn_data[expn_id.0 as usize].as_ref() .expect("no expansion info for an expansion ID") } @@ -197,7 +178,7 @@ impl HygieneData { if expn_id == ExpnId::root() { return false; } - expn_id = self.parent_expn(expn_id); + expn_id = self.expn_info(expn_id).parent; } true } @@ -593,9 +574,9 @@ impl Span { /// other compiler-generated code to set per-span properties like allowed unstable features. /// The returned span belongs to the created expansion and has the new properties, /// but its location is inherited from the current span. - pub fn fresh_expansion(self, parent: ExpnId, expn_info: ExpnInfo) -> Span { + pub fn fresh_expansion(self, expn_info: ExpnInfo) -> Span { HygieneData::with(|data| { - let expn_id = data.fresh_expn(parent, Some(expn_info)); + let expn_id = data.fresh_expn(Some(expn_info)); self.with_ctxt(data.apply_mark(SyntaxContext::root(), expn_id)) }) } @@ -606,6 +587,10 @@ impl Span { #[derive(Clone, Debug, RustcEncodable, RustcDecodable)] pub struct ExpnInfo { // --- The part unique to each expansion. + /// The kind of this expansion - macro or compiler desugaring. + pub kind: ExpnKind, + /// The expansion that produced this expansion. + pub parent: ExpnId, /// The location of the actual macro invocation or syntax sugar , e.g. /// `let x = foo!();` or `if let Some(y) = x {}` /// @@ -616,8 +601,6 @@ pub struct ExpnInfo { /// call_site span would have its own ExpnInfo, with the call_site /// pointing to the `foo!` invocation. pub call_site: Span, - /// The kind of this expansion - macro or compiler desugaring. - pub kind: ExpnKind, // --- The part specific to the macro/desugaring definition. // --- FIXME: Share it between expansions with the same definition. @@ -644,8 +627,9 @@ impl ExpnInfo { /// Constructs an expansion info with default properties. pub fn default(kind: ExpnKind, call_site: Span, edition: Edition) -> ExpnInfo { ExpnInfo { - call_site, kind, + parent: ExpnId::root(), + call_site, def_site: DUMMY_SP, default_transparency: Transparency::SemiTransparent, allow_internal_unstable: None, @@ -753,6 +737,18 @@ impl DesugaringKind { } } +impl Encodable for ExpnId { + fn encode(&self, _: &mut E) -> Result<(), E::Error> { + Ok(()) // FIXME(jseyfried) intercrate hygiene + } +} + +impl Decodable for ExpnId { + fn decode(_: &mut D) -> Result { + Ok(ExpnId::root()) // FIXME(jseyfried) intercrate hygiene + } +} + impl Encodable for SyntaxContext { fn encode(&self, _: &mut E) -> Result<(), E::Error> { Ok(()) // FIXME(jseyfried) intercrate hygiene @@ -760,7 +756,7 @@ impl Encodable for SyntaxContext { } impl Decodable for SyntaxContext { - fn decode(_: &mut D) -> Result { + fn decode(_: &mut D) -> Result { Ok(SyntaxContext::root()) // FIXME(jseyfried) intercrate hygiene } } -- cgit 1.4.1-3-g733a5 From 74190a5e1c7439b001296fbc41da67682fd1d9bf Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Tue, 13 Aug 2019 22:48:27 +0300 Subject: syntax_pos: Remove the duplicate global edition It was introduced to avoid going through `hygiene_data`, but now it's read only once, when `ParseSess` is created, so going through a lock is ok. --- src/libsyntax/parse/lexer/tests.rs | 32 ++++---------------------------- src/libsyntax/parse/mod.rs | 3 ++- src/libsyntax_pos/edition.rs | 5 ----- src/libsyntax_pos/lib.rs | 2 -- 4 files changed, 6 insertions(+), 36 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/lexer/tests.rs b/src/libsyntax/parse/lexer/tests.rs index 1e4d9048b41..94570140996 100644 --- a/src/libsyntax/parse/lexer/tests.rs +++ b/src/libsyntax/parse/lexer/tests.rs @@ -1,41 +1,17 @@ use super::*; -use crate::ast::CrateConfig; use crate::symbol::Symbol; use crate::source_map::{SourceMap, FilePathMapping}; -use crate::feature_gate::UnstableFeatures; use crate::parse::token; -use crate::diagnostics::plugin::ErrorMap; use crate::with_default_globals; use std::io; use std::path::PathBuf; -use syntax_pos::{BytePos, Span, edition::Edition}; -use rustc_data_structures::fx::{FxHashSet, FxHashMap}; -use rustc_data_structures::sync::{Lock, Once}; +use errors::{Handler, emitter::EmitterWriter}; +use syntax_pos::{BytePos, Span}; fn mk_sess(sm: Lrc) -> ParseSess { - let emitter = errors::emitter::EmitterWriter::new(Box::new(io::sink()), - Some(sm.clone()), - false, - false, - false); - ParseSess { - span_diagnostic: errors::Handler::with_emitter(true, None, Box::new(emitter)), - unstable_features: UnstableFeatures::from_environment(), - config: CrateConfig::default(), - included_mod_stack: Lock::new(Vec::new()), - source_map: sm, - missing_fragment_specifiers: Lock::new(FxHashSet::default()), - raw_identifier_spans: Lock::new(Vec::new()), - registered_diagnostics: Lock::new(ErrorMap::new()), - buffered_lints: Lock::new(vec![]), - edition: Edition::from_session(), - ambiguous_block_expr_parse: Lock::new(FxHashMap::default()), - param_attr_spans: Lock::new(Vec::new()), - let_chains_spans: Lock::new(Vec::new()), - async_closure_spans: Lock::new(Vec::new()), - injected_crate_name: Once::new(), - } + let emitter = EmitterWriter::new(Box::new(io::sink()), Some(sm.clone()), false, false, false); + ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm) } // open a string reader for the given string diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 80aa7a35266..a1bcc455eb4 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -16,6 +16,7 @@ use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, use rustc_data_structures::sync::{Lrc, Lock, Once}; use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; use syntax_pos::edition::Edition; +use syntax_pos::hygiene::ExpnId; use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use std::borrow::Cow; @@ -86,7 +87,7 @@ impl ParseSess { included_mod_stack: Lock::new(vec![]), source_map, buffered_lints: Lock::new(vec![]), - edition: Edition::from_session(), + edition: ExpnId::root().expn_info().edition, ambiguous_block_expr_parse: Lock::new(FxHashMap::default()), param_attr_spans: Lock::new(Vec::new()), let_chains_spans: Lock::new(Vec::new()), diff --git a/src/libsyntax_pos/edition.rs b/src/libsyntax_pos/edition.rs index 20216568426..00cd00f2837 100644 --- a/src/libsyntax_pos/edition.rs +++ b/src/libsyntax_pos/edition.rs @@ -1,7 +1,6 @@ use crate::symbol::{Symbol, sym}; use std::fmt; use std::str::FromStr; -use crate::GLOBALS; /// The edition of the compiler (RFC 2052) #[derive(Clone, Copy, Hash, PartialEq, PartialOrd, Debug, RustcEncodable, RustcDecodable, Eq)] @@ -39,10 +38,6 @@ impl fmt::Display for Edition { } impl Edition { - pub fn from_session() -> Edition { - GLOBALS.with(|globals| globals.edition) - } - pub fn lint_name(&self) -> &'static str { match *self { Edition::Edition2015 => "rust_2015_compatibility", diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 7af426eaa13..ae538677a3a 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -49,7 +49,6 @@ pub struct Globals { symbol_interner: Lock, span_interner: Lock, hygiene_data: Lock, - edition: Edition, } impl Globals { @@ -58,7 +57,6 @@ impl Globals { symbol_interner: Lock::new(symbol::Interner::fresh()), span_interner: Lock::new(span_encoding::SpanInterner::default()), hygiene_data: Lock::new(hygiene::HygieneData::new(edition)), - edition, } } } -- cgit 1.4.1-3-g733a5 From 136db2235a754f91f8a0a6bf6d985d77fe97f8db Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Tue, 13 Aug 2019 23:56:42 +0300 Subject: hygiene: `ExpnInfo` -> `ExpnData` For naming consistency with everything else in this area --- src/librustc/hir/lowering.rs | 6 +- src/librustc/ich/hcx.rs | 2 +- src/librustc/ich/impls_syntax.rs | 2 +- src/librustc/lint/internal.rs | 8 +-- src/librustc/lint/mod.rs | 10 +-- src/librustc/traits/error_reporting.rs | 6 +- src/librustc/ty/query/on_disk_cache.rs | 68 ++++++++++---------- src/librustc_codegen_ssa/back/write.rs | 2 +- src/librustc_lint/unused.rs | 2 +- src/librustc_resolve/macros.rs | 10 +-- src/libsyntax/ext/base.rs | 18 +++--- src/libsyntax/ext/expand.rs | 20 +++--- src/libsyntax/ext/proc_macro_server.rs | 2 +- src/libsyntax/parse/mod.rs | 2 +- src/libsyntax/source_map.rs | 14 ++--- src/libsyntax_ext/deriving/clone.rs | 2 +- src/libsyntax_ext/deriving/cmp/eq.rs | 2 +- src/libsyntax_ext/deriving/cmp/partial_eq.rs | 2 +- src/libsyntax_ext/deriving/generic/mod.rs | 2 +- src/libsyntax_ext/plugin_macro_defs.rs | 4 +- src/libsyntax_ext/proc_macro_harness.rs | 4 +- src/libsyntax_ext/standard_library_imports.rs | 4 +- src/libsyntax_ext/test_harness.rs | 4 +- src/libsyntax_pos/hygiene.rs | 90 +++++++++++++-------------- src/libsyntax_pos/lib.rs | 50 +++++++-------- 25 files changed, 168 insertions(+), 168 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 8c1ce5983b8..0f6e834ca26 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -67,7 +67,7 @@ use syntax::errors; use syntax::ext::base::SpecialDerives; use syntax::ext::hygiene::ExpnId; use syntax::print::pprust; -use syntax::source_map::{respan, ExpnInfo, ExpnKind, DesugaringKind, Spanned}; +use syntax::source_map::{respan, ExpnData, ExpnKind, DesugaringKind, Spanned}; use syntax::symbol::{kw, sym, Symbol}; use syntax::tokenstream::{TokenStream, TokenTree}; use syntax::parse::token::{self, Token}; @@ -704,9 +704,9 @@ impl<'a> LoweringContext<'a> { span: Span, allow_internal_unstable: Option>, ) -> Span { - span.fresh_expansion(ExpnInfo { + span.fresh_expansion(ExpnData { allow_internal_unstable, - ..ExpnInfo::default(ExpnKind::Desugaring(reason), span, self.sess.edition()) + ..ExpnData::default(ExpnKind::Desugaring(reason), span, self.sess.edition()) }) } diff --git a/src/librustc/ich/hcx.rs b/src/librustc/ich/hcx.rs index 39f6b0d4344..e77faea1e4c 100644 --- a/src/librustc/ich/hcx.rs +++ b/src/librustc/ich/hcx.rs @@ -370,7 +370,7 @@ impl<'a> HashStable> for Span { } let mut hasher = StableHasher::new(); - expn_id.expn_info().hash_stable(hcx, &mut hasher); + expn_id.expn_data().hash_stable(hcx, &mut hasher); let sub_hash: Fingerprint = hasher.finish(); let sub_hash = sub_hash.to_smaller_hash(); cache.borrow_mut().insert(expn_id, sub_hash); diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index 22e2cff3595..7003f71c8ba 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -397,7 +397,7 @@ impl_stable_hash_for!(enum ::syntax_pos::hygiene::Transparency { Opaque, }); -impl_stable_hash_for!(struct ::syntax_pos::hygiene::ExpnInfo { +impl_stable_hash_for!(struct ::syntax_pos::hygiene::ExpnData { kind, parent -> _, call_site, diff --git a/src/librustc/lint/internal.rs b/src/librustc/lint/internal.rs index 29106fe000b..be73b305e2c 100644 --- a/src/librustc/lint/internal.rs +++ b/src/librustc/lint/internal.rs @@ -227,10 +227,10 @@ impl EarlyLintPass for LintPassImpl { if let ItemKind::Impl(_, _, _, _, Some(lint_pass), _, _) = &item.node { if let Some(last) = lint_pass.path.segments.last() { if last.ident.name == sym::LintPass { - let expn_info = lint_pass.path.span.ctxt().outer_expn_info(); - let call_site = expn_info.call_site; - if expn_info.kind.descr() != sym::impl_lint_pass && - call_site.ctxt().outer_expn_info().kind.descr() != sym::declare_lint_pass { + let expn_data = lint_pass.path.span.ctxt().outer_expn_data(); + let call_site = expn_data.call_site; + if expn_data.kind.descr() != sym::impl_lint_pass && + call_site.ctxt().outer_expn_data().kind.descr() != sym::declare_lint_pass { cx.struct_span_lint( LINT_PASS_IMPL_WITHOUT_MACRO, lint_pass.path.span, diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index 3729ee81f5c..2b58627cdea 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -885,16 +885,16 @@ pub fn provide(providers: &mut Providers<'_>) { /// This is used to test whether a lint should not even begin to figure out whether it should /// be reported on the current node. pub fn in_external_macro(sess: &Session, span: Span) -> bool { - let expn_info = span.ctxt().outer_expn_info(); - match expn_info.kind { + let expn_data = span.ctxt().outer_expn_data(); + match expn_data.kind { ExpnKind::Root | ExpnKind::Desugaring(DesugaringKind::ForLoop) => false, ExpnKind::Desugaring(_) => true, // well, it's "external" ExpnKind::Macro(MacroKind::Bang, _) => { - if expn_info.def_site.is_dummy() { + if expn_data.def_site.is_dummy() { // dummy span for the def_site means it's an external macro return true; } - match sess.source_map().span_to_snippet(expn_info.def_site) { + match sess.source_map().span_to_snippet(expn_data.def_site) { Ok(code) => !code.starts_with("macro_rules"), // no snippet = external macro or compiler-builtin expansion Err(_) => true, @@ -906,7 +906,7 @@ pub fn in_external_macro(sess: &Session, span: Span) -> bool { /// Returns whether `span` originates in a derive macro's expansion pub fn in_derive_expansion(span: Span) -> bool { - if let ExpnKind::Macro(MacroKind::Derive, _) = span.ctxt().outer_expn_info().kind { + if let ExpnKind::Macro(MacroKind::Derive, _) = span.ctxt().outer_expn_data().kind { return true; } false diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 20568d4709b..ba92e851141 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -61,9 +61,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { // We want to ignore desugarings here: spans are equivalent even // if one is the result of a desugaring and the other is not. let mut span = error.obligation.cause.span; - let expn_info = span.ctxt().outer_expn_info(); - if let ExpnKind::Desugaring(_) = expn_info.kind { - span = expn_info.call_site; + let expn_data = span.ctxt().outer_expn_data(); + if let ExpnKind::Desugaring(_) = expn_data.kind { + span = expn_data.call_site; } error_map.entry(span).or_default().push( diff --git a/src/librustc/ty/query/on_disk_cache.rs b/src/librustc/ty/query/on_disk_cache.rs index 351b9988bb2..8bf01970eb5 100644 --- a/src/librustc/ty/query/on_disk_cache.rs +++ b/src/librustc/ty/query/on_disk_cache.rs @@ -23,16 +23,16 @@ use std::mem; use syntax::ast::NodeId; use syntax::source_map::{SourceMap, StableSourceFileId}; use syntax_pos::{BytePos, Span, DUMMY_SP, SourceFile}; -use syntax_pos::hygiene::{ExpnId, SyntaxContext, ExpnInfo}; +use syntax_pos::hygiene::{ExpnId, SyntaxContext, ExpnData}; const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE; const TAG_CLEAR_CROSS_CRATE_CLEAR: u8 = 0; const TAG_CLEAR_CROSS_CRATE_SET: u8 = 1; -const TAG_NO_EXPANSION_INFO: u8 = 0; -const TAG_EXPANSION_INFO_SHORTHAND: u8 = 1; -const TAG_EXPANSION_INFO_INLINE: u8 = 2; +const TAG_NO_EXPN_DATA: u8 = 0; +const TAG_EXPN_DATA_SHORTHAND: u8 = 1; +const TAG_EXPN_DATA_INLINE: u8 = 2; const TAG_VALID_SPAN: u8 = 0; const TAG_INVALID_SPAN: u8 = 1; @@ -58,7 +58,7 @@ pub struct OnDiskCache<'sess> { // These two fields caches that are populated lazily during decoding. file_index_to_file: Lock>>, - synthetic_expansion_infos: Lock>, + synthetic_syntax_contexts: Lock>, // A map from dep-node to the position of the cached query result in // `serialized_data`. @@ -135,7 +135,7 @@ impl<'sess> OnDiskCache<'sess> { current_diagnostics: Default::default(), query_result_index: footer.query_result_index.into_iter().collect(), prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(), - synthetic_expansion_infos: Default::default(), + synthetic_syntax_contexts: Default::default(), alloc_decoding_state: AllocDecodingState::new(footer.interpret_alloc_index), } } @@ -151,7 +151,7 @@ impl<'sess> OnDiskCache<'sess> { current_diagnostics: Default::default(), query_result_index: Default::default(), prev_diagnostics_index: Default::default(), - synthetic_expansion_infos: Default::default(), + synthetic_syntax_contexts: Default::default(), alloc_decoding_state: AllocDecodingState::new(Vec::new()), } } @@ -185,7 +185,7 @@ impl<'sess> OnDiskCache<'sess> { encoder, type_shorthands: Default::default(), predicate_shorthands: Default::default(), - expn_info_shorthands: Default::default(), + expn_data_shorthands: Default::default(), interpret_allocs: Default::default(), interpret_allocs_inverse: Vec::new(), source_map: CachingSourceMapView::new(tcx.sess.source_map()), @@ -383,7 +383,7 @@ impl<'sess> OnDiskCache<'sess> { cnum_map: self.cnum_map.get(), file_index_to_file: &self.file_index_to_file, file_index_to_stable_id: &self.file_index_to_stable_id, - synthetic_expansion_infos: &self.synthetic_expansion_infos, + synthetic_syntax_contexts: &self.synthetic_syntax_contexts, alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(), }; @@ -440,7 +440,7 @@ struct CacheDecoder<'a, 'tcx> { opaque: opaque::Decoder<'a>, source_map: &'a SourceMap, cnum_map: &'a IndexVec>, - synthetic_expansion_infos: &'a Lock>, + synthetic_syntax_contexts: &'a Lock>, file_index_to_file: &'a Lock>>, file_index_to_stable_id: &'a FxHashMap, alloc_decoding_session: AllocDecodingSession<'a>, @@ -586,37 +586,37 @@ impl<'a, 'tcx> SpecializedDecoder for CacheDecoder<'a, 'tcx> { let lo = file_lo.lines[line_lo - 1] + col_lo; let hi = lo + len; - let expn_info_tag = u8::decode(self)?; + let expn_data_tag = u8::decode(self)?; - // FIXME(mw): This method does not restore `ExpnInfo::parent` or + // FIXME(mw): This method does not restore `ExpnData::parent` or // `SyntaxContextData::prev_ctxt` or `SyntaxContextData::opaque`. These things // don't seem to be used after HIR lowering, so everything should be fine // as long as incremental compilation does not kick in before that. let location = || Span::with_root_ctxt(lo, hi); - let recover_from_expn_info = |this: &Self, expn_info, pos| { - let span = location().fresh_expansion(expn_info); - this.synthetic_expansion_infos.borrow_mut().insert(pos, span.ctxt()); + let recover_from_expn_data = |this: &Self, expn_data, pos| { + let span = location().fresh_expansion(expn_data); + this.synthetic_syntax_contexts.borrow_mut().insert(pos, span.ctxt()); span }; - Ok(match expn_info_tag { - TAG_NO_EXPANSION_INFO => { + Ok(match expn_data_tag { + TAG_NO_EXPN_DATA => { location() } - TAG_EXPANSION_INFO_INLINE => { - let expn_info = Decodable::decode(self)?; - recover_from_expn_info( - self, expn_info, AbsoluteBytePos::new(self.opaque.position()) + TAG_EXPN_DATA_INLINE => { + let expn_data = Decodable::decode(self)?; + recover_from_expn_data( + self, expn_data, AbsoluteBytePos::new(self.opaque.position()) ) } - TAG_EXPANSION_INFO_SHORTHAND => { + TAG_EXPN_DATA_SHORTHAND => { let pos = AbsoluteBytePos::decode(self)?; - let cached_ctxt = self.synthetic_expansion_infos.borrow().get(&pos).cloned(); + let cached_ctxt = self.synthetic_syntax_contexts.borrow().get(&pos).cloned(); if let Some(ctxt) = cached_ctxt { Span::new(lo, hi, ctxt) } else { - let expn_info = - self.with_position(pos.to_usize(), |this| ExpnInfo::decode(this))?; - recover_from_expn_info(self, expn_info, pos) + let expn_data = + self.with_position(pos.to_usize(), |this| ExpnData::decode(this))?; + recover_from_expn_data(self, expn_data, pos) } } _ => { @@ -725,7 +725,7 @@ struct CacheEncoder<'a, 'tcx, E: ty_codec::TyEncoder> { encoder: &'a mut E, type_shorthands: FxHashMap, usize>, predicate_shorthands: FxHashMap, usize>, - expn_info_shorthands: FxHashMap, + expn_data_shorthands: FxHashMap, interpret_allocs: FxHashMap, interpret_allocs_inverse: Vec, source_map: CachingSourceMapView<'tcx>, @@ -817,17 +817,17 @@ where len.encode(self)?; if span_data.ctxt == SyntaxContext::root() { - TAG_NO_EXPANSION_INFO.encode(self) + TAG_NO_EXPN_DATA.encode(self) } else { - let (expn_id, expn_info) = span_data.ctxt.outer_expn_with_info(); - if let Some(pos) = self.expn_info_shorthands.get(&expn_id).cloned() { - TAG_EXPANSION_INFO_SHORTHAND.encode(self)?; + let (expn_id, expn_data) = span_data.ctxt.outer_expn_with_data(); + if let Some(pos) = self.expn_data_shorthands.get(&expn_id).cloned() { + TAG_EXPN_DATA_SHORTHAND.encode(self)?; pos.encode(self) } else { - TAG_EXPANSION_INFO_INLINE.encode(self)?; + TAG_EXPN_DATA_INLINE.encode(self)?; let pos = AbsoluteBytePos::new(self.position()); - self.expn_info_shorthands.insert(expn_id, pos); - expn_info.encode(self) + self.expn_data_shorthands.insert(expn_id, pos); + expn_data.encode(self) } } } diff --git a/src/librustc_codegen_ssa/back/write.rs b/src/librustc_codegen_ssa/back/write.rs index 240264a9822..eec09842623 100644 --- a/src/librustc_codegen_ssa/back/write.rs +++ b/src/librustc_codegen_ssa/back/write.rs @@ -1775,7 +1775,7 @@ impl SharedEmitterMain { } } Ok(SharedEmitterMessage::InlineAsmError(cookie, msg)) => { - sess.span_err(ExpnId::from_u32(cookie).expn_info().call_site, &msg) + sess.span_err(ExpnId::from_u32(cookie).expn_data().call_site, &msg) } Ok(SharedEmitterMessage::AbortIfErrors) => { sess.abort_if_errors(); diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index 1bb05bda69f..90e46771396 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -517,7 +517,7 @@ impl EarlyLintPass for UnusedParens { // trigger in situations that macro authors shouldn't have to care about, e.g., // when a parenthesized token tree matched in one macro expansion is matched as // an expression in another and used as a fn/method argument (Issue #47775) - if e.span.ctxt().outer_expn_info().call_site.from_expansion() { + if e.span.ctxt().outer_expn_data().call_site.from_expansion() { return; } let msg = format!("{} argument", call_kind); diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 58e785ab8c2..2ca8771cda6 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -17,7 +17,7 @@ use syntax::edition::Edition; use syntax::ext::base::{self, Indeterminate, SpecialDerives}; use syntax::ext::base::{MacroKind, SyntaxExtension}; use syntax::ext::expand::{AstFragment, Invocation, InvocationKind}; -use syntax::ext::hygiene::{self, ExpnId, ExpnInfo, ExpnKind}; +use syntax::ext::hygiene::{self, ExpnId, ExpnData, ExpnKind}; use syntax::ext::tt::macro_rules; use syntax::feature_gate::{emit_feature_err, is_builtin_attr_name}; use syntax::feature_gate::GateIssue; @@ -97,7 +97,7 @@ impl<'a> base::Resolver for Resolver<'a> { } fn get_module_scope(&mut self, id: ast::NodeId) -> ExpnId { - let expn_id = ExpnId::fresh(Some(ExpnInfo::default( + let expn_id = ExpnId::fresh(Some(ExpnData::default( ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, self.session.edition() ))); let module = self.module_map[&self.definitions.local_def_id(id)]; @@ -185,8 +185,8 @@ impl<'a> base::Resolver for Resolver<'a> { let (ext, res) = self.smart_resolve_macro_path(path, kind, parent_scope, force)?; let span = invoc.span(); - invoc.expansion_data.id.set_expn_info( - ext.expn_info(parent_scope.expansion, span, fast_print_path(path)) + invoc.expansion_data.id.set_expn_data( + ext.expn_data(parent_scope.expansion, span, fast_print_path(path)) ); if let Res::Def(_, def_id) = res { @@ -302,7 +302,7 @@ impl<'a> Resolver<'a> { // Possibly apply the macro helper hack if kind == Some(MacroKind::Bang) && path.len() == 1 && - path[0].ident.span.ctxt().outer_expn_info().local_inner_macros { + path[0].ident.span.ctxt().outer_expn_data().local_inner_macros { let root = Ident::new(kw::DollarCrate, path[0].ident.span); path.insert(0, Segment::from_ident(root)); } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 734b566b3ad..fb1bf4d7160 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -15,7 +15,7 @@ use crate::tokenstream::{self, TokenStream, TokenTree}; use errors::{DiagnosticBuilder, DiagnosticId}; use smallvec::{smallvec, SmallVec}; use syntax_pos::{FileName, Span, MultiSpan, DUMMY_SP}; -use syntax_pos::hygiene::{ExpnInfo, ExpnKind}; +use syntax_pos::hygiene::{ExpnData, ExpnKind}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::{self, Lrc}; @@ -640,8 +640,8 @@ impl SyntaxExtension { SyntaxExtension::default(SyntaxExtensionKind::NonMacroAttr { mark_used }, edition) } - pub fn expn_info(&self, parent: ExpnId, call_site: Span, descr: Symbol) -> ExpnInfo { - ExpnInfo { + pub fn expn_data(&self, parent: ExpnId, call_site: Span, descr: Symbol) -> ExpnData { + ExpnData { kind: ExpnKind::Macro(self.macro_kind(), descr), parent, call_site, @@ -708,7 +708,7 @@ pub struct ExpansionData { /// One of these is made during expansion and incrementally updated as we go; /// when a macro expansion occurs, the resulting nodes have the `backtrace() -/// -> expn_info` of their expansion context stored into their span. +/// -> expn_data` of their expansion context stored into their span. pub struct ExtCtxt<'a> { pub parse_sess: &'a parse::ParseSess, pub ecfg: expand::ExpansionConfig<'a>, @@ -757,7 +757,7 @@ impl<'a> ExtCtxt<'a> { pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config } pub fn call_site(&self) -> Span { - self.current_expansion.id.expn_info().call_site + self.current_expansion.id.expn_data().call_site } pub fn backtrace(&self) -> SyntaxContext { SyntaxContext::root().apply_mark(self.current_expansion.id) @@ -770,13 +770,13 @@ impl<'a> ExtCtxt<'a> { let mut ctxt = self.backtrace(); let mut last_macro = None; loop { - let expn_info = ctxt.outer_expn_info(); + let expn_data = ctxt.outer_expn_data(); // Stop going up the backtrace once include! is encountered - if expn_info.is_root() || expn_info.kind.descr() == sym::include { + if expn_data.is_root() || expn_data.kind.descr() == sym::include { break; } - ctxt = expn_info.call_site.ctxt(); - last_macro = Some(expn_info.call_site); + ctxt = expn_data.call_site.ctxt(); + last_macro = Some(expn_data.call_site); } last_macro } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 4233d5c0a22..e7deadbc9a0 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -5,7 +5,7 @@ use crate::source_map::respan; use crate::config::StripUnconfigured; use crate::ext::base::*; use crate::ext::proc_macro::collect_derives; -use crate::ext::hygiene::{ExpnId, SyntaxContext, ExpnInfo, ExpnKind}; +use crate::ext::hygiene::{ExpnId, SyntaxContext, ExpnData, ExpnKind}; use crate::ext::tt::macro_rules::annotate_err_with_kind; use crate::ext::placeholders::{placeholder, PlaceholderExpander}; use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err}; @@ -475,11 +475,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } if self.cx.current_expansion.depth > self.cx.ecfg.recursion_limit { - let info = self.cx.current_expansion.id.expn_info(); + let expn_data = self.cx.current_expansion.id.expn_data(); let suggested_limit = self.cx.ecfg.recursion_limit * 2; - let mut err = self.cx.struct_span_err(info.call_site, + let mut err = self.cx.struct_span_err(expn_data.call_site, &format!("recursion limit reached while expanding the macro `{}`", - info.kind.descr())); + expn_data.kind.descr())); err.help(&format!( "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", suggested_limit)); @@ -796,20 +796,20 @@ struct InvocationCollector<'a, 'b> { impl<'a, 'b> InvocationCollector<'a, 'b> { fn collect(&mut self, fragment_kind: AstFragmentKind, kind: InvocationKind) -> AstFragment { - // Expansion info for all the collected invocations is set upon their resolution, + // Expansion data for all the collected invocations is set upon their resolution, // with exception of the derive container case which is not resolved and can get - // its expansion info immediately. - let expn_info = match &kind { - InvocationKind::DeriveContainer { item, .. } => Some(ExpnInfo { + // its expansion data immediately. + let expn_data = match &kind { + InvocationKind::DeriveContainer { item, .. } => Some(ExpnData { parent: self.cx.current_expansion.id, - ..ExpnInfo::default( + ..ExpnData::default( ExpnKind::Macro(MacroKind::Attr, sym::derive), item.span(), self.cx.parse_sess.edition, ) }), _ => None, }; - let expn_id = ExpnId::fresh(expn_info); + let expn_id = ExpnId::fresh(expn_data); self.invocations.push(Invocation { kind, fragment_kind, diff --git a/src/libsyntax/ext/proc_macro_server.rs b/src/libsyntax/ext/proc_macro_server.rs index d370431a5da..1619fa69941 100644 --- a/src/libsyntax/ext/proc_macro_server.rs +++ b/src/libsyntax/ext/proc_macro_server.rs @@ -362,7 +362,7 @@ pub(crate) struct Rustc<'a> { impl<'a> Rustc<'a> { pub fn new(cx: &'a ExtCtxt<'_>) -> Self { // No way to determine def location for a proc macro right now, so use call location. - let location = cx.current_expansion.id.expn_info().call_site; + let location = cx.current_expansion.id.expn_data().call_site; let to_span = |transparency| { location.with_ctxt( SyntaxContext::root() diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index a1bcc455eb4..26f78b9c5c7 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -87,7 +87,7 @@ impl ParseSess { included_mod_stack: Lock::new(vec![]), source_map, buffered_lints: Lock::new(vec![]), - edition: ExpnId::root().expn_info().edition, + edition: ExpnId::root().expn_data().edition, ambiguous_block_expr_parse: Lock::new(FxHashMap::default()), param_attr_spans: Lock::new(Vec::new()), let_chains_spans: Lock::new(Vec::new()), diff --git a/src/libsyntax/source_map.rs b/src/libsyntax/source_map.rs index da7eb6add41..940687cb5d4 100644 --- a/src/libsyntax/source_map.rs +++ b/src/libsyntax/source_map.rs @@ -8,7 +8,7 @@ //! information, source code snippets, etc. pub use syntax_pos::*; -pub use syntax_pos::hygiene::{ExpnKind, ExpnInfo}; +pub use syntax_pos::hygiene::{ExpnKind, ExpnData}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::StableHasher; @@ -29,15 +29,15 @@ mod tests; /// Returns the span itself if it doesn't come from a macro expansion, /// otherwise return the call site span up to the `enclosing_sp` by -/// following the `expn_info` chain. +/// following the `expn_data` chain. pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span { - let expn_info1 = sp.ctxt().outer_expn_info(); - let expn_info2 = enclosing_sp.ctxt().outer_expn_info(); - if expn_info1.is_root() || - !expn_info2.is_root() && expn_info1.call_site == expn_info2.call_site { + let expn_data1 = sp.ctxt().outer_expn_data(); + let expn_data2 = enclosing_sp.ctxt().outer_expn_data(); + if expn_data1.is_root() || + !expn_data2.is_root() && expn_data1.call_site == expn_data2.call_site { sp } else { - original_sp(expn_info1.call_site, enclosing_sp) + original_sp(expn_data1.call_site, enclosing_sp) } } diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index 73df625d5ee..d030ea4a56e 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -35,7 +35,7 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt<'_>, match annitem.node { ItemKind::Struct(_, Generics { ref params, .. }) | ItemKind::Enum(_, Generics { ref params, .. }) => { - let container_id = cx.current_expansion.id.expn_info().parent; + let container_id = cx.current_expansion.id.expn_data().parent; if cx.resolver.has_derives(container_id, SpecialDerives::COPY) && !params.iter().any(|param| match param.kind { ast::GenericParamKind::Type { .. } => true, diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index 1ef34a68004..54027c600b4 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -13,7 +13,7 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt<'_>, mitem: &MetaItem, item: &Annotatable, push: &mut dyn FnMut(Annotatable)) { - cx.resolver.add_derives(cx.current_expansion.id.expn_info().parent, SpecialDerives::EQ); + cx.resolver.add_derives(cx.current_expansion.id.expn_data().parent, SpecialDerives::EQ); let inline = cx.meta_word(span, sym::inline); let hidden = cx.meta_list_item_word(span, sym::hidden); diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs index 76befc98591..91e1e80e4fb 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_eq.rs @@ -13,7 +13,7 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt<'_>, mitem: &MetaItem, item: &Annotatable, push: &mut dyn FnMut(Annotatable)) { - cx.resolver.add_derives(cx.current_expansion.id.expn_info().parent, SpecialDerives::PARTIAL_EQ); + cx.resolver.add_derives(cx.current_expansion.id.expn_data().parent, SpecialDerives::PARTIAL_EQ); // structures are equal if all fields are equal, and non equal, if // any fields are not equal or if the enum variants are different diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 6b739e27eee..55fb7677038 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -425,7 +425,7 @@ impl<'a> TraitDef<'a> { return; } }; - let container_id = cx.current_expansion.id.expn_info().parent; + let container_id = cx.current_expansion.id.expn_data().parent; let is_always_copy = cx.resolver.has_derives(container_id, SpecialDerives::COPY) && has_no_type_params; diff --git a/src/libsyntax_ext/plugin_macro_defs.rs b/src/libsyntax_ext/plugin_macro_defs.rs index b34a250881a..dbfd8fe98f3 100644 --- a/src/libsyntax_ext/plugin_macro_defs.rs +++ b/src/libsyntax_ext/plugin_macro_defs.rs @@ -11,7 +11,7 @@ use syntax::source_map::respan; use syntax::symbol::sym; use syntax::tokenstream::*; use syntax_pos::{Span, DUMMY_SP}; -use syntax_pos::hygiene::{ExpnInfo, ExpnKind, MacroKind}; +use syntax_pos::hygiene::{ExpnData, ExpnKind, MacroKind}; use std::mem; @@ -43,7 +43,7 @@ pub fn inject( ) { if !named_exts.is_empty() { let mut extra_items = Vec::new(); - let span = DUMMY_SP.fresh_expansion(ExpnInfo::allow_unstable( + let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::plugin), DUMMY_SP, edition, [sym::rustc_attrs][..].into(), )); diff --git a/src/libsyntax_ext/proc_macro_harness.rs b/src/libsyntax_ext/proc_macro_harness.rs index 9d8a8c17ba2..e772eaf8349 100644 --- a/src/libsyntax_ext/proc_macro_harness.rs +++ b/src/libsyntax_ext/proc_macro_harness.rs @@ -3,7 +3,7 @@ use std::mem; use smallvec::smallvec; use syntax::ast::{self, Ident}; use syntax::attr; -use syntax::source_map::{ExpnInfo, ExpnKind, respan}; +use syntax::source_map::{ExpnData, ExpnKind, respan}; use syntax::ext::base::{ExtCtxt, MacroKind}; use syntax::ext::expand::{AstFragment, ExpansionConfig}; use syntax::ext::proc_macro::is_proc_macro_attr; @@ -327,7 +327,7 @@ fn mk_decls( custom_attrs: &[ProcMacroDef], custom_macros: &[ProcMacroDef], ) -> P { - let span = DUMMY_SP.fresh_expansion(ExpnInfo::allow_unstable( + let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::proc_macro), DUMMY_SP, cx.parse_sess.edition, [sym::rustc_attrs, sym::proc_macro_internals][..].into(), )); diff --git a/src/libsyntax_ext/standard_library_imports.rs b/src/libsyntax_ext/standard_library_imports.rs index c0041248652..8ca376341fc 100644 --- a/src/libsyntax_ext/standard_library_imports.rs +++ b/src/libsyntax_ext/standard_library_imports.rs @@ -2,7 +2,7 @@ use syntax::{ast, attr}; use syntax::edition::Edition; use syntax::ext::hygiene::MacroKind; use syntax::ptr::P; -use syntax::source_map::{ExpnInfo, ExpnKind, dummy_spanned, respan}; +use syntax::source_map::{ExpnData, ExpnKind, dummy_spanned, respan}; use syntax::symbol::{Ident, Symbol, kw, sym}; use syntax_pos::DUMMY_SP; @@ -55,7 +55,7 @@ pub fn inject( // the prelude. let name = names[0]; - let span = DUMMY_SP.fresh_expansion(ExpnInfo::allow_unstable( + let span = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::std_inject), DUMMY_SP, edition, [sym::prelude_import][..].into(), )); diff --git a/src/libsyntax_ext/test_harness.rs b/src/libsyntax_ext/test_harness.rs index 3fb1c1bd022..4a6ea0ebf85 100644 --- a/src/libsyntax_ext/test_harness.rs +++ b/src/libsyntax_ext/test_harness.rs @@ -11,7 +11,7 @@ use syntax::feature_gate::Features; use syntax::mut_visit::{*, ExpectOne}; use syntax::parse::ParseSess; use syntax::ptr::P; -use syntax::source_map::{ExpnInfo, ExpnKind, dummy_spanned}; +use syntax::source_map::{ExpnData, ExpnKind, dummy_spanned}; use syntax::symbol::{kw, sym, Symbol}; use syntax_pos::{Span, DUMMY_SP}; @@ -268,7 +268,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P { // #![main] // test::test_main_static(&[..tests]); // } - let sp = DUMMY_SP.fresh_expansion(ExpnInfo::allow_unstable( + let sp = DUMMY_SP.fresh_expansion(ExpnData::allow_unstable( ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, cx.ext_cx.parse_sess.edition, [sym::main, sym::test, sym::rustc_attrs][..].into(), )); diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index 4d9496f94fb..87d930f897a 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -13,8 +13,8 @@ // // This explains why `HygieneData`, `SyntaxContext` and `ExpnId` have interfaces // with a certain amount of redundancy in them. For example, -// `SyntaxContext::outer_expn_info` combines `SyntaxContext::outer` and -// `ExpnId::expn_info` so that two `HygieneData` accesses can be performed within +// `SyntaxContext::outer_expn_data` combines `SyntaxContext::outer` and +// `ExpnId::expn_data` so that two `HygieneData` accesses can be performed within // a single `HygieneData::with` call. // // It also explains why many functions appear in `HygieneData` and again in @@ -76,8 +76,8 @@ pub enum Transparency { } impl ExpnId { - pub fn fresh(expn_info: Option) -> Self { - HygieneData::with(|data| data.fresh_expn(expn_info)) + pub fn fresh(expn_data: Option) -> Self { + HygieneData::with(|data| data.fresh_expn(expn_data)) } /// The ID of the theoretical expansion that generates freshly parsed, unexpanded AST. @@ -97,16 +97,16 @@ impl ExpnId { } #[inline] - pub fn expn_info(self) -> ExpnInfo { - HygieneData::with(|data| data.expn_info(self).clone()) + pub fn expn_data(self) -> ExpnData { + HygieneData::with(|data| data.expn_data(self).clone()) } #[inline] - pub fn set_expn_info(self, info: ExpnInfo) { + pub fn set_expn_data(self, expn_data: ExpnData) { HygieneData::with(|data| { - let old_info = &mut data.expn_data[self.0 as usize]; - assert!(old_info.is_none(), "expansion info is reset for an expansion ID"); - *old_info = Some(info); + let old_expn_data = &mut data.expn_data[self.0 as usize]; + assert!(old_expn_data.is_none(), "expansion data is reset for an expansion ID"); + *old_expn_data = Some(expn_data); }) } @@ -124,9 +124,9 @@ impl ExpnId { #[inline] pub fn looks_like_proc_macro_derive(self) -> bool { HygieneData::with(|data| { - let expn_info = data.expn_info(self); - if let ExpnKind::Macro(MacroKind::Derive, _) = expn_info.kind { - return expn_info.default_transparency == Transparency::Opaque; + let expn_data = data.expn_data(self); + if let ExpnKind::Macro(MacroKind::Derive, _) = expn_data.kind { + return expn_data.default_transparency == Transparency::Opaque; } false }) @@ -135,10 +135,10 @@ impl ExpnId { #[derive(Debug)] crate struct HygieneData { - /// Each expansion should have an associated expansion info, but sometimes there's a delay - /// between creation of an expansion ID and obtaining its info (e.g. macros are collected + /// Each expansion should have an associated expansion data, but sometimes there's a delay + /// between creation of an expansion ID and obtaining its data (e.g. macros are collected /// first and then resolved later), so we use an `Option` here. - expn_data: Vec>, + expn_data: Vec>, syntax_context_data: Vec, syntax_context_map: FxHashMap<(SyntaxContext, ExpnId, Transparency), SyntaxContext>, } @@ -146,7 +146,7 @@ crate struct HygieneData { impl HygieneData { crate fn new(edition: Edition) -> Self { HygieneData { - expn_data: vec![Some(ExpnInfo::default(ExpnKind::Root, DUMMY_SP, edition))], + expn_data: vec![Some(ExpnData::default(ExpnKind::Root, DUMMY_SP, edition))], syntax_context_data: vec![SyntaxContextData { outer_expn: ExpnId::root(), outer_transparency: Transparency::Opaque, @@ -163,14 +163,14 @@ impl HygieneData { GLOBALS.with(|globals| f(&mut *globals.hygiene_data.borrow_mut())) } - fn fresh_expn(&mut self, expn_info: Option) -> ExpnId { - self.expn_data.push(expn_info); + fn fresh_expn(&mut self, expn_data: Option) -> ExpnId { + self.expn_data.push(expn_data); ExpnId(self.expn_data.len() as u32 - 1) } - fn expn_info(&self, expn_id: ExpnId) -> &ExpnInfo { + fn expn_data(&self, expn_id: ExpnId) -> &ExpnData { self.expn_data[expn_id.0 as usize].as_ref() - .expect("no expansion info for an expansion ID") + .expect("no expansion data for an expansion ID") } fn is_descendant_of(&self, mut expn_id: ExpnId, ancestor: ExpnId) -> bool { @@ -178,7 +178,7 @@ impl HygieneData { if expn_id == ExpnId::root() { return false; } - expn_id = self.expn_info(expn_id).parent; + expn_id = self.expn_data(expn_id).parent; } true } @@ -221,7 +221,7 @@ impl HygieneData { fn walk_chain(&self, mut span: Span, to: SyntaxContext) -> Span { while span.from_expansion() && span.ctxt() != to { - span = self.expn_info(self.outer_expn(span.ctxt())).call_site; + span = self.expn_data(self.outer_expn(span.ctxt())).call_site; } span } @@ -237,7 +237,7 @@ impl HygieneData { fn apply_mark(&mut self, ctxt: SyntaxContext, expn_id: ExpnId) -> SyntaxContext { assert_ne!(expn_id, ExpnId::root()); self.apply_mark_with_transparency( - ctxt, expn_id, self.expn_info(expn_id).default_transparency + ctxt, expn_id, self.expn_data(expn_id).default_transparency ) } @@ -248,7 +248,7 @@ impl HygieneData { return self.apply_mark_internal(ctxt, expn_id, transparency); } - let call_site_ctxt = self.expn_info(expn_id).call_site.ctxt(); + let call_site_ctxt = self.expn_data(expn_id).call_site.ctxt(); let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { self.modern(call_site_ctxt) } else { @@ -540,20 +540,20 @@ impl SyntaxContext { HygieneData::with(|data| data.outer_expn(self)) } - /// `ctxt.outer_expn_info()` is equivalent to but faster than - /// `ctxt.outer_expn().expn_info()`. + /// `ctxt.outer_expn_data()` is equivalent to but faster than + /// `ctxt.outer_expn().expn_data()`. #[inline] - pub fn outer_expn_info(self) -> ExpnInfo { - HygieneData::with(|data| data.expn_info(data.outer_expn(self)).clone()) + pub fn outer_expn_data(self) -> ExpnData { + HygieneData::with(|data| data.expn_data(data.outer_expn(self)).clone()) } - /// `ctxt.outer_expn_with_info()` is equivalent to but faster than - /// `{ let outer = ctxt.outer_expn(); (outer, outer.expn_info()) }`. + /// `ctxt.outer_expn_with_data()` is equivalent to but faster than + /// `{ let outer = ctxt.outer_expn(); (outer, outer.expn_data()) }`. #[inline] - pub fn outer_expn_with_info(self) -> (ExpnId, ExpnInfo) { + pub fn outer_expn_with_data(self) -> (ExpnId, ExpnData) { HygieneData::with(|data| { let outer = data.outer_expn(self); - (outer, data.expn_info(outer).clone()) + (outer, data.expn_data(outer).clone()) }) } @@ -574,9 +574,9 @@ impl Span { /// other compiler-generated code to set per-span properties like allowed unstable features. /// The returned span belongs to the created expansion and has the new properties, /// but its location is inherited from the current span. - pub fn fresh_expansion(self, expn_info: ExpnInfo) -> Span { + pub fn fresh_expansion(self, expn_data: ExpnData) -> Span { HygieneData::with(|data| { - let expn_id = data.fresh_expn(Some(expn_info)); + let expn_id = data.fresh_expn(Some(expn_data)); self.with_ctxt(data.apply_mark(SyntaxContext::root(), expn_id)) }) } @@ -585,7 +585,7 @@ impl Span { /// A subset of properties from both macro definition and macro call available through global data. /// Avoid using this if you have access to the original definition or call structures. #[derive(Clone, Debug, RustcEncodable, RustcDecodable)] -pub struct ExpnInfo { +pub struct ExpnData { // --- The part unique to each expansion. /// The kind of this expansion - macro or compiler desugaring. pub kind: ExpnKind, @@ -598,7 +598,7 @@ pub struct ExpnInfo { /// `foo!()` invoked `bar!()` internally, and there was an /// expression inside `bar!`; the call_site of the expression in /// the expansion would point to the `bar!` invocation; that - /// call_site span would have its own ExpnInfo, with the call_site + /// call_site span would have its own ExpnData, with the call_site /// pointing to the `foo!` invocation. pub call_site: Span, @@ -609,7 +609,7 @@ pub struct ExpnInfo { /// The span of the macro definition (possibly dummy). /// This span serves only informational purpose and is not used for resolution. pub def_site: Span, - /// Transparency used by `apply_mark` for the expansion with this expansion info by default. + /// Transparency used by `apply_mark` for the expansion with this expansion data by default. pub default_transparency: Transparency, /// List of #[unstable]/feature-gated features that the macro is allowed to use /// internally without forcing the whole crate to opt-in @@ -625,10 +625,10 @@ pub struct ExpnInfo { pub edition: Edition, } -impl ExpnInfo { - /// Constructs an expansion info with default properties. - pub fn default(kind: ExpnKind, call_site: Span, edition: Edition) -> ExpnInfo { - ExpnInfo { +impl ExpnData { + /// Constructs expansion data with default properties. + pub fn default(kind: ExpnKind, call_site: Span, edition: Edition) -> ExpnData { + ExpnData { kind, parent: ExpnId::root(), call_site, @@ -642,10 +642,10 @@ impl ExpnInfo { } pub fn allow_unstable(kind: ExpnKind, call_site: Span, edition: Edition, - allow_internal_unstable: Lrc<[Symbol]>) -> ExpnInfo { - ExpnInfo { + allow_internal_unstable: Lrc<[Symbol]>) -> ExpnData { + ExpnData { allow_internal_unstable: Some(allow_internal_unstable), - ..ExpnInfo::default(kind, call_site, edition) + ..ExpnData::default(kind, call_site, edition) } } diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index ae538677a3a..aa36fe27d8e 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -21,7 +21,7 @@ use rustc_serialize::{Encodable, Decodable, Encoder, Decoder}; pub mod edition; use edition::Edition; pub mod hygiene; -pub use hygiene::{ExpnId, SyntaxContext, ExpnInfo, ExpnKind, MacroKind, DesugaringKind}; +pub use hygiene::{ExpnId, SyntaxContext, ExpnData, ExpnKind, MacroKind, DesugaringKind}; mod span_encoding; pub use span_encoding::{Span, DUMMY_SP}; @@ -353,20 +353,20 @@ impl Span { /// Returns the source span -- this is either the supplied span, or the span for /// the macro callsite that expanded to it. pub fn source_callsite(self) -> Span { - let expn_info = self.ctxt().outer_expn_info(); - if !expn_info.is_root() { expn_info.call_site.source_callsite() } else { self } + let expn_data = self.ctxt().outer_expn_data(); + if !expn_data.is_root() { expn_data.call_site.source_callsite() } else { self } } /// The `Span` for the tokens in the previous macro expansion from which `self` was generated, /// if any. pub fn parent(self) -> Option { - let expn_info = self.ctxt().outer_expn_info(); - if !expn_info.is_root() { Some(expn_info.call_site) } else { None } + let expn_data = self.ctxt().outer_expn_data(); + if !expn_data.is_root() { Some(expn_data.call_site) } else { None } } /// Edition of the crate from which this span came. pub fn edition(self) -> edition::Edition { - self.ctxt().outer_expn_info().edition + self.ctxt().outer_expn_data().edition } #[inline] @@ -382,22 +382,22 @@ impl Span { /// Returns the source callee. /// /// Returns `None` if the supplied span has no expansion trace, - /// else returns the `ExpnInfo` for the macro definition + /// else returns the `ExpnData` for the macro definition /// corresponding to the source callsite. - pub fn source_callee(self) -> Option { - fn source_callee(expn_info: ExpnInfo) -> ExpnInfo { - let next_expn_info = expn_info.call_site.ctxt().outer_expn_info(); - if !next_expn_info.is_root() { source_callee(next_expn_info) } else { expn_info } + pub fn source_callee(self) -> Option { + fn source_callee(expn_data: ExpnData) -> ExpnData { + let next_expn_data = expn_data.call_site.ctxt().outer_expn_data(); + if !next_expn_data.is_root() { source_callee(next_expn_data) } else { expn_data } } - let expn_info = self.ctxt().outer_expn_info(); - if !expn_info.is_root() { Some(source_callee(expn_info)) } else { None } + let expn_data = self.ctxt().outer_expn_data(); + if !expn_data.is_root() { Some(source_callee(expn_data)) } else { None } } /// Checks if a span is "internal" to a macro in which `#[unstable]` /// items can be used (that is, a macro marked with /// `#[allow_internal_unstable]`). pub fn allows_unstable(&self, feature: Symbol) -> bool { - self.ctxt().outer_expn_info().allow_internal_unstable.map_or(false, |features| { + self.ctxt().outer_expn_data().allow_internal_unstable.map_or(false, |features| { features.iter().any(|&f| { f == feature || f == sym::allow_internal_unstable_backcompat_hack }) @@ -406,7 +406,7 @@ impl Span { /// Checks if this span arises from a compiler desugaring of kind `kind`. pub fn is_desugaring(&self, kind: DesugaringKind) -> bool { - match self.ctxt().outer_expn_info().kind { + match self.ctxt().outer_expn_data().kind { ExpnKind::Desugaring(k) => k == kind, _ => false, } @@ -415,7 +415,7 @@ impl Span { /// Returns the compiler desugaring that created this span, or `None` /// if this span is not from a desugaring. pub fn desugaring_kind(&self) -> Option { - match self.ctxt().outer_expn_info().kind { + match self.ctxt().outer_expn_data().kind { ExpnKind::Desugaring(k) => Some(k), _ => None } @@ -425,20 +425,20 @@ impl Span { /// can be used without triggering the `unsafe_code` lint // (that is, a macro marked with `#[allow_internal_unsafe]`). pub fn allows_unsafe(&self) -> bool { - self.ctxt().outer_expn_info().allow_internal_unsafe + self.ctxt().outer_expn_data().allow_internal_unsafe } pub fn macro_backtrace(mut self) -> Vec { let mut prev_span = DUMMY_SP; let mut result = vec![]; loop { - let info = self.ctxt().outer_expn_info(); - if info.is_root() { + let expn_data = self.ctxt().outer_expn_data(); + if expn_data.is_root() { break; } // Don't print recursive invocations. - if !info.call_site.source_equal(&prev_span) { - let (pre, post) = match info.kind { + if !expn_data.call_site.source_equal(&prev_span) { + let (pre, post) = match expn_data.kind { ExpnKind::Root => break, ExpnKind::Desugaring(..) => ("desugaring of ", ""), ExpnKind::Macro(macro_kind, _) => match macro_kind { @@ -448,14 +448,14 @@ impl Span { } }; result.push(MacroBacktrace { - call_site: info.call_site, - macro_decl_name: format!("{}{}{}", pre, info.kind.descr(), post), - def_site_span: info.def_site, + call_site: expn_data.call_site, + macro_decl_name: format!("{}{}{}", pre, expn_data.kind.descr(), post), + def_site_span: expn_data.def_site, }); } prev_span = self; - self = info.call_site; + self = expn_data.call_site; } result } -- cgit 1.4.1-3-g733a5 From 4087fc583e543f2801bf2e8c3b8051b31d26a078 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Wed, 14 Aug 2019 05:44:32 +0200 Subject: Feature gate 'yield ?' pre-expansion. --- src/libsyntax/feature_gate.rs | 12 +++++++----- src/libsyntax/parse/mod.rs | 3 +++ src/libsyntax/parse/parser/expr.rs | 3 +++ src/test/ui/feature-gates/feature-gate-generators.rs | 6 ++++++ .../ui/feature-gates/feature-gate-generators.stderr | 20 +++++++++++++++++++- 5 files changed, 38 insertions(+), 6 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 8a56ae13b6f..9d1920dc410 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -2088,11 +2088,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { "type ascription is experimental"); } } - ast::ExprKind::Yield(..) => { - gate_feature_post!(&self, generators, - e.span, - "yield syntax is experimental"); - } ast::ExprKind::TryBlock(_) => { gate_feature_post!(&self, try_blocks, e.span, "`try` expression is experimental"); } @@ -2464,6 +2459,13 @@ pub fn check_crate(krate: &ast::Crate, "async closures are unstable" )); + for_each_in_lock(&sess.yield_spans, |span| gate_feature!( + &ctx, + generators, + *span, + "yield syntax is experimental" + )); + let visitor = &mut PostExpansionVisitor { context: &ctx, builtin_attributes: &*BUILTIN_ATTRIBUTE_MAP, diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 26f78b9c5c7..9088f929372 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -63,6 +63,8 @@ pub struct ParseSess { pub let_chains_spans: Lock>, // Places where `async || ..` exprs were used and should be feature gated. pub async_closure_spans: Lock>, + // Places where `yield e?` exprs were used and should be feature gated. + pub yield_spans: Lock>, pub injected_crate_name: Once, } @@ -92,6 +94,7 @@ impl ParseSess { param_attr_spans: Lock::new(Vec::new()), let_chains_spans: Lock::new(Vec::new()), async_closure_spans: Lock::new(Vec::new()), + yield_spans: Lock::new(Vec::new()), injected_crate_name: Once::new(), } } diff --git a/src/libsyntax/parse/parser/expr.rs b/src/libsyntax/parse/parser/expr.rs index f4b6a926734..ccc6bd15067 100644 --- a/src/libsyntax/parse/parser/expr.rs +++ b/src/libsyntax/parse/parser/expr.rs @@ -997,6 +997,9 @@ impl<'a> Parser<'a> { } else { ex = ExprKind::Yield(None); } + + let span = lo.to(hi); + self.sess.yield_spans.borrow_mut().push(span); } else if self.eat_keyword(kw::Let) { return self.parse_let_expr(attrs); } else if is_span_rust_2018 && self.eat_keyword(kw::Await) { diff --git a/src/test/ui/feature-gates/feature-gate-generators.rs b/src/test/ui/feature-gates/feature-gate-generators.rs index cee930fd785..382d891feed 100644 --- a/src/test/ui/feature-gates/feature-gate-generators.rs +++ b/src/test/ui/feature-gates/feature-gate-generators.rs @@ -2,3 +2,9 @@ fn main() { yield true; //~ ERROR yield syntax is experimental //~^ ERROR yield statement outside of generator literal } + +#[cfg(FALSE)] +fn foo() { + yield; //~ ERROR yield syntax is experimental + yield 0; //~ ERROR yield syntax is experimental +} diff --git a/src/test/ui/feature-gates/feature-gate-generators.stderr b/src/test/ui/feature-gates/feature-gate-generators.stderr index cdb05601254..24b814b410c 100644 --- a/src/test/ui/feature-gates/feature-gate-generators.stderr +++ b/src/test/ui/feature-gates/feature-gate-generators.stderr @@ -7,12 +7,30 @@ LL | yield true; = note: for more information, see https://github.com/rust-lang/rust/issues/43122 = help: add `#![feature(generators)]` to the crate attributes to enable +error[E0658]: yield syntax is experimental + --> $DIR/feature-gate-generators.rs:8:5 + | +LL | yield; + | ^^^^^ + | + = note: for more information, see https://github.com/rust-lang/rust/issues/43122 + = help: add `#![feature(generators)]` to the crate attributes to enable + +error[E0658]: yield syntax is experimental + --> $DIR/feature-gate-generators.rs:9:5 + | +LL | yield 0; + | ^^^^^^^ + | + = note: for more information, see https://github.com/rust-lang/rust/issues/43122 + = help: add `#![feature(generators)]` to the crate attributes to enable + error[E0627]: yield statement outside of generator literal --> $DIR/feature-gate-generators.rs:2:5 | LL | yield true; | ^^^^^^^^^^ -error: aborting due to 2 previous errors +error: aborting due to 4 previous errors For more information about this error, try `rustc --explain E0658`. -- cgit 1.4.1-3-g733a5 From 20661f18df695058224427fb84d21a0eeec8c657 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Wed, 14 Aug 2019 06:09:11 +0200 Subject: Simplify pre-expansion gating in general. --- src/libsyntax/feature_gate.rs | 42 ++++++++++-------------------------------- 1 file changed, 10 insertions(+), 32 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 9d1920dc410..1a87a903156 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -30,7 +30,6 @@ use crate::tokenstream::TokenTree; use errors::{Applicability, DiagnosticBuilder, Handler}; use rustc_data_structures::fx::FxHashMap; -use rustc_data_structures::sync::Lock; use rustc_target::spec::abi::Abi; use syntax_pos::{Span, DUMMY_SP, MultiSpan}; use log::debug; @@ -2422,10 +2421,6 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute], features } -fn for_each_in_lock(vec: &Lock>, f: impl Fn(&T)) { - vec.borrow().iter().for_each(f); -} - pub fn check_crate(krate: &ast::Crate, sess: &ParseSess, features: &Features, @@ -2438,33 +2433,16 @@ pub fn check_crate(krate: &ast::Crate, plugin_attributes, }; - for_each_in_lock(&sess.param_attr_spans, |span| gate_feature!( - &ctx, - param_attrs, - *span, - "attributes on function parameters are unstable" - )); - - for_each_in_lock(&sess.let_chains_spans, |span| gate_feature!( - &ctx, - let_chains, - *span, - "`let` expressions in this position are experimental" - )); - - for_each_in_lock(&sess.async_closure_spans, |span| gate_feature!( - &ctx, - async_closure, - *span, - "async closures are unstable" - )); - - for_each_in_lock(&sess.yield_spans, |span| gate_feature!( - &ctx, - generators, - *span, - "yield syntax is experimental" - )); + macro_rules! gate_all { + ($spans:ident, $gate:ident, $msg:literal) => { + for span in &*sess.$spans.borrow() { gate_feature!(&ctx, $gate, *span, $msg); } + } + } + + gate_all!(param_attr_spans, param_attrs, "attributes on function parameters are unstable"); + gate_all!(let_chains_spans, let_chains, "`let` expressions in this position are experimental"); + gate_all!(async_closure_spans, async_closure, "async closures are unstable"); + gate_all!(yield_spans, generators, "yield syntax is experimental"); let visitor = &mut PostExpansionVisitor { context: &ctx, -- cgit 1.4.1-3-g733a5 From d04af194fc24d4577a6e5eeb0b52f4358e17771f Mon Sep 17 00:00:00 2001 From: Matthew Jasper Date: Mon, 12 Aug 2019 22:12:53 +0100 Subject: Remove SyntaxContext from {ast, hir}::{GlobalAsm, InlineAsm} We now store it in the `Span` of the expression or item. --- src/librustc/hir/lowering/expr.rs | 1 - src/librustc/hir/lowering/item.rs | 5 +---- src/librustc/hir/mod.rs | 5 ----- src/librustc_codegen_llvm/asm.rs | 7 ++++--- src/librustc_codegen_ssa/mir/statement.rs | 7 ++++++- src/librustc_codegen_ssa/traits/asm.rs | 2 ++ src/libsyntax/ast.rs | 4 +--- src/libsyntax/mut_visit.rs | 2 +- src/libsyntax_ext/asm.rs | 3 +-- src/libsyntax_ext/global_asm.rs | 7 ++----- 10 files changed, 18 insertions(+), 25 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/hir/lowering/expr.rs b/src/librustc/hir/lowering/expr.rs index 4ba61e9d4fd..ff0c44a2387 100644 --- a/src/librustc/hir/lowering/expr.rs +++ b/src/librustc/hir/lowering/expr.rs @@ -984,7 +984,6 @@ impl LoweringContext<'_> { volatile: asm.volatile, alignstack: asm.alignstack, dialect: asm.dialect, - ctxt: asm.ctxt, }; let outputs = asm.outputs diff --git a/src/librustc/hir/lowering/item.rs b/src/librustc/hir/lowering/item.rs index 51a0c4184f9..4f9a9ed5673 100644 --- a/src/librustc/hir/lowering/item.rs +++ b/src/librustc/hir/lowering/item.rs @@ -750,10 +750,7 @@ impl LoweringContext<'_> { } fn lower_global_asm(&mut self, ga: &GlobalAsm) -> P { - P(hir::GlobalAsm { - asm: ga.asm, - ctxt: ga.ctxt, - }) + P(hir::GlobalAsm { asm: ga.asm }) } fn lower_variant(&mut self, v: &Variant) -> hir::Variant { diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 57fd0be77ec..e5ada1fb9ae 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -23,7 +23,6 @@ use rustc_target::spec::abi::Abi; use syntax::ast::{self, CrateSugar, Ident, Name, NodeId, AsmDialect}; use syntax::ast::{Attribute, Label, LitKind, StrStyle, FloatTy, IntTy, UintTy}; use syntax::attr::{InlineAttr, OptimizeAttr}; -use syntax::ext::hygiene::SyntaxContext; use syntax::symbol::{Symbol, kw}; use syntax::tokenstream::TokenStream; use syntax::util::parser::ExprPrecedence; @@ -2004,8 +2003,6 @@ pub struct InlineAsm { pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, - #[stable_hasher(ignore)] // This is used for error reporting - pub ctxt: SyntaxContext, } /// Represents an argument in a function header. @@ -2184,8 +2181,6 @@ pub struct ForeignMod { #[derive(RustcEncodable, RustcDecodable, Debug, HashStable)] pub struct GlobalAsm { pub asm: Symbol, - #[stable_hasher(ignore)] // This is used for error reporting - pub ctxt: SyntaxContext, } #[derive(RustcEncodable, RustcDecodable, Debug, HashStable)] diff --git a/src/librustc_codegen_llvm/asm.rs b/src/librustc_codegen_llvm/asm.rs index 9763d523a2a..b68ee2cb44d 100644 --- a/src/librustc_codegen_llvm/asm.rs +++ b/src/librustc_codegen_llvm/asm.rs @@ -6,9 +6,9 @@ use crate::value::Value; use rustc::hir; use rustc_codegen_ssa::traits::*; - use rustc_codegen_ssa::mir::place::PlaceRef; use rustc_codegen_ssa::mir::operand::OperandValue; +use syntax_pos::Span; use std::ffi::{CStr, CString}; use libc::{c_uint, c_char}; @@ -19,7 +19,8 @@ impl AsmBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> { &mut self, ia: &hir::InlineAsm, outputs: Vec>, - mut inputs: Vec<&'ll Value> + mut inputs: Vec<&'ll Value>, + span: Span, ) -> bool { let mut ext_constraints = vec![]; let mut output_types = vec![]; @@ -102,7 +103,7 @@ impl AsmBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> { let kind = llvm::LLVMGetMDKindIDInContext(self.llcx, key.as_ptr() as *const c_char, key.len() as c_uint); - let val: &'ll Value = self.const_i32(ia.ctxt.outer_expn().as_u32() as i32); + let val: &'ll Value = self.const_i32(span.ctxt().outer_expn().as_u32() as i32); llvm::LLVMSetMetadata(r, kind, llvm::LLVMMDNodeInContext(self.llcx, &val, 1)); diff --git a/src/librustc_codegen_ssa/mir/statement.rs b/src/librustc_codegen_ssa/mir/statement.rs index 3717be4b417..3617f3afaae 100644 --- a/src/librustc_codegen_ssa/mir/statement.rs +++ b/src/librustc_codegen_ssa/mir/statement.rs @@ -89,7 +89,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { }); if input_vals.len() == asm.inputs.len() { - let res = bx.codegen_inline_asm(&asm.asm, outputs, input_vals); + let res = bx.codegen_inline_asm( + &asm.asm, + outputs, + input_vals, + statement.source_info.span, + ); if !res { span_err!(bx.sess(), statement.source_info.span, E0668, "malformed inline assembly"); diff --git a/src/librustc_codegen_ssa/traits/asm.rs b/src/librustc_codegen_ssa/traits/asm.rs index fd3c868bbc5..c9e1ed86e97 100644 --- a/src/librustc_codegen_ssa/traits/asm.rs +++ b/src/librustc_codegen_ssa/traits/asm.rs @@ -1,6 +1,7 @@ use super::BackendTypes; use crate::mir::place::PlaceRef; use rustc::hir::{GlobalAsm, InlineAsm}; +use syntax_pos::Span; pub trait AsmBuilderMethods<'tcx>: BackendTypes { /// Take an inline assembly expression and splat it out via LLVM @@ -9,6 +10,7 @@ pub trait AsmBuilderMethods<'tcx>: BackendTypes { ia: &InlineAsm, outputs: Vec>, inputs: Vec, + span: Span, ) -> bool; } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 3ae37f734b7..9091607629e 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -5,7 +5,7 @@ pub use UnsafeSource::*; pub use crate::symbol::{Ident, Symbol as Name}; pub use crate::util::parser::ExprPrecedence; -use crate::ext::hygiene::{ExpnId, SyntaxContext}; +use crate::ext::hygiene::ExpnId; use crate::parse::token::{self, DelimToken}; use crate::print::pprust; use crate::ptr::P; @@ -1782,7 +1782,6 @@ pub struct InlineAsm { pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, - pub ctxt: SyntaxContext, } /// An argument in a function header. @@ -2030,7 +2029,6 @@ pub struct ForeignMod { #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)] pub struct GlobalAsm { pub asm: Symbol, - pub ctxt: SyntaxContext, } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index acafe327640..18d4a646355 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -1182,7 +1182,7 @@ pub fn noop_visit_expr(Expr { node, id, span, attrs }: &mut Expr, } ExprKind::InlineAsm(asm) => { let InlineAsm { asm: _, asm_str_style: _, outputs, inputs, clobbers: _, volatile: _, - alignstack: _, dialect: _, ctxt: _ } = asm.deref_mut(); + alignstack: _, dialect: _ } = asm.deref_mut(); for out in outputs { let InlineAsmOutput { constraint: _, expr, is_rw: _, is_indirect: _ } = out; vis.visit_expr(expr); diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index 950166f9260..644a44f1989 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -63,7 +63,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>, MacEager::expr(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::InlineAsm(P(inline_asm)), - span: sp, + span: sp.with_ctxt(cx.backtrace()), attrs: ThinVec::new(), })) } @@ -277,6 +277,5 @@ fn parse_inline_asm<'a>( volatile, alignstack, dialect, - ctxt: cx.backtrace(), })) } diff --git a/src/libsyntax_ext/global_asm.rs b/src/libsyntax_ext/global_asm.rs index 112192fac5d..73ebeaec454 100644 --- a/src/libsyntax_ext/global_asm.rs +++ b/src/libsyntax_ext/global_asm.rs @@ -30,7 +30,7 @@ pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt<'_>, id: ast::DUMMY_NODE_ID, node: ast::ItemKind::GlobalAsm(P(global_asm)), vis: respan(sp.shrink_to_lo(), ast::VisibilityKind::Inherited), - span: sp, + span: sp.with_ctxt(cx.backtrace()), tokens: None, })]) } @@ -61,8 +61,5 @@ fn parse_global_asm<'a>( None => return Ok(None), }; - Ok(Some(ast::GlobalAsm { - asm, - ctxt: cx.backtrace(), - })) + Ok(Some(ast::GlobalAsm { asm })) } -- cgit 1.4.1-3-g733a5 From 1713ac4bf5c992f40d667c929c1e1ce9c3a51204 Mon Sep 17 00:00:00 2001 From: varkor Date: Fri, 19 Oct 2018 15:40:07 +0100 Subject: Initial implementation of or patterns --- src/librustc/cfg/construct.rs | 5 ++++ src/librustc/hir/intravisit.rs | 1 + src/librustc/hir/lowering.rs | 3 ++ src/librustc/hir/mod.rs | 4 +++ src/librustc/hir/print.rs | 6 +++- src/librustc/middle/mem_categorization.rs | 6 ++++ src/librustc_mir/build/matches/mod.rs | 6 ++++ src/librustc_mir/build/matches/simplify.rs | 4 +++ src/librustc_mir/build/matches/test.rs | 2 ++ src/librustc_mir/hair/pattern/_match.rs | 10 +++++-- src/librustc_mir/hair/pattern/mod.rs | 47 +++++++++++++++++++++++------- src/librustc_typeck/check/_match.rs | 8 +++++ src/librustdoc/clean/mod.rs | 3 ++ src/libsyntax/ast.rs | 10 +++++-- src/libsyntax/mut_visit.rs | 5 ++-- src/libsyntax/print/pprust.rs | 33 +++++++++++++++++++-- src/libsyntax/visit.rs | 7 ++--- 17 files changed, 134 insertions(+), 26 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/cfg/construct.rs b/src/librustc/cfg/construct.rs index 7ada56cfa76..0dad2dda837 100644 --- a/src/librustc/cfg/construct.rs +++ b/src/librustc/cfg/construct.rs @@ -140,6 +140,11 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.add_ast_node(pat.hir_id.local_id, &[pats_exit]) } + PatKind::Or(ref pats) => { + let branches: Vec<_> = pats.iter().map(|p| self.pat(p, pred)).collect(); + self.add_ast_node(pat.hir_id.local_id, &branches) + } + PatKind::Slice(ref pre, ref vec, ref post) => { let pre_exit = self.pats_all(pre.iter(), pred); let vec_exit = self.pats_all(vec.iter(), pre_exit); diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index 99fe9f1682f..2c6373bdfa4 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -709,6 +709,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { visitor.visit_pat(&field.pat) } } + PatKind::Or(ref pats) => walk_list!(visitor, visit_pat, pats), PatKind::Tuple(ref tuple_elements, _) => { walk_list!(visitor, visit_pat, tuple_elements); } diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 0f6e834ca26..d2ea485b5db 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -2669,6 +2669,9 @@ impl<'a> LoweringContext<'a> { let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct"); hir::PatKind::TupleStruct(qpath, pats, ddpos) } + PatKind::Or(ref pats) => { + hir::PatKind::Or(pats.iter().map(|x| self.lower_pat(x)).collect()) + } PatKind::Path(ref qself, ref path) => { let qpath = self.lower_qpath( p.id, diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 57fd0be77ec..2ae08568b7f 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -882,6 +882,7 @@ impl Pat { PatKind::TupleStruct(_, ref s, _) | PatKind::Tuple(ref s, _) => { s.iter().all(|p| p.walk_(it)) } + PatKind::Or(ref pats) => pats.iter().all(|p| p.walk_(it)), PatKind::Box(ref s) | PatKind::Ref(ref s, _) => { s.walk_(it) } @@ -976,6 +977,9 @@ pub enum PatKind { /// `0 <= position <= subpats.len()` TupleStruct(QPath, HirVec>, Option), + /// An or-pattern `A | B | C`. + Or(Vec>), + /// A path pattern for an unit struct/variant or a (maybe-associated) constant. Path(QPath), diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 2fd683ed83c..157b7c07a9b 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -4,7 +4,7 @@ use syntax::source_map::{SourceMap, Spanned}; use syntax::parse::ParseSess; use syntax::print::pp::{self, Breaks}; use syntax::print::pp::Breaks::{Consistent, Inconsistent}; -use syntax::print::pprust::{self, Comments, PrintState}; +use syntax::print::pprust::{self, Comments, PrintState, SeparatorSpacing}; use syntax::symbol::kw; use syntax::util::parser::{self, AssocOp, Fixity}; use syntax_pos::{self, BytePos, FileName}; @@ -1687,6 +1687,10 @@ impl<'a> State<'a> { self.s.space(); self.s.word("}"); } + PatKind::Or(ref pats) => { + let spacing = SeparatorSpacing::Both; + self.strsep("|", spacing, Inconsistent, &pats[..], |s, p| s.print_pat(&p))?; + } PatKind::Tuple(ref elts, ddpos) => { self.popen(); if let Some(ddpos) = ddpos { diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index a55803e255b..73ca981bbe8 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -1290,6 +1290,12 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> { } } + PatKind::Or(ref pats) => { + for pat in pats { + self.cat_pattern_(cmt.clone(), &pat, op)?; + } + } + PatKind::Binding(.., Some(ref subpat)) => { self.cat_pattern_(cmt, &subpat, op)?; } diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index d72b0addae9..0dec7ef4f00 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -657,6 +657,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.visit_bindings(&subpattern.pattern, subpattern_user_ty, f); } } + PatternKind::Or { ref pats } => { + // FIXME(#47184): extract or handle `pattern_user_ty` somehow + for pat in pats { + self.visit_bindings(&pat, &pattern_user_ty.clone(), f); + } + } } } } diff --git a/src/librustc_mir/build/matches/simplify.rs b/src/librustc_mir/build/matches/simplify.rs index 3473155a3ea..8d049b53988 100644 --- a/src/librustc_mir/build/matches/simplify.rs +++ b/src/librustc_mir/build/matches/simplify.rs @@ -195,6 +195,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { candidate.match_pairs.push(MatchPair::new(place, subpattern)); Ok(()) } + + PatternKind::Or { .. } => { + Err(match_pair) + } } } } diff --git a/src/librustc_mir/build/matches/test.rs b/src/librustc_mir/build/matches/test.rs index 65e92d422b0..ec85daccd47 100644 --- a/src/librustc_mir/build/matches/test.rs +++ b/src/librustc_mir/build/matches/test.rs @@ -87,6 +87,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { PatternKind::AscribeUserType { .. } | PatternKind::Array { .. } | PatternKind::Wild | + PatternKind::Or { .. } | PatternKind::Binding { .. } | PatternKind::Leaf { .. } | PatternKind::Deref { .. } => { @@ -130,6 +131,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { PatternKind::Slice { .. } | PatternKind::Array { .. } | PatternKind::Wild | + PatternKind::Or { .. } | PatternKind::Binding { .. } | PatternKind::AscribeUserType { .. } | PatternKind::Leaf { .. } | diff --git a/src/librustc_mir/hair/pattern/_match.rs b/src/librustc_mir/hair/pattern/_match.rs index 1833ee30624..ae59244d37f 100644 --- a/src/librustc_mir/hair/pattern/_match.rs +++ b/src/librustc_mir/hair/pattern/_match.rs @@ -75,9 +75,6 @@ /// D((r_1, p_(i,2), .., p_(i,n))) /// D((r_2, p_(i,2), .., p_(i,n))) /// -/// Note that the OR-patterns are not always used directly in Rust, but are used to derive -/// the exhaustive integer matching rules, so they're written here for posterity. -/// /// The algorithm for computing `U` /// ------------------------------- /// The algorithm is inductive (on the number of columns: i.e., components of tuple patterns). @@ -1359,6 +1356,9 @@ fn pat_constructors<'tcx>(cx: &mut MatchCheckCtxt<'_, 'tcx>, Some(vec![Slice(pat_len)]) } } + PatternKind::Or { .. } => { + bug!("support for or-patterns has not been fully implemented yet."); + } } } @@ -1884,6 +1884,10 @@ fn specialize<'p, 'a: 'p, 'tcx>( "unexpected ctor {:?} for slice pat", constructor) } } + + PatternKind::Or { .. } => { + bug!("support for or-patterns has not been fully implemented yet."); + } }; debug!("specialize({:#?}, {:#?}) = {:#?}", r[0], wild_patterns, head); diff --git a/src/librustc_mir/hair/pattern/mod.rs b/src/librustc_mir/hair/pattern/mod.rs index bebb0719af8..d2a5793e703 100644 --- a/src/librustc_mir/hair/pattern/mod.rs +++ b/src/librustc_mir/hair/pattern/mod.rs @@ -175,6 +175,11 @@ pub enum PatternKind<'tcx> { slice: Option>, suffix: Vec>, }, + + /// or-pattern + Or { + pats: Vec>, + }, } #[derive(Copy, Clone, Debug, PartialEq)] @@ -186,6 +191,18 @@ pub struct PatternRange<'tcx> { impl<'tcx> fmt::Display for Pattern<'tcx> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // Printing lists is a chore. + let mut first = true; + let mut start_or_continue = |s| { + if first { + first = false; + "" + } else { + s + } + }; + let mut start_or_comma = || start_or_continue(", "); + match *self.kind { PatternKind::Wild => write!(f, "_"), PatternKind::AscribeUserType { ref subpattern, .. } => @@ -224,9 +241,6 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { } }; - let mut first = true; - let mut start_or_continue = || if first { first = false; "" } else { ", " }; - if let Some(variant) = variant { write!(f, "{}", variant.ident)?; @@ -241,12 +255,12 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { continue; } let name = variant.fields[p.field.index()].ident; - write!(f, "{}{}: {}", start_or_continue(), name, p.pattern)?; + write!(f, "{}{}: {}", start_or_comma(), name, p.pattern)?; printed += 1; } if printed < variant.fields.len() { - write!(f, "{}..", start_or_continue())?; + write!(f, "{}..", start_or_comma())?; } return write!(f, " }}"); @@ -257,7 +271,7 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { if num_fields != 0 || variant.is_none() { write!(f, "(")?; for i in 0..num_fields { - write!(f, "{}", start_or_continue())?; + write!(f, "{}", start_or_comma())?; // Common case: the field is where we expect it. if let Some(p) = subpatterns.get(i) { @@ -305,14 +319,12 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { } PatternKind::Slice { ref prefix, ref slice, ref suffix } | PatternKind::Array { ref prefix, ref slice, ref suffix } => { - let mut first = true; - let mut start_or_continue = || if first { first = false; "" } else { ", " }; write!(f, "[")?; for p in prefix { - write!(f, "{}{}", start_or_continue(), p)?; + write!(f, "{}{}", start_or_comma(), p)?; } if let Some(ref slice) = *slice { - write!(f, "{}", start_or_continue())?; + write!(f, "{}", start_or_comma())?; match *slice.kind { PatternKind::Wild => {} _ => write!(f, "{}", slice)? @@ -320,10 +332,16 @@ impl<'tcx> fmt::Display for Pattern<'tcx> { write!(f, "..")?; } for p in suffix { - write!(f, "{}{}", start_or_continue(), p)?; + write!(f, "{}{}", start_or_comma(), p)?; } write!(f, "]") } + PatternKind::Or { ref pats } => { + for pat in pats { + write!(f, "{}{}", start_or_continue(" | "), pat)?; + } + Ok(()) + } } } } @@ -655,6 +673,12 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> { self.lower_variant_or_leaf(res, pat.hir_id, pat.span, ty, subpatterns) } + + PatKind::Or(ref pats) => { + PatternKind::Or { + pats: pats.iter().map(|p| self.lower_pattern(p)).collect(), + } + } }; Pattern { @@ -1436,6 +1460,7 @@ impl<'tcx> PatternFoldable<'tcx> for PatternKind<'tcx> { slice: slice.fold_with(folder), suffix: suffix.fold_with(folder) }, + PatternKind::Or { ref pats } => PatternKind::Or { pats: pats.fold_with(folder) }, } } } diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index 99ae777bb63..2e22fb76675 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -53,6 +53,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let is_non_ref_pat = match pat.node { PatKind::Struct(..) | PatKind::TupleStruct(..) | + PatKind::Or(_) | PatKind::Tuple(..) | PatKind::Box(_) | PatKind::Range(..) | @@ -309,6 +310,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { PatKind::Struct(ref qpath, ref fields, etc) => { self.check_pat_struct(pat, qpath, fields, etc, expected, def_bm, discrim_span) } + PatKind::Or(ref pats) => { + let expected_ty = self.structurally_resolved_type(pat.span, expected); + for pat in pats { + self.check_pat_walk(pat, expected, def_bm, false); + } + expected_ty + } PatKind::Tuple(ref elements, ddpos) => { let mut expected_len = elements.len(); if ddpos.is_some() { diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index fede9e93010..023d22861de 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -4107,6 +4107,9 @@ fn name_from_pat(p: &hir::Pat) -> String { if etc { ", .." } else { "" } ) } + PatKind::Or(ref pats) => { + pats.iter().map(|p| name_from_pat(&**p)).collect::>().join(" | ") + } PatKind::Tuple(ref elts, _) => format!("({})", elts.iter().map(|p| name_from_pat(&**p)) .collect::>().join(", ")), PatKind::Box(ref p) => name_from_pat(&**p), diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 3ae37f734b7..0136c4ff5f9 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -572,9 +572,10 @@ impl Pat { match &self.node { PatKind::Ident(_, _, Some(p)) => p.walk(it), PatKind::Struct(_, fields, _) => fields.iter().all(|field| field.pat.walk(it)), - PatKind::TupleStruct(_, s) | PatKind::Tuple(s) | PatKind::Slice(s) => { - s.iter().all(|p| p.walk(it)) - } + PatKind::TupleStruct(_, s) + | PatKind::Tuple(s) + | PatKind::Slice(s) + | PatKind::Or(s) => s.iter().all(|p| p.walk(it)), PatKind::Box(s) | PatKind::Ref(s, _) | PatKind::Paren(s) => s.walk(it), PatKind::Wild | PatKind::Rest @@ -648,6 +649,9 @@ pub enum PatKind { /// A tuple struct/variant pattern (`Variant(x, y, .., z)`). TupleStruct(Path, Vec>), + /// An or-pattern `A | B | C`. + Or(Vec>), + /// A possibly qualified path pattern. /// Unqualified path patterns `A::B::C` can legally refer to variants, structs, constants /// or associated constants. Qualified path patterns `::B::C`/`::B::C` can diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index acafe327640..b67b4619d7f 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -1050,7 +1050,6 @@ pub fn noop_visit_pat(pat: &mut P, vis: &mut T) { vis.visit_span(span); }; } - PatKind::Tuple(elems) => visit_vec(elems, |elem| vis.visit_pat(elem)), PatKind::Box(inner) => vis.visit_pat(inner), PatKind::Ref(inner, _mutbl) => vis.visit_pat(inner), PatKind::Range(e1, e2, Spanned { span: _, node: _ }) => { @@ -1058,7 +1057,9 @@ pub fn noop_visit_pat(pat: &mut P, vis: &mut T) { vis.visit_expr(e2); vis.visit_span(span); } - PatKind::Slice(elems) => visit_vec(elems, |elem| vis.visit_pat(elem)), + PatKind::Tuple(elems) + | PatKind::Slice(elems) + | PatKind::Or(elems) => visit_vec(elems, |elem| vis.visit_pat(elem)), PatKind::Paren(inner) => vis.visit_pat(inner), PatKind::Mac(mac) => vis.visit_mac(mac), } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 8a7009828bc..8dcb7ecf881 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -431,23 +431,48 @@ impl std::ops::DerefMut for State<'_> { } } +pub enum SeparatorSpacing { + After, + Both, +} + pub trait PrintState<'a>: std::ops::Deref + std::ops::DerefMut { fn comments(&mut self) -> &mut Option>; fn print_ident(&mut self, ident: ast::Ident); fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool); - fn commasep(&mut self, b: Breaks, elts: &[T], mut op: F) + fn strsep( + &mut self, + sep: &'static str, + spacing: SeparatorSpacing, + b: Breaks, + elts: &[T], + mut op: F + ) -> io::Result<()> where F: FnMut(&mut Self, &T), { self.rbox(0, b); let mut first = true; for elt in elts { - if first { first = false; } else { self.word_space(","); } + if first { + first = false; + } else { + if let SeparatorSpacing::Both = spacing { + self.writer().space(); + } + self.word_space(sep); + } op(self, elt); } self.end(); } + fn commasep(&mut self, b: Breaks, elts: &[T], mut op: F) + where F: FnMut(&mut Self, &T), + { + self.strsep(",", SeparatorSpacing::After, b, elts, op) + } + fn maybe_print_comment(&mut self, pos: BytePos) { while let Some(ref cmnt) = self.next_comment() { if cmnt.pos < pos { @@ -2353,6 +2378,10 @@ impl<'a> State<'a> { self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p)); self.pclose(); } + PatKind::Or(ref pats) => { + let spacing = SeparatorSpacing::Both; + self.strsep("|", spacing, Inconsistent, &pats[..], |s, p| s.print_pat(p))?; + } PatKind::Path(None, ref path) => { self.print_path(path, true, 0); } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 6648347d4ae..ce679a5db63 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -447,9 +447,6 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) { visitor.visit_pat(&field.pat) } } - PatKind::Tuple(ref elems) => { - walk_list!(visitor, visit_pat, elems); - } PatKind::Box(ref subpattern) | PatKind::Ref(ref subpattern, _) | PatKind::Paren(ref subpattern) => { @@ -465,7 +462,9 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) { visitor.visit_expr(upper_bound); } PatKind::Wild | PatKind::Rest => {}, - PatKind::Slice(ref elems) => { + PatKind::Tuple(ref elems) => { + | PatKind::Slice(ref elems) + | PatKind::Or(ref elems) => { walk_list!(visitor, visit_pat, elems); } PatKind::Mac(ref mac) => visitor.visit_mac(mac), -- cgit 1.4.1-3-g733a5 From 1870537f2701e5aa47080a879b63a4d6b391553b Mon Sep 17 00:00:00 2001 From: Dan Robertson Date: Sun, 14 Jul 2019 01:05:52 +0000 Subject: initial implementation of or-pattern parsing Initial implementation of parsing or-patterns e.g., `Some(Foo | Bar)`. This is a partial implementation of RFC 2535. --- .../src/language-features/or-patterns.md | 36 +++++++++++++++++ src/librustc/hir/mod.rs | 3 +- src/librustc/hir/print.rs | 5 +-- src/librustc_mir/build/matches/mod.rs | 3 +- src/librustc_mir/hair/pattern/_match.rs | 3 ++ src/librustc_mir/hair/pattern/mod.rs | 3 +- src/librustc_typeck/check/_match.rs | 2 +- src/libsyntax/ast.rs | 1 + src/libsyntax/feature_gate.rs | 5 +++ src/libsyntax/parse/mod.rs | 3 ++ src/libsyntax/parse/parser/pat.rs | 41 +++++++++++++++++-- src/libsyntax/print/pprust.rs | 47 ++++++---------------- src/libsyntax/visit.rs | 2 +- src/libsyntax_pos/symbol.rs | 1 + .../ui/feature-gate/feature-gate-or_patterns.rs | 9 +++++ .../feature-gate/feature-gate-or_patterns.stderr | 12 ++++++ src/test/ui/parser/pat-lt-bracket-6.rs | 5 ++- src/test/ui/parser/pat-lt-bracket-6.stderr | 6 +-- src/test/ui/parser/pat-lt-bracket-7.rs | 3 +- src/test/ui/parser/pat-lt-bracket-7.stderr | 6 +-- .../parser/recover-for-loop-parens-around-head.rs | 2 +- .../recover-for-loop-parens-around-head.stderr | 4 +- 22 files changed, 142 insertions(+), 60 deletions(-) create mode 100644 src/doc/unstable-book/src/language-features/or-patterns.md create mode 100644 src/test/ui/feature-gate/feature-gate-or_patterns.rs create mode 100644 src/test/ui/feature-gate/feature-gate-or_patterns.stderr (limited to 'src/libsyntax') diff --git a/src/doc/unstable-book/src/language-features/or-patterns.md b/src/doc/unstable-book/src/language-features/or-patterns.md new file mode 100644 index 00000000000..8ebacb44d37 --- /dev/null +++ b/src/doc/unstable-book/src/language-features/or-patterns.md @@ -0,0 +1,36 @@ +# `or_patterns` + +The tracking issue for this feature is: [#54883] + +[#54883]: https://github.com/rust-lang/rust/issues/54883 + +------------------------ + +The `or_pattern` language feature allows `|` to be arbitrarily nested within +a pattern, for example, `Some(A(0) | B(1 | 2))` becomes a valid pattern. + +## Examples + +```rust,ignore +#![feature(or_patterns)] + +pub enum Foo { + Bar, + Baz, + Quux, +} + +pub fn example(maybe_foo: Option) { + match maybe_foo { + Some(Foo::Bar | Foo::Baz) => { + println!("The value contained `Bar` or `Baz`"); + } + Some(_) => { + println!("The value did not contain `Bar` or `Baz`"); + } + None => { + println!("The value was `None`"); + } + } +} +``` diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 2ae08568b7f..5b15cf9a6c9 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -978,7 +978,8 @@ pub enum PatKind { TupleStruct(QPath, HirVec>, Option), /// An or-pattern `A | B | C`. - Or(Vec>), + /// Invariant: `pats.len() >= 2`. + Or(HirVec>), /// A path pattern for an unit struct/variant or a (maybe-associated) constant. Path(QPath), diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 157b7c07a9b..632a13f9183 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -4,7 +4,7 @@ use syntax::source_map::{SourceMap, Spanned}; use syntax::parse::ParseSess; use syntax::print::pp::{self, Breaks}; use syntax::print::pp::Breaks::{Consistent, Inconsistent}; -use syntax::print::pprust::{self, Comments, PrintState, SeparatorSpacing}; +use syntax::print::pprust::{self, Comments, PrintState}; use syntax::symbol::kw; use syntax::util::parser::{self, AssocOp, Fixity}; use syntax_pos::{self, BytePos, FileName}; @@ -1688,8 +1688,7 @@ impl<'a> State<'a> { self.s.word("}"); } PatKind::Or(ref pats) => { - let spacing = SeparatorSpacing::Both; - self.strsep("|", spacing, Inconsistent, &pats[..], |s, p| s.print_pat(&p))?; + self.strsep("|", true, Inconsistent, &pats[..], |s, p| s.print_pat(&p)); } PatKind::Tuple(ref elts, ddpos) => { self.popen(); diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index 0dec7ef4f00..94323b15b69 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -658,9 +658,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } PatternKind::Or { ref pats } => { - // FIXME(#47184): extract or handle `pattern_user_ty` somehow for pat in pats { - self.visit_bindings(&pat, &pattern_user_ty.clone(), f); + self.visit_bindings(&pat, pattern_user_ty.clone(), f); } } } diff --git a/src/librustc_mir/hair/pattern/_match.rs b/src/librustc_mir/hair/pattern/_match.rs index ae59244d37f..222750e602d 100644 --- a/src/librustc_mir/hair/pattern/_match.rs +++ b/src/librustc_mir/hair/pattern/_match.rs @@ -75,6 +75,9 @@ /// D((r_1, p_(i,2), .., p_(i,n))) /// D((r_2, p_(i,2), .., p_(i,n))) /// +/// Note that the OR-patterns are not always used directly in Rust, but are used to derive +/// the exhaustive integer matching rules, so they're written here for posterity. +/// /// The algorithm for computing `U` /// ------------------------------- /// The algorithm is inductive (on the number of columns: i.e., components of tuple patterns). diff --git a/src/librustc_mir/hair/pattern/mod.rs b/src/librustc_mir/hair/pattern/mod.rs index d2a5793e703..6caccfddfa4 100644 --- a/src/librustc_mir/hair/pattern/mod.rs +++ b/src/librustc_mir/hair/pattern/mod.rs @@ -176,7 +176,8 @@ pub enum PatternKind<'tcx> { suffix: Vec>, }, - /// or-pattern + /// An or-pattern, e.g. `p | q`. + /// Invariant: `pats.len() >= 2`. Or { pats: Vec>, }, diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index 2e22fb76675..fc25eb44cbd 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -313,7 +313,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { PatKind::Or(ref pats) => { let expected_ty = self.structurally_resolved_type(pat.span, expected); for pat in pats { - self.check_pat_walk(pat, expected, def_bm, false); + self.check_pat_walk(pat, expected, def_bm, discrim_span); } expected_ty } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 0136c4ff5f9..3d15782df34 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -650,6 +650,7 @@ pub enum PatKind { TupleStruct(Path, Vec>), /// An or-pattern `A | B | C`. + /// Invariant: `pats.len() >= 2`. Or(Vec>), /// A possibly qualified path pattern. diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 1a87a903156..bbc3ae28225 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -559,6 +559,9 @@ declare_features! ( // Allows `impl Trait` to be used inside type aliases (RFC 2515). (active, type_alias_impl_trait, "1.38.0", Some(63063), None), + // Allows the use of or-patterns, e.g. `0 | 1`. + (active, or_patterns, "1.38.0", Some(54883), None), + // ------------------------------------------------------------------------- // feature-group-end: actual feature gates // ------------------------------------------------------------------------- @@ -571,6 +574,7 @@ pub const INCOMPLETE_FEATURES: &[Symbol] = &[ sym::impl_trait_in_bindings, sym::generic_associated_types, sym::const_generics, + sym::or_patterns, sym::let_chains, ]; @@ -2443,6 +2447,7 @@ pub fn check_crate(krate: &ast::Crate, gate_all!(let_chains_spans, let_chains, "`let` expressions in this position are experimental"); gate_all!(async_closure_spans, async_closure, "async closures are unstable"); gate_all!(yield_spans, generators, "yield syntax is experimental"); + gate_all!(or_pattern_spans, or_patterns, "or-patterns syntax is experimental"); let visitor = &mut PostExpansionVisitor { context: &ctx, diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 9088f929372..b1f3612a839 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -66,6 +66,8 @@ pub struct ParseSess { // Places where `yield e?` exprs were used and should be feature gated. pub yield_spans: Lock>, pub injected_crate_name: Once, + // Places where or-patterns e.g. `Some(Foo | Bar)` were used and should be feature gated. + pub or_pattern_spans: Lock>, } impl ParseSess { @@ -96,6 +98,7 @@ impl ParseSess { async_closure_spans: Lock::new(Vec::new()), yield_spans: Lock::new(Vec::new()), injected_crate_name: Once::new(), + or_pattern_spans: Lock::new(Vec::new()), } } diff --git a/src/libsyntax/parse/parser/pat.rs b/src/libsyntax/parse/parser/pat.rs index c3079d2da0c..fd458aec743 100644 --- a/src/libsyntax/parse/parser/pat.rs +++ b/src/libsyntax/parse/parser/pat.rs @@ -14,7 +14,10 @@ use errors::{Applicability, DiagnosticBuilder}; impl<'a> Parser<'a> { /// Parses a pattern. - pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P> { + pub fn parse_pat( + &mut self, + expected: Option<&'static str> + ) -> PResult<'a, P> { self.parse_pat_with_range_pat(true, expected) } @@ -97,6 +100,34 @@ impl<'a> Parser<'a> { Ok(()) } + /// Parses a pattern, that may be a or-pattern (e.g. `Some(Foo | Bar)`). + fn parse_pat_with_or(&mut self, expected: Option<&'static str>) -> PResult<'a, P> { + // Parse the first pattern. + let first_pat = self.parse_pat(expected)?; + + // If the next token is not a `|`, this is not an or-pattern and + // we should exit here. + if !self.check(&token::BinOp(token::Or)) { + return Ok(first_pat) + } + + let lo = first_pat.span; + + let mut pats = vec![first_pat]; + + while self.eat(&token::BinOp(token::Or)) { + pats.push(self.parse_pat_with_range_pat( + true, expected + )?); + } + + let or_pattern_span = lo.to(self.prev_span); + + self.sess.or_pattern_spans.borrow_mut().push(or_pattern_span); + + Ok(self.mk_pat(or_pattern_span, PatKind::Or(pats))) + } + /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are /// allowed). fn parse_pat_with_range_pat( @@ -240,7 +271,9 @@ impl<'a> Parser<'a> { /// Parse a tuple or parenthesis pattern. fn parse_pat_tuple_or_parens(&mut self) -> PResult<'a, PatKind> { - let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?; + let (fields, trailing_comma) = self.parse_paren_comma_seq(|p| { + p.parse_pat_with_or(None) + })?; // Here, `(pat,)` is a tuple pattern. // For backward compatibility, `(..)` is a tuple pattern as well. @@ -483,7 +516,7 @@ impl<'a> Parser<'a> { err.span_label(self.token.span, msg); return Err(err); } - let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat(None))?; + let (fields, _) = self.parse_paren_comma_seq(|p| p.parse_pat_with_or(None))?; Ok(PatKind::TupleStruct(path, fields)) } @@ -627,7 +660,7 @@ impl<'a> Parser<'a> { // Parsing a pattern of the form "fieldname: pat" let fieldname = self.parse_field_name()?; self.bump(); - let pat = self.parse_pat(None)?; + let pat = self.parse_pat_with_or(None)?; hi = pat.span; (pat, fieldname, false) } else { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 8dcb7ecf881..4dc00af4860 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -431,46 +431,33 @@ impl std::ops::DerefMut for State<'_> { } } -pub enum SeparatorSpacing { - After, - Both, -} - pub trait PrintState<'a>: std::ops::Deref + std::ops::DerefMut { fn comments(&mut self) -> &mut Option>; fn print_ident(&mut self, ident: ast::Ident); fn print_generic_args(&mut self, args: &ast::GenericArgs, colons_before_params: bool); - fn strsep( - &mut self, - sep: &'static str, - spacing: SeparatorSpacing, - b: Breaks, - elts: &[T], - mut op: F - ) -> io::Result<()> + fn strsep(&mut self, sep: &'static str, space_before: bool, + b: Breaks, elts: &[T], mut op: F) where F: FnMut(&mut Self, &T), { self.rbox(0, b); - let mut first = true; - for elt in elts { - if first { - first = false; - } else { - if let SeparatorSpacing::Both = spacing { - self.writer().space(); + if let Some((first, rest)) = elts.split_first() { + op(self, first); + for elt in rest { + if space_before { + self.space(); } self.word_space(sep); + op(self, elt); } - op(self, elt); } self.end(); } - fn commasep(&mut self, b: Breaks, elts: &[T], mut op: F) + fn commasep(&mut self, b: Breaks, elts: &[T], op: F) where F: FnMut(&mut Self, &T), { - self.strsep(",", SeparatorSpacing::After, b, elts, op) + self.strsep(",", false, b, elts, op) } fn maybe_print_comment(&mut self, pos: BytePos) { @@ -2379,8 +2366,7 @@ impl<'a> State<'a> { self.pclose(); } PatKind::Or(ref pats) => { - let spacing = SeparatorSpacing::Both; - self.strsep("|", spacing, Inconsistent, &pats[..], |s, p| s.print_pat(p))?; + self.strsep("|", true, Inconsistent, &pats[..], |s, p| s.print_pat(p)); } PatKind::Path(None, ref path) => { self.print_path(path, true, 0); @@ -2458,16 +2444,7 @@ impl<'a> State<'a> { } fn print_pats(&mut self, pats: &[P]) { - let mut first = true; - for p in pats { - if first { - first = false; - } else { - self.s.space(); - self.word_space("|"); - } - self.print_pat(p); - } + self.strsep("|", true, Inconsistent, pats, |s, p| s.print_pat(p)); } fn print_arm(&mut self, arm: &ast::Arm) { diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index ce679a5db63..91b92d84a81 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -462,7 +462,7 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) { visitor.visit_expr(upper_bound); } PatKind::Wild | PatKind::Rest => {}, - PatKind::Tuple(ref elems) => { + PatKind::Tuple(ref elems) | PatKind::Slice(ref elems) | PatKind::Or(ref elems) => { walk_list!(visitor, visit_pat, elems); diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index 27fc66d3b09..361e01781b1 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -469,6 +469,7 @@ symbols! { option_env, opt_out_copy, or, + or_patterns, Ord, Ordering, Output, diff --git a/src/test/ui/feature-gate/feature-gate-or_patterns.rs b/src/test/ui/feature-gate/feature-gate-or_patterns.rs new file mode 100644 index 00000000000..036a6095965 --- /dev/null +++ b/src/test/ui/feature-gate/feature-gate-or_patterns.rs @@ -0,0 +1,9 @@ +#![crate_type="lib"] + +pub fn example(x: Option) { + match x { + Some(0 | 1 | 2) => {} + //~^ ERROR: or-patterns syntax is experimental + _ => {} + } +} diff --git a/src/test/ui/feature-gate/feature-gate-or_patterns.stderr b/src/test/ui/feature-gate/feature-gate-or_patterns.stderr new file mode 100644 index 00000000000..aaabb54c1f0 --- /dev/null +++ b/src/test/ui/feature-gate/feature-gate-or_patterns.stderr @@ -0,0 +1,12 @@ +error[E0658]: or-patterns syntax is experimental + --> $DIR/feature-gate-or_patterns.rs:5:14 + | +LL | Some(0 | 1 | 2) => {} + | ^^^^^^^^^ + | + = note: for more information, see https://github.com/rust-lang/rust/issues/54883 + = help: add `#![feature(or_patterns)]` to the crate attributes to enable + +error: aborting due to previous error + +For more information about this error, try `rustc --explain E0658`. diff --git a/src/test/ui/parser/pat-lt-bracket-6.rs b/src/test/ui/parser/pat-lt-bracket-6.rs index 7b972183099..f27caa5d78c 100644 --- a/src/test/ui/parser/pat-lt-bracket-6.rs +++ b/src/test/ui/parser/pat-lt-bracket-6.rs @@ -2,8 +2,9 @@ fn main() { struct Test(&'static u8, [u8; 0]); let x = Test(&0, []); - let Test(&desc[..]) = x; //~ ERROR: expected one of `)`, `,`, or `@`, found `[` - //~^ ERROR subslice patterns are unstable + let Test(&desc[..]) = x; + //~^ ERROR: expected one of `)`, `,`, `@`, or `|`, found `[` + //~^^ ERROR subslice patterns are unstable } const RECOVERY_WITNESS: () = 0; //~ ERROR mismatched types diff --git a/src/test/ui/parser/pat-lt-bracket-6.stderr b/src/test/ui/parser/pat-lt-bracket-6.stderr index 201465b2c85..6f08f0a9d95 100644 --- a/src/test/ui/parser/pat-lt-bracket-6.stderr +++ b/src/test/ui/parser/pat-lt-bracket-6.stderr @@ -1,8 +1,8 @@ -error: expected one of `)`, `,`, or `@`, found `[` +error: expected one of `)`, `,`, `@`, or `|`, found `[` --> $DIR/pat-lt-bracket-6.rs:5:19 | LL | let Test(&desc[..]) = x; - | ^ expected one of `)`, `,`, or `@` here + | ^ expected one of `)`, `,`, `@`, or `|` here error[E0658]: subslice patterns are unstable --> $DIR/pat-lt-bracket-6.rs:5:20 @@ -14,7 +14,7 @@ LL | let Test(&desc[..]) = x; = help: add `#![feature(slice_patterns)]` to the crate attributes to enable error[E0308]: mismatched types - --> $DIR/pat-lt-bracket-6.rs:9:30 + --> $DIR/pat-lt-bracket-6.rs:10:30 | LL | const RECOVERY_WITNESS: () = 0; | ^ expected (), found integer diff --git a/src/test/ui/parser/pat-lt-bracket-7.rs b/src/test/ui/parser/pat-lt-bracket-7.rs index 020fdb845e8..327aef5ad15 100644 --- a/src/test/ui/parser/pat-lt-bracket-7.rs +++ b/src/test/ui/parser/pat-lt-bracket-7.rs @@ -2,7 +2,8 @@ fn main() { struct Thing(u8, [u8; 0]); let foo = core::iter::empty(); - for Thing(x[]) in foo {} //~ ERROR: expected one of `)`, `,`, or `@`, found `[` + for Thing(x[]) in foo {} + //~^ ERROR: expected one of `)`, `,`, `@`, or `|`, found `[` } const RECOVERY_WITNESS: () = 0; //~ ERROR mismatched types diff --git a/src/test/ui/parser/pat-lt-bracket-7.stderr b/src/test/ui/parser/pat-lt-bracket-7.stderr index 17557efa49e..196f1c0ae91 100644 --- a/src/test/ui/parser/pat-lt-bracket-7.stderr +++ b/src/test/ui/parser/pat-lt-bracket-7.stderr @@ -1,11 +1,11 @@ -error: expected one of `)`, `,`, or `@`, found `[` +error: expected one of `)`, `,`, `@`, or `|`, found `[` --> $DIR/pat-lt-bracket-7.rs:5:16 | LL | for Thing(x[]) in foo {} - | ^ expected one of `)`, `,`, or `@` here + | ^ expected one of `)`, `,`, `@`, or `|` here error[E0308]: mismatched types - --> $DIR/pat-lt-bracket-7.rs:8:30 + --> $DIR/pat-lt-bracket-7.rs:9:30 | LL | const RECOVERY_WITNESS: () = 0; | ^ expected (), found integer diff --git a/src/test/ui/parser/recover-for-loop-parens-around-head.rs b/src/test/ui/parser/recover-for-loop-parens-around-head.rs index e6c59fcf22d..c6be2c90667 100644 --- a/src/test/ui/parser/recover-for-loop-parens-around-head.rs +++ b/src/test/ui/parser/recover-for-loop-parens-around-head.rs @@ -8,7 +8,7 @@ fn main() { let vec = vec![1, 2, 3]; for ( elem in vec ) { - //~^ ERROR expected one of `)`, `,`, or `@`, found `in` + //~^ ERROR expected one of `)`, `,`, `@`, or `|`, found `in` //~| ERROR unexpected closing `)` const RECOVERY_WITNESS: () = 0; //~ ERROR mismatched types } diff --git a/src/test/ui/parser/recover-for-loop-parens-around-head.stderr b/src/test/ui/parser/recover-for-loop-parens-around-head.stderr index c160e646c28..1b5b6cca092 100644 --- a/src/test/ui/parser/recover-for-loop-parens-around-head.stderr +++ b/src/test/ui/parser/recover-for-loop-parens-around-head.stderr @@ -1,8 +1,8 @@ -error: expected one of `)`, `,`, or `@`, found `in` +error: expected one of `)`, `,`, `@`, or `|`, found `in` --> $DIR/recover-for-loop-parens-around-head.rs:10:16 | LL | for ( elem in vec ) { - | ^^ expected one of `)`, `,`, or `@` here + | ^^ expected one of `)`, `,`, `@`, or `|` here error: unexpected closing `)` --> $DIR/recover-for-loop-parens-around-head.rs:10:23 -- cgit 1.4.1-3-g733a5 From 1064d41c96047650897be96190e018be9bbd818a Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sat, 17 Aug 2019 20:49:00 +0300 Subject: resolve/expand: Rename some things for clarity --- src/librustc_resolve/build_reduced_graph.rs | 14 +++++++------- src/librustc_resolve/lib.rs | 4 ++-- src/librustc_resolve/macros.rs | 22 +++++++++++----------- src/librustc_resolve/resolve_imports.rs | 4 ++-- src/libsyntax/ext/base.rs | 8 ++++---- src/libsyntax/ext/expand.rs | 25 ++++++++++++++----------- src/libsyntax/ext/placeholders.rs | 7 +++---- 7 files changed, 43 insertions(+), 41 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index fe5c85d3a95..42428456b6e 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -160,23 +160,23 @@ impl<'a> Resolver<'a> { Some(ext) } + // FIXME: `extra_placeholders` should be included into the `fragment` as regular placeholders. crate fn build_reduced_graph( &mut self, fragment: &AstFragment, - extra_placeholders: &[ExpnId], + extra_placeholders: &[NodeId], parent_scope: ParentScope<'a>, ) -> LegacyScope<'a> { let mut def_collector = DefCollector::new(&mut self.definitions, parent_scope.expansion); fragment.visit_with(&mut def_collector); for placeholder in extra_placeholders { - def_collector.visit_macro_invoc(NodeId::placeholder_from_expn_id(*placeholder)); + def_collector.visit_macro_invoc(*placeholder); } let mut visitor = BuildReducedGraphVisitor { r: self, parent_scope }; fragment.visit_with(&mut visitor); for placeholder in extra_placeholders { - visitor.parent_scope.legacy = - visitor.visit_invoc(NodeId::placeholder_from_expn_id(*placeholder)); + visitor.parent_scope.legacy = visitor.visit_invoc(*placeholder); } visitor.parent_scope.legacy @@ -884,7 +884,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { } /// Builds the reduced graph for a single item in an external crate. - fn build_reduced_graph_for_external_crate_res(&mut self, child: Export) { + fn build_reduced_graph_for_external_crate_res(&mut self, child: Export) { let parent = self.parent_scope.module; let Export { ident, res, vis, span } = child; // FIXME: We shouldn't create the gensym here, it should come from metadata, @@ -1073,10 +1073,10 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { false } - fn visit_invoc(&mut self, id: ast::NodeId) -> LegacyScope<'a> { + fn visit_invoc(&mut self, id: NodeId) -> LegacyScope<'a> { let invoc_id = id.placeholder_to_expn_id(); - self.parent_scope.module.unresolved_invocations.borrow_mut().insert(invoc_id); + self.parent_scope.module.unexpanded_invocations.borrow_mut().insert(invoc_id); let old_parent_scope = self.r.invocation_parent_scopes.insert(invoc_id, self.parent_scope); assert!(old_parent_scope.is_none(), "invocation data is reset for an invocation"); diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 984473d781e..2dd0ad13c52 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -448,7 +448,7 @@ pub struct ModuleData<'a> { populate_on_access: Cell, // Macro invocations that can expand into items in this module. - unresolved_invocations: RefCell>, + unexpanded_invocations: RefCell>, no_implicit_prelude: bool, @@ -478,7 +478,7 @@ impl<'a> ModuleData<'a> { normal_ancestor_id, lazy_resolutions: Default::default(), populate_on_access: Cell::new(!normal_ancestor_id.is_local()), - unresolved_invocations: Default::default(), + unexpanded_invocations: Default::default(), no_implicit_prelude: false, glob_importers: RefCell::new(Vec::new()), globs: RefCell::new(Vec::new()), diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index ff0c14f9cd9..01ad67252a3 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -10,7 +10,7 @@ use crate::resolve_imports::ImportResolver; use rustc::hir::def::{self, DefKind, NonMacroAttrKind}; use rustc::middle::stability; use rustc::{ty, lint, span_bug}; -use syntax::ast::{self, Ident}; +use syntax::ast::{self, NodeId, Ident}; use syntax::attr::StabilityLevel; use syntax::edition::Edition; use syntax::ext::base::{self, Indeterminate, SpecialDerives}; @@ -26,7 +26,7 @@ use syntax_pos::{Span, DUMMY_SP}; use std::{mem, ptr}; use rustc_data_structures::sync::Lrc; -type Res = def::Res; +type Res = def::Res; /// Binding produced by a `macro_rules` item. /// Not modularized, can shadow previous legacy bindings, etc. @@ -91,11 +91,11 @@ fn fast_print_path(path: &ast::Path) -> Symbol { } impl<'a> base::Resolver for Resolver<'a> { - fn next_node_id(&mut self) -> ast::NodeId { + fn next_node_id(&mut self) -> NodeId { self.session.next_node_id() } - fn get_module_scope(&mut self, id: ast::NodeId) -> ExpnId { + fn get_module_scope(&mut self, id: NodeId) -> ExpnId { let expn_id = ExpnId::fresh(Some(ExpnData::default( ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, self.session.edition() ))); @@ -115,18 +115,18 @@ impl<'a> base::Resolver for Resolver<'a> { }); } - - + // FIXME: `extra_placeholders` should be included into the `fragment` as regular placeholders. fn visit_ast_fragment_with_placeholders( - &mut self, expansion: ExpnId, fragment: &AstFragment, derives: &[ExpnId] + &mut self, expansion: ExpnId, fragment: &AstFragment, extra_placeholders: &[NodeId] ) { // Integrate the new AST fragment into all the definition and module structures. // We are inside the `expansion` now, but other parent scope components are still the same. let parent_scope = ParentScope { expansion, ..self.invocation_parent_scopes[&expansion] }; - let output_legacy_scope = self.build_reduced_graph(fragment, derives, parent_scope); + let output_legacy_scope = + self.build_reduced_graph(fragment, extra_placeholders, parent_scope); self.output_legacy_scopes.insert(expansion, output_legacy_scope); - parent_scope.module.unresolved_invocations.borrow_mut().remove(&expansion); + parent_scope.module.unexpanded_invocations.borrow_mut().remove(&expansion); } fn register_builtin_macro(&mut self, ident: ast::Ident, ext: SyntaxExtension) { @@ -480,7 +480,7 @@ impl<'a> Resolver<'a> { Scope::MacroUsePrelude => match this.macro_use_prelude.get(&ident.name).cloned() { Some(binding) => Ok((binding, Flags::PRELUDE | Flags::MISC_FROM_PRELUDE)), None => Err(Determinacy::determined( - this.graph_root.unresolved_invocations.borrow().is_empty() + this.graph_root.unexpanded_invocations.borrow().is_empty() )) } Scope::BuiltinAttrs => if is_builtin_attr_name(ident.name) { @@ -503,7 +503,7 @@ impl<'a> Resolver<'a> { Scope::ExternPrelude => match this.extern_prelude_get(ident, !record_used) { Some(binding) => Ok((binding, Flags::PRELUDE)), None => Err(Determinacy::determined( - this.graph_root.unresolved_invocations.borrow().is_empty() + this.graph_root.unexpanded_invocations.borrow().is_empty() )), } Scope::ToolPrelude => if KNOWN_TOOLS.contains(&ident.name) { diff --git a/src/librustc_resolve/resolve_imports.rs b/src/librustc_resolve/resolve_imports.rs index b49f1868706..fd222a132a3 100644 --- a/src/librustc_resolve/resolve_imports.rs +++ b/src/librustc_resolve/resolve_imports.rs @@ -202,7 +202,7 @@ impl<'a> Resolver<'a> { Err((Determined, Weak::No)) } else if let Some(binding) = self.extern_prelude_get(ident, !record_used) { Ok(binding) - } else if !self.graph_root.unresolved_invocations.borrow().is_empty() { + } else if !self.graph_root.unexpanded_invocations.borrow().is_empty() { // Macro-expanded `extern crate` items can add names to extern prelude. Err((Undetermined, Weak::No)) } else { @@ -348,7 +348,7 @@ impl<'a> Resolver<'a> { // progress, we have to ignore those potential unresolved invocations from other modules // and prohibit access to macro-expanded `macro_export` macros instead (unless restricted // shadowing is enabled, see `macro_expanded_macro_export_errors`). - let unexpanded_macros = !module.unresolved_invocations.borrow().is_empty(); + let unexpanded_macros = !module.unexpanded_invocations.borrow().is_empty(); if let Some(binding) = resolution.binding { if !unexpanded_macros || ns == MacroNS || restricted_shadowing { return check_usable(self, binding); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index fb1bf4d7160..b0a4a6af983 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -1,4 +1,4 @@ -use crate::ast::{self, Attribute, Name, PatKind}; +use crate::ast::{self, NodeId, Attribute, Name, PatKind}; use crate::attr::{HasAttrs, Stability, Deprecation}; use crate::source_map::SourceMap; use crate::edition::Edition; @@ -671,13 +671,13 @@ bitflags::bitflags! { } pub trait Resolver { - fn next_node_id(&mut self) -> ast::NodeId; + fn next_node_id(&mut self) -> NodeId; - fn get_module_scope(&mut self, id: ast::NodeId) -> ExpnId; + fn get_module_scope(&mut self, id: NodeId) -> ExpnId; fn resolve_dollar_crates(&mut self); fn visit_ast_fragment_with_placeholders(&mut self, expn_id: ExpnId, fragment: &AstFragment, - derives: &[ExpnId]); + extra_placeholders: &[NodeId]); fn register_builtin_macro(&mut self, ident: ast::Ident, ext: SyntaxExtension); fn resolve_imports(&mut self); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index dac402921b9..c1d52c97455 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -291,7 +291,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { // Unresolved macros produce dummy outputs as a recovery measure. invocations.reverse(); let mut expanded_fragments = Vec::new(); - let mut derives: FxHashMap> = FxHashMap::default(); + let mut all_derive_placeholders: FxHashMap> = FxHashMap::default(); let mut undetermined_invocations = Vec::new(); let (mut progress, mut force) = (false, !self.monotonic); loop { @@ -347,13 +347,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let mut item = self.fully_configure(item); item.visit_attrs(|attrs| attrs.retain(|a| a.path != sym::derive)); - let derives = derives.entry(invoc.expansion_data.id).or_default(); + let derive_placeholders = + all_derive_placeholders.entry(invoc.expansion_data.id).or_default(); - derives.reserve(traits.len()); + derive_placeholders.reserve(traits.len()); invocations.reserve(traits.len()); for path in traits { let expn_id = ExpnId::fresh(None); - derives.push(expn_id); + derive_placeholders.push(NodeId::placeholder_from_expn_id(expn_id)); invocations.push(Invocation { kind: InvocationKind::Derive { path, item: item.clone() }, fragment_kind: invoc.fragment_kind, @@ -365,7 +366,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } let fragment = invoc.fragment_kind .expect_from_annotatables(::std::iter::once(item)); - self.collect_invocations(fragment, derives) + self.collect_invocations(fragment, derive_placeholders) } else { unreachable!() }; @@ -384,10 +385,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> { // Finally incorporate all the expanded macros into the input AST fragment. let mut placeholder_expander = PlaceholderExpander::new(self.cx, self.monotonic); while let Some(expanded_fragments) = expanded_fragments.pop() { - for (mark, expanded_fragment) in expanded_fragments.into_iter().rev() { - let derives = derives.remove(&mark).unwrap_or_else(Vec::new); - placeholder_expander.add(NodeId::placeholder_from_expn_id(mark), - expanded_fragment, derives); + for (expn_id, expanded_fragment) in expanded_fragments.into_iter().rev() { + let derive_placeholders = + all_derive_placeholders.remove(&expn_id).unwrap_or_else(Vec::new); + placeholder_expander.add(NodeId::placeholder_from_expn_id(expn_id), + expanded_fragment, derive_placeholders); } } fragment_with_placeholders.mut_visit_with(&mut placeholder_expander); @@ -404,7 +406,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { /// them with "placeholders" - dummy macro invocations with specially crafted `NodeId`s. /// Then call into resolver that builds a skeleton ("reduced graph") of the fragment and /// prepares data for resolving paths of macro invocations. - fn collect_invocations(&mut self, mut fragment: AstFragment, derives: &[ExpnId]) + fn collect_invocations(&mut self, mut fragment: AstFragment, extra_placeholders: &[NodeId]) -> (AstFragment, Vec) { // Resolve `$crate`s in the fragment for pretty-printing. self.cx.resolver.resolve_dollar_crates(); @@ -423,9 +425,10 @@ impl<'a, 'b> MacroExpander<'a, 'b> { collector.invocations }; + // FIXME: Merge `extra_placeholders` into the `fragment` as regular placeholders. if self.monotonic { self.cx.resolver.visit_ast_fragment_with_placeholders( - self.cx.current_expansion.id, &fragment, derives); + self.cx.current_expansion.id, &fragment, extra_placeholders); } (fragment, invocations) diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index 2d05f8f0b00..d800cfedcfb 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -2,7 +2,6 @@ use crate::ast::{self, NodeId}; use crate::source_map::{DUMMY_SP, dummy_spanned}; use crate::ext::base::ExtCtxt; use crate::ext::expand::{AstFragment, AstFragmentKind}; -use crate::ext::hygiene::ExpnId; use crate::tokenstream::TokenStream; use crate::mut_visit::*; use crate::ptr::P; @@ -86,11 +85,11 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> { } } - pub fn add(&mut self, id: ast::NodeId, mut fragment: AstFragment, derives: Vec) { + pub fn add(&mut self, id: ast::NodeId, mut fragment: AstFragment, placeholders: Vec) { fragment.mut_visit_with(self); if let AstFragment::Items(mut items) = fragment { - for derive in derives { - match self.remove(NodeId::placeholder_from_expn_id(derive)) { + for placeholder in placeholders { + match self.remove(placeholder) { AstFragment::Items(derived_items) => items.extend(derived_items), _ => unreachable!(), } -- cgit 1.4.1-3-g733a5 From 8b932dfda77f8a48f0d134c31c4b33382724a69c Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Mon, 19 Aug 2019 19:00:24 +0300 Subject: remove composite tokens support from the lexer --- src/librustc_lexer/src/lib.rs | 176 +++---------------------------------- src/libsyntax/parse/lexer/mod.rs | 25 ------ src/libsyntax/parse/lexer/tests.rs | 32 ++++--- 3 files changed, 34 insertions(+), 199 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc_lexer/src/lib.rs b/src/librustc_lexer/src/lib.rs index afef307a0ed..41b47befaf1 100644 --- a/src/librustc_lexer/src/lib.rs +++ b/src/librustc_lexer/src/lib.rs @@ -23,9 +23,6 @@ pub enum TokenKind { Lifetime { starts_with_number: bool }, Semi, Comma, - DotDotDot, - DotDotEq, - DotDot, Dot, OpenParen, CloseParen, @@ -37,41 +34,19 @@ pub enum TokenKind { Pound, Tilde, Question, - ColonColon, Colon, Dollar, - EqEq, Eq, - FatArrow, - Ne, Not, - Le, - LArrow, Lt, - ShlEq, - Shl, - Ge, Gt, - ShrEq, - Shr, - RArrow, Minus, - MinusEq, And, - AndAnd, - AndEq, Or, - OrOr, - OrEq, - PlusEq, Plus, - StarEq, Star, - SlashEq, Slash, - CaretEq, Caret, - PercentEq, Percent, Unknown, } @@ -135,13 +110,7 @@ impl Cursor<'_> { '/' => match self.nth_char(0) { '/' => self.line_comment(), '*' => self.block_comment(), - _ => { - if self.eat_assign() { - SlashEq - } else { - Slash - } - } + _ => Slash, }, c if character_properties::is_whitespace(c) => self.whitespace(), 'r' => match (self.nth_char(0), self.nth_char(1)) { @@ -199,22 +168,7 @@ impl Cursor<'_> { } ';' => Semi, ',' => Comma, - '.' => { - if self.nth_char(0) == '.' { - self.bump(); - if self.nth_char(0) == '.' { - self.bump(); - DotDotDot - } else if self.nth_char(0) == '=' { - self.bump(); - DotDotEq - } else { - DotDot - } - } else { - Dot - } - } + '.' => Dot, '(' => OpenParen, ')' => CloseParen, '{' => OpenBrace, @@ -225,112 +179,19 @@ impl Cursor<'_> { '#' => Pound, '~' => Tilde, '?' => Question, - ':' => { - if self.nth_char(0) == ':' { - self.bump(); - ColonColon - } else { - Colon - } - } + ':' => Colon, '$' => Dollar, - '=' => { - if self.nth_char(0) == '=' { - self.bump(); - EqEq - } else if self.nth_char(0) == '>' { - self.bump(); - FatArrow - } else { - Eq - } - } - '!' => { - if self.nth_char(0) == '=' { - self.bump(); - Ne - } else { - Not - } - } - '<' => match self.nth_char(0) { - '=' => { - self.bump(); - Le - } - '<' => { - self.bump(); - if self.eat_assign() { ShlEq } else { Shl } - } - '-' => { - self.bump(); - LArrow - } - _ => Lt, - }, - '>' => match self.nth_char(0) { - '=' => { - self.bump(); - Ge - } - '>' => { - self.bump(); - if self.eat_assign() { ShrEq } else { Shr } - } - _ => Gt, - }, - '-' => { - if self.nth_char(0) == '>' { - self.bump(); - RArrow - } else { - if self.eat_assign() { MinusEq } else { Minus } - } - } - '&' => { - if self.nth_char(0) == '&' { - self.bump(); - AndAnd - } else { - if self.eat_assign() { AndEq } else { And } - } - } - '|' => { - if self.nth_char(0) == '|' { - self.bump(); - OrOr - } else { - if self.eat_assign() { OrEq } else { Or } - } - } - '+' => { - if self.eat_assign() { - PlusEq - } else { - Plus - } - } - '*' => { - if self.eat_assign() { - StarEq - } else { - Star - } - } - '^' => { - if self.eat_assign() { - CaretEq - } else { - Caret - } - } - '%' => { - if self.eat_assign() { - PercentEq - } else { - Percent - } - } + '=' => Eq, + '!' => Not, + '<' => Lt, + '>' => Gt, + '-' => Minus, + '&' => And, + '|' => Or, + '+' => Plus, + '*' => Star, + '^' => Caret, + '%' => Percent, '\'' => self.lifetime_or_char(), '"' => { let terminated = self.double_quoted_string(); @@ -643,15 +504,6 @@ impl Cursor<'_> { self.bump(); } } - - fn eat_assign(&mut self) -> bool { - if self.nth_char(0) == '=' { - self.bump(); - true - } else { - false - } - } } pub mod character_properties { diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index bdf468a52bb..66add869359 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -273,9 +273,6 @@ impl<'a> StringReader<'a> { } rustc_lexer::TokenKind::Semi => token::Semi, rustc_lexer::TokenKind::Comma => token::Comma, - rustc_lexer::TokenKind::DotDotDot => token::DotDotDot, - rustc_lexer::TokenKind::DotDotEq => token::DotDotEq, - rustc_lexer::TokenKind::DotDot => token::DotDot, rustc_lexer::TokenKind::Dot => token::Dot, rustc_lexer::TokenKind::OpenParen => token::OpenDelim(token::Paren), rustc_lexer::TokenKind::CloseParen => token::CloseDelim(token::Paren), @@ -287,42 +284,20 @@ impl<'a> StringReader<'a> { rustc_lexer::TokenKind::Pound => token::Pound, rustc_lexer::TokenKind::Tilde => token::Tilde, rustc_lexer::TokenKind::Question => token::Question, - rustc_lexer::TokenKind::ColonColon => token::ModSep, rustc_lexer::TokenKind::Colon => token::Colon, rustc_lexer::TokenKind::Dollar => token::Dollar, - rustc_lexer::TokenKind::EqEq => token::EqEq, rustc_lexer::TokenKind::Eq => token::Eq, - rustc_lexer::TokenKind::FatArrow => token::FatArrow, - rustc_lexer::TokenKind::Ne => token::Ne, rustc_lexer::TokenKind::Not => token::Not, - rustc_lexer::TokenKind::Le => token::Le, - rustc_lexer::TokenKind::LArrow => token::LArrow, rustc_lexer::TokenKind::Lt => token::Lt, - rustc_lexer::TokenKind::ShlEq => token::BinOpEq(token::Shl), - rustc_lexer::TokenKind::Shl => token::BinOp(token::Shl), - rustc_lexer::TokenKind::Ge => token::Ge, rustc_lexer::TokenKind::Gt => token::Gt, - rustc_lexer::TokenKind::ShrEq => token::BinOpEq(token::Shr), - rustc_lexer::TokenKind::Shr => token::BinOp(token::Shr), - rustc_lexer::TokenKind::RArrow => token::RArrow, rustc_lexer::TokenKind::Minus => token::BinOp(token::Minus), - rustc_lexer::TokenKind::MinusEq => token::BinOpEq(token::Minus), rustc_lexer::TokenKind::And => token::BinOp(token::And), - rustc_lexer::TokenKind::AndEq => token::BinOpEq(token::And), - rustc_lexer::TokenKind::AndAnd => token::AndAnd, rustc_lexer::TokenKind::Or => token::BinOp(token::Or), - rustc_lexer::TokenKind::OrEq => token::BinOpEq(token::Or), - rustc_lexer::TokenKind::OrOr => token::OrOr, rustc_lexer::TokenKind::Plus => token::BinOp(token::Plus), - rustc_lexer::TokenKind::PlusEq => token::BinOpEq(token::Plus), rustc_lexer::TokenKind::Star => token::BinOp(token::Star), - rustc_lexer::TokenKind::StarEq => token::BinOpEq(token::Star), rustc_lexer::TokenKind::Slash => token::BinOp(token::Slash), - rustc_lexer::TokenKind::SlashEq => token::BinOpEq(token::Slash), rustc_lexer::TokenKind::Caret => token::BinOp(token::Caret), - rustc_lexer::TokenKind::CaretEq => token::BinOpEq(token::Caret), rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent), - rustc_lexer::TokenKind::PercentEq => token::BinOpEq(token::Percent), rustc_lexer::TokenKind::Unknown => { let c = self.str_from(start).chars().next().unwrap(); diff --git a/src/libsyntax/parse/lexer/tests.rs b/src/libsyntax/parse/lexer/tests.rs index 94570140996..a915aa42fd1 100644 --- a/src/libsyntax/parse/lexer/tests.rs +++ b/src/libsyntax/parse/lexer/tests.rs @@ -75,42 +75,50 @@ fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind } #[test] -fn doublecolonparsing() { +fn doublecolon_parsing() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - check_tokenization(setup(&sm, &sh, "a b".to_string()), - vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); + check_tokenization( + setup(&sm, &sh, "a b".to_string()), + vec![mk_ident("a"), token::Whitespace, mk_ident("b")], + ); }) } #[test] -fn dcparsing_2() { +fn doublecolon_parsing_2() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - check_tokenization(setup(&sm, &sh, "a::b".to_string()), - vec![mk_ident("a"), token::ModSep, mk_ident("b")]); + check_tokenization( + setup(&sm, &sh, "a::b".to_string()), + vec![mk_ident("a"), token::Colon, token::Colon, mk_ident("b")], + ); }) } #[test] -fn dcparsing_3() { +fn doublecolon_parsing_3() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - check_tokenization(setup(&sm, &sh, "a ::b".to_string()), - vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); + check_tokenization( + setup(&sm, &sh, "a ::b".to_string()), + vec![mk_ident("a"), token::Whitespace, token::Colon, token::Colon, mk_ident("b")], + ); }) } #[test] -fn dcparsing_4() { +fn doublecolon_parsing_4() { with_default_globals(|| { let sm = Lrc::new(SourceMap::new(FilePathMapping::empty())); let sh = mk_sess(sm.clone()); - check_tokenization(setup(&sm, &sh, "a:: b".to_string()), - vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); + check_tokenization( + setup(&sm, &sh, "a:: b".to_string()), + vec![mk_ident("a"), token::Colon, token::Colon, token::Whitespace, mk_ident("b")], + ); }) } -- cgit 1.4.1-3-g733a5 From 914e1f456415eae0ae095dd39dc51c115c1ffb5a Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Mon, 19 Aug 2019 19:30:44 +0300 Subject: glue tokens when building token stream --- src/libsyntax/parse/lexer/tokentrees.rs | 40 ++++++++++++++++++++++++++------- src/libsyntax/parse/token.rs | 2 +- src/libsyntax/tokenstream.rs | 2 +- 3 files changed, 34 insertions(+), 10 deletions(-) (limited to 'src/libsyntax') diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 37e67a2729e..e5ba7e45309 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -39,29 +39,29 @@ struct TokenTreesReader<'a> { impl<'a> TokenTreesReader<'a> { // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`. fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { - let mut tts = Vec::new(); + let mut buf = TokenStreamBuilder::default(); self.real_token(); while self.token != token::Eof { - tts.push(self.parse_token_tree()?); + buf.push(self.parse_token_tree()?); } - Ok(TokenStream::new(tts)) + Ok(buf.into_token_stream()) } // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`. fn parse_token_trees_until_close_delim(&mut self) -> TokenStream { - let mut tts = vec![]; + let mut buf = TokenStreamBuilder::default(); loop { if let token::CloseDelim(..) = self.token.kind { - return TokenStream::new(tts); + return buf.into_token_stream(); } match self.parse_token_tree() { - Ok(tree) => tts.push(tree), + Ok(tree) => buf.push(tree), Err(mut e) => { e.emit(); - return TokenStream::new(tts); + return buf.into_token_stream(); } } } @@ -223,8 +223,32 @@ impl<'a> TokenTreesReader<'a> { _ => { self.token = token; return; - }, + } + } + } + } +} + +#[derive(Default)] +struct TokenStreamBuilder { + buf: Vec, +} + +impl TokenStreamBuilder { + fn push(&mut self, (tree, joint): TreeAndJoint) { + if let Some((TokenTree::Token(prev_token), Joint)) = self.buf.last() { + if let TokenTree::Token(token) = &tree { + if let Some(glued) = prev_token.glue(token) { + self.buf.pop(); + self.buf.push((TokenTree::Token(glued), joint)); + return; + } } } + self.buf.push((tree, joint)) + } + + fn into_token_stream(self) -> TokenStream { + TokenStream::new(self.buf) } } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index be800b4de66..1865f925165 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -551,7 +551,7 @@ impl Token { } } - crate fn glue(self, joint: Token) -> Option { + crate fn glue(&self, joint: &Token) -> Option { let kind = match self.kind { Eq => match joint.kind { Eq => EqEq, diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 6ff8898fe21..09a1b93c7bb 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -414,7 +414,7 @@ impl TokenStreamBuilder { let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint); if let Some(TokenTree::Token(last_token)) = last_tree_if_joint { if let Some((TokenTree::Token(token), is_joint)) = stream.first_tree_and_joint() { - if let Some(glued_tok) = last_token.glue(token) { + if let Some(glued_tok) = last_token.glue(&token) { let last_stream = self.0.pop().unwrap(); self.push_all_but_last_tree(&last_stream); let glued_tt = TokenTree::Token(glued_tok); -- cgit 1.4.1-3-g733a5 From b0d47829489d44512c01868d6985cf1e86de3673 Mon Sep 17 00:00:00 2001 From: Mazdak Farrokhzad Date: Fri, 2 Aug 2019 03:44:00 +0200 Subject: Stabilize 'async_await'. --- src/librustc/error_codes.rs | 2 -- src/librustc_typeck/check/mod.rs | 2 -- src/librustc_typeck/error_codes.rs | 7 ++----- src/libstd/keyword_docs.rs | 2 -- src/libsyntax/feature_gate.rs | 19 ++----------------- 5 files changed, 4 insertions(+), 28 deletions(-) (limited to 'src/libsyntax') diff --git a/src/librustc/error_codes.rs b/src/librustc/error_codes.rs index b3eee7c3464..a200a058f4f 100644 --- a/src/librustc/error_codes.rs +++ b/src/librustc/error_codes.rs @@ -2088,7 +2088,6 @@ generator can be constructed. Erroneous code example: ```edition2018,compile-fail,E0698 -#![feature(async_await)] async fn bar() -> () {} async fn foo() { @@ -2101,7 +2100,6 @@ To fix this you must bind `T` to a concrete type such as `String` so that a generator can then be constructed: ```edition2018 -#![feature(async_await)] async fn bar() -> () {} async fn foo() { diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index fc1ee649e28..9c7ac83e82e 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -4197,8 +4197,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// A possible error is to forget to add `.await` when using futures: /// /// ``` - /// #![feature(async_await)] - /// /// async fn make_u32() -> u32 { /// 22 /// } diff --git a/src/librustc_typeck/error_codes.rs b/src/librustc_typeck/error_codes.rs index ca9ce3d22b5..b52183d4b1b 100644 --- a/src/librustc_typeck/error_codes.rs +++ b/src/librustc_typeck/error_codes.rs @@ -4751,7 +4751,6 @@ E0733: r##" Recursion in an `async fn` requires boxing. For example, this will not compile: ```edition2018,compile_fail,E0733 -#![feature(async_await)] async fn foo(n: usize) { if n > 0 { foo(n - 1).await; @@ -4763,12 +4762,11 @@ To achieve async recursion, the `async fn` needs to be desugared such that the `Future` is explicit in the return type: ```edition2018,compile_fail,E0720 -# #![feature(async_await)] use std::future::Future; -fn foo_desugered(n: usize) -> impl Future { +fn foo_desugared(n: usize) -> impl Future { async move { if n > 0 { - foo_desugered(n - 1).await; + foo_desugared(n - 1).await; } } } @@ -4777,7 +4775,6 @@ fn foo_desugered(n: usize) -> impl Future { Finally, the future is wrapped in a pinned box: ```edition2018 -# #![feature(async_await)] use std::future::Future; use std::pin::Pin; fn foo_recursive(n: usize) -> Pin>> { diff --git a/src/libstd/keyword_docs.rs b/src/libstd/keyword_docs.rs index f5018485ef7..85a9dea09ed 100644 --- a/src/libstd/keyword_docs.rs +++ b/src/libstd/keyword_docs.rs @@ -984,7 +984,6 @@ mod where_keyword { } // 2018 Edition keywords -#[unstable(feature = "async_await", issue = "50547")] #[doc(keyword = "async")] // /// Return a [`Future`] instead of blocking the current thread. @@ -995,7 +994,6 @@ mod where_keyword { } /// [not yet complete]: https://github.com/rust-lang/rust/issues/34601 mod async_keyword { } -#[unstable(feature = "async_await", issue = "50547")] #[doc(keyword = "await")] // /// Suspend execution until the result of a [`Future`] is ready. diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index bbc3ae28225..bce0b07db1c 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -461,9 +461,6 @@ declare_features! ( // Allows using `#[doc(keyword = "...")]`. (active, doc_keyword, "1.28.0", Some(51315), None), - // Allows async and await syntax. - (active, async_await, "1.28.0", Some(50547), None), - // Allows reinterpretation of the bits of a value of one type as another type during const eval. (active, const_transmute, "1.29.0", Some(53605), None), @@ -857,6 +854,8 @@ declare_features! ( (accepted, repr_align_enum, "1.37.0", Some(57996), None), // Allows `const _: TYPE = VALUE`. (accepted, underscore_const_names, "1.37.0", Some(54912), None), + // Allows free and inherent `async fn`s, `async` blocks, and `.await` expressions. + (accepted, async_await, "1.38.0", Some(50547), None), // ------------------------------------------------------------------------- // feature-group-end: accepted features @@ -2100,12 +2099,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { "labels on blocks are unstable"); } } - ast::ExprKind::Async(..) => { - gate_feature_post!(&self, async_await, e.span, "async blocks are unstable"); - } - ast::ExprKind::Await(_) => { - gate_feature_post!(&self, async_await, e.span, "async/await is unstable"); - } _ => {} } visit::walk_expr(self, e) @@ -2154,11 +2147,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { span: Span, _node_id: NodeId) { if let Some(header) = fn_kind.header() { - // Check for const fn and async fn declarations. - if header.asyncness.node.is_async() { - gate_feature_post!(&self, async_await, span, "async fn is unstable"); - } - // Stability of const fn methods are covered in // `visit_trait_item` and `visit_impl_item` below; this is // because default methods don't pass through this point. @@ -2198,9 +2186,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { if block.is_none() { self.check_abi(sig.header.abi, ti.span); } - if sig.header.asyncness.node.is_async() { - gate_feature_post!(&self, async_await, ti.span, "async fn is unstable"); - } if sig.decl.c_variadic { gate_feature_post!(&self, c_variadic, ti.span, "C-variadic functions are unstable"); -- cgit 1.4.1-3-g733a5