diff options
| author | Taiki Endo <te316e89@gmail.com> | 2019-02-07 02:33:01 +0900 |
|---|---|---|
| committer | Taiki Endo <te316e89@gmail.com> | 2019-02-07 02:33:01 +0900 |
| commit | 7bb082d27fe472f52b103de0ae9fc6fa7e6546cc (patch) | |
| tree | dfed08e00fc6e88022fd7249bd5017e5d57110a7 /src/libsyntax/ext/tt | |
| parent | 2596bc1368d1e3d34c9a7841ad87a3100f01cbad (diff) | |
| download | rust-7bb082d27fe472f52b103de0ae9fc6fa7e6546cc.tar.gz rust-7bb082d27fe472f52b103de0ae9fc6fa7e6546cc.zip | |
libsyntax => 2018
Diffstat (limited to 'src/libsyntax/ext/tt')
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 31 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 58 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/quoted.rs | 19 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 23 |
4 files changed, 68 insertions, 63 deletions
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index b4003ac729a..a9000b89fb4 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -70,21 +70,22 @@ //! eof: [a $( a )* a b ยท] //! ``` -pub use self::NamedMatch::*; -pub use self::ParseResult::*; -use self::TokenTreeOrTokenTreeSlice::*; - -use ast::Ident; +pub use NamedMatch::*; +pub use ParseResult::*; +use TokenTreeOrTokenTreeSlice::*; + +use crate::ast::Ident; +use crate::errors::FatalError; +use crate::ext::tt::quoted::{self, TokenTree}; +use crate::parse::{Directory, ParseSess}; +use crate::parse::parser::{Parser, PathStyle}; +use crate::parse::token::{self, DocComment, Nonterminal, Token}; +use crate::print::pprust; +use crate::symbol::keywords; +use crate::tokenstream::{DelimSpan, TokenStream}; + +use smallvec::{smallvec, SmallVec}; use syntax_pos::{self, Span}; -use errors::FatalError; -use ext::tt::quoted::{self, TokenTree}; -use parse::{Directory, ParseSess}; -use parse::parser::{Parser, PathStyle}; -use parse::token::{self, DocComment, Nonterminal, Token}; -use print::pprust; -use smallvec::SmallVec; -use symbol::keywords; -use tokenstream::{DelimSpan, TokenStream}; use rustc_data_structures::fx::FxHashMap; use std::collections::hash_map::Entry::{Occupied, Vacant}; @@ -649,7 +650,7 @@ pub fn parse( sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], - directory: Option<Directory>, + directory: Option<Directory<'_>>, recurse_into_modules: bool, ) -> NamedParseResult { // Create a parser that can be used for the "black box" parts. diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 176575b67ea..b3ecaeaedbb 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -1,29 +1,31 @@ -use {ast, attr}; +use crate::{ast, attr}; +use crate::edition::Edition; +use crate::errors::FatalError; +use crate::ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension}; +use crate::ext::base::{NormalTT, TTMacroExpander}; +use crate::ext::expand::{AstFragment, AstFragmentKind}; +use crate::ext::tt::macro_parser::{Success, Error, Failure}; +use crate::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; +use crate::ext::tt::macro_parser::{parse, parse_failure_msg}; +use crate::ext::tt::quoted; +use crate::ext::tt::transcribe::transcribe; +use crate::feature_gate::Features; +use crate::parse::{Directory, ParseSess}; +use crate::parse::parser::Parser; +use crate::parse::token::{self, NtTT}; +use crate::parse::token::Token::*; +use crate::symbol::Symbol; +use crate::tokenstream::{DelimSpan, TokenStream, TokenTree}; + use syntax_pos::{Span, DUMMY_SP}; -use edition::Edition; -use errors::FatalError; -use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension}; -use ext::base::{NormalTT, TTMacroExpander}; -use ext::expand::{AstFragment, AstFragmentKind}; -use ext::tt::macro_parser::{Success, Error, Failure}; -use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; -use ext::tt::macro_parser::{parse, parse_failure_msg}; -use ext::tt::quoted; -use ext::tt::transcribe::transcribe; -use feature_gate::Features; -use parse::{Directory, ParseSess}; -use parse::parser::Parser; -use parse::token::{self, NtTT}; -use parse::token::Token::*; -use symbol::Symbol; -use tokenstream::{DelimSpan, TokenStream, TokenTree}; +use log::debug; use rustc_data_structures::fx::FxHashMap; use std::borrow::Cow; use std::collections::hash_map::Entry; use rustc_data_structures::sync::Lrc; -use errors::Applicability; +use crate::errors::Applicability; const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \ `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, \ @@ -91,7 +93,7 @@ struct MacroRulesMacroExpander { impl TTMacroExpander for MacroRulesMacroExpander { fn expand<'cx>( &self, - cx: &'cx mut ExtCtxt, + cx: &'cx mut ExtCtxt<'_>, sp: Span, input: TokenStream, def_span: Option<Span>, @@ -109,13 +111,13 @@ impl TTMacroExpander for MacroRulesMacroExpander { } } -fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) { +fn trace_macros_note(cx: &mut ExtCtxt<'_>, sp: Span, message: String) { let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp); cx.expansions.entry(sp).or_default().push(message); } /// Given `lhses` and `rhses`, this is the new macro we create -fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, +fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, def_span: Option<Span>, name: ast::Ident, @@ -423,7 +425,7 @@ fn check_lhs_nt_follows(sess: &ParseSess, /// Check that the lhs contains no repetition which could match an empty token /// tree, because then the matcher would hang indefinitely. fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { - use self::quoted::TokenTree; + use quoted::TokenTree; for tt in tts { match *tt { TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (), @@ -497,7 +499,7 @@ struct FirstSets { impl FirstSets { fn new(tts: &[quoted::TokenTree]) -> FirstSets { - use self::quoted::TokenTree; + use quoted::TokenTree; let mut sets = FirstSets { first: FxHashMap::default() }; build_recur(&mut sets, tts); @@ -567,7 +569,7 @@ impl FirstSets { // walks forward over `tts` until all potential FIRST tokens are // identified. fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet { - use self::quoted::TokenTree; + use quoted::TokenTree; let mut first = TokenSet::empty(); for tt in tts.iter() { @@ -721,7 +723,7 @@ fn check_matcher_core(sess: &ParseSess, first_sets: &FirstSets, matcher: &[quoted::TokenTree], follow: &TokenSet) -> TokenSet { - use self::quoted::TokenTree; + use quoted::TokenTree; let mut last = TokenSet::empty(); @@ -940,7 +942,7 @@ enum IsInFollow { /// separator. // when changing this do not forget to update doc/book/macros.md! fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { - use self::quoted::TokenTree; + use quoted::TokenTree; if let TokenTree::Token(_, token::CloseDelim(_)) = *tok { // closing a token tree can never be matched by any fragment; @@ -1072,7 +1074,7 @@ fn is_legal_fragment_specifier(_sess: &ParseSess, fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { match *tt { - quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok), + quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok), quoted::TokenTree::MetaVar(_, name) => format!("${}", name), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index b56871a1885..6c3cf3e6312 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -1,13 +1,14 @@ -use ast::NodeId; -use early_buffered_lints::BufferedEarlyLintId; -use ext::tt::macro_parser; -use feature_gate::Features; -use parse::{token, ParseSess}; -use print::pprust; -use symbol::keywords; +use crate::ast::NodeId; +use crate::early_buffered_lints::BufferedEarlyLintId; +use crate::ext::tt::macro_parser; +use crate::feature_gate::Features; +use crate::parse::{token, ParseSess}; +use crate::print::pprust; +use crate::tokenstream::{self, DelimSpan}; +use crate::ast; +use crate::symbol::keywords; + use syntax_pos::{edition::Edition, BytePos, Span}; -use tokenstream::{self, DelimSpan}; -use ast; use rustc_data_structures::sync::Lrc; use std::iter::Peekable; diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 08f34b22328..b9a50cc6488 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -1,13 +1,14 @@ -use ast::Ident; -use ext::base::ExtCtxt; -use ext::expand::Marker; -use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; -use ext::tt::quoted; -use mut_visit::noop_visit_tt; -use parse::token::{self, Token, NtTT}; -use smallvec::SmallVec; +use crate::ast::Ident; +use crate::ext::base::ExtCtxt; +use crate::ext::expand::Marker; +use crate::ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; +use crate::ext::tt::quoted; +use crate::mut_visit::noop_visit_tt; +use crate::parse::token::{self, Token, NtTT}; +use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint}; + +use smallvec::{smallvec, SmallVec}; use syntax_pos::DUMMY_SP; -use tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint}; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync::Lrc; @@ -56,7 +57,7 @@ impl Iterator for Frame { /// This can do Macro-By-Example transcription. On the other hand, if /// `src` contains no `TokenTree::{Sequence, MetaVar, MetaVarDecl}`s, `interp` can /// (and should) be None. -pub fn transcribe(cx: &ExtCtxt, +pub fn transcribe(cx: &ExtCtxt<'_>, interp: Option<FxHashMap<Ident, Rc<NamedMatch>>>, src: Vec<quoted::TokenTree>) -> TokenStream { @@ -230,7 +231,7 @@ fn lockstep_iter_size(tree: "ed::TokenTree, interpolations: &FxHashMap<Ident, Rc<NamedMatch>>, repeats: &[(usize, usize)]) -> LockstepIterSize { - use self::quoted::TokenTree; + use quoted::TokenTree; match *tree { TokenTree::Delimited(_, ref delimed) => { delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| { |
