about summary refs log tree commit diff
path: root/src/libsyntax/ext
diff options
context:
space:
mode:
authorMazdak Farrokhzad <twingoow@gmail.com>2019-02-23 09:25:26 +0100
committerGitHub <noreply@github.com>2019-02-23 09:25:26 +0100
commit585d4d29d91065c14fb823b8044495a6e5e857c1 (patch)
treebaa24cd750a1bcb6a9939d0430bc36b0c11a5d19 /src/libsyntax/ext
parent4f99061874f60fa04eb6868ddd70cfea25995fec (diff)
parent895a79423bf5298e13a177ee6317f43380d437bc (diff)
downloadrust-585d4d29d91065c14fb823b8044495a6e5e857c1.tar.gz
rust-585d4d29d91065c14fb823b8044495a6e5e857c1.zip
Rollup merge of #58476 - nnethercote:rm-LazyTokenStream, r=petrochenkov
Remove `LazyTokenStream`.

`LazyTokenStream` was added in #40939. Perhaps it was an effective optimization then, but no longer. This PR removes it, making the code both simpler and faster.

r? @alexcrichton
Diffstat (limited to 'src/libsyntax/ext')
-rw-r--r--src/libsyntax/ext/base.rs2
-rw-r--r--src/libsyntax/ext/expand.rs5
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs29
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs2
4 files changed, 20 insertions, 18 deletions
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 5980261593d..452cc2f2c65 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -266,7 +266,7 @@ impl<F> TTMacroExpander for F
         impl MutVisitor for AvoidInterpolatedIdents {
             fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
                 if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt {
-                    if let token::NtIdent(ident, is_raw) = nt.0 {
+                    if let token::NtIdent(ident, is_raw) = **nt {
                         *tt = tokenstream::TokenTree::Token(ident.span,
                                                             token::Ident(ident, is_raw));
                     }
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index f50663f9785..b805213bb1a 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -25,6 +25,7 @@ use syntax_pos::{Span, DUMMY_SP, FileName};
 use syntax_pos::hygiene::ExpnFormat;
 
 use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::sync::Lrc;
 use std::fs;
 use std::io::ErrorKind;
 use std::{iter, mem};
@@ -584,14 +585,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
             }
             AttrProcMacro(ref mac, ..) => {
                 self.gate_proc_macro_attr_item(attr.span, &item);
-                let item_tok = TokenTree::Token(DUMMY_SP, Token::interpolated(match item {
+                let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item {
                     Annotatable::Item(item) => token::NtItem(item),
                     Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
                     Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
                     Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
                     Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
                     Annotatable::Expr(expr) => token::NtExpr(expr),
-                })).into();
+                }))).into();
                 let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span);
                 let tok_result = mac.expand(self.cx, attr.span, input, item_tok);
                 let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind,
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 5de1ccec860..fe1cffb092b 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -88,6 +88,7 @@ use smallvec::{smallvec, SmallVec};
 use syntax_pos::Span;
 
 use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::sync::Lrc;
 use std::collections::hash_map::Entry::{Occupied, Vacant};
 use std::mem;
 use std::ops::{Deref, DerefMut};
@@ -179,7 +180,7 @@ struct MatcherPos<'root, 'tt: 'root> {
     /// all bound matches from the submatcher into the shared top-level `matches` vector. If `sep`
     /// and `up` are `Some`, then `matches` is _not_ the shared top-level list. Instead, if one
     /// wants the shared `matches`, one should use `up.matches`.
-    matches: Box<[Rc<NamedMatchVec>]>,
+    matches: Box<[Lrc<NamedMatchVec>]>,
     /// The position in `matches` corresponding to the first metavar in this matcher's sequence of
     /// token trees. In other words, the first metavar in the first token of `top_elts` corresponds
     /// to `matches[match_lo]`.
@@ -218,7 +219,7 @@ struct MatcherPos<'root, 'tt: 'root> {
 impl<'root, 'tt> MatcherPos<'root, 'tt> {
     /// Adds `m` as a named match for the `idx`-th metavar.
     fn push_match(&mut self, idx: usize, m: NamedMatch) {
-        let matches = Rc::make_mut(&mut self.matches[idx]);
+        let matches = Lrc::make_mut(&mut self.matches[idx]);
         matches.push(m);
     }
 }
@@ -295,11 +296,11 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
 }
 
 /// `len` `Vec`s (initially shared and empty) that will store matches of metavars.
-fn create_matches(len: usize) -> Box<[Rc<NamedMatchVec>]> {
+fn create_matches(len: usize) -> Box<[Lrc<NamedMatchVec>]> {
     if len == 0 {
         vec![]
     } else {
-        let empty_matches = Rc::new(SmallVec::new());
+        let empty_matches = Lrc::new(SmallVec::new());
         vec![empty_matches; len]
     }.into_boxed_slice()
 }
@@ -353,8 +354,8 @@ fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherP
 /// token tree it was derived from.
 #[derive(Debug, Clone)]
 pub enum NamedMatch {
-    MatchedSeq(Rc<NamedMatchVec>, DelimSpan),
-    MatchedNonterminal(Rc<Nonterminal>),
+    MatchedSeq(Lrc<NamedMatchVec>, DelimSpan),
+    MatchedNonterminal(Lrc<Nonterminal>),
 }
 
 /// Takes a sequence of token trees `ms` representing a matcher which successfully matched input
@@ -561,7 +562,7 @@ fn inner_parse_loop<'root, 'tt>(
                         new_item.match_cur += seq.num_captures;
                         new_item.idx += 1;
                         for idx in item.match_cur..item.match_cur + seq.num_captures {
-                            new_item.push_match(idx, MatchedSeq(Rc::new(smallvec![]), sp));
+                            new_item.push_match(idx, MatchedSeq(Lrc::new(smallvec![]), sp));
                         }
                         cur_items.push(new_item);
                     }
@@ -707,7 +708,7 @@ pub fn parse(
                 let matches = eof_items[0]
                     .matches
                     .iter_mut()
-                    .map(|dv| Rc::make_mut(dv).pop().unwrap());
+                    .map(|dv| Lrc::make_mut(dv).pop().unwrap());
                 return nameize(sess, ms, matches);
             } else if eof_items.len() > 1 {
                 return Error(
@@ -780,7 +781,7 @@ pub fn parse(
                 let match_cur = item.match_cur;
                 item.push_match(
                     match_cur,
-                    MatchedNonterminal(Rc::new(parse_nt(&mut parser, span, &ident.as_str()))),
+                    MatchedNonterminal(Lrc::new(parse_nt(&mut parser, span, &ident.as_str()))),
                 );
                 item.idx += 1;
                 item.match_cur += 1;
@@ -829,7 +830,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
         },
         "block" => match *token {
             Token::OpenDelim(token::Brace) => true,
-            Token::Interpolated(ref nt) => match nt.0 {
+            Token::Interpolated(ref nt) => match **nt {
                 token::NtItem(_)
                 | token::NtPat(_)
                 | token::NtTy(_)
@@ -843,9 +844,9 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
         },
         "path" | "meta" => match *token {
             Token::ModSep | Token::Ident(..) => true,
-            Token::Interpolated(ref nt) => match nt.0 {
+            Token::Interpolated(ref nt) => match **nt {
                 token::NtPath(_) | token::NtMeta(_) => true,
-                _ => may_be_ident(&nt.0),
+                _ => may_be_ident(&nt),
             },
             _ => false,
         },
@@ -862,12 +863,12 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
             Token::ModSep |                     // path
             Token::Lt |                         // path (UFCS constant)
             Token::BinOp(token::Shl) => true,   // path (double UFCS)
-            Token::Interpolated(ref nt) => may_be_ident(&nt.0),
+            Token::Interpolated(ref nt) => may_be_ident(nt),
             _ => false,
         },
         "lifetime" => match *token {
             Token::Lifetime(_) => true,
-            Token::Interpolated(ref nt) => match nt.0 {
+            Token::Interpolated(ref nt) => match **nt {
                 token::NtLifetime(_) | token::NtTT(_) => true,
                 _ => false,
             },
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index b9a50cc6488..bd2adb5ac13 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -149,7 +149,7 @@ pub fn transcribe(cx: &ExtCtxt<'_>,
                             result.push(tt.clone().into());
                         } else {
                             sp = sp.apply_mark(cx.current_expansion.mark);
-                            let token = TokenTree::Token(sp, Token::interpolated((**nt).clone()));
+                            let token = TokenTree::Token(sp, Token::Interpolated(nt.clone()));
                             result.push(token.into());
                         }
                     } else {