about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorMazdak Farrokhzad <twingoow@gmail.com>2019-02-23 09:25:26 +0100
committerGitHub <noreply@github.com>2019-02-23 09:25:26 +0100
commit585d4d29d91065c14fb823b8044495a6e5e857c1 (patch)
treebaa24cd750a1bcb6a9939d0430bc36b0c11a5d19 /src/libsyntax/parse
parent4f99061874f60fa04eb6868ddd70cfea25995fec (diff)
parent895a79423bf5298e13a177ee6317f43380d437bc (diff)
downloadrust-585d4d29d91065c14fb823b8044495a6e5e857c1.tar.gz
rust-585d4d29d91065c14fb823b8044495a6e5e857c1.zip
Rollup merge of #58476 - nnethercote:rm-LazyTokenStream, r=petrochenkov
Remove `LazyTokenStream`.

`LazyTokenStream` was added in #40939. Perhaps it was an effective optimization then, but no longer. This PR removes it, making the code both simpler and faster.

r? @alexcrichton
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/attr.rs4
-rw-r--r--src/libsyntax/parse/parser.rs16
-rw-r--r--src/libsyntax/parse/token.rs248
3 files changed, 102 insertions, 166 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs
index b36ca0574cb..9020c8c6a2d 100644
--- a/src/libsyntax/parse/attr.rs
+++ b/src/libsyntax/parse/attr.rs
@@ -141,7 +141,7 @@ impl<'a> Parser<'a> {
     /// The delimiters or `=` are still put into the resulting token stream.
     crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
         let meta = match self.token {
-            token::Interpolated(ref nt) => match nt.0 {
+            token::Interpolated(ref nt) => match **nt {
                 Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
                 _ => None,
             },
@@ -227,7 +227,7 @@ impl<'a> Parser<'a> {
     /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
     pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
         let nt_meta = match self.token {
-            token::Interpolated(ref nt) => match nt.0 {
+            token::Interpolated(ref nt) => match **nt {
                 token::NtMeta(ref e) => Some(e.clone()),
                 _ => None,
             },
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index e6a48912c48..bb2e9d8ed59 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -119,7 +119,7 @@ enum BlockMode {
 macro_rules! maybe_whole_expr {
     ($p:expr) => {
         if let token::Interpolated(nt) = $p.token.clone() {
-            match nt.0 {
+            match *nt {
                 token::NtExpr(ref e) | token::NtLiteral(ref e) => {
                     $p.bump();
                     return Ok((*e).clone());
@@ -146,7 +146,7 @@ macro_rules! maybe_whole_expr {
 macro_rules! maybe_whole {
     ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
         if let token::Interpolated(nt) = $p.token.clone() {
-            if let token::$constructor($x) = nt.0.clone() {
+            if let token::$constructor($x) = (*nt).clone() {
                 $p.bump();
                 return Ok($e);
             }
@@ -1570,7 +1570,7 @@ impl<'a> Parser<'a> {
                     Some(body)
                 }
                 token::Interpolated(ref nt) => {
-                    match &nt.0 {
+                    match **nt {
                         token::NtBlock(..) => {
                             *at_end = true;
                             let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
@@ -1913,7 +1913,7 @@ impl<'a> Parser<'a> {
 
     fn is_named_argument(&mut self) -> bool {
         let offset = match self.token {
-            token::Interpolated(ref nt) => match nt.0 {
+            token::Interpolated(ref nt) => match **nt {
                 token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
                 _ => 0,
             }
@@ -2099,7 +2099,7 @@ impl<'a> Parser<'a> {
     /// Matches `token_lit = LIT_INTEGER | ...`.
     fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
         let out = match self.token {
-            token::Interpolated(ref nt) => match nt.0 {
+            token::Interpolated(ref nt) => match **nt {
                 token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
                     ExprKind::Lit(ref lit) => { lit.node.clone() }
                     _ => { return self.unexpected_last(&self.token); }
@@ -2299,7 +2299,7 @@ impl<'a> Parser<'a> {
     /// attributes.
     pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
         let meta_ident = match self.token {
-            token::Interpolated(ref nt) => match nt.0 {
+            token::Interpolated(ref nt) => match **nt {
                 token::NtMeta(ref meta) => match meta.node {
                     ast::MetaItemKind::Word => Some(meta.ident.clone()),
                     _ => None,
@@ -3271,7 +3271,7 @@ impl<'a> Parser<'a> {
                 self.meta_var_span = Some(self.span);
                 // Interpolated identifier and lifetime tokens are replaced with usual identifier
                 // and lifetime tokens, so the former are never encountered during normal parsing.
-                match nt.0 {
+                match **nt {
                     token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
                     token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
                     _ => return,
@@ -3403,7 +3403,7 @@ impl<'a> Parser<'a> {
                     // can't continue an expression after an ident
                     token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
                     token::Literal(..) | token::Pound => true,
-                    token::Interpolated(ref nt) => match nt.0 {
+                    token::Interpolated(ref nt) => match **nt {
                         token::NtIdent(..) | token::NtExpr(..) |
                         token::NtBlock(..) | token::NtPath(..) => true,
                         _ => false,
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index ff7f3e0bfae..eec422d6266 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -13,16 +13,15 @@ use crate::syntax::parse::parse_stream_from_source_str;
 use crate::syntax::parse::parser::emit_unclosed_delims;
 use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
 
-use serialize::{Decodable, Decoder, Encodable, Encoder};
 use syntax_pos::symbol::{self, Symbol};
 use syntax_pos::{self, Span, FileName};
 use log::info;
 
-use std::{cmp, fmt};
+use std::fmt;
 use std::mem;
 #[cfg(target_arch = "x86_64")]
 use rustc_data_structures::static_assert;
-use rustc_data_structures::sync::{Lrc, Lock};
+use rustc_data_structures::sync::Lrc;
 
 #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
 pub enum BinOpToken {
@@ -87,7 +86,7 @@ impl Lit {
         }
     }
 
-    // See comments in `interpolated_to_tokenstream` for why we care about
+    // See comments in `Nonterminal::to_tokenstream` for why we care about
     // *probably* equal here rather than actual equality
     fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
         mem::discriminant(self) == mem::discriminant(other)
@@ -184,9 +183,8 @@ pub enum Token {
     Ident(ast::Ident, /* is_raw */ bool),
     Lifetime(ast::Ident),
 
-    // The `LazyTokenStream` is a pure function of the `Nonterminal`,
-    // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc.
-    Interpolated(Lrc<(Nonterminal, LazyTokenStream)>),
+    Interpolated(Lrc<Nonterminal>),
+
     // Can be expanded into several tokens.
     /// A doc comment.
     DocComment(ast::Name),
@@ -209,10 +207,6 @@ pub enum Token {
 static_assert!(MEM_SIZE_OF_STATEMENT: mem::size_of::<Token>() == 16);
 
 impl Token {
-    pub fn interpolated(nt: Nonterminal) -> Token {
-        Token::Interpolated(Lrc::new((nt, LazyTokenStream::new())))
-    }
-
     /// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
     pub fn from_ast_ident(ident: ast::Ident) -> Token {
         Ident(ident, ident.is_raw_guess())
@@ -244,7 +238,7 @@ impl Token {
             ModSep                            | // global path
             Lifetime(..)                      | // labeled loop
             Pound                             => true, // expression attributes
-            Interpolated(ref nt) => match nt.0 {
+            Interpolated(ref nt) => match **nt {
                 NtLiteral(..) |
                 NtIdent(..)   |
                 NtExpr(..)    |
@@ -272,7 +266,7 @@ impl Token {
             Lifetime(..)                | // lifetime bound in trait object
             Lt | BinOp(Shl)             | // associated path
             ModSep                      => true, // global path
-            Interpolated(ref nt) => match nt.0 {
+            Interpolated(ref nt) => match **nt {
                 NtIdent(..) | NtTy(..) | NtPath(..) | NtLifetime(..) => true,
                 _ => false,
             },
@@ -284,7 +278,7 @@ impl Token {
     pub fn can_begin_const_arg(&self) -> bool {
         match self {
             OpenDelim(Brace) => true,
-            Interpolated(ref nt) => match nt.0 {
+            Interpolated(ref nt) => match **nt {
                 NtExpr(..) => true,
                 NtBlock(..) => true,
                 NtLiteral(..) => true,
@@ -316,7 +310,7 @@ impl Token {
             BinOp(Minus) => true,
             Ident(ident, false) if ident.name == keywords::True.name() => true,
             Ident(ident, false) if ident.name == keywords::False.name() => true,
-            Interpolated(ref nt) => match nt.0 {
+            Interpolated(ref nt) => match **nt {
                 NtLiteral(..) => true,
                 _             => false,
             },
@@ -328,7 +322,7 @@ impl Token {
     pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
         match *self {
             Ident(ident, is_raw) => Some((ident, is_raw)),
-            Interpolated(ref nt) => match nt.0 {
+            Interpolated(ref nt) => match **nt {
                 NtIdent(ident, is_raw) => Some((ident, is_raw)),
                 _ => None,
             },
@@ -339,7 +333,7 @@ impl Token {
     pub fn lifetime(&self) -> Option<ast::Ident> {
         match *self {
             Lifetime(ident) => Some(ident),
-            Interpolated(ref nt) => match nt.0 {
+            Interpolated(ref nt) => match **nt {
                 NtLifetime(ident) => Some(ident),
                 _ => None,
             },
@@ -367,7 +361,7 @@ impl Token {
     /// Returns `true` if the token is an interpolated path.
     fn is_path(&self) -> bool {
         if let Interpolated(ref nt) = *self {
-            if let NtPath(..) = nt.0 {
+            if let NtPath(..) = **nt {
                 return true;
             }
         }
@@ -508,98 +502,7 @@ impl Token {
         }
     }
 
-    pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span)
-        -> TokenStream
-    {
-        let nt = match *self {
-            Token::Interpolated(ref nt) => nt,
-            _ => panic!("only works on interpolated tokens"),
-        };
-
-        // An `Interpolated` token means that we have a `Nonterminal`
-        // which is often a parsed AST item. At this point we now need
-        // to convert the parsed AST to an actual token stream, e.g.
-        // un-parse it basically.
-        //
-        // Unfortunately there's not really a great way to do that in a
-        // guaranteed lossless fashion right now. The fallback here is
-        // to just stringify the AST node and reparse it, but this loses
-        // all span information.
-        //
-        // As a result, some AST nodes are annotated with the token
-        // stream they came from. Here we attempt to extract these
-        // lossless token streams before we fall back to the
-        // stringification.
-        let mut tokens = None;
-
-        match nt.0 {
-            Nonterminal::NtItem(ref item) => {
-                tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
-            }
-            Nonterminal::NtTraitItem(ref item) => {
-                tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
-            }
-            Nonterminal::NtImplItem(ref item) => {
-                tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
-            }
-            Nonterminal::NtIdent(ident, is_raw) => {
-                let token = Token::Ident(ident, is_raw);
-                tokens = Some(TokenTree::Token(ident.span, token).into());
-            }
-            Nonterminal::NtLifetime(ident) => {
-                let token = Token::Lifetime(ident);
-                tokens = Some(TokenTree::Token(ident.span, token).into());
-            }
-            Nonterminal::NtTT(ref tt) => {
-                tokens = Some(tt.clone().into());
-            }
-            _ => {}
-        }
-
-        let tokens_for_real = nt.1.force(|| {
-            // FIXME(#43081): Avoid this pretty-print + reparse hack
-            let source = pprust::token_to_string(self);
-            let filename = FileName::macro_expansion_source_code(&source);
-            let (tokens, errors) = parse_stream_from_source_str(
-                filename, source, sess, Some(span));
-            emit_unclosed_delims(&errors, &sess.span_diagnostic);
-            tokens
-        });
-
-        // During early phases of the compiler the AST could get modified
-        // directly (e.g., attributes added or removed) and the internal cache
-        // of tokens my not be invalidated or updated. Consequently if the
-        // "lossless" token stream disagrees with our actual stringification
-        // (which has historically been much more battle-tested) then we go
-        // with the lossy stream anyway (losing span information).
-        //
-        // Note that the comparison isn't `==` here to avoid comparing spans,
-        // but it *also* is a "probable" equality which is a pretty weird
-        // definition. We mostly want to catch actual changes to the AST
-        // like a `#[cfg]` being processed or some weird `macro_rules!`
-        // expansion.
-        //
-        // What we *don't* want to catch is the fact that a user-defined
-        // literal like `0xf` is stringified as `15`, causing the cached token
-        // stream to not be literal `==` token-wise (ignoring spans) to the
-        // token stream we got from stringification.
-        //
-        // Instead the "probably equal" check here is "does each token
-        // recursively have the same discriminant?" We basically don't look at
-        // the token values here and assume that such fine grained token stream
-        // modifications, including adding/removing typically non-semantic
-        // tokens such as extra braces and commas, don't happen.
-        if let Some(tokens) = tokens {
-            if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
-                return tokens
-            }
-            info!("cached tokens found, but they're not \"probably equal\", \
-                   going with stringified version");
-        }
-        return tokens_for_real
-    }
-
-    // See comments in `interpolated_to_tokenstream` for why we care about
+    // See comments in `Nonterminal::to_tokenstream` for why we care about
     // *probably* equal here rather than actual equality
     crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
         if mem::discriminant(self) != mem::discriminant(other) {
@@ -731,61 +634,94 @@ impl fmt::Debug for Nonterminal {
     }
 }
 
-crate fn is_op(tok: &Token) -> bool {
-    match *tok {
-        OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
-        Ident(..) | Lifetime(..) | Interpolated(..) |
-        Whitespace | Comment | Shebang(..) | Eof => false,
-        _ => true,
-    }
-}
-
-#[derive(Clone)]
-pub struct LazyTokenStream(Lock<Option<TokenStream>>);
-
-impl cmp::Eq for LazyTokenStream {}
-impl PartialEq for LazyTokenStream {
-    fn eq(&self, _other: &LazyTokenStream) -> bool {
-        true
-    }
-}
-
-impl fmt::Debug for LazyTokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        fmt::Debug::fmt(&self.clone().0.into_inner(), f)
-    }
-}
+impl Nonterminal {
+    pub fn to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream {
+        // A `Nonterminal` is often a parsed AST item. At this point we now
+        // need to convert the parsed AST to an actual token stream, e.g.
+        // un-parse it basically.
+        //
+        // Unfortunately there's not really a great way to do that in a
+        // guaranteed lossless fashion right now. The fallback here is to just
+        // stringify the AST node and reparse it, but this loses all span
+        // information.
+        //
+        // As a result, some AST nodes are annotated with the token stream they
+        // came from. Here we attempt to extract these lossless token streams
+        // before we fall back to the stringification.
+        let tokens = match *self {
+            Nonterminal::NtItem(ref item) => {
+                prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
+            }
+            Nonterminal::NtTraitItem(ref item) => {
+                prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
+            }
+            Nonterminal::NtImplItem(ref item) => {
+                prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
+            }
+            Nonterminal::NtIdent(ident, is_raw) => {
+                let token = Token::Ident(ident, is_raw);
+                Some(TokenTree::Token(ident.span, token).into())
+            }
+            Nonterminal::NtLifetime(ident) => {
+                let token = Token::Lifetime(ident);
+                Some(TokenTree::Token(ident.span, token).into())
+            }
+            Nonterminal::NtTT(ref tt) => {
+                Some(tt.clone().into())
+            }
+            _ => None,
+        };
 
-impl LazyTokenStream {
-    pub fn new() -> Self {
-        LazyTokenStream(Lock::new(None))
-    }
+        // FIXME(#43081): Avoid this pretty-print + reparse hack
+        let source = pprust::nonterminal_to_string(self);
+        let filename = FileName::macro_expansion_source_code(&source);
+        let (tokens_for_real, errors) =
+            parse_stream_from_source_str(filename, source, sess, Some(span));
+        emit_unclosed_delims(&errors, &sess.span_diagnostic);
 
-    fn force<F: FnOnce() -> TokenStream>(&self, f: F) -> TokenStream {
-        let mut opt_stream = self.0.lock();
-        if opt_stream.is_none() {
-            *opt_stream = Some(f());
+        // During early phases of the compiler the AST could get modified
+        // directly (e.g., attributes added or removed) and the internal cache
+        // of tokens my not be invalidated or updated. Consequently if the
+        // "lossless" token stream disagrees with our actual stringification
+        // (which has historically been much more battle-tested) then we go
+        // with the lossy stream anyway (losing span information).
+        //
+        // Note that the comparison isn't `==` here to avoid comparing spans,
+        // but it *also* is a "probable" equality which is a pretty weird
+        // definition. We mostly want to catch actual changes to the AST
+        // like a `#[cfg]` being processed or some weird `macro_rules!`
+        // expansion.
+        //
+        // What we *don't* want to catch is the fact that a user-defined
+        // literal like `0xf` is stringified as `15`, causing the cached token
+        // stream to not be literal `==` token-wise (ignoring spans) to the
+        // token stream we got from stringification.
+        //
+        // Instead the "probably equal" check here is "does each token
+        // recursively have the same discriminant?" We basically don't look at
+        // the token values here and assume that such fine grained token stream
+        // modifications, including adding/removing typically non-semantic
+        // tokens such as extra braces and commas, don't happen.
+        if let Some(tokens) = tokens {
+            if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
+                return tokens
+            }
+            info!("cached tokens found, but they're not \"probably equal\", \
+                   going with stringified version");
         }
-        opt_stream.clone().unwrap()
-    }
-}
-
-impl Encodable for LazyTokenStream {
-    fn encode<S: Encoder>(&self, _: &mut S) -> Result<(), S::Error> {
-        Ok(())
+        return tokens_for_real
     }
 }
 
-impl Decodable for LazyTokenStream {
-    fn decode<D: Decoder>(_: &mut D) -> Result<LazyTokenStream, D::Error> {
-        Ok(LazyTokenStream::new())
+crate fn is_op(tok: &Token) -> bool {
+    match *tok {
+        OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
+        Ident(..) | Lifetime(..) | Interpolated(..) |
+        Whitespace | Comment | Shebang(..) | Eof => false,
+        _ => true,
     }
 }
 
-impl ::std::hash::Hash for LazyTokenStream {
-    fn hash<H: ::std::hash::Hasher>(&self, _hasher: &mut H) {}
-}
-
 fn prepend_attrs(sess: &ParseSess,
                  attrs: &[ast::Attribute],
                  tokens: Option<&tokenstream::TokenStream>,