about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer/mod.rs34
-rw-r--r--src/libsyntax/parse/lexer/tokentrees.rs23
-rw-r--r--src/libsyntax/parse/token.rs2
3 files changed, 42 insertions, 17 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index bbece1ee5e3..89bf10244da 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -34,7 +34,10 @@ pub struct TokenAndSpan {
 
 impl Default for TokenAndSpan {
     fn default() -> Self {
-        TokenAndSpan { tok: token::Whitespace, sp: syntax_pos::DUMMY_SP }
+        TokenAndSpan {
+            tok: token::Whitespace,
+            sp: syntax_pos::DUMMY_SP,
+        }
     }
 }
 
@@ -54,8 +57,9 @@ pub struct StringReader<'a> {
     /// If part of a filemap is being re-lexed, this should be set to false.
     pub save_new_lines_and_multibyte: bool,
     // cached:
-    pub peek_tok: token::Token,
-    pub peek_span: Span,
+    peek_tok: token::Token,
+    peek_span: Span,
+    peek_span_src_raw: Span,
     pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
     // cache a direct reference to the source text, so that we don't have to
     // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time.
@@ -63,13 +67,20 @@ pub struct StringReader<'a> {
     /// Stack of open delimiters and their spans. Used for error message.
     token: token::Token,
     span: Span,
+    /// The raw source span which *does not* take `override_span` into account
+    span_src_raw: Span,
     open_braces: Vec<(token::DelimToken, Span)>,
     pub override_span: Option<Span>,
 }
 
 impl<'a> StringReader<'a> {
     fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
-        unwrap_or!(self.override_span, Span::new(lo, hi, NO_EXPANSION))
+        self.mk_sp_and_raw(lo, hi).0
+    }
+    fn mk_sp_and_raw(&self, lo: BytePos, hi: BytePos) -> (Span, Span) {
+        let raw = Span::new(lo, hi, NO_EXPANSION);
+        let real = unwrap_or!(self.override_span, raw);
+        (real, raw)
     }
     fn mk_ident(&self, string: &str) -> Ident {
         let mut ident = Ident::from_str(string);
@@ -121,6 +132,7 @@ impl<'a> StringReader<'a> {
             sp: self.peek_span,
         };
         self.advance_token()?;
+        self.span_src_raw = self.peek_span_src_raw;
         Ok(ret_val)
     }
 
@@ -180,10 +192,12 @@ impl<'a> StringReader<'a> {
             // dummy values; not read
             peek_tok: token::Eof,
             peek_span: syntax_pos::DUMMY_SP,
+            peek_span_src_raw: syntax_pos::DUMMY_SP,
             src,
             fatal_errs: Vec::new(),
             token: token::Eof,
             span: syntax_pos::DUMMY_SP,
+            span_src_raw: syntax_pos::DUMMY_SP,
             open_braces: Vec::new(),
             override_span: None,
         }
@@ -325,17 +339,25 @@ impl<'a> StringReader<'a> {
     fn advance_token(&mut self) -> Result<(), ()> {
         match self.scan_whitespace_or_comment() {
             Some(comment) => {
+                self.peek_span_src_raw = comment.sp;
                 self.peek_span = comment.sp;
                 self.peek_tok = comment.tok;
             }
             None => {
                 if self.is_eof() {
                     self.peek_tok = token::Eof;
-                    self.peek_span = self.mk_sp(self.filemap.end_pos, self.filemap.end_pos);
+                    let (real, raw) = self.mk_sp_and_raw(
+                        self.filemap.end_pos,
+                        self.filemap.end_pos,
+                    );
+                    self.peek_span = real;
+                    self.peek_span_src_raw = raw;
                 } else {
                     let start_bytepos = self.pos;
                     self.peek_tok = self.next_token_inner()?;
-                    self.peek_span = self.mk_sp(start_bytepos, self.pos);
+                    let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos);
+                    self.peek_span = real;
+                    self.peek_span_src_raw = raw;
                 };
             }
         }
diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs
index a2c81e24754..278b8c991f7 100644
--- a/src/libsyntax/parse/lexer/tokentrees.rs
+++ b/src/libsyntax/parse/lexer/tokentrees.rs
@@ -18,9 +18,7 @@ impl<'a> StringReader<'a> {
     pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
         let mut tts = Vec::new();
         while self.token != token::Eof {
-            let tree = self.parse_token_tree()?;
-            let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token);
-            tts.push(if is_joint { tree.joint() } else { tree.into() });
+            tts.push(self.parse_token_tree()?);
         }
         Ok(TokenStream::concat(tts))
     }
@@ -32,19 +30,17 @@ impl<'a> StringReader<'a> {
             if let token::CloseDelim(..) = self.token {
                 return TokenStream::concat(tts);
             }
-            let tree = match self.parse_token_tree() {
-                Ok(tree) => tree,
+            match self.parse_token_tree() {
+                Ok(tree) => tts.push(tree),
                 Err(mut e) => {
                     e.emit();
                     return TokenStream::concat(tts);
                 }
-            };
-            let is_joint = tree.span().hi() == self.span.lo() && token::is_op(&self.token);
-            tts.push(if is_joint { tree.joint() } else { tree.into() });
+            }
         }
     }
 
-    fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
+    fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> {
         match self.token {
             token::Eof => {
                 let msg = "this file contains an un-closed delimiter";
@@ -115,7 +111,7 @@ impl<'a> StringReader<'a> {
                 Ok(TokenTree::Delimited(span, Delimited {
                     delim,
                     tts: tts.into(),
-                }))
+                }).into())
             },
             token::CloseDelim(_) => {
                 // An unexpected closing delimiter (i.e., there is no
@@ -127,8 +123,13 @@ impl<'a> StringReader<'a> {
             },
             _ => {
                 let tt = TokenTree::Token(self.span, self.token.clone());
+                // Note that testing for joint-ness here is done via the raw
+                // source span as the joint-ness is a property of the raw source
+                // rather than wanting to take `override_span` into account.
+                let raw = self.span_src_raw;
                 self.real_token();
-                Ok(tt)
+                let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token);
+                Ok(if is_joint { tt.joint() } else { tt.into() })
             }
         }
     }
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 5575614a4d4..034be6a6864 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -581,6 +581,8 @@ impl Token {
             if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
                 return tokens
             }
+            info!("cached tokens found, but they're not \"probably equal\", \
+                   going with stringified version");
         }
         return tokens_for_real
     }