about summary refs log tree commit diff
path: root/src/libsyntax/parse
diff options
context:
space:
mode:
authorNick Cameron <ncameron@mozilla.com>2016-08-29 16:16:43 +1200
committerNick Cameron <ncameron@mozilla.com>2016-09-22 08:47:57 +1200
commit6a2d2c949581c710eeb505000e56ffa1e5a860b5 (patch)
tree053157b72ce58708786d8356c07aaafbef95e5c1 /src/libsyntax/parse
parentc772948b687488a087356cb91432425662e034b9 (diff)
downloadrust-6a2d2c949581c710eeb505000e56ffa1e5a860b5.tar.gz
rust-6a2d2c949581c710eeb505000e56ffa1e5a860b5.zip
Adds a `ProcMacro` form of syntax extension
This commit adds syntax extension forms matching the types for procedural macros 2.0 (RFC #1566), these still require the usual syntax extension boiler plate, but this is a first step towards proper implementation and should be useful for macros 1.1 stuff too.

Supports both attribute-like and function-like macros.
Diffstat (limited to 'src/libsyntax/parse')
-rw-r--r--src/libsyntax/parse/lexer/mod.rs58
-rw-r--r--src/libsyntax/parse/mod.rs7
2 files changed, 59 insertions, 6 deletions
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 9e9ea096460..53294e78710 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -85,6 +85,12 @@ pub struct StringReader<'a> {
     /// The last character to be read
     pub curr: Option<char>,
     pub filemap: Rc<syntax_pos::FileMap>,
+    /// If Some, stop reading the source at this position (inclusive).
+    pub terminator: Option<BytePos>,
+    /// Whether to record new-lines in filemap. This is only necessary the first
+    /// time a filemap is lexed. If part of a filemap is being re-lexed, this
+    /// should be set to false.
+    pub save_new_lines: bool,
     // cached:
     pub peek_tok: token::Token,
     pub peek_span: Span,
@@ -96,7 +102,15 @@ pub struct StringReader<'a> {
 
 impl<'a> Reader for StringReader<'a> {
     fn is_eof(&self) -> bool {
-        self.curr.is_none()
+        if self.curr.is_none() {
+            return true;
+        }
+
+        match self.terminator {
+            Some(t) => self.pos > t,
+            None => false,
+        }
+
     }
     /// Return the next token. EFFECT: advances the string_reader.
     fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
@@ -164,6 +178,14 @@ impl<'a> StringReader<'a> {
     pub fn new_raw<'b>(span_diagnostic: &'b Handler,
                        filemap: Rc<syntax_pos::FileMap>)
                        -> StringReader<'b> {
+        let mut sr = StringReader::new_raw_internal(span_diagnostic, filemap);
+        sr.bump();
+        sr
+    }
+
+    fn new_raw_internal<'b>(span_diagnostic: &'b Handler,
+                            filemap: Rc<syntax_pos::FileMap>)
+                            -> StringReader<'b> {
         if filemap.src.is_none() {
             span_diagnostic.bug(&format!("Cannot lex filemap \
                                           without source: {}",
@@ -172,21 +194,21 @@ impl<'a> StringReader<'a> {
 
         let source_text = (*filemap.src.as_ref().unwrap()).clone();
 
-        let mut sr = StringReader {
+        StringReader {
             span_diagnostic: span_diagnostic,
             pos: filemap.start_pos,
             last_pos: filemap.start_pos,
             col: CharPos(0),
             curr: Some('\n'),
             filemap: filemap,
+            terminator: None,
+            save_new_lines: true,
             // dummy values; not read
             peek_tok: token::Eof,
             peek_span: syntax_pos::DUMMY_SP,
             source_text: source_text,
             fatal_errs: Vec::new(),
-        };
-        sr.bump();
-        sr
+        }
     }
 
     pub fn new<'b>(span_diagnostic: &'b Handler,
@@ -200,6 +222,28 @@ impl<'a> StringReader<'a> {
         sr
     }
 
+    pub fn from_span<'b>(span_diagnostic: &'b Handler,
+                         span: Span,
+                         codemap: &CodeMap)
+                         -> StringReader<'b> {
+        let start_pos = codemap.lookup_byte_offset(span.lo);
+        let last_pos = codemap.lookup_byte_offset(span.hi);
+        assert!(start_pos.fm.name == last_pos.fm.name, "Attempt to lex span which crosses files");
+        let mut sr = StringReader::new_raw_internal(span_diagnostic, start_pos.fm.clone());
+        sr.pos = span.lo;
+        sr.last_pos = span.lo;
+        sr.terminator = Some(span.hi);
+        sr.save_new_lines = false;
+
+        sr.bump();
+
+        if let Err(_) = sr.advance_token() {
+            sr.emit_fatal_errors();
+            panic!(FatalError);
+        }
+        sr
+    }
+
     pub fn curr_is(&self, c: char) -> bool {
         self.curr == Some(c)
     }
@@ -405,7 +449,9 @@ impl<'a> StringReader<'a> {
             self.curr = Some(ch);
             self.col = self.col + CharPos(1);
             if last_char == '\n' {
-                self.filemap.next_line(self.last_pos);
+                if self.save_new_lines {
+                    self.filemap.next_line(self.last_pos);
+                }
                 self.col = CharPos(0);
             }
 
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 5aa0efdec11..4ad8e227cbb 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -258,6 +258,13 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
     }
 }
 
+pub fn span_to_tts(sess: &ParseSess, span: Span) -> Vec<tokenstream::TokenTree> {
+    let cfg = Vec::new();
+    let srdr = lexer::StringReader::from_span(&sess.span_diagnostic, span, &sess.code_map);
+    let mut p1 = Parser::new(sess, cfg, Box::new(srdr));
+    panictry!(p1.parse_all_token_trees())
+}
+
 /// Given a filemap, produce a sequence of token-trees
 pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
     -> Vec<tokenstream::TokenTree> {