diff options
| author | cgswords <cswords@mozilla.com> | 2016-06-20 08:49:33 -0700 |
|---|---|---|
| committer | cgswords <cameronswords@gmail.com> | 2016-06-21 11:12:36 -0700 |
| commit | d59accfb065843d12db9180a4f504664e3d23ef1 (patch) | |
| tree | d13f16af4af9a0a05d6545bb88aa2ea32b479cd2 /src/libsyntax/ext | |
| parent | 5522e678bcefe14cc2ab3d0ab329b7059ce52b36 (diff) | |
| download | rust-d59accfb065843d12db9180a4f504664e3d23ef1.tar.gz rust-d59accfb065843d12db9180a4f504664e3d23ef1.zip | |
Refactored tokentrees into their own files in preparation for tokenstreams. Modified tests to point to the new file now.
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/base.rs | 28 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 21 | ||||
| -rw-r--r-- | src/libsyntax/ext/source_util.rs | 17 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 9 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 16 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 14 |
7 files changed, 59 insertions, 48 deletions
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 5da81a269ab..98b6e0762c5 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -31,6 +31,7 @@ use fold::Folder; use std::collections::{HashMap, HashSet}; use std::rc::Rc; use std::default::Default; +use tokenstream; #[derive(Debug,Clone)] @@ -163,20 +164,22 @@ pub trait TTMacroExpander { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, - token_tree: &[ast::TokenTree]) + token_tree: &[tokenstream::TokenTree]) -> Box<MacResult+'cx>; } pub type MacroExpanderFn = - for<'cx> fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box<MacResult+'cx>; + for<'cx> fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) + -> Box<MacResult+'cx>; impl<F> TTMacroExpander for F - where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[ast::TokenTree]) -> Box<MacResult+'cx> + where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) + -> Box<MacResult+'cx> { fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, - token_tree: &[ast::TokenTree]) + token_tree: &[tokenstream::TokenTree]) -> Box<MacResult+'cx> { (*self)(ecx, span, token_tree) } @@ -187,22 +190,23 @@ pub trait IdentMacroExpander { cx: &'cx mut ExtCtxt, sp: Span, ident: ast::Ident, - token_tree: Vec<ast::TokenTree> ) + token_tree: Vec<tokenstream::TokenTree> ) -> Box<MacResult+'cx>; } pub type IdentMacroExpanderFn = - for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<ast::TokenTree>) -> Box<MacResult+'cx>; + for<'cx> fn(&'cx mut ExtCtxt, Span, ast::Ident, Vec<tokenstream::TokenTree>) + -> Box<MacResult+'cx>; impl<F> IdentMacroExpander for F where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, ast::Ident, - Vec<ast::TokenTree>) -> Box<MacResult+'cx> + Vec<tokenstream::TokenTree>) -> Box<MacResult+'cx> { fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, sp: Span, ident: ast::Ident, - token_tree: Vec<ast::TokenTree> ) + token_tree: Vec<tokenstream::TokenTree> ) -> Box<MacResult+'cx> { (*self)(cx, sp, ident, token_tree) @@ -607,7 +611,7 @@ impl<'a> ExtCtxt<'a> { expand::MacroExpander::new(self) } - pub fn new_parser_from_tts(&self, tts: &[ast::TokenTree]) + pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> { parse::tts_to_parser(self.parse_sess, tts.to_vec(), self.cfg()) } @@ -806,7 +810,7 @@ pub fn expr_to_string(cx: &mut ExtCtxt, expr: P<ast::Expr>, err_msg: &str) /// done as rarely as possible). pub fn check_zero_tts(cx: &ExtCtxt, sp: Span, - tts: &[ast::TokenTree], + tts: &[tokenstream::TokenTree], name: &str) { if !tts.is_empty() { cx.span_err(sp, &format!("{} takes no arguments", name)); @@ -817,7 +821,7 @@ pub fn check_zero_tts(cx: &ExtCtxt, /// is not a string literal, emit an error and return None. pub fn get_single_str_from_tts(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree], + tts: &[tokenstream::TokenTree], name: &str) -> Option<String> { let mut p = cx.new_parser_from_tts(tts); @@ -838,7 +842,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt, /// parsing error, emit a non-fatal error and return None. pub fn get_exprs_from_tts(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) -> Option<Vec<P<ast::Expr>>> { + tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> { let mut p = cx.new_parser_from_tts(tts); let mut es = Vec::new(); while p.token != token::Eof { diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index d63411568dc..0ac72b90c42 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -11,7 +11,6 @@ use ast::{Block, Crate, DeclKind, PatKind}; use ast::{Local, Ident, Mac_, Name, SpannedIdent}; use ast::{MacStmtStyle, Mrk, Stmt, StmtKind, ItemKind}; -use ast::TokenTree; use ast; use ext::mtwt; use ext::build::AstBuilder; @@ -27,6 +26,7 @@ use fold::*; use util::move_map::MoveMap; use parse::token::{fresh_mark, fresh_name, intern, keywords}; use ptr::P; +use tokenstream::TokenTree; use util::small_vector::SmallVector; use visit; use visit::Visitor; diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 871b0d4b1c0..85527963b64 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, TokenTree, Ty}; +use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, Ty}; use codemap::Span; use ext::base::ExtCtxt; use ext::base; @@ -17,6 +17,7 @@ use parse::parser::{Parser, PathStyle}; use parse::token::*; use parse::token; use ptr::P; +use tokenstream::{self, TokenTree}; /// Quasiquoting works via token trees. /// @@ -33,7 +34,7 @@ pub mod rt { use ptr::P; use std::rc::Rc; - use ast::TokenTree; + use tokenstream::{self, TokenTree}; pub use parse::new_parser_from_tts; pub use codemap::{BytePos, Span, dummy_spanned, DUMMY_SP}; @@ -214,7 +215,7 @@ pub mod rt { if self.node.style == ast::AttrStyle::Inner { r.push(TokenTree::Token(self.span, token::Not)); } - r.push(TokenTree::Delimited(self.span, Rc::new(ast::Delimited { + r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited { delim: token::Bracket, open_span: self.span, tts: self.node.value.to_tokens(cx), @@ -234,7 +235,7 @@ pub mod rt { impl ToTokens for () { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { - vec![TokenTree::Delimited(DUMMY_SP, Rc::new(ast::Delimited { + vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited { delim: token::Paren, open_span: DUMMY_SP, tts: vec![], @@ -548,7 +549,7 @@ fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P<ast::Expr> { } fn mk_tt_path(cx: &ExtCtxt, sp: Span, name: &str) -> P<ast::Expr> { - let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext("TokenTree"), id_ext(name)); + let idents = vec!(id_ext("syntax"), id_ext("tokenstream"), id_ext("TokenTree"), id_ext(name)); cx.expr_path(cx.path_global(sp, idents)) } @@ -772,12 +773,12 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm None => cx.expr_none(sp), }; let e_op = match seq.op { - ast::KleeneOp::ZeroOrMore => "ZeroOrMore", - ast::KleeneOp::OneOrMore => "OneOrMore", + tokenstream::KleeneOp::ZeroOrMore => "ZeroOrMore", + tokenstream::KleeneOp::OneOrMore => "OneOrMore", }; let e_op_idents = vec![ id_ext("syntax"), - id_ext("ast"), + id_ext("tokenstream"), id_ext("KleeneOp"), id_ext(e_op), ]; @@ -787,7 +788,9 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm cx.field_imm(sp, id_ext("op"), e_op), cx.field_imm(sp, id_ext("num_captures"), cx.expr_usize(sp, seq.num_captures))]; - let seq_path = vec![id_ext("syntax"), id_ext("ast"), id_ext("SequenceRepetition")]; + let seq_path = vec![id_ext("syntax"), + id_ext("tokenstream"), + id_ext("SequenceRepetition")]; let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields); let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"), id_ext("rc"), diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index fd229d77966..b97e15e9260 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -18,6 +18,7 @@ use parse::token; use parse; use print::pprust; use ptr::P; +use tokenstream; use util::small_vector::SmallVector; use std::fs::File; @@ -30,7 +31,7 @@ use std::rc::Rc; // a given file into the current one. /// line!(): expands to the current line number -pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<base::MacResult+'static> { base::check_zero_tts(cx, sp, tts, "line!"); @@ -41,7 +42,7 @@ pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } /* column!(): expands to the current column number */ -pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<base::MacResult+'static> { base::check_zero_tts(cx, sp, tts, "column!"); @@ -54,7 +55,7 @@ pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) /// file!(): expands to the current filename */ /// The filemap (`loc.file`) contains a bunch more information we could spit /// out if we wanted. -pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<base::MacResult+'static> { base::check_zero_tts(cx, sp, tts, "file!"); @@ -64,14 +65,14 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) base::MacEager::expr(cx.expr_str(topmost, filename)) } -pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<base::MacResult+'static> { let s = pprust::tts_to_string(tts); base::MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&s[..]))) } -pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<base::MacResult+'static> { base::check_zero_tts(cx, sp, tts, "module_path!"); let string = cx.mod_path() @@ -87,7 +88,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) /// include! : parse the given file as an expr /// This is generally a bad idea because it's going to behave /// unhygienically. -pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<base::MacResult+'cx> { let file = match get_single_str_from_tts(cx, sp, tts, "include!") { Some(f) => f, @@ -130,7 +131,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree } // include_str! : read the given file, insert it as a literal string expr -pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<base::MacResult+'static> { let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") { Some(f) => f, @@ -167,7 +168,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } -pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) +pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box<base::MacResult+'static> { let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") { Some(f) => f, diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index ca5eb8f8003..5f4bf8042f9 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -79,7 +79,7 @@ pub use self::ParseResult::*; use self::TokenTreeOrTokenTreeVec::*; use ast; -use ast::{TokenTree, Name, Ident}; +use ast::{Name, Ident}; use codemap::{BytePos, mk_sp, Span, Spanned}; use codemap; use errors::FatalError; @@ -91,6 +91,7 @@ use parse::token::{Token, Nonterminal}; use parse::token; use print::pprust; use ptr::P; +use tokenstream::{self, TokenTree}; use std::mem; use std::rc::Rc; @@ -102,8 +103,8 @@ use std::collections::hash_map::Entry::{Vacant, Occupied}; #[derive(Clone)] enum TokenTreeOrTokenTreeVec { - Tt(ast::TokenTree), - TtSeq(Rc<Vec<ast::TokenTree>>), + Tt(tokenstream::TokenTree), + TtSeq(Rc<Vec<tokenstream::TokenTree>>), } impl TokenTreeOrTokenTreeVec { @@ -374,7 +375,7 @@ pub fn parse(sess: &ParseSess, match ei.top_elts.get_tt(idx) { /* need to descend into sequence */ TokenTree::Sequence(sp, seq) => { - if seq.op == ast::KleeneOp::ZeroOrMore { + if seq.op == tokenstream::KleeneOp::ZeroOrMore { let mut new_ei = ei.clone(); new_ei.match_cur += seq.num_captures; new_ei.idx += 1; diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index bbe989b0f40..eb354392e7d 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, TokenTree}; +use ast; use codemap::{Span, DUMMY_SP}; use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension}; use ext::base::{NormalTT, TTMacroExpander}; @@ -21,6 +21,7 @@ use parse::token::{self, gensym_ident, NtTT, Token}; use parse::token::Token::*; use print; use ptr::P; +use tokenstream::{self, TokenTree}; use util::small_vector::SmallVector; @@ -248,22 +249,22 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt")); let argument_gram = vec!( TokenTree::Sequence(DUMMY_SP, - Rc::new(ast::SequenceRepetition { + Rc::new(tokenstream::SequenceRepetition { tts: vec![ TokenTree::Token(DUMMY_SP, match_lhs_tok), TokenTree::Token(DUMMY_SP, token::FatArrow), TokenTree::Token(DUMMY_SP, match_rhs_tok)], separator: Some(token::Semi), - op: ast::KleeneOp::OneOrMore, + op: tokenstream::KleeneOp::OneOrMore, num_captures: 2 })), //to phase into semicolon-termination instead of //semicolon-separation TokenTree::Sequence(DUMMY_SP, - Rc::new(ast::SequenceRepetition { + Rc::new(tokenstream::SequenceRepetition { tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)], separator: None, - op: ast::KleeneOp::ZeroOrMore, + op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0 }))); @@ -427,7 +428,7 @@ impl FirstSets { } // Reverse scan: Sequence comes before `first`. - if subfirst.maybe_empty || seq_rep.op == ast::KleeneOp::ZeroOrMore { + if subfirst.maybe_empty || seq_rep.op == tokenstream::KleeneOp::ZeroOrMore { // If sequence is potentially empty, then // union them (preserving first emptiness). first.add_all(&TokenSet { maybe_empty: true, ..subfirst }); @@ -474,7 +475,8 @@ impl FirstSets { assert!(first.maybe_empty); first.add_all(subfirst); - if subfirst.maybe_empty || seq_rep.op == ast::KleeneOp::ZeroOrMore { + if subfirst.maybe_empty || + seq_rep.op == tokenstream::KleeneOp::ZeroOrMore { // continue scanning for more first // tokens, but also make sure we // restore empty-tracking state diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 6b3b5ce9de9..dbe6ca8c683 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -9,8 +9,7 @@ // except according to those terms. use self::LockstepIterSize::*; -use ast; -use ast::{TokenTree, Ident, Name}; +use ast::{Ident, Name}; use codemap::{Span, DUMMY_SP}; use errors::{Handler, DiagnosticBuilder}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; @@ -18,6 +17,7 @@ use parse::token::{DocComment, MatchNt, SubstNt}; use parse::token::{Token, NtIdent, SpecialMacroVar}; use parse::token; use parse::lexer::TokenAndSpan; +use tokenstream::{self, TokenTree}; use std::rc::Rc; use std::ops::Add; @@ -59,7 +59,7 @@ pub struct TtReader<'a> { pub fn new_tt_reader(sp_diag: &Handler, interp: Option<HashMap<Name, Rc<NamedMatch>>>, imported_from: Option<Ident>, - src: Vec<ast::TokenTree>) + src: Vec<tokenstream::TokenTree>) -> TtReader { new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false) } @@ -73,16 +73,16 @@ pub fn new_tt_reader(sp_diag: &Handler, pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler, interp: Option<HashMap<Name, Rc<NamedMatch>>>, imported_from: Option<Ident>, - src: Vec<ast::TokenTree>, + src: Vec<tokenstream::TokenTree>, desugar_doc_comments: bool) -> TtReader { let mut r = TtReader { sp_diag: sp_diag, stack: vec!(TtFrame { - forest: TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { + forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { tts: src, // doesn't matter. This merely holds the root unzipping. - separator: None, op: ast::KleeneOp::ZeroOrMore, num_captures: 0 + separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0 })), idx: 0, dotdotdoted: false, @@ -259,7 +259,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisConstraint(len, _) => { if len == 0 { - if seq.op == ast::KleeneOp::OneOrMore { + if seq.op == tokenstream::KleeneOp::OneOrMore { // FIXME #2887 blame invoker panic!(r.sp_diag.span_fatal(sp.clone(), "this must repeat at least once")); |
