diff options
| author | Jeffrey Seyfried <jeffrey.seyfried@gmail.com> | 2017-01-29 08:38:44 +0000 |
|---|---|---|
| committer | Jeffrey Seyfried <jeffrey.seyfried@gmail.com> | 2017-02-28 22:14:29 +0000 |
| commit | d8b34e9a74a4e91c4283ba4002a050ac0150cec6 (patch) | |
| tree | fc62b9e970fd9120e078856dd6c9727bcb55ac89 /src/libsyntax | |
| parent | 247188803356234ae5d6ecf947ffb2308688dc90 (diff) | |
| download | rust-d8b34e9a74a4e91c4283ba4002a050ac0150cec6.tar.gz rust-d8b34e9a74a4e91c4283ba4002a050ac0150cec6.zip | |
Add `syntax::ext::tt::quoted::{TokenTree, ..}` and remove `tokenstream::TokenTree::Sequence`.
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 150 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 44 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 76 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/quoted.rs | 230 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 51 | ||||
| -rw-r--r-- | src/libsyntax/fold.rs | 7 | ||||
| -rw-r--r-- | src/libsyntax/lib.rs | 1 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 161 | ||||
| -rw-r--r-- | src/libsyntax/print/pprust.rs | 14 | ||||
| -rw-r--r-- | src/libsyntax/tokenstream.rs | 62 |
11 files changed, 380 insertions, 426 deletions
diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 6c46f90f3d4..b1b69c80f4d 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -14,10 +14,9 @@ use ext::base::ExtCtxt; use ext::base; use ext::build::AstBuilder; use parse::parser::{Parser, PathStyle}; -use parse::token::*; use parse::token; use ptr::P; -use tokenstream::{self, TokenTree}; +use tokenstream::TokenTree; /// Quasiquoting works via token trees. @@ -356,14 +355,35 @@ pub mod rt { } fn parse_tts(&self, s: String) -> Vec<TokenTree> { - panictry!(parse::parse_tts_from_source_str( - "<quote expansion>".to_string(), - s, - self.parse_sess())) + parse::parse_tts_from_source_str("<quote expansion>".to_string(), s, self.parse_sess()) } } } +// Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`. +pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> { + use std::rc::Rc; + use tokenstream::Delimited; + + let mut results = Vec::new(); + let mut result = Vec::new(); + for tree in tts { + match tree { + TokenTree::Token(_, token::OpenDelim(..)) => { + results.push(::std::mem::replace(&mut result, Vec::new())); + } + TokenTree::Token(span, token::CloseDelim(delim)) => { + let tree = + TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, tts: result })); + result = results.pop().unwrap(); + result.push(tree); + } + tree @ _ => result.push(tree), + } + } + result +} + // These panicking parsing functions are used by the quote_*!() syntax extensions, // but shouldn't be used otherwise. pub fn parse_expr_panic(parser: &mut Parser) -> P<Expr> { @@ -510,20 +530,6 @@ pub fn expand_quote_path(cx: &mut ExtCtxt, base::MacEager::expr(expanded) } -pub fn expand_quote_matcher(cx: &mut ExtCtxt, - sp: Span, - tts: &[TokenTree]) - -> Box<base::MacResult+'static> { - let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); - let mut vector = mk_stmts_let(cx, sp); - vector.extend(statements_mk_tts(cx, &tts[..], true)); - vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt")))); - let block = cx.expr_block(cx.block(sp, vector)); - - let expanded = expand_wrapper(cx, sp, cx_expr, block, &[&["syntax", "ext", "quote", "rt"]]); - base::MacEager::expr(expanded) -} - fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> { strs.iter().map(|s| ast::Ident::from_str(s)).collect() } @@ -669,12 +675,6 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> { vec![mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))]); } - token::MatchNt(name, kind) => { - return cx.expr_call(sp, - mk_token_path(cx, sp, "MatchNt"), - vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]); - } - token::Interpolated(_) => panic!("quote! with interpolated token"), _ => () @@ -712,9 +712,9 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> { mk_token_path(cx, sp, name) } -fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> { +fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> { match *tt { - TokenTree::Token(sp, SubstNt(ident)) => { + TokenTree::Token(sp, token::Ident(ident)) if quoted => { // tt.extend($ident.to_tokens(ext_cx)) let e_to_toks = @@ -733,13 +733,6 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm vec![cx.stmt_expr(e_push)] } - ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => { - let mut seq = vec![]; - for i in 0..tt.len() { - seq.push(tt.get_tt(i)); - } - statements_mk_tts(cx, &seq[..], matcher) - } TokenTree::Token(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); let e_tok = cx.expr_call(sp, @@ -753,77 +746,17 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm vec![cx.stmt_expr(e_push)] }, TokenTree::Delimited(span, ref delimed) => { - statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter() - .chain(delimed.tts.iter() - .flat_map(|tt| statements_mk_tt(cx, tt, matcher))) - .chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher)) - .collect() - }, - TokenTree::Sequence(sp, ref seq) => { - if !matcher { - panic!("TokenTree::Sequence in quote!"); - } - - let e_sp = cx.expr_ident(sp, id_ext("_sp")); - - let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); - let mut tts_stmts = vec![stmt_let_tt]; - tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher)); - tts_stmts.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt")))); - let e_tts = cx.expr_block(cx.block(sp, tts_stmts)); - - let e_separator = match seq.separator { - Some(ref sep) => cx.expr_some(sp, expr_mk_token(cx, sp, sep)), - None => cx.expr_none(sp), - }; - let e_op = match seq.op { - tokenstream::KleeneOp::ZeroOrMore => "ZeroOrMore", - tokenstream::KleeneOp::OneOrMore => "OneOrMore", - }; - let e_op_idents = vec![ - id_ext("syntax"), - id_ext("tokenstream"), - id_ext("KleeneOp"), - id_ext(e_op), - ]; - let e_op = cx.expr_path(cx.path_global(sp, e_op_idents)); - let fields = vec![cx.field_imm(sp, id_ext("tts"), e_tts), - cx.field_imm(sp, id_ext("separator"), e_separator), - cx.field_imm(sp, id_ext("op"), e_op), - cx.field_imm(sp, id_ext("num_captures"), - cx.expr_usize(sp, seq.num_captures))]; - let seq_path = vec![id_ext("syntax"), - id_ext("tokenstream"), - id_ext("SequenceRepetition")]; - let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields); - let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"), - id_ext("rc"), - id_ext("Rc"), - id_ext("new")], - vec![e_seq_struct]); - let e_tok = cx.expr_call(sp, - mk_tt_path(cx, sp, "Sequence"), - vec![e_sp, e_rc_new]); - let e_push = - cx.expr_method_call(sp, - cx.expr_ident(sp, id_ext("tt")), - id_ext("push"), - vec![e_tok]); - vec![cx.stmt_expr(e_push)] + let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false); + stmts.extend(statements_mk_tts(cx, &delimed.tts)); + stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false)); + stmts } } } fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree]) -> (P<ast::Expr>, Vec<TokenTree>) { - // NB: It appears that the main parser loses its mind if we consider - // $foo as a SubstNt during the main parse, so we have to re-parse - // under quote_depth > 0. This is silly and should go away; the _guess_ is - // it has to do with transition away from supporting old-style macros, so - // try removing it when enough of them are gone. - let mut p = cx.new_parser_from_tts(tts); - p.quote_depth += 1; let cx_expr = panictry!(p.parse_expr()); if !p.eat(&token::Comma) { @@ -877,24 +810,31 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> { vec![stmt_let_sp, stmt_let_tt] } -fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec<ast::Stmt> { +fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree]) -> Vec<ast::Stmt> { let mut ss = Vec::new(); + let mut quoted = false; for tt in tts { - ss.extend(statements_mk_tt(cx, tt, matcher)); + quoted = match *tt { + TokenTree::Token(_, token::Dollar) if !quoted => true, + _ => { + ss.extend(statements_mk_tt(cx, tt, quoted)); + false + } + } } ss } -fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) - -> (P<ast::Expr>, P<ast::Expr>) { +fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast::Expr>) { let (cx_expr, tts) = parse_arguments_to_quote(cx, tts); let mut vector = mk_stmts_let(cx, sp); - vector.extend(statements_mk_tts(cx, &tts[..], false)); + vector.extend(statements_mk_tts(cx, &tts[..])); vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt")))); let block = cx.expr_block(cx.block(sp, vector)); + let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")]; - (cx_expr, block) + (cx_expr, cx.expr_call_global(sp, unflatten, vec![block])) } fn expand_wrapper(cx: &ExtCtxt, diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 089c35c694a..5761a61342b 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -82,13 +82,14 @@ use ast::Ident; use syntax_pos::{self, BytePos, mk_sp, Span}; use codemap::Spanned; use errors::FatalError; +use ext::tt::quoted; use parse::{Directory, ParseSess}; use parse::parser::{PathStyle, Parser}; -use parse::token::{DocComment, MatchNt, SubstNt}; +use parse::token::{DocComment, MatchNt}; use parse::token::{Token, Nonterminal}; use parse::token; use print::pprust; -use tokenstream::{self, TokenTree}; +use tokenstream::TokenTree; use util::small_vector::SmallVector; use std::mem; @@ -101,8 +102,8 @@ use std::collections::hash_map::Entry::{Vacant, Occupied}; #[derive(Clone)] enum TokenTreeOrTokenTreeVec { - Tt(tokenstream::TokenTree), - TtSeq(Vec<tokenstream::TokenTree>), + Tt(quoted::TokenTree), + TtSeq(Vec<quoted::TokenTree>), } impl TokenTreeOrTokenTreeVec { @@ -113,7 +114,7 @@ impl TokenTreeOrTokenTreeVec { } } - fn get_tt(&self, index: usize) -> TokenTree { + fn get_tt(&self, index: usize) -> quoted::TokenTree { match *self { TtSeq(ref v) => v[index].clone(), Tt(ref tt) => tt.get_tt(index), @@ -144,7 +145,9 @@ struct MatcherPos { pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>; -pub fn count_names(ms: &[TokenTree]) -> usize { +pub fn count_names(ms: &[quoted::TokenTree]) -> usize { + use self::quoted::TokenTree; + ms.iter().fold(0, |count, elt| { count + match *elt { TokenTree::Sequence(_, ref seq) => { @@ -161,7 +164,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize { }) } -fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> { +fn initial_matcher_pos(ms: Vec<quoted::TokenTree>, lo: BytePos) -> Box<MatcherPos> { let match_idx_hi = count_names(&ms[..]); let matches = create_matches(match_idx_hi); Box::new(MatcherPos { @@ -200,7 +203,10 @@ pub enum NamedMatch { MatchedNonterminal(Rc<Nonterminal>) } -fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[TokenTree], mut res: I) -> NamedParseResult { +fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[quoted::TokenTree], mut res: I) + -> NamedParseResult { + use self::quoted::TokenTree; + fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(m: &TokenTree, mut res: &mut I, ret_val: &mut HashMap<Ident, Rc<NamedMatch>>) -> Result<(), (syntax_pos::Span, String)> { @@ -225,9 +231,6 @@ fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[TokenTree], mut res: I) -> Na } } } - TokenTree::Token(sp, SubstNt(..)) => { - return Err((sp, "missing fragment specifier".to_string())) - } TokenTree::Token(..) => (), } @@ -281,6 +284,8 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, eof_eis: &mut SmallVector<Box<MatcherPos>>, bb_eis: &mut SmallVector<Box<MatcherPos>>, token: &Token, span: &syntax_pos::Span) -> ParseResult<()> { + use self::quoted::TokenTree; + while let Some(mut ei) = cur_eis.pop() { // When unzipped trees end, remove them while ei.idx >= ei.top_elts.len() { @@ -346,7 +351,7 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, match ei.top_elts.get_tt(idx) { /* need to descend into sequence */ TokenTree::Sequence(sp, seq) => { - if seq.op == tokenstream::KleeneOp::ZeroOrMore { + if seq.op == quoted::KleeneOp::ZeroOrMore { // Examine the case where there are 0 matches of this sequence let mut new_ei = ei.clone(); new_ei.match_cur += seq.num_captures; @@ -380,9 +385,6 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, _ => bb_eis.push(ei), } } - TokenTree::Token(sp, SubstNt(..)) => { - return Error(sp, "missing fragment specifier".to_string()) - } seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => { let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq)); let idx = ei.idx; @@ -406,8 +408,13 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>, Success(()) } -pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: Option<Directory>) +pub fn parse(sess: &ParseSess, + tts: Vec<TokenTree>, + ms: &[quoted::TokenTree], + directory: Option<Directory>) -> NamedParseResult { + use self::quoted::TokenTree; + let mut parser = Parser::new(sess, tts, directory, true); let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo)); let mut next_eis = Vec::new(); // or proceed normally @@ -479,10 +486,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { match name { "tt" => { - p.quote_depth += 1; //but in theory, non-quoted tts might be useful - let tt = panictry!(p.parse_token_tree()); - p.quote_depth -= 1; - return token::NtTT(tt); + return token::NtTT(panictry!(p.parse_token_tree())); } _ => {} } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index d0c1c0efea7..5da401d48ee 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -16,6 +16,7 @@ use ext::expand::{Expansion, ExpansionKind}; use ext::tt::macro_parser::{Success, Error, Failure}; use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{parse, parse_failure_msg}; +use ext::tt::quoted; use ext::tt::transcribe::transcribe; use parse::{Directory, ParseSess}; use parse::parser::Parser; @@ -23,7 +24,7 @@ use parse::token::{self, NtTT, Token}; use parse::token::Token::*; use print; use symbol::Symbol; -use tokenstream::{self, TokenTree}; +use tokenstream::TokenTree; use std::collections::{HashMap}; use std::collections::hash_map::{Entry}; @@ -58,8 +59,8 @@ impl<'a> ParserAnyMacro<'a> { struct MacroRulesMacroExpander { name: ast::Ident, - lhses: Vec<TokenTree>, - rhses: Vec<TokenTree>, + lhses: Vec<quoted::TokenTree>, + rhses: Vec<quoted::TokenTree>, valid: bool, } @@ -86,8 +87,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, sp: Span, name: ast::Ident, arg: &[TokenTree], - lhses: &[TokenTree], - rhses: &[TokenTree]) + lhses: &[quoted::TokenTree], + rhses: &[quoted::TokenTree]) -> Box<MacResult+'cx> { if cx.trace_macros() { println!("{}! {{ {} }}", @@ -101,7 +102,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers let lhs_tt = match *lhs { - TokenTree::Delimited(_, ref delim) => &delim.tts[..], + quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..], _ => cx.span_bug(sp, "malformed macro lhs") }; @@ -109,7 +110,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, Success(named_matches) => { let rhs = match rhses[i] { // ignore delimiters - TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(), + quoted::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(), _ => cx.span_bug(sp, "malformed macro rhs"), }; // rhs has holes ( `$id` and `$(...)` that need filled) @@ -167,21 +168,21 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt")); let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt")); let argument_gram = vec![ - TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { + quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition { tts: vec![ - TokenTree::Token(DUMMY_SP, match_lhs_tok), - TokenTree::Token(DUMMY_SP, token::FatArrow), - TokenTree::Token(DUMMY_SP, match_rhs_tok), + quoted::TokenTree::Token(DUMMY_SP, match_lhs_tok), + quoted::TokenTree::Token(DUMMY_SP, token::FatArrow), + quoted::TokenTree::Token(DUMMY_SP, match_rhs_tok), ], separator: Some(token::Semi), - op: tokenstream::KleeneOp::OneOrMore, + op: quoted::KleeneOp::OneOrMore, num_captures: 2, })), // to phase into semicolon-termination instead of semicolon-separation - TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { - tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)], + quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition { + tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)], separator: None, - op: tokenstream::KleeneOp::ZeroOrMore, + op: quoted::KleeneOp::ZeroOrMore, num_captures: 0 })), ]; @@ -206,12 +207,13 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = **m { if let NtTT(ref tt) = **nt { - valid &= check_lhs_nt_follows(sess, tt); - return (*tt).clone(); + let tt = quoted::parse(&[tt.clone()], true, sess).pop().unwrap(); + valid &= check_lhs_nt_follows(sess, &tt); + return tt; } } sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") - }).collect::<Vec<TokenTree>>() + }).collect::<Vec<quoted::TokenTree>>() } _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") }; @@ -221,11 +223,11 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { s.iter().map(|m| { if let MatchedNonterminal(ref nt) = **m { if let NtTT(ref tt) = **nt { - return (*tt).clone(); + return quoted::parse(&[tt.clone()], false, sess).pop().unwrap(); } } sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") - }).collect() + }).collect::<Vec<quoted::TokenTree>>() } _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs") }; @@ -249,14 +251,14 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable")) } -fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool { +fn check_lhs_nt_follows(sess: &ParseSess, lhs: "ed::TokenTree) -> bool { // lhs is going to be like TokenTree::Delimited(...), where the // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. match lhs { - &TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts), + "ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts), _ => { let msg = "invalid macro matcher; matchers must be contained in balanced delimiters"; - sess.span_diagnostic.span_err(lhs.get_span(), msg); + sess.span_diagnostic.span_err(lhs.span(), msg); false } } @@ -266,7 +268,8 @@ fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool { /// Check that the lhs contains no repetition which could match an empty token /// tree, because then the matcher would hang indefinitely. -fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool { +fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { + use self::quoted::TokenTree; for tt in tts { match *tt { TokenTree::Token(_, _) => (), @@ -278,7 +281,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool { if seq.tts.iter().all(|seq_tt| { match *seq_tt { TokenTree::Sequence(_, ref sub_seq) => - sub_seq.op == tokenstream::KleeneOp::ZeroOrMore, + sub_seq.op == quoted::KleeneOp::ZeroOrMore, _ => false, } }) { @@ -296,15 +299,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool { true } -fn check_rhs(sess: &ParseSess, rhs: &TokenTree) -> bool { +fn check_rhs(sess: &ParseSess, rhs: "ed::TokenTree) -> bool { match *rhs { - TokenTree::Delimited(..) => return true, - _ => sess.span_diagnostic.span_err(rhs.get_span(), "macro rhs must be delimited") + quoted::TokenTree::Delimited(..) => return true, + _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited") } false } -fn check_matcher(sess: &ParseSess, matcher: &[TokenTree]) -> bool { +fn check_matcher(sess: &ParseSess, matcher: &[quoted::TokenTree]) -> bool { let first_sets = FirstSets::new(matcher); let empty_suffix = TokenSet::empty(); let err = sess.span_diagnostic.err_count(); @@ -335,7 +338,9 @@ struct FirstSets { } impl FirstSets { - fn new(tts: &[TokenTree]) -> FirstSets { + fn new(tts: &[quoted::TokenTree]) -> FirstSets { + use self::quoted::TokenTree; + let mut sets = FirstSets { first: HashMap::new() }; build_recur(&mut sets, tts); return sets; @@ -382,7 +387,7 @@ impl FirstSets { } // Reverse scan: Sequence comes before `first`. - if subfirst.maybe_empty || seq_rep.op == tokenstream::KleeneOp::ZeroOrMore { + if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore { // If sequence is potentially empty, then // union them (preserving first emptiness). first.add_all(&TokenSet { maybe_empty: true, ..subfirst }); @@ -401,7 +406,9 @@ impl FirstSets { // walks forward over `tts` until all potential FIRST tokens are // identified. - fn first(&self, tts: &[TokenTree]) -> TokenSet { + fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet { + use self::quoted::TokenTree; + let mut first = TokenSet::empty(); for tt in tts.iter() { assert!(first.maybe_empty); @@ -430,7 +437,7 @@ impl FirstSets { assert!(first.maybe_empty); first.add_all(subfirst); if subfirst.maybe_empty || - seq_rep.op == tokenstream::KleeneOp::ZeroOrMore { + seq_rep.op == quoted::KleeneOp::ZeroOrMore { // continue scanning for more first // tokens, but also make sure we // restore empty-tracking state @@ -549,9 +556,10 @@ impl TokenSet { // see `FirstSets::new`. fn check_matcher_core(sess: &ParseSess, first_sets: &FirstSets, - matcher: &[TokenTree], + matcher: &[quoted::TokenTree], follow: &TokenSet) -> TokenSet { use print::pprust::token_to_string; + use self::quoted::TokenTree; let mut last = TokenSet::empty(); diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs new file mode 100644 index 00000000000..1170bcabb77 --- /dev/null +++ b/src/libsyntax/ext/tt/quoted.rs @@ -0,0 +1,230 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or +// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license +// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use ast; +use ext::tt::macro_parser; +use parse::{ParseSess, token}; +use print::pprust; +use symbol::{keywords, Symbol}; +use syntax_pos::{DUMMY_SP, Span, BytePos}; +use tokenstream; + +use std::rc::Rc; + +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub struct Delimited { + pub delim: token::DelimToken, + pub tts: Vec<TokenTree>, +} + +impl Delimited { + pub fn open_token(&self) -> token::Token { + token::OpenDelim(self.delim) + } + + pub fn close_token(&self) -> token::Token { + token::CloseDelim(self.delim) + } + + pub fn open_tt(&self, span: Span) -> TokenTree { + let open_span = match span { + DUMMY_SP => DUMMY_SP, + _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span }, + }; + TokenTree::Token(open_span, self.open_token()) + } + + pub fn close_tt(&self, span: Span) -> TokenTree { + let close_span = match span { + DUMMY_SP => DUMMY_SP, + _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span }, + }; + TokenTree::Token(close_span, self.close_token()) + } +} + +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub struct SequenceRepetition { + /// The sequence of token trees + pub tts: Vec<TokenTree>, + /// The optional separator + pub separator: Option<token::Token>, + /// Whether the sequence can be repeated zero (*), or one or more times (+) + pub op: KleeneOp, + /// The number of `MatchNt`s that appear in the sequence (and subsequences) + pub num_captures: usize, +} + +/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star) +/// for token sequences. +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] +pub enum KleeneOp { + ZeroOrMore, + OneOrMore, +} + +/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)` +/// are "first-class" token trees. +#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] +pub enum TokenTree { + Token(Span, token::Token), + Delimited(Span, Rc<Delimited>), + /// A kleene-style repetition sequence with a span + Sequence(Span, Rc<SequenceRepetition>), +} + +impl TokenTree { + pub fn len(&self) -> usize { + match *self { + TokenTree::Delimited(_, ref delimed) => match delimed.delim { + token::NoDelim => delimed.tts.len(), + _ => delimed.tts.len() + 2, + }, + TokenTree::Sequence(_, ref seq) => seq.tts.len(), + TokenTree::Token(..) => 0, + } + } + + pub fn get_tt(&self, index: usize) -> TokenTree { + match (self, index) { + (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => { + delimed.tts[index].clone() + } + (&TokenTree::Delimited(span, ref delimed), _) => { + if index == 0 { + return delimed.open_tt(span); + } + if index == delimed.tts.len() + 1 { + return delimed.close_tt(span); + } + delimed.tts[index - 1].clone() + } + (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(), + _ => panic!("Cannot expand a token tree"), + } + } + + /// Retrieve the TokenTree's span. + pub fn span(&self) -> Span { + match *self { + TokenTree::Token(sp, _) | + TokenTree::Delimited(sp, _) | + TokenTree::Sequence(sp, _) => sp, + } + } +} + +pub fn parse(input: &[tokenstream::TokenTree], expect_matchers: bool, sess: &ParseSess) + -> Vec<TokenTree> { + let mut result = Vec::new(); + let mut trees = input.iter().cloned(); + while let Some(tree) = trees.next() { + let tree = parse_tree(tree, &mut trees, expect_matchers, sess); + match tree { + TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => { + let span = match trees.next() { + Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { + Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => { + let span = Span { lo: start_sp.lo, ..end_sp }; + result.push(TokenTree::Token(span, token::MatchNt(ident, kind))); + continue + } + tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + }, + tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), + }; + sess.span_diagnostic.span_err(span, "missing fragment specifier"); + } + _ => result.push(tree), + } + } + result +} + +fn parse_tree<I>(tree: tokenstream::TokenTree, + trees: &mut I, + expect_matchers: bool, + sess: &ParseSess) + -> TokenTree + where I: Iterator<Item = tokenstream::TokenTree>, +{ + match tree { + tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() { + Some(tokenstream::TokenTree::Delimited(span, ref delimited)) => { + if delimited.delim != token::Paren { + let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim)); + let msg = format!("expected `(`, found `{}`", tok); + sess.span_diagnostic.span_err(span, &msg); + } + let sequence = parse(&delimited.tts, expect_matchers, sess); + let (separator, op) = parse_sep_and_kleene_op(trees, span, sess); + let name_captures = macro_parser::count_names(&sequence); + TokenTree::Sequence(span, Rc::new(SequenceRepetition { + tts: sequence, + separator: separator, + op: op, + num_captures: name_captures, + })) + } + Some(tokenstream::TokenTree::Token(ident_span, token::Ident(ident))) => { + let span = Span { lo: span.lo, ..ident_span }; + if ident.name == keywords::Crate.name() { + let ident = ast::Ident { name: Symbol::intern("$crate"), ..ident }; + TokenTree::Token(span, token::Ident(ident)) + } else { + TokenTree::Token(span, token::SubstNt(ident)) + } + } + Some(tokenstream::TokenTree::Token(span, tok)) => { + let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok)); + sess.span_diagnostic.span_err(span, &msg); + TokenTree::Token(span, token::SubstNt(keywords::Invalid.ident())) + } + None => TokenTree::Token(span, token::Dollar), + }, + tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok), + tokenstream::TokenTree::Delimited(span, delimited) => { + TokenTree::Delimited(span, Rc::new(Delimited { + delim: delimited.delim, + tts: parse(&delimited.tts, expect_matchers, sess), + })) + } + } +} + +fn parse_sep_and_kleene_op<I>(input: &mut I, span: Span, sess: &ParseSess) + -> (Option<token::Token>, KleeneOp) + where I: Iterator<Item = tokenstream::TokenTree>, +{ + fn kleene_op(token: &token::Token) -> Option<KleeneOp> { + match *token { + token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore), + token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore), + _ => None, + } + } + + let span = match input.next() { + Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) { + Some(op) => return (None, op), + None => match input.next() { + Some(tokenstream::TokenTree::Token(span, tok2)) => match kleene_op(&tok2) { + Some(op) => return (Some(tok), op), + None => span, + }, + tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + } + }, + tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + }; + + sess.span_diagnostic.span_err(span, "expected `*` or `+`"); + (None, KleeneOp::ZeroOrMore) +} diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 46bc1dc8b76..856294433a8 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -11,9 +11,10 @@ use ast::Ident; use errors::Handler; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; +use ext::tt::quoted; use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT}; use syntax_pos::{Span, DUMMY_SP}; -use tokenstream::{self, TokenTree, Delimited, SequenceRepetition}; +use tokenstream::{TokenTree, Delimited}; use util::small_vector::SmallVector; use std::rc::Rc; @@ -24,34 +25,28 @@ use std::collections::HashMap; // An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`). enum Frame { Delimited { - forest: Rc<Delimited>, - idx: usize, - span: Span, - }, - MatchNt { - name: Ident, - kind: Ident, + forest: Rc<quoted::Delimited>, idx: usize, span: Span, }, Sequence { - forest: Rc<SequenceRepetition>, + forest: Rc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token>, }, } impl Frame { - fn new(tts: Vec<TokenTree>) -> Frame { - let forest = Rc::new(tokenstream::Delimited { delim: token::NoDelim, tts: tts }); + fn new(tts: Vec<quoted::TokenTree>) -> Frame { + let forest = Rc::new(quoted::Delimited { delim: token::NoDelim, tts: tts }); Frame::Delimited { forest: forest, idx: 0, span: DUMMY_SP } } } impl Iterator for Frame { - type Item = TokenTree; + type Item = quoted::TokenTree; - fn next(&mut self) -> Option<TokenTree> { + fn next(&mut self) -> Option<quoted::TokenTree> { match *self { Frame::Delimited { ref forest, ref mut idx, .. } => { *idx += 1; @@ -61,15 +56,6 @@ impl Iterator for Frame { *idx += 1; forest.tts.get(*idx - 1).cloned() } - Frame::MatchNt { ref mut idx, name, kind, span } => { - *idx += 1; - match *idx { - 1 => Some(TokenTree::Token(span, token::SubstNt(name))), - 2 => Some(TokenTree::Token(span, token::Colon)), - 3 => Some(TokenTree::Token(span, token::Ident(kind))), - _ => None, - } - } } } } @@ -79,7 +65,7 @@ impl Iterator for Frame { /// (and should) be None. pub fn transcribe(sp_diag: &Handler, interp: Option<HashMap<Ident, Rc<NamedMatch>>>, - src: Vec<tokenstream::TokenTree>) + src: Vec<quoted::TokenTree>) -> Vec<TokenTree> { let mut stack = SmallVector::one(Frame::new(src)); let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */ @@ -121,15 +107,14 @@ pub fn transcribe(sp_diag: &Handler, result = result_stack.pop().unwrap(); result.push(tree); } - _ => {} } continue }; match tree { - TokenTree::Sequence(sp, seq) => { + quoted::TokenTree::Sequence(sp, seq) => { // FIXME(pcwalton): Bad copy. - match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()), + match lockstep_iter_size("ed::TokenTree::Sequence(sp, seq.clone()), &interpolations, &repeat_idx) { LockstepIterSize::Unconstrained => { @@ -145,7 +130,7 @@ pub fn transcribe(sp_diag: &Handler, } LockstepIterSize::Constraint(len, _) => { if len == 0 { - if seq.op == tokenstream::KleeneOp::OneOrMore { + if seq.op == quoted::KleeneOp::OneOrMore { // FIXME #2887 blame invoker panic!(sp_diag.span_fatal(sp.clone(), "this must repeat at least once")); @@ -163,7 +148,7 @@ pub fn transcribe(sp_diag: &Handler, } } // FIXME #2887: think about span stuff here - TokenTree::Token(sp, SubstNt(ident)) => { + quoted::TokenTree::Token(sp, SubstNt(ident)) => { match lookup_cur_matched(ident, &interpolations, &repeat_idx) { None => result.push(TokenTree::Token(sp, SubstNt(ident))), Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched { @@ -187,14 +172,11 @@ pub fn transcribe(sp_diag: &Handler, } } } - TokenTree::Delimited(span, delimited) => { + quoted::TokenTree::Delimited(span, delimited) => { stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span }); result_stack.push(mem::replace(&mut result, Vec::new())); } - TokenTree::Token(span, MatchNt(name, kind)) => { - stack.push(Frame::MatchNt { name: name, kind: kind, idx: 0, span: span }); - } - tt @ TokenTree::Token(..) => result.push(tt), + quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)), } } } @@ -245,10 +227,11 @@ impl Add for LockstepIterSize { } } -fn lockstep_iter_size(tree: &TokenTree, +fn lockstep_iter_size(tree: "ed::TokenTree, interpolations: &HashMap<Ident, Rc<NamedMatch>>, repeat_idx: &[usize]) -> LockstepIterSize { + use self::quoted::TokenTree; match *tree { TokenTree::Delimited(_, ref delimed) => { delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| { diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 1ee070cb92d..c33d945d60e 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -551,13 +551,6 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree { } )) }, - TokenTree::Sequence(span, ref seq) => - TokenTree::Sequence(fld.new_span(span), - Rc::new(SequenceRepetition { - tts: fld.fold_tts(&seq.tts), - separator: seq.separator.clone().map(|tok| fld.fold_token(tok)), - ..**seq - })), } } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 87a03adf6b7..39a9aff48bf 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -139,6 +139,7 @@ pub mod ext { pub mod transcribe; pub mod macro_parser; pub mod macro_rules; + pub mod quoted; } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 20e80afc115..78fd706b27a 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -139,13 +139,9 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa new_parser_from_source_str(sess, name, source).parse_stmt() } -// Warning: This parses with quote_depth > 0, which is not the default. pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) - -> PResult<'a, Vec<tokenstream::TokenTree>> { - let mut p = new_parser_from_source_str(sess, name, source); - p.quote_depth += 1; - // right now this is re-creating the token trees from ... token trees. - p.parse_all_token_trees() + -> Vec<tokenstream::TokenTree> { + filemap_to_tts(sess, sess.codemap().new_filemap(name, None, source)) } // Create a new parser from a source string @@ -986,7 +982,7 @@ mod tests { _ => panic!("not a macro"), }; - let span = tts.iter().rev().next().unwrap().get_span(); + let span = tts.iter().rev().next().unwrap().span(); match sess.codemap().span_to_snippet(span) { Ok(s) => assert_eq!(&s[..], "{ body }"), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 3a3c20dfb64..ab965e27633 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -43,19 +43,16 @@ use {ast, attr}; use codemap::{self, CodeMap, Spanned, spanned, respan}; use syntax_pos::{self, Span, Pos, BytePos, mk_sp}; use errors::{self, DiagnosticBuilder}; -use ext::tt::macro_parser; -use parse; -use parse::classify; +use parse::{self, classify, token}; use parse::common::SeqSep; use parse::lexer::TokenAndSpan; use parse::obsolete::ObsoleteSyntax; -use parse::token::{self, MatchNt, SubstNt}; use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership}; use util::parser::{AssocOp, Fixity}; use print::pprust; use ptr::P; use parse::PResult; -use tokenstream::{self, Delimited, SequenceRepetition, TokenTree}; +use tokenstream::{Delimited, TokenTree}; use symbol::{Symbol, keywords}; use util::ThinVec; @@ -168,8 +165,6 @@ pub struct Parser<'a> { /// the previous token kind prev_token_kind: PrevTokenKind, pub restrictions: Restrictions, - pub quote_depth: usize, // not (yet) related to the quasiquoter - parsing_token_tree: bool, /// The set of seen errors about obsolete syntax. Used to suppress /// extra detail when the same error is seen twice pub obsolete_set: HashSet<ObsoleteSyntax>, @@ -329,8 +324,6 @@ impl<'a> Parser<'a> { prev_span: syntax_pos::DUMMY_SP, prev_token_kind: PrevTokenKind::Other, restrictions: Restrictions::empty(), - quote_depth: 0, - parsing_token_tree: false, obsolete_set: HashSet::new(), directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned }, root_module_name: None, @@ -359,20 +352,11 @@ impl<'a> Parser<'a> { if i + 1 < tts.len() { self.tts.push((tts, i + 1)); } - // FIXME(jseyfried): remove after fixing #39390 in #39419. - if self.quote_depth > 0 { - if let TokenTree::Sequence(sp, _) = tt { - self.span_err(sp, "attempted to repeat an expression containing no \ - syntax variables matched as repeating at this depth"); - } - } - match tt { - TokenTree::Token(sp, tok) => TokenAndSpan { tok: tok, sp: sp }, - _ if tt.len() > 0 => { - self.tts.push((tt, 0)); - continue - } - _ => continue, + if let TokenTree::Token(sp, tok) = tt { + TokenAndSpan { tok: tok, sp: sp } + } else { + self.tts.push((tt, 0)); + continue } } else { TokenAndSpan { tok: token::Eof, sp: self.span } @@ -997,7 +981,6 @@ impl<'a> Parser<'a> { tok = match tts.get_tt(i) { TokenTree::Token(_, tok) => tok, TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim), - TokenTree::Sequence(..) => token::Dollar, }; } } @@ -2586,139 +2569,21 @@ impl<'a> Parser<'a> { return Ok(e); } - // Parse unquoted tokens after a `$` in a token tree - fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> { - let mut sp = self.span; - let name = match self.token { - token::Dollar => { - self.bump(); - - if self.token == token::OpenDelim(token::Paren) { - let Spanned { node: seq, span: seq_span } = self.parse_seq( - &token::OpenDelim(token::Paren), - &token::CloseDelim(token::Paren), - SeqSep::none(), - |p| p.parse_token_tree() - )?; - let (sep, repeat) = self.parse_sep_and_kleene_op()?; - let name_num = macro_parser::count_names(&seq); - return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi), - Rc::new(SequenceRepetition { - tts: seq, - separator: sep, - op: repeat, - num_captures: name_num - }))); - } else if self.token.is_keyword(keywords::Crate) { - let ident = match self.token { - token::Ident(id) => ast::Ident { name: Symbol::intern("$crate"), ..id }, - _ => unreachable!(), - }; - self.bump(); - return Ok(TokenTree::Token(sp, token::Ident(ident))); - } else { - sp = mk_sp(sp.lo, self.span.hi); - self.parse_ident().unwrap_or_else(|mut e| { - e.emit(); - keywords::Invalid.ident() - }) - } - } - token::SubstNt(name) => { - self.bump(); - name - } - _ => unreachable!() - }; - // continue by trying to parse the `:ident` after `$name` - if self.token == token::Colon && - self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword()) { - self.bump(); - sp = mk_sp(sp.lo, self.span.hi); - let nt_kind = self.parse_ident()?; - Ok(TokenTree::Token(sp, MatchNt(name, nt_kind))) - } else { - Ok(TokenTree::Token(sp, SubstNt(name))) - } - } - pub fn check_unknown_macro_variable(&mut self) { - if self.quote_depth == 0 && !self.parsing_token_tree { - match self.token { - token::SubstNt(name) => - self.fatal(&format!("unknown macro variable `{}`", name)).emit(), - _ => {} - } - } - } - - /// Parse an optional separator followed by a Kleene-style - /// repetition token (+ or *). - pub fn parse_sep_and_kleene_op(&mut self) - -> PResult<'a, (Option<token::Token>, tokenstream::KleeneOp)> { - fn parse_kleene_op<'a>(parser: &mut Parser<'a>) -> - PResult<'a, Option<tokenstream::KleeneOp>> { - match parser.token { - token::BinOp(token::Star) => { - parser.bump(); - Ok(Some(tokenstream::KleeneOp::ZeroOrMore)) - }, - token::BinOp(token::Plus) => { - parser.bump(); - Ok(Some(tokenstream::KleeneOp::OneOrMore)) - }, - _ => Ok(None) - } - }; - - if let Some(kleene_op) = parse_kleene_op(self)? { - return Ok((None, kleene_op)); - } - - let separator = match self.token { - token::CloseDelim(..) => None, - _ => Some(self.bump_and_get()), - }; - match parse_kleene_op(self)? { - Some(zerok) => Ok((separator, zerok)), - None => return Err(self.fatal("expected `*` or `+`")) + if let token::SubstNt(name) = self.token { + self.fatal(&format!("unknown macro variable `{}`", name)).emit() } } /// parse a single token tree from the input. pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> { - // FIXME #6994: currently, this is too eager. It - // parses token trees but also identifies TokenType::Sequence's - // and token::SubstNt's; it's too early to know yet - // whether something will be a nonterminal or a seq - // yet. match self.token { - token::OpenDelim(delim) => { - if self.quote_depth == 0 { - let tt = self.tts.pop().unwrap().0; - self.bump(); - return Ok(tt); - } - - let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true); - let lo = self.span.lo; - self.bump(); - let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace), - &token::CloseDelim(token::Paren), - &token::CloseDelim(token::Bracket)], - SeqSep::none(), - |p| p.parse_token_tree(), - |mut e| e.emit()); - self.parsing_token_tree = parsing_token_tree; + token::OpenDelim(..) => { + let tt = self.tts.pop().unwrap().0; self.bump(); - - Ok(TokenTree::Delimited(Span { lo: lo, ..self.prev_span }, Rc::new(Delimited { - delim: delim, - tts: tts, - }))) + return Ok(tt); }, - token::CloseDelim(..) | token::Eof => Ok(TokenTree::Token(self.span, token::Eof)), - token::Dollar | token::SubstNt(..) if self.quote_depth > 0 => self.parse_unquoted(), + token::CloseDelim(_) | token::Eof => unreachable!(), _ => Ok(TokenTree::Token(self.span, self.bump_and_get())), } } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index f8f1820d0b9..593d551046b 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1475,20 +1475,6 @@ impl<'a> State<'a> { space(&mut self.s)?; word(&mut self.s, &token_to_string(&delimed.close_token())) }, - TokenTree::Sequence(_, ref seq) => { - word(&mut self.s, "$(")?; - for tt_elt in &seq.tts { - self.print_tt(tt_elt)?; - } - word(&mut self.s, ")")?; - if let Some(ref tk) = seq.separator { - word(&mut self.s, &token_to_string(tk))?; - } - match seq.op { - tokenstream::KleeneOp::ZeroOrMore => word(&mut self.s, "*"), - tokenstream::KleeneOp::OneOrMore => word(&mut self.s, "+"), - } - } } } diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index bd63e9f39e5..66654046721 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -12,9 +12,7 @@ //! //! TokenStreams represent syntactic objects before they are converted into ASTs. //! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s, -//! which are themselves either a single Token, a Delimited subsequence of tokens, -//! or a SequenceRepetition specifier (for the purpose of sequence generation during macro -//! expansion). +//! which are themselves a single `Token` or a `Delimited` subsequence of tokens. //! //! ## Ownership //! TokenStreams are persistent data structures constructed as ropes with reference @@ -28,10 +26,10 @@ use ast::{self, AttrStyle, LitKind}; use syntax_pos::{BytePos, Span, DUMMY_SP}; use codemap::Spanned; use ext::base; -use ext::tt::macro_parser; +use ext::tt::{macro_parser, quoted}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::{self, Directory}; -use parse::token::{self, Token, Lit, Nonterminal}; +use parse::token::{self, Token, Lit}; use print::pprust; use serialize::{Decoder, Decodable, Encoder, Encodable}; use symbol::Symbol; @@ -84,27 +82,6 @@ impl Delimited { } } -/// A sequence of token trees -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] -pub struct SequenceRepetition { - /// The sequence of token trees - pub tts: Vec<TokenTree>, - /// The optional separator - pub separator: Option<token::Token>, - /// Whether the sequence can be repeated zero (*), or one or more times (+) - pub op: KleeneOp, - /// The number of `MatchNt`s that appear in the sequence (and subsequences) - pub num_captures: usize, -} - -/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star) -/// for token sequences. -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] -pub enum KleeneOp { - ZeroOrMore, - OneOrMore, -} - /// When the main rust parser encounters a syntax-extension invocation, it /// parses the arguments to the invocation as a token-tree. This is a very /// loose structure, such that all sorts of different AST-fragments can @@ -123,10 +100,6 @@ pub enum TokenTree { Token(Span, token::Token), /// A delimited sequence of token trees Delimited(Span, Rc<Delimited>), - - // This only makes sense in MBE macros. - /// A kleene-style repetition sequence with a span - Sequence(Span, Rc<SequenceRepetition>), } impl TokenTree { @@ -138,15 +111,10 @@ impl TokenTree { AttrStyle::Inner => 3, } } - TokenTree::Token(_, token::Interpolated(ref nt)) => { - if let Nonterminal::NtTT(..) = **nt { 1 } else { 0 } - }, - TokenTree::Token(_, token::MatchNt(..)) => 3, TokenTree::Delimited(_, ref delimed) => match delimed.delim { token::NoDelim => delimed.tts.len(), _ => delimed.tts.len() + 2, }, - TokenTree::Sequence(_, ref seq) => seq.tts.len(), TokenTree::Token(..) => 0, } } @@ -197,30 +165,12 @@ impl TokenTree { } delimed.tts[index - 1].clone() } - (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => { - let v = [TokenTree::Token(sp, token::SubstNt(name)), - TokenTree::Token(sp, token::Colon), - TokenTree::Token(sp, token::Ident(kind))]; - v[index].clone() - } - (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(), _ => panic!("Cannot expand a token tree"), } } - /// Returns the `Span` corresponding to this token tree. - pub fn get_span(&self) -> Span { - match *self { - TokenTree::Token(span, _) => span, - TokenTree::Delimited(span, _) => span, - TokenTree::Sequence(span, _) => span, - } - } - /// Use this token tree as a matcher to parse given tts. - pub fn parse(cx: &base::ExtCtxt, - mtch: &[TokenTree], - tts: &[TokenTree]) + pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: &[TokenTree]) -> macro_parser::NamedParseResult { // `None` is because we're not interpolating let directory = Directory { @@ -252,9 +202,7 @@ impl TokenTree { /// Retrieve the TokenTree's span. pub fn span(&self) -> Span { match *self { - TokenTree::Token(sp, _) | - TokenTree::Delimited(sp, _) | - TokenTree::Sequence(sp, _) => sp, + TokenTree::Token(sp, _) | TokenTree::Delimited(sp, _) => sp, } } |
