diff options
| author | bors <bors@rust-lang.org> | 2014-10-27 23:02:55 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2014-10-27 23:02:55 +0000 |
| commit | bd7138dd698dde29fb4d7fd34529a863b85d947e (patch) | |
| tree | b0d85f6266675501dce79b4802325e0d60b147e3 /src/libsyntax/parse | |
| parent | e05c3b7799b45b78baf49f05763865be838f5b43 (diff) | |
| parent | 4dc06dceb2bbb7ced9ea137b5280f7ce413b4d01 (diff) | |
| download | rust-bd7138dd698dde29fb4d7fd34529a863b85d947e.tar.gz rust-bd7138dd698dde29fb4d7fd34529a863b85d947e.zip | |
auto merge of #18368 : alexcrichton/rust/rollup, r=alexcrichton
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 228 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 73 |
2 files changed, 148 insertions, 153 deletions
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 2d7d32cd9ea..2965094f236 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -788,65 +788,57 @@ mod test { } // check the token-tree-ization of macros - #[test] fn string_to_tts_macro () { + #[test] + fn string_to_tts_macro () { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); let tts: &[ast::TokenTree] = tts.as_slice(); match tts { - [ast::TTTok(_,_), - ast::TTTok(_,token::NOT), - ast::TTTok(_,_), - ast::TTDelim(ref delim_elts)] => { - let delim_elts: &[ast::TokenTree] = delim_elts.as_slice(); - match delim_elts { - [ast::TTTok(_,token::LPAREN), - ast::TTDelim(ref first_set), - ast::TTTok(_,token::FAT_ARROW), - ast::TTDelim(ref second_set), - ast::TTTok(_,token::RPAREN)] => { - let first_set: &[ast::TokenTree] = - first_set.as_slice(); - match first_set { - [ast::TTTok(_,token::LPAREN), - ast::TTTok(_,token::DOLLAR), - ast::TTTok(_,_), - ast::TTTok(_,token::RPAREN)] => { - let second_set: &[ast::TokenTree] = - second_set.as_slice(); - match second_set { - [ast::TTTok(_,token::LPAREN), - ast::TTTok(_,token::DOLLAR), - ast::TTTok(_,_), - ast::TTTok(_,token::RPAREN)] => { - assert_eq!("correct","correct") - } - _ => assert_eq!("wrong 4","correct") - } - }, - _ => { - error!("failing value 3: {}",first_set); - assert_eq!("wrong 3","correct") - } + [ast::TtToken(_, token::IDENT(name_macro_rules, false)), + ast::TtToken(_, token::NOT), + ast::TtToken(_, token::IDENT(name_zip, false)), + ast::TtDelimited(_, ref macro_delimed)] + if name_macro_rules.as_str() == "macro_rules" + && name_zip.as_str() == "zip" => { + let (ref macro_open, ref macro_tts, ref macro_close) = **macro_delimed; + match (macro_open, macro_tts.as_slice(), macro_close) { + (&ast::Delimiter { token: token::LPAREN, .. }, + [ast::TtDelimited(_, ref first_delimed), + ast::TtToken(_, token::FAT_ARROW), + ast::TtDelimited(_, ref second_delimed)], + &ast::Delimiter { token: token::RPAREN, .. }) => { + let (ref first_open, ref first_tts, ref first_close) = **first_delimed; + match (first_open, first_tts.as_slice(), first_close) { + (&ast::Delimiter { token: token::LPAREN, .. }, + [ast::TtToken(_, token::DOLLAR), + ast::TtToken(_, token::IDENT(name, false))], + &ast::Delimiter { token: token::RPAREN, .. }) + if name.as_str() == "a" => {}, + _ => fail!("value 3: {}", **first_delimed), + } + let (ref second_open, ref second_tts, ref second_close) = **second_delimed; + match (second_open, second_tts.as_slice(), second_close) { + (&ast::Delimiter { token: token::LPAREN, .. }, + [ast::TtToken(_, token::DOLLAR), + ast::TtToken(_, token::IDENT(name, false))], + &ast::Delimiter { token: token::RPAREN, .. }) + if name.as_str() == "a" => {}, + _ => fail!("value 4: {}", **second_delimed), } }, - _ => { - error!("failing value 2: {}",delim_elts); - assert_eq!("wrong","correct"); - } + _ => fail!("value 2: {}", **macro_delimed), } }, - _ => { - error!("failing value: {}",tts); - assert_eq!("wrong 1","correct"); - } + _ => fail!("value: {}",tts), } } - #[test] fn string_to_tts_1 () { + #[test] + fn string_to_tts_1 () { let tts = string_to_tts("fn a (b : int) { b; }".to_string()); assert_eq!(json::encode(&tts), "[\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ {\ @@ -859,7 +851,7 @@ mod test { ]\ },\ {\ - \"variant\":\"TTTok\",\ + \"variant\":\"TtToken\",\ \"fields\":[\ null,\ {\ @@ -872,96 +864,90 @@ mod test { ]\ },\ {\ - \"variant\":\"TTDelim\",\ + \"variant\":\"TtDelimited\",\ \"fields\":[\ + null,\ [\ {\ - \"variant\":\"TTTok\",\ - \"fields\":[\ - null,\ - \"LPAREN\"\ - ]\ - },\ - {\ - \"variant\":\"TTTok\",\ - \"fields\":[\ - null,\ - {\ - \"variant\":\"IDENT\",\ - \"fields\":[\ - \"b\",\ - false\ - ]\ - }\ - ]\ + \"span\":null,\ + \"token\":\"LPAREN\"\ },\ + [\ + {\ + \"variant\":\"TtToken\",\ + \"fields\":[\ + null,\ + {\ + \"variant\":\"IDENT\",\ + \"fields\":[\ + \"b\",\ + false\ + ]\ + }\ + ]\ + },\ + {\ + \"variant\":\"TtToken\",\ + \"fields\":[\ + null,\ + \"COLON\"\ + ]\ + },\ + {\ + \"variant\":\"TtToken\",\ + \"fields\":[\ + null,\ + {\ + \"variant\":\"IDENT\",\ + \"fields\":[\ + \"int\",\ + false\ + ]\ + }\ + ]\ + }\ + ],\ {\ - \"variant\":\"TTTok\",\ - \"fields\":[\ - null,\ - \"COLON\"\ - ]\ - },\ - {\ - \"variant\":\"TTTok\",\ - \"fields\":[\ - null,\ - {\ - \"variant\":\"IDENT\",\ - \"fields\":[\ - \"int\",\ - false\ - ]\ - }\ - ]\ - },\ - {\ - \"variant\":\"TTTok\",\ - \"fields\":[\ - null,\ - \"RPAREN\"\ - ]\ + \"span\":null,\ + \"token\":\"RPAREN\"\ }\ ]\ ]\ },\ {\ - \"variant\":\"TTDelim\",\ + \"variant\":\"TtDelimited\",\ \"fields\":[\ + null,\ [\ {\ - \"variant\":\"TTTok\",\ - \"fields\":[\ - null,\ - \"LBRACE\"\ - ]\ - },\ - {\ - \"variant\":\"TTTok\",\ - \"fields\":[\ - null,\ - {\ - \"variant\":\"IDENT\",\ - \"fields\":[\ - \"b\",\ - false\ - ]\ - }\ - ]\ - },\ - {\ - \"variant\":\"TTTok\",\ - \"fields\":[\ - null,\ - \"SEMI\"\ - ]\ + \"span\":null,\ + \"token\":\"LBRACE\"\ },\ + [\ + {\ + \"variant\":\"TtToken\",\ + \"fields\":[\ + null,\ + {\ + \"variant\":\"IDENT\",\ + \"fields\":[\ + \"b\",\ + false\ + ]\ + }\ + ]\ + },\ + {\ + \"variant\":\"TtToken\",\ + \"fields\":[\ + null,\ + \"SEMI\"\ + ]\ + }\ + ],\ {\ - \"variant\":\"TTTok\",\ - \"fields\":[\ - null,\ - \"RBRACE\"\ - ]\ + \"span\":null,\ + \"token\":\"RBRACE\"\ }\ ]\ ]\ diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 5abf79836f5..7bf751c2d5e 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -48,8 +48,8 @@ use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField}; use ast::{StructVariantKind, BiSub}; use ast::StrStyle; use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue}; -use ast::{TokenTree, TraitItem, TraitRef, TTDelim, TTSeq, TTTok}; -use ast::{TTNonterminal, TupleVariantKind, Ty, Ty_, TyBot}; +use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TtDelimited, TtSequence, TtToken}; +use ast::{TtNonterminal, TupleVariantKind, Ty, Ty_, TyBot}; use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn}; use ast::{TyTypeof, TyInfer, TypeMethod}; use ast::{TyNil, TyParam, TyParamBound, TyParen, TyPath, TyPtr, TyQPath}; @@ -2497,27 +2497,30 @@ impl<'a> Parser<'a> { return e; } - /// Parse an optional separator followed by a kleene-style + /// Parse an optional separator followed by a Kleene-style /// repetition token (+ or *). - pub fn parse_sep_and_zerok(&mut self) -> (Option<token::Token>, bool) { - fn parse_zerok(parser: &mut Parser) -> Option<bool> { + pub fn parse_sep_and_kleene_op(&mut self) -> (Option<token::Token>, ast::KleeneOp) { + fn parse_kleene_op(parser: &mut Parser) -> Option<ast::KleeneOp> { match parser.token { - token::BINOP(token::STAR) | token::BINOP(token::PLUS) => { - let zerok = parser.token == token::BINOP(token::STAR); + token::BINOP(token::STAR) => { parser.bump(); - Some(zerok) + Some(ast::ZeroOrMore) + }, + token::BINOP(token::PLUS) => { + parser.bump(); + Some(ast::OneOrMore) }, _ => None } }; - match parse_zerok(self) { - Some(zerok) => return (None, zerok), + match parse_kleene_op(self) { + Some(kleene_op) => return (None, kleene_op), None => {} } let separator = self.bump_and_get(); - match parse_zerok(self) { + match parse_kleene_op(self) { Some(zerok) => (Some(separator), zerok), None => self.fatal("expected `*` or `+`") } @@ -2526,8 +2529,8 @@ impl<'a> Parser<'a> { /// parse a single token tree from the input. pub fn parse_token_tree(&mut self) -> TokenTree { // FIXME #6994: currently, this is too eager. It - // parses token trees but also identifies TTSeq's - // and TTNonterminal's; it's too early to know yet + // parses token trees but also identifies TtSequence's + // and TtNonterminal's; it's too early to know yet // whether something will be a nonterminal or a seq // yet. maybe_whole!(deref self, NtTT); @@ -2564,26 +2567,21 @@ impl<'a> Parser<'a> { seq_sep_none(), |p| p.parse_token_tree() ); - let (s, z) = p.parse_sep_and_zerok(); + let (sep, repeat) = p.parse_sep_and_kleene_op(); let seq = match seq { Spanned { node, .. } => node, }; - TTSeq(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z) + TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), sep, repeat) } else { - TTNonterminal(sp, p.parse_ident()) + TtNonterminal(sp, p.parse_ident()) } } _ => { - parse_any_tt_tok(p) + TtToken(p.span, p.bump_and_get()) } } } - // turn the next token into a TTTok: - fn parse_any_tt_tok(p: &mut Parser) -> TokenTree { - TTTok(p.span, p.bump_and_get()) - } - match (&self.token, token::close_delimiter_for(&self.token)) { (&token::EOF, _) => { let open_braces = self.open_braces.clone(); @@ -2595,21 +2593,32 @@ impl<'a> Parser<'a> { self.fatal("this file contains an un-closed delimiter "); } (_, Some(close_delim)) => { + // The span for beginning of the delimited section + let pre_span = self.span; + // Parse the open delimiter. self.open_braces.push(self.span); - let mut result = vec!(parse_any_tt_tok(self)); + let open = Delimiter { + span: self.span, + token: self.bump_and_get(), + }; - let trees = - self.parse_seq_to_before_end(&close_delim, - seq_sep_none(), - |p| p.parse_token_tree()); - result.extend(trees.into_iter()); + // Parse the token trees within the delimeters + let tts = self.parse_seq_to_before_end( + &close_delim, seq_sep_none(), |p| p.parse_token_tree() + ); // Parse the close delimiter. - result.push(parse_any_tt_tok(self)); + let close = Delimiter { + span: self.span, + token: self.bump_and_get(), + }; self.open_braces.pop().unwrap(); - TTDelim(Rc::new(result)) + // Expand to cover the entire delimited token tree + let span = Span { hi: self.span.hi, ..pre_span }; + + TtDelimited(span, Rc::new((open, tts, close))) } _ => parse_non_delim_tt_tok(self) } @@ -2673,8 +2682,8 @@ impl<'a> Parser<'a> { if ms.len() == 0u { self.fatal("repetition body must be nonempty"); } - let (sep, zerok) = self.parse_sep_and_zerok(); - MatchSeq(ms, sep, zerok, name_idx_lo, *name_idx) + let (sep, kleene_op) = self.parse_sep_and_kleene_op(); + MatchSeq(ms, sep, kleene_op, name_idx_lo, *name_idx) } else { let bound_to = self.parse_ident(); self.expect(&token::COLON); |
