diff options
| author | bors <bors@rust-lang.org> | 2015-04-06 22:08:01 +0000 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2015-04-06 22:08:01 +0000 |
| commit | b49a5ef003fedcbb0d78aebda62ba30dfdd17a20 (patch) | |
| tree | d99b438e04f810e098c79b634ed6d730d2dbcb4a /src/libsyntax/ext | |
| parent | aab8669ddad0432ef7279cc7f7b0b20d32785314 (diff) | |
| parent | e3427c3c341fcd15cbac783bf8dad7276422c97a (diff) | |
| download | rust-b49a5ef003fedcbb0d78aebda62ba30dfdd17a20.tar.gz rust-b49a5ef003fedcbb0d78aebda62ba30dfdd17a20.zip | |
Auto merge of #23857 - phildawes:libsyntax_nopanic, r=nikomatsakis
Hello! I've been working towards a libsyntax without panics. See: http://internals.rust-lang.org/t/changing-libsyntax-to-use-result-instead-of-panic/1670 This patch changes the internals of parser.rs to use Result<> rather than panicing. It keeps the following old-style panicing functions as a facade: parse_expr, parse_item, parse_pat, parse_arm, parse_ty, parse_stmt I left these functions because I wasn't sure what to do about the quote_* macros or how many syntax-extensions would break if these and quoting macros returned Result. The gyst of the rest of the patch is: - Functions in parse/parser.rs return PResult<> rather than panicing - Other functions in libsyntax call panic! explicitly if they rely on panicing behaviour. - I added a macro 'panictry!()' to act as scaffolding for callers while converting panicing functions. (This does the same as 'unwrap()' but is easier to grep for and turn into try!()). Am I on the right track? I'd quite like to get something merged soon as keeping this rebased in the face of libsyntax changes is a lot of work. Please let me know what changes you'd like to see to make this happen. Thanks!, Phil
Diffstat (limited to 'src/libsyntax/ext')
| -rw-r--r-- | src/libsyntax/ext/asm.rs | 28 | ||||
| -rw-r--r-- | src/libsyntax/ext/base.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/cfg.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ext/format.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/source_util.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 30 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 14 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 14 |
10 files changed, 57 insertions, 57 deletions
diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index d256698b885..f2b45d89f73 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -91,16 +91,16 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) p.token != token::ModSep { if outputs.len() != 0 { - p.eat(&token::Comma); + panictry!(p.eat(&token::Comma)); } - let (constraint, _str_style) = p.parse_str(); + let (constraint, _str_style) = panictry!(p.parse_str()); let span = p.last_span; - p.expect(&token::OpenDelim(token::Paren)); + panictry!(p.expect(&token::OpenDelim(token::Paren))); let out = p.parse_expr(); - p.expect(&token::CloseDelim(token::Paren)); + panictry!(p.expect(&token::CloseDelim(token::Paren))); // Expands a read+write operand into two operands. // @@ -131,10 +131,10 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) p.token != token::ModSep { if inputs.len() != 0 { - p.eat(&token::Comma); + panictry!(p.eat(&token::Comma)); } - let (constraint, _str_style) = p.parse_str(); + let (constraint, _str_style) = panictry!(p.parse_str()); if constraint.starts_with("=") { cx.span_err(p.last_span, "input operand constraint contains '='"); @@ -142,9 +142,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(p.last_span, "input operand constraint contains '+'"); } - p.expect(&token::OpenDelim(token::Paren)); + panictry!(p.expect(&token::OpenDelim(token::Paren))); let input = p.parse_expr(); - p.expect(&token::CloseDelim(token::Paren)); + panictry!(p.expect(&token::CloseDelim(token::Paren))); inputs.push((constraint, input)); } @@ -155,10 +155,10 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) p.token != token::ModSep { if clobs.len() != 0 { - p.eat(&token::Comma); + panictry!(p.eat(&token::Comma)); } - let (s, _str_style) = p.parse_str(); + let (s, _str_style) = panictry!(p.parse_str()); if OPTIONS.iter().any(|&opt| s == opt) { cx.span_warn(p.last_span, "expected a clobber, found an option"); @@ -167,7 +167,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } Options => { - let (option, _str_style) = p.parse_str(); + let (option, _str_style) = panictry!(p.parse_str()); if option == "volatile" { // Indicates that the inline assembly has side effects @@ -182,7 +182,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } if p.token == token::Comma { - p.eat(&token::Comma); + panictry!(p.eat(&token::Comma)); } } StateNone => () @@ -194,12 +194,12 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) match (&p.token, state.next(), state.next().next()) { (&token::Colon, StateNone, _) | (&token::ModSep, _, StateNone) => { - p.bump(); + panictry!(p.bump()); break 'statement; } (&token::Colon, st, _) | (&token::ModSep, _, st) => { - p.bump(); + panictry!(p.bump()); state = st; } (&token::Eof, _, _) => break 'statement, diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 71fba789ff8..80ee92608a5 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -652,9 +652,9 @@ impl<'a> ExtCtxt<'a> { pub fn bt_push(&mut self, ei: ExpnInfo) { self.recursion_count += 1; if self.recursion_count > self.ecfg.recursion_limit { - self.span_fatal(ei.call_site, + panic!(self.span_fatal(ei.call_site, &format!("recursion limit reached while expanding the macro `{}`", - ei.callee.name)); + ei.callee.name))); } let mut call_site = ei.call_site; @@ -699,7 +699,7 @@ impl<'a> ExtCtxt<'a> { /// value doesn't have to match anything) pub fn span_fatal(&self, sp: Span, msg: &str) -> ! { self.print_backtrace(); - self.parse_sess.span_diagnostic.span_fatal(sp, msg); + panic!(self.parse_sess.span_diagnostic.span_fatal(sp, msg)); } /// Emit `msg` attached to `sp`, without immediately stopping @@ -817,7 +817,7 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt, let mut es = Vec::new(); while p.token != token::Eof { es.push(cx.expander().fold_expr(p.parse_expr())); - if p.eat(&token::Comma) { + if panictry!(p.eat(&token::Comma)){ continue; } if p.token != token::Eof { diff --git a/src/libsyntax/ext/cfg.rs b/src/libsyntax/ext/cfg.rs index 6a2209bf0ae..8af7fb7b268 100644 --- a/src/libsyntax/ext/cfg.rs +++ b/src/libsyntax/ext/cfg.rs @@ -29,7 +29,7 @@ pub fn expand_cfg<'cx>(cx: &mut ExtCtxt, let mut p = cx.new_parser_from_tts(tts); let cfg = p.parse_meta_item(); - if !p.eat(&token::Eof) { + if !panictry!(p.eat(&token::Eof)){ cx.span_err(sp, "expected 1 cfg-pattern"); return DummyResult::expr(sp); } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 94abc0b34bc..f13047d3725 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -1684,7 +1684,7 @@ mod test { fn expand_crate_str(crate_str: String) -> ast::Crate { let ps = parse::new_parse_sess(); - let crate_ast = string_to_parser(&ps, crate_str).parse_crate_mod(); + let crate_ast = panictry!(string_to_parser(&ps, crate_str).parse_crate_mod()); // the cfg argument actually does matter, here... expand_crate(&ps,test_ecfg(),vec!(),vec!(),crate_ast) } diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 2fe77bf7a54..1d99a475b32 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -92,7 +92,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let fmtstr = p.parse_expr(); let mut named = false; while p.token != token::Eof { - if !p.eat(&token::Comma) { + if !panictry!(p.eat(&token::Comma)) { ecx.span_err(sp, "expected token: `,`"); return None; } @@ -101,7 +101,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) named = true; let ident = match p.token { token::Ident(i, _) => { - p.bump(); + panictry!(p.bump()); i } _ if named => { @@ -120,7 +120,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let interned_name = token::get_ident(ident); let name = &interned_name[..]; - p.expect(&token::Eq); + panictry!(p.expect(&token::Eq)); let e = p.parse_expr(); match names.get(name) { None => {} diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 87299721fb3..5776fa99740 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -781,11 +781,11 @@ fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[ast::TokenTree]) p.quote_depth += 1; let cx_expr = p.parse_expr(); - if !p.eat(&token::Comma) { - p.fatal("expected token `,`"); + if !panictry!(p.eat(&token::Comma)) { + panic!(p.fatal("expected token `,`")); } - let tts = p.parse_all_token_trees(); + let tts = panictry!(p.parse_all_token_trees()); p.abort_if_errors(); (cx_expr, tts) diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 31d8b207bb9..a4c2d2dc030 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -117,11 +117,11 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree while self.p.token != token::Eof { match self.p.parse_item() { Some(item) => ret.push(item), - None => self.p.span_fatal( + None => panic!(self.p.span_fatal( self.p.span, &format!("expected item, found `{}`", self.p.this_token_to_string()) - ) + )) } } Some(ret) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index b7d40a46f3e..4e0b74401a2 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -226,10 +226,10 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) } Occupied(..) => { let string = token::get_ident(bind_name); - p_s.span_diagnostic + panic!(p_s.span_diagnostic .span_fatal(sp, &format!("duplicated bind name: {}", - &string)) + &string))) } } } @@ -260,10 +260,10 @@ pub fn parse_or_else(sess: &ParseSess, match parse(sess, cfg, rdr, &ms[..]) { Success(m) => m, Failure(sp, str) => { - sess.span_diagnostic.span_fatal(sp, &str[..]) + panic!(sess.span_diagnostic.span_fatal(sp, &str[..])) } Error(sp, str) => { - sess.span_diagnostic.span_fatal(sp, &str[..]) + panic!(sess.span_diagnostic.span_fatal(sp, &str[..])) } } } @@ -512,46 +512,46 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal { match name { "tt" => { p.quote_depth += 1; //but in theory, non-quoted tts might be useful - let res = token::NtTT(P(p.parse_token_tree())); + let res = token::NtTT(P(panictry!(p.parse_token_tree()))); p.quote_depth -= 1; return res; } _ => {} } // check at the beginning and the parser checks after each bump - p.check_unknown_macro_variable(); + panictry!(p.check_unknown_macro_variable()); match name { "item" => match p.parse_item() { Some(i) => token::NtItem(i), - None => p.fatal("expected an item keyword") + None => panic!(p.fatal("expected an item keyword")) }, - "block" => token::NtBlock(p.parse_block()), + "block" => token::NtBlock(panictry!(p.parse_block())), "stmt" => match p.parse_stmt() { Some(s) => token::NtStmt(s), - None => p.fatal("expected a statement") + None => panic!(p.fatal("expected a statement")) }, "pat" => token::NtPat(p.parse_pat()), "expr" => token::NtExpr(p.parse_expr()), "ty" => token::NtTy(p.parse_ty()), // this could be handled like a token, since it is one "ident" => match p.token { - token::Ident(sn,b) => { p.bump(); token::NtIdent(box sn,b) } + token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(box sn,b) } _ => { let token_str = pprust::token_to_string(&p.token); - p.fatal(&format!("expected ident, found {}", - &token_str[..])) + panic!(p.fatal(&format!("expected ident, found {}", + &token_str[..]))) } }, "path" => { - token::NtPath(box p.parse_path(LifetimeAndTypesWithoutColons)) + token::NtPath(box panictry!(p.parse_path(LifetimeAndTypesWithoutColons))) } "meta" => token::NtMeta(p.parse_meta_item()), _ => { - p.span_fatal_help(sp, + panic!(p.span_fatal_help(sp, &format!("invalid fragment specifier `{}`", name), "valid fragment specifiers are `ident`, `block`, \ `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \ - and `item`") + and `item`")) } } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 1e53db60301..250ba0442ba 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -41,7 +41,7 @@ impl<'a> ParserAnyMacro<'a> { fn ensure_complete_parse(&self, allow_semi: bool) { let mut parser = self.parser.borrow_mut(); if allow_semi && parser.token == token::Semi { - parser.bump() + panictry!(parser.bump()) } if parser.token != token::Eof { let token_str = parser.this_token_to_string(); @@ -81,7 +81,7 @@ impl<'a> MacResult for ParserAnyMacro<'a> { let mut parser = self.parser.borrow_mut(); match parser.token { token::Eof => break, - _ => ret.push(parser.parse_impl_item()) + _ => ret.push(panictry!(parser.parse_impl_item())) } } self.ensure_complete_parse(false); @@ -142,7 +142,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, MatchedNonterminal(NtTT(ref lhs_tt)) => { let lhs_tt = match **lhs_tt { TtDelimited(_, ref delim) => &delim.tts[..], - _ => cx.span_fatal(sp, "malformed macro lhs") + _ => panic!(cx.span_fatal(sp, "malformed macro lhs")) }; match TokenTree::parse(cx, lhs_tt, arg) { @@ -153,7 +153,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, match **tt { // ignore delimiters TtDelimited(_, ref delimed) => delimed.tts.clone(), - _ => cx.span_fatal(sp, "macro rhs must be delimited"), + _ => panic!(cx.span_fatal(sp, "macro rhs must be delimited")), } }, _ => cx.span_bug(sp, "bad thing in rhs") @@ -164,7 +164,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, imported_from, rhs); let mut p = Parser::new(cx.parse_sess(), cx.cfg(), Box::new(trncbr)); - p.check_unknown_macro_variable(); + panictry!(p.check_unknown_macro_variable()); // Let the context choose how to interpret the result. // Weird, but useful for X-macros. return box ParserAnyMacro { @@ -175,13 +175,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, best_fail_spot = sp; best_fail_msg = (*msg).clone(); }, - Error(sp, ref msg) => cx.span_fatal(sp, &msg[..]) + Error(sp, ref msg) => panic!(cx.span_fatal(sp, &msg[..])) } } _ => cx.bug("non-matcher found in parsed lhses") } } - cx.span_fatal(best_fail_spot, &best_fail_msg[..]); + panic!(cx.span_fatal(best_fail_spot, &best_fail_msg[..])); } // Note that macro-by-example's input is also matched against a token tree: diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 0d92bd761b4..e39b46a2d3e 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -247,22 +247,22 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { match lockstep_iter_size(&TtSequence(sp, seq.clone()), r) { LisUnconstrained => { - r.sp_diag.span_fatal( + panic!(r.sp_diag.span_fatal( sp.clone(), /* blame macro writer */ "attempted to repeat an expression \ containing no syntax \ - variables matched as repeating at this depth"); + variables matched as repeating at this depth")); } LisContradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - r.sp_diag.span_fatal(sp.clone(), &msg[..]); + panic!(r.sp_diag.span_fatal(sp.clone(), &msg[..])); } LisConstraint(len, _) => { if len == 0 { if seq.op == ast::OneOrMore { // FIXME #2887 blame invoker - r.sp_diag.span_fatal(sp.clone(), - "this must repeat at least once"); + panic!(r.sp_diag.span_fatal(sp.clone(), + "this must repeat at least once")); } r.stack.last_mut().unwrap().idx += 1; @@ -306,10 +306,10 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { return ret_val; } MatchedSeq(..) => { - r.sp_diag.span_fatal( + panic!(r.sp_diag.span_fatal( r.cur_span, /* blame the macro writer */ &format!("variable '{:?}' is still repeating at this depth", - token::get_ident(ident))); + token::get_ident(ident)))); } } } |
