about summary refs log tree commit diff
path: root/src/libsyntax/parse/mod.rs
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2015-04-06 22:08:01 +0000
committerbors <bors@rust-lang.org>2015-04-06 22:08:01 +0000
commitb49a5ef003fedcbb0d78aebda62ba30dfdd17a20 (patch)
treed99b438e04f810e098c79b634ed6d730d2dbcb4a /src/libsyntax/parse/mod.rs
parentaab8669ddad0432ef7279cc7f7b0b20d32785314 (diff)
parente3427c3c341fcd15cbac783bf8dad7276422c97a (diff)
downloadrust-b49a5ef003fedcbb0d78aebda62ba30dfdd17a20.tar.gz
rust-b49a5ef003fedcbb0d78aebda62ba30dfdd17a20.zip
Auto merge of #23857 - phildawes:libsyntax_nopanic, r=nikomatsakis
Hello! 

I've been working towards a libsyntax without panics. See:
http://internals.rust-lang.org/t/changing-libsyntax-to-use-result-instead-of-panic/1670

This patch changes the internals of parser.rs to use Result<> rather than panicing. It keeps the following old-style panicing functions as a facade:
parse_expr, parse_item, parse_pat, parse_arm, parse_ty, parse_stmt

I left these functions because I wasn't sure what to do about the quote_* macros or how many syntax-extensions would break if these and quoting macros returned Result.

The gyst of the rest of the patch is:

 - Functions in parse/parser.rs return PResult<> rather than panicing
 - Other functions in libsyntax call panic! explicitly if they rely on panicing behaviour.
 - I added a macro 'panictry!()' to act as scaffolding for callers while converting panicing functions. (This does the same as 'unwrap()' but is easier to grep for and turn into try!()).

Am I on the right track?  I'd quite like to get something merged soon as keeping this rebased in the face of libsyntax changes is a lot of work. Please let me know what changes you'd like to see to make this happen.

Thanks!, Phil
Diffstat (limited to 'src/libsyntax/parse/mod.rs')
-rw-r--r--src/libsyntax/parse/mod.rs23
1 files changed, 13 insertions, 10 deletions
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index f59e1d8214a..226e2037b2f 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -12,11 +12,12 @@
 
 use ast;
 use codemap::{Span, CodeMap, FileMap};
-use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto};
+use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto, FatalError};
 use parse::attr::ParserAttr;
 use parse::parser::Parser;
 use ptr::P;
 
+
 use std::cell::{Cell, RefCell};
 use std::fs::File;
 use std::io::Read;
@@ -27,6 +28,8 @@ use std::path::{Path, PathBuf};
 use std::rc::Rc;
 use std::str;
 
+pub type PResult<T> = Result<T, FatalError>;
+
 #[macro_use]
 pub mod parser;
 
@@ -88,7 +91,7 @@ pub fn parse_crate_from_file(
     cfg: ast::CrateConfig,
     sess: &ParseSess
 ) -> ast::Crate {
-    new_parser_from_file(sess, cfg, input).parse_crate_mod()
+    panictry!(new_parser_from_file(sess, cfg, input).parse_crate_mod())
     // why is there no p.abort_if_errors here?
 }
 
@@ -109,7 +112,7 @@ pub fn parse_crate_from_source_str(name: String,
                                            cfg,
                                            name,
                                            source);
-    maybe_aborted(p.parse_crate_mod(),p)
+    maybe_aborted(panictry!(p.parse_crate_mod()),p)
 }
 
 pub fn parse_crate_attrs_from_source_str(name: String,
@@ -182,7 +185,7 @@ pub fn parse_tts_from_source_str(name: String,
     );
     p.quote_depth += 1;
     // right now this is re-creating the token trees from ... token trees.
-    maybe_aborted(p.parse_all_token_trees(),p)
+    maybe_aborted(panictry!(p.parse_all_token_trees()),p)
 }
 
 // Note: keep in sync with `with_hygiene::new_parser_from_source_str`
@@ -245,7 +248,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
     -> Rc<FileMap> {
     let err = |msg: &str| {
         match spanopt {
-            Some(sp) => sess.span_diagnostic.span_fatal(sp, msg),
+            Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, msg)),
             None => sess.span_diagnostic.handler().fatal(msg),
         }
     };
@@ -286,7 +289,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
     let cfg = Vec::new();
     let srdr = lexer::StringReader::new(&sess.span_diagnostic, filemap);
     let mut p1 = Parser::new(sess, cfg, box srdr);
-    p1.parse_all_token_trees()
+    panictry!(p1.parse_all_token_trees())
 }
 
 /// Given tts and cfg, produce a parser
@@ -295,7 +298,7 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess,
                          cfg: ast::CrateConfig) -> Parser<'a> {
     let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts);
     let mut p = Parser::new(sess, cfg, box trdr);
-    p.check_unknown_macro_variable();
+    panictry!(p.check_unknown_macro_variable());
     p
 }
 
@@ -325,7 +328,7 @@ pub mod with_hygiene {
         );
         p.quote_depth += 1;
         // right now this is re-creating the token trees from ... token trees.
-        maybe_aborted(p.parse_all_token_trees(),p)
+        maybe_aborted(panictry!(p.parse_all_token_trees()),p)
     }
 
     // Note: keep this in sync with `super::new_parser_from_source_str` until
@@ -358,7 +361,7 @@ pub mod with_hygiene {
         let cfg = Vec::new();
         let srdr = make_reader(&sess.span_diagnostic, filemap);
         let mut p1 = Parser::new(sess, cfg, box srdr);
-        p1.parse_all_token_trees()
+        panictry!(p1.parse_all_token_trees())
     }
 }
 
@@ -964,7 +967,7 @@ mod test {
     #[test] fn parse_ident_pat () {
         let sess = new_parse_sess();
         let mut parser = string_to_parser(&sess, "b".to_string());
-        assert!(parser.parse_pat()
+        assert!(panictry!(parser.parse_pat_nopanic())
                 == P(ast::Pat{
                 id: ast::DUMMY_NODE_ID,
                 node: ast::PatIdent(ast::BindByValue(ast::MutImmutable),