diff options
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/ast.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/ast_map/blocks.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/ast_map/mod.rs | 36 | ||||
| -rw-r--r-- | src/libsyntax/ast_util.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/codemap.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/diagnostic.rs | 14 | ||||
| -rw-r--r-- | src/libsyntax/ext/expand.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/ext/mtwt.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/quote.rs | 8 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_parser.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/macro_rules.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/fold.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/comments.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer/mod.rs | 10 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 32 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/print/pp.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/print/pprust.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/util/small_vector.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/visit.rs | 2 |
20 files changed, 89 insertions, 89 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index a4f060cd9fc..3bd25d245e1 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -84,7 +84,7 @@ impl PartialEq for Ident { // one example and its non-hygienic counterpart would be: // syntax::parse::token::Token::mtwt_eq // syntax::ext::tt::macro_parser::token_name_eq - fail!("not allowed to compare these idents: {}, {}. \ + panic!("not allowed to compare these idents: {}, {}. \ Probably related to issue \\#6993", self, other); } } diff --git a/src/libsyntax/ast_map/blocks.rs b/src/libsyntax/ast_map/blocks.rs index 8280f34615f..187d94d1fa7 100644 --- a/src/libsyntax/ast_map/blocks.rs +++ b/src/libsyntax/ast_map/blocks.rs @@ -198,17 +198,17 @@ impl<'a> FnLikeNode<'a> { ident: i.ident, decl: &**decl, style: style, body: &**block, generics: generics, abi: abi, id: i.id, span: i.span }), - _ => fail!("item FnLikeNode that is not fn-like"), + _ => panic!("item FnLikeNode that is not fn-like"), }, ast_map::NodeTraitItem(t) => match *t { ast::ProvidedMethod(ref m) => method(&**m), - _ => fail!("trait method FnLikeNode that is not fn-like"), + _ => panic!("trait method FnLikeNode that is not fn-like"), }, ast_map::NodeImplItem(ii) => { match *ii { ast::MethodImplItem(ref m) => method(&**m), ast::TypeImplItem(_) => { - fail!("impl method FnLikeNode that is not fn-like") + panic!("impl method FnLikeNode that is not fn-like") } } } @@ -217,9 +217,9 @@ impl<'a> FnLikeNode<'a> { closure(ClosureParts::new(&**decl, &**block, e.id, e.span)), ast::ExprProc(ref decl, ref block) => closure(ClosureParts::new(&**decl, &**block, e.id, e.span)), - _ => fail!("expr FnLikeNode that is not fn-like"), + _ => panic!("expr FnLikeNode that is not fn-like"), }, - _ => fail!("other FnLikeNode that is not fn-like"), + _ => panic!("other FnLikeNode that is not fn-like"), } } } diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs index b82a4a0b997..915c2d1b318 100644 --- a/src/libsyntax/ast_map/mod.rs +++ b/src/libsyntax/ast_map/mod.rs @@ -263,12 +263,12 @@ impl<'ast> Map<'ast> { &self.forest.krate } - /// Retrieve the Node corresponding to `id`, failing if it cannot + /// Retrieve the Node corresponding to `id`, panicking if it cannot /// be found. pub fn get(&self, id: NodeId) -> Node<'ast> { match self.find(id) { Some(node) => node, - None => fail!("couldn't find node id {} in the AST map", id) + None => panic!("couldn't find node id {} in the AST map", id) } } @@ -308,7 +308,7 @@ impl<'ast> Map<'ast> { }; match abi { Some(abi) => abi, - None => fail!("expected foreign mod or inlined parent, found {}", + None => panic!("expected foreign mod or inlined parent, found {}", self.node_to_string(parent)) } } @@ -324,7 +324,7 @@ impl<'ast> Map<'ast> { pub fn expect_item(&self, id: NodeId) -> &'ast Item { match self.find(id) { Some(NodeItem(item)) => item, - _ => fail!("expected item, found {}", self.node_to_string(id)) + _ => panic!("expected item, found {}", self.node_to_string(id)) } } @@ -333,37 +333,37 @@ impl<'ast> Map<'ast> { Some(NodeItem(i)) => { match i.node { ItemStruct(ref struct_def, _) => &**struct_def, - _ => fail!("struct ID bound to non-struct") + _ => panic!("struct ID bound to non-struct") } } Some(NodeVariant(variant)) => { match variant.node.kind { StructVariantKind(ref struct_def) => &**struct_def, - _ => fail!("struct ID bound to enum variant that isn't struct-like"), + _ => panic!("struct ID bound to enum variant that isn't struct-like"), } } - _ => fail!(format!("expected struct, found {}", self.node_to_string(id))), + _ => panic!(format!("expected struct, found {}", self.node_to_string(id))), } } pub fn expect_variant(&self, id: NodeId) -> &'ast Variant { match self.find(id) { Some(NodeVariant(variant)) => variant, - _ => fail!(format!("expected variant, found {}", self.node_to_string(id))), + _ => panic!(format!("expected variant, found {}", self.node_to_string(id))), } } pub fn expect_foreign_item(&self, id: NodeId) -> &'ast ForeignItem { match self.find(id) { Some(NodeForeignItem(item)) => item, - _ => fail!("expected foreign item, found {}", self.node_to_string(id)) + _ => panic!("expected foreign item, found {}", self.node_to_string(id)) } } pub fn expect_expr(&self, id: NodeId) -> &'ast Expr { match self.find(id) { Some(NodeExpr(expr)) => expr, - _ => fail!("expected expr, found {}", self.node_to_string(id)) + _ => panic!("expected expr, found {}", self.node_to_string(id)) } } @@ -388,7 +388,7 @@ impl<'ast> Map<'ast> { PathName(ident.name) } MethMac(_) => { - fail!("no path elem for {}", node) + panic!("no path elem for {}", node) } } } @@ -402,13 +402,13 @@ impl<'ast> Map<'ast> { MethDecl(ident, _, _, _, _, _, _, _) => { PathName(ident.name) } - MethMac(_) => fail!("no path elem for {}", node), + MethMac(_) => panic!("no path elem for {}", node), } } TypeTraitItem(ref m) => PathName(m.ident.name), }, NodeVariant(v) => PathName(v.node.name.name), - _ => fail!("no path elem for {}", node) + _ => panic!("no path elem for {}", node) } } @@ -533,7 +533,7 @@ impl<'ast> Map<'ast> { pub fn span(&self, id: NodeId) -> Span { self.opt_span(id) - .unwrap_or_else(|| fail!("AstMap.span: could not find span for id {}", id)) + .unwrap_or_else(|| panic!("AstMap.span: could not find span for id {}", id)) } pub fn def_id_span(&self, def_id: DefId, fallback: Span) -> Span { @@ -666,7 +666,7 @@ impl Named for Method { fn name(&self) -> Name { match self.node { MethDecl(i, _, _, _, _, _, _, _) => i.name, - MethMac(_) => fail!("encountered unexpanded method macro."), + MethMac(_) => panic!("encountered unexpanded method macro."), } } } @@ -1018,9 +1018,9 @@ impl<'a> NodePrinter for pprust::State<'a> { // these cases do not carry enough information in the // ast_map to reconstruct their full structure for pretty // printing. - NodeLocal(_) => fail!("cannot print isolated Local"), - NodeArg(_) => fail!("cannot print isolated Arg"), - NodeStructCtor(_) => fail!("cannot print isolated StructCtor"), + NodeLocal(_) => panic!("cannot print isolated Local"), + NodeArg(_) => panic!("cannot print isolated Arg"), + NodeStructCtor(_) => panic!("cannot print isolated StructCtor"), } } } diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index d5a460dc9db..7e1716e6b18 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -43,7 +43,7 @@ pub fn stmt_id(s: &Stmt) -> NodeId { StmtDecl(_, id) => id, StmtExpr(_, id) => id, StmtSemi(_, id) => id, - StmtMac(..) => fail!("attempted to analyze unexpanded stmt") + StmtMac(..) => panic!("attempted to analyze unexpanded stmt") } } @@ -233,7 +233,7 @@ pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod { abi: abi, } }, - MethMac(_) => fail!("expected non-macro method declaration") + MethMac(_) => panic!("expected non-macro method declaration") } } @@ -246,7 +246,7 @@ pub fn trait_item_to_ty_method(method: &TraitItem) -> TypeMethod { RequiredMethod(ref m) => (*m).clone(), ProvidedMethod(ref m) => trait_method_to_ty_method(&**m), TypeTraitItem(_) => { - fail!("trait_method_to_ty_method(): expected method but found \ + panic!("trait_method_to_ty_method(): expected method but found \ typedef") } } @@ -615,7 +615,7 @@ pub fn walk_pat(pat: &Pat, it: |&Pat| -> bool) -> bool { slice.iter().all(|p| walk_pat(&**p, |p| it(p))) && after.iter().all(|p| walk_pat(&**p, |p| it(p))) } - PatMac(_) => fail!("attempted to analyze unexpanded pattern"), + PatMac(_) => panic!("attempted to analyze unexpanded pattern"), PatWild(_) | PatLit(_) | PatRange(_, _) | PatIdent(_, _, _) | PatEnum(_, _) => { true @@ -725,7 +725,7 @@ macro_rules! mf_method{ match self.node { $field_pat => $result, MethMac(_) => { - fail!("expected an AST without macro invocations"); + panic!("expected an AST without macro invocations"); } } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 4df334a3f2c..7d303644020 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -425,7 +425,7 @@ impl CodeMap { // FIXME #8256: this used to be an assert but whatever precondition // it's testing isn't true for all spans in the AST, so to allow the - // caller to not have to fail (and it can't catch it since the CodeMap + // caller to not have to panic (and it can't catch it since the CodeMap // isn't sendable), return None if begin.fm.start_pos != end.fm.start_pos { None @@ -441,7 +441,7 @@ impl CodeMap { return fm.clone(); } } - fail!("asking for {} which we don't know about", filename); + panic!("asking for {} which we don't know about", filename); } pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos { @@ -503,13 +503,13 @@ impl CodeMap { break; } if a == 0 { - fail!("position {} does not resolve to a source location", + panic!("position {} does not resolve to a source location", pos.to_uint()); } a -= 1; } if a >= len { - fail!("position {} does not resolve to a source location", + panic!("position {} does not resolve to a source location", pos.to_uint()) } diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 3da1b1f3175..e24aa0f0b95 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -63,7 +63,7 @@ pub trait Emitter { sp: RenderSpan, msg: &str, lvl: Level); } -/// This structure is used to signify that a task has failed with a fatal error +/// This structure is used to signify that a task has panicked with a fatal error /// from the diagnostics. You can use this with the `Any` trait to figure out /// how a rustc task died (if so desired). pub struct FatalError; @@ -83,7 +83,7 @@ pub struct SpanHandler { impl SpanHandler { pub fn span_fatal(&self, sp: Span, msg: &str) -> ! { self.handler.emit(Some((&self.cm, sp)), msg, Fatal); - fail!(FatalError); + panic!(FatalError); } pub fn span_err(&self, sp: Span, msg: &str) { self.handler.emit(Some((&self.cm, sp)), msg, Error); @@ -113,7 +113,7 @@ impl SpanHandler { } pub fn span_bug(&self, sp: Span, msg: &str) -> ! { self.handler.emit(Some((&self.cm, sp)), msg, Bug); - fail!(ExplicitBug); + panic!(ExplicitBug); } pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! { self.span_bug(sp, format!("unimplemented {}", msg).as_slice()); @@ -134,7 +134,7 @@ pub struct Handler { impl Handler { pub fn fatal(&self, msg: &str) -> ! { self.emit.borrow_mut().emit(None, msg, None, Fatal); - fail!(FatalError); + panic!(FatalError); } pub fn err(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Error); @@ -172,7 +172,7 @@ impl Handler { } pub fn bug(&self, msg: &str) -> ! { self.emit.borrow_mut().emit(None, msg, None, Bug); - fail!(ExplicitBug); + panic!(ExplicitBug); } pub fn unimpl(&self, msg: &str) -> ! { self.bug(format!("unimplemented {}", msg).as_slice()); @@ -367,7 +367,7 @@ impl Emitter for EmitterWriter { match error { Ok(()) => {} - Err(e) => fail!("failed to print diagnostics: {}", e), + Err(e) => panic!("failed to print diagnostics: {}", e), } } @@ -375,7 +375,7 @@ impl Emitter for EmitterWriter { sp: RenderSpan, msg: &str, lvl: Level) { match emit(self, cm, sp, msg, None, lvl, true) { Ok(()) => {} - Err(e) => fail!("failed to print diagnostics: {}", e), + Err(e) => panic!("failed to print diagnostics: {}", e), } } } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 39b710e0d57..87406081aae 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -741,7 +741,7 @@ fn expand_arm(arm: ast::Arm, fld: &mut MacroExpander) -> ast::Arm { // expand pats... they might contain macro uses: let expanded_pats = arm.pats.move_map(|pat| fld.fold_pat(pat)); if expanded_pats.len() == 0 { - fail!("encountered match arm with 0 patterns"); + panic!("encountered match arm with 0 patterns"); } // all of the pats must have the same set of bindings, so use the // first one to extract them and generate new names: @@ -1621,7 +1621,7 @@ mod test { // good lord, you can't make a path with 0 segments, can you? let final_varref_ident = match varref.segments.last() { Some(pathsegment) => pathsegment.identifier, - None => fail!("varref with 0 path segments?") + None => panic!("varref with 0 path segments?") }; let varref_name = mtwt::resolve(final_varref_ident); let varref_idents : Vec<ast::Ident> @@ -1688,7 +1688,7 @@ foo_module!() let cxbinds: &[&ast::Ident] = cxbinds.as_slice(); let cxbind = match cxbinds { [b] => b, - _ => fail!("expected just one binding for ext_cx") + _ => panic!("expected just one binding for ext_cx") }; let resolved_binding = mtwt::resolve(*cxbind); let varrefs = crate_varrefs(&cr); diff --git a/src/libsyntax/ext/mtwt.rs b/src/libsyntax/ext/mtwt.rs index b4f8b9f8228..840468176ab 100644 --- a/src/libsyntax/ext/mtwt.rs +++ b/src/libsyntax/ext/mtwt.rs @@ -211,7 +211,7 @@ fn resolve_internal(id: Ident, resolvedthis } } - IllegalCtxt => fail!("expected resolvable context, got IllegalCtxt") + IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt") } }; resolve_table.insert(key, resolved); @@ -250,7 +250,7 @@ fn marksof_internal(ctxt: SyntaxContext, loopvar = tl; } } - IllegalCtxt => fail!("expected resolvable context, got IllegalCtxt") + IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt") } } } @@ -261,7 +261,7 @@ pub fn outer_mark(ctxt: SyntaxContext) -> Mrk { with_sctable(|sctable| { match (*sctable.table.borrow())[ctxt as uint] { Mark(mrk, _) => mrk, - _ => fail!("can't retrieve outer mark when outside is not a mark") + _ => panic!("can't retrieve outer mark when outside is not a mark") } }) } @@ -342,7 +342,7 @@ mod tests { sc = tail; continue; } - IllegalCtxt => fail!("expected resolvable context, got IllegalCtxt") + IllegalCtxt => panic!("expected resolvable context, got IllegalCtxt") } } } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index dc7a495523f..a95a737720a 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -366,7 +366,7 @@ pub mod rt { Some(ast) => ast, None => { error!("parse error"); - fail!() + panic!() } } } @@ -598,7 +598,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> { vec!(mk_name(cx, sp, ident.ident()))); } - token::Interpolated(_) => fail!("quote! with interpolated token"), + token::Interpolated(_) => panic!("quote! with interpolated token"), _ => () } @@ -635,7 +635,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> { token::Dollar => "Dollar", token::Underscore => "Underscore", token::Eof => "Eof", - _ => fail!(), + _ => panic!(), }; mk_token_path(cx, sp, name) } @@ -662,7 +662,7 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> { .chain(mk_tt(cx, sp, &close.to_tt()).into_iter()) .collect() }, - ast::TtSequence(..) => fail!("TtSequence in quote!"), + ast::TtSequence(..) => panic!("TtSequence in quote!"), ast::TtNonterminal(sp, ident) => { // tt.extend($ident.to_tokens(ext_cx).into_iter()) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 073bebcb3f6..9260a45adb9 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -395,7 +395,7 @@ pub fn parse(sess: &ParseSess, token::get_ident(name), token::get_ident(bind))).to_string() } - _ => fail!() + _ => panic!() } }).collect::<Vec<String>>().connect(" or "); return Error(sp, format!( "local ambiguity: multiple parsing options: \ @@ -421,7 +421,7 @@ pub fn parse(sess: &ParseSess, parse_nt(&mut rust_parser, name_string.get())))); ei.idx += 1u; } - _ => fail!() + _ => panic!() } cur_eis.push(ei); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 20428e50c7f..85bd5cde304 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -39,7 +39,7 @@ impl<'a> ParserAnyMacro<'a> { /// silently drop anything. `allow_semi` is so that "optional" /// semicolons at the end of normal expressions aren't complained /// about e.g. the semicolon in `macro_rules! kapow( () => { - /// fail!(); } )` doesn't get picked up by .parse_expr(), but it's + /// panic!(); } )` doesn't get picked up by .parse_expr(), but it's /// allowed to be there. fn ensure_complete_parse(&self, allow_semi: bool) { let mut parser = self.parser.borrow_mut(); diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 967ad3a897c..47ca66b0b49 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -171,7 +171,7 @@ pub trait Folder { } fn fold_mac(&mut self, _macro: Mac) -> Mac { - fail!("fold_mac disabled by default"); + panic!("fold_mac disabled by default"); // NB: see note about macros above. // if you really want a folder that // works on macros, use this @@ -1404,7 +1404,7 @@ mod test { let a_val = $a; let b_val = $b; if !(pred_val(a_val.as_slice(),b_val.as_slice())) { - fail!("expected args satisfying {}, got {} and {}", + panic!("expected args satisfying {}, got {} and {}", $predname, a_val, b_val); } } diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 3814ecfbe5b..5a7679570bf 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -142,7 +142,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { return lines.connect("\n"); } - fail!("not a doc-comment: {}", comment); + panic!("not a doc-comment: {}", comment); } fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec<Comment>) { @@ -322,7 +322,7 @@ fn consume_comment(rdr: &mut StringReader, read_block_comment(rdr, code_to_the_left, comments); } else if rdr.curr_is('#') && rdr.nextch_is('!') { read_shebang_comment(rdr, code_to_the_left, comments); - } else { fail!(); } + } else { panic!(); } debug!("<<< consume comment"); } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index b439353ad95..3a6cf610b4f 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -555,8 +555,8 @@ impl<'a> StringReader<'a> { whence: &str) { match r.curr { Some(r_c) if r_c == c => r.bump(), - Some(r_c) => fail!("expected {}, hit {}, {}", described_c, r_c, whence), - None => fail!("expected {}, hit EOF, {}", described_c, whence), + Some(r_c) => panic!("expected {}, hit {}, {}", described_c, r_c, whence), + None => panic!("expected {}, hit EOF, {}", described_c, whence), } } @@ -577,7 +577,7 @@ impl<'a> StringReader<'a> { self.scan_digits(base); let encoded_name : u32 = self.with_str_from(start_bpos, |s| { num::from_str_radix(s, 10).unwrap_or_else(|| { - fail!("expected digits representing a name, got `{}`, {}, range [{},{}]", + panic!("expected digits representing a name, got `{}`, {}, range [{},{}]", s, whence, start_bpos, self.last_pos); }) }); @@ -595,7 +595,7 @@ impl<'a> StringReader<'a> { self.scan_digits(base); let encoded_ctxt : ast::SyntaxContext = self.with_str_from(start_bpos, |s| { num::from_str_radix(s, 10).unwrap_or_else(|| { - fail!("expected digits representing a ctxt, got `{}`, {}", s, whence); + panic!("expected digits representing a ctxt, got `{}`, {}", s, whence); }) }); @@ -1542,7 +1542,7 @@ mod test { let mut lexer = setup(&sh, "/* /* */ */'a'".to_string()); match lexer.next_token().tok { token::Comment => { }, - _ => fail!("expected a comment!") + _ => panic!("expected a comment!") } assert_eq!(lexer.next_token().tok, token::LitChar(token::intern("a"))); } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index e60da0867f7..c731f3965a0 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -65,7 +65,7 @@ impl ParseSess { match v.checked_add(&count) { Some(next) => { self.node_id.set(next); } - None => fail!("Input too large, ran out of node ids!") + None => panic!("Input too large, ran out of node ids!") } v @@ -381,7 +381,7 @@ pub fn char_lit(lit: &str) -> (char, int) { '0' => Some('\0'), _ => { None } }, - _ => fail!("lexer accepted invalid char escape `{}`", lit) + _ => panic!("lexer accepted invalid char escape `{}`", lit) }; match c { @@ -434,7 +434,7 @@ pub fn str_lit(lit: &str) -> String { match c { '\\' => { let ch = chars.peek().unwrap_or_else(|| { - fail!("{}", error(i).as_slice()) + panic!("{}", error(i).as_slice()) }).val1(); if ch == '\n' { @@ -442,11 +442,11 @@ pub fn str_lit(lit: &str) -> String { } else if ch == '\r' { chars.next(); let ch = chars.peek().unwrap_or_else(|| { - fail!("{}", error(i).as_slice()) + panic!("{}", error(i).as_slice()) }).val1(); if ch != '\n' { - fail!("lexer accepted bare CR"); + panic!("lexer accepted bare CR"); } eat(&mut chars); } else { @@ -460,11 +460,11 @@ pub fn str_lit(lit: &str) -> String { }, '\r' => { let ch = chars.peek().unwrap_or_else(|| { - fail!("{}", error(i).as_slice()) + panic!("{}", error(i).as_slice()) }).val1(); if ch != '\n' { - fail!("lexer accepted bare CR"); + panic!("lexer accepted bare CR"); } chars.next(); res.push('\n'); @@ -494,7 +494,7 @@ pub fn raw_str_lit(lit: &str) -> String { Some(c) => { if c == '\r' { if *chars.peek().unwrap() != '\n' { - fail!("lexer accepted bare CR"); + panic!("lexer accepted bare CR"); } chars.next(); res.push('\n'); @@ -553,11 +553,11 @@ pub fn byte_lit(lit: &str) -> (u8, uint) { match ::std::num::from_str_radix::<u64>(lit.slice(2, 4), 16) { Some(c) => if c > 0xFF { - fail!(err(2)) + panic!(err(2)) } else { return (c as u8, 4) }, - None => fail!(err(3)) + None => panic!(err(3)) } } }; @@ -594,7 +594,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> { b'\r' => { chars.next(); if chars.peek().expect(em.as_slice()).val1() != b'\n' { - fail!("lexer accepted bare CR"); + panic!("lexer accepted bare CR"); } eat(&mut chars); } @@ -612,7 +612,7 @@ pub fn binary_lit(lit: &str) -> Rc<Vec<u8>> { Some((i, b'\r')) => { let em = error(i); if chars.peek().expect(em.as_slice()).val1() != b'\n' { - fail!("lexer accepted bare CR"); + panic!("lexer accepted bare CR"); } chars.next(); res.push(b'\n'); @@ -813,7 +813,7 @@ mod test { ast::TtToken(_, token::Ident(name, token::Plain))], &ast::Delimiter { token: token::RParen, .. }) if name.as_str() == "a" => {}, - _ => fail!("value 3: {}", **first_delimed), + _ => panic!("value 3: {}", **first_delimed), } let (ref second_open, ref second_tts, ref second_close) = **second_delimed; match (second_open, second_tts.as_slice(), second_close) { @@ -822,13 +822,13 @@ mod test { ast::TtToken(_, token::Ident(name, token::Plain))], &ast::Delimiter { token: token::RParen, .. }) if name.as_str() == "a" => {}, - _ => fail!("value 4: {}", **second_delimed), + _ => panic!("value 4: {}", **second_delimed), } }, - _ => fail!("value 2: {}", **macro_delimed), + _ => panic!("value 2: {}", **macro_delimed), } }, - _ => fail!("value: {}",tts), + _ => panic!("value: {}",tts), } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 654de709566..8ef3a559bf4 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -5746,7 +5746,7 @@ impl<'a> Parser<'a> { break; } IoviForeignItem(_) => { - fail!(); + panic!(); } } attrs = self.parse_outer_attributes(); @@ -5769,7 +5769,7 @@ impl<'a> Parser<'a> { items.push(item) } IoviForeignItem(_) => { - fail!(); + panic!(); } } } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 65efd4f0042..57c72ca77c6 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -600,7 +600,7 @@ impl Printer { } Eof => { // Eof should never get here. - fail!(); + panic!(); } } } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index a52987f5bd1..d347d0199a7 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -1072,7 +1072,7 @@ impl<'a> State<'a> { Inconsistent, struct_def.fields.as_slice(), |s, field| { match field.node.kind { - ast::NamedField(..) => fail!("unexpected named field"), + ast::NamedField(..) => panic!("unexpected named field"), ast::UnnamedField(vis) => { try!(s.print_visibility(vis)); try!(s.maybe_print_comment(field.span.lo)); @@ -1093,7 +1093,7 @@ impl<'a> State<'a> { for field in struct_def.fields.iter() { match field.node.kind { - ast::UnnamedField(..) => fail!("unexpected unnamed field"), + ast::UnnamedField(..) => panic!("unexpected unnamed field"), ast::NamedField(ident, visibility) => { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(field.span.lo)); @@ -1440,7 +1440,7 @@ impl<'a> State<'a> { } // BLEAH, constraints would be great here _ => { - fail!("print_if saw if with weird alternative"); + panic!("print_if saw if with weird alternative"); } } } diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index 60ba5f6615b..422c2d5c75b 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -99,7 +99,7 @@ impl<T> SmallVector<T> { match self.repr { One(ref v) if idx == 0 => v, Many(ref vs) => &vs[idx], - _ => fail!("out of bounds access") + _ => panic!("out of bounds access") } } @@ -110,10 +110,10 @@ impl<T> SmallVector<T> { if v.len() == 1 { v.into_iter().next().unwrap() } else { - fail!(err) + panic!(err) } } - _ => fail!(err) + _ => panic!(err) } } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index f2d56f53d22..bec72e88f99 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -103,7 +103,7 @@ pub trait Visitor<'v> { walk_explicit_self(self, es) } fn visit_mac(&mut self, _macro: &'v Mac) { - fail!("visit_mac disabled by default"); + panic!("visit_mac disabled by default"); // NB: see note about macros above. // if you really want a visitor that // works on macros, use this |
