diff options
| author | bors <bors@rust-lang.org> | 2014-03-17 02:11:56 -0700 |
|---|---|---|
| committer | bors <bors@rust-lang.org> | 2014-03-17 02:11:56 -0700 |
| commit | e4c91e6c7cfc03246a422576ab41ac74125fd3b8 (patch) | |
| tree | bf813a516c3fdfd864e1ae3422b93f22da28bdf6 /src/libsyntax/parse | |
| parent | b6d5b8f6ff7b9feaf8f11e2624c6eeeeb5b3a9d9 (diff) | |
| parent | e2ebc8f81138bcad019f43a3af0cddb0dc0dcfbc (diff) | |
| download | rust-e4c91e6c7cfc03246a422576ab41ac74125fd3b8.tar.gz rust-e4c91e6c7cfc03246a422576ab41ac74125fd3b8.zip | |
auto merge of #12735 : eddyb/rust/at-exodus-chapter-11, r=cmr
Diffstat (limited to 'src/libsyntax/parse')
| -rw-r--r-- | src/libsyntax/parse/attr.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/comments.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer.rs | 208 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 119 | ||||
| -rw-r--r-- | src/libsyntax/parse/obsolete.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 12 |
6 files changed, 157 insertions, 190 deletions
diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 0a74c7ca821..399648ef1d8 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -28,7 +28,7 @@ pub trait ParserAttr { fn parse_optional_meta(&mut self) -> Vec<@ast::MetaItem> ; } -impl ParserAttr for Parser { +impl<'a> ParserAttr for Parser<'a> { // Parse attributes that appear before an item fn parse_outer_attributes(&mut self) -> Vec<ast::Attribute> { let mut attrs: Vec<ast::Attribute> = Vec::new(); diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index c2a2097de24..ed74fd416d1 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -346,10 +346,10 @@ pub struct Literal { // it appears this function is called only from pprust... that's // probably not a good thing. pub fn gather_comments_and_literals(span_diagnostic: - @diagnostic::SpanHandler, + &diagnostic::SpanHandler, path: ~str, srdr: &mut io::Reader) - -> (Vec<Comment> , Vec<Literal> ) { + -> (Vec<Comment>, Vec<Literal>) { let src = srdr.read_to_end().unwrap(); let src = str::from_utf8_owned(src).unwrap(); let cm = CodeMap::new(); diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 884fc306f22..546aefc1297 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -18,6 +18,7 @@ use parse::token::{str_to_ident}; use std::cell::{Cell, RefCell}; use std::char; +use std::rc::Rc; use std::mem::replace; use std::num::from_str_radix; @@ -27,7 +28,7 @@ pub trait Reader { fn is_eof(&self) -> bool; fn next_token(&self) -> TokenAndSpan; fn fatal(&self, ~str) -> !; - fn span_diag(&self) -> @SpanHandler; + fn span_diag<'a>(&'a self) -> &'a SpanHandler; fn peek(&self) -> TokenAndSpan; fn dup(&self) -> ~Reader:; } @@ -38,8 +39,8 @@ pub struct TokenAndSpan { sp: Span, } -pub struct StringReader { - span_diagnostic: @SpanHandler, +pub struct StringReader<'a> { + span_diagnostic: &'a SpanHandler, // The absolute offset within the codemap of the next character to read pos: Cell<BytePos>, // The absolute offset within the codemap of the last character read(curr) @@ -48,36 +49,36 @@ pub struct StringReader { col: Cell<CharPos>, // The last character to be read curr: Cell<Option<char>>, - filemap: @codemap::FileMap, + filemap: Rc<codemap::FileMap>, /* cached: */ peek_tok: RefCell<token::Token>, peek_span: RefCell<Span>, } -impl StringReader { +impl<'a> StringReader<'a> { pub fn curr_is(&self, c: char) -> bool { self.curr.get() == Some(c) } } -pub fn new_string_reader(span_diagnostic: @SpanHandler, - filemap: @codemap::FileMap) - -> StringReader { +pub fn new_string_reader<'a>(span_diagnostic: &'a SpanHandler, + filemap: Rc<codemap::FileMap>) + -> StringReader<'a> { let r = new_low_level_string_reader(span_diagnostic, filemap); string_advance_token(&r); /* fill in peek_* */ r } /* For comments.rs, which hackily pokes into 'pos' and 'curr' */ -pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler, - filemap: @codemap::FileMap) - -> StringReader { +pub fn new_low_level_string_reader<'a>(span_diagnostic: &'a SpanHandler, + filemap: Rc<codemap::FileMap>) + -> StringReader<'a> { // Force the initial reader bump to start on a fresh line let initial_char = '\n'; let r = StringReader { span_diagnostic: span_diagnostic, - pos: Cell::new(filemap.start_pos), - last_pos: Cell::new(filemap.start_pos), + pos: Cell::new(filemap.deref().start_pos), + last_pos: Cell::new(filemap.deref().start_pos), col: Cell::new(CharPos(0)), curr: Cell::new(Some(initial_char)), filemap: filemap, @@ -92,20 +93,20 @@ pub fn new_low_level_string_reader(span_diagnostic: @SpanHandler, // duplicating the string reader is probably a bad idea, in // that using them will cause interleaved pushes of line // offsets to the underlying filemap... -fn dup_string_reader(r: &StringReader) -> StringReader { +fn dup_string_reader<'a>(r: &StringReader<'a>) -> StringReader<'a> { StringReader { span_diagnostic: r.span_diagnostic, pos: Cell::new(r.pos.get()), last_pos: Cell::new(r.last_pos.get()), col: Cell::new(r.col.get()), curr: Cell::new(r.curr.get()), - filemap: r.filemap, + filemap: r.filemap.clone(), peek_tok: r.peek_tok.clone(), peek_span: r.peek_span.clone(), } } -impl Reader for StringReader { +impl<'a> Reader for StringReader<'a> { fn is_eof(&self) -> bool { is_eof(self) } // return the next token. EFFECT: advances the string_reader. fn next_token(&self) -> TokenAndSpan { @@ -122,7 +123,7 @@ impl Reader for StringReader { fn fatal(&self, m: ~str) -> ! { self.span_diagnostic.span_fatal(self.peek_span.get(), m) } - fn span_diag(&self) -> @SpanHandler { self.span_diagnostic } + fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.span_diagnostic } fn peek(&self) -> TokenAndSpan { // FIXME(pcwalton): Bad copy! TokenAndSpan { @@ -133,7 +134,7 @@ impl Reader for StringReader { fn dup(&self) -> ~Reader: { ~dup_string_reader(self) as ~Reader: } } -impl Reader for TtReader { +impl<'a> Reader for TtReader<'a> { fn is_eof(&self) -> bool { let cur_tok = self.cur_tok.borrow(); *cur_tok.get() == token::EOF @@ -146,7 +147,7 @@ impl Reader for TtReader { fn fatal(&self, m: ~str) -> ! { self.sp_diag.span_fatal(self.cur_span.get(), m); } - fn span_diag(&self) -> @SpanHandler { self.sp_diag } + fn span_diag<'a>(&'a self) -> &'a SpanHandler { self.sp_diag } fn peek(&self) -> TokenAndSpan { TokenAndSpan { tok: self.cur_tok.get(), @@ -189,7 +190,7 @@ fn fatal_span_verbose(rdr: &StringReader, -> ! { let mut m = m; m.push_str(": "); - let s = rdr.filemap.src.slice( + let s = rdr.filemap.deref().src.slice( byte_offset(rdr, from_pos).to_uint(), byte_offset(rdr, to_pos).to_uint()); m.push_str(s); @@ -218,7 +219,7 @@ fn string_advance_token(r: &StringReader) { } fn byte_offset(rdr: &StringReader, pos: BytePos) -> BytePos { - (pos - rdr.filemap.start_pos) + (pos - rdr.filemap.deref().start_pos) } /// Calls `f` with a string slice of the source text spanning from `start` @@ -240,7 +241,7 @@ fn with_str_from_to<T>( end: BytePos, f: |s: &str| -> T) -> T { - f(rdr.filemap.src.slice( + f(rdr.filemap.deref().src.slice( byte_offset(rdr, start).to_uint(), byte_offset(rdr, end).to_uint())) } @@ -250,21 +251,21 @@ fn with_str_from_to<T>( pub fn bump(rdr: &StringReader) { rdr.last_pos.set(rdr.pos.get()); let current_byte_offset = byte_offset(rdr, rdr.pos.get()).to_uint(); - if current_byte_offset < (rdr.filemap.src).len() { + if current_byte_offset < rdr.filemap.deref().src.len() { assert!(rdr.curr.get().is_some()); let last_char = rdr.curr.get().unwrap(); - let next = rdr.filemap.src.char_range_at(current_byte_offset); + let next = rdr.filemap.deref().src.char_range_at(current_byte_offset); let byte_offset_diff = next.next - current_byte_offset; rdr.pos.set(rdr.pos.get() + Pos::from_uint(byte_offset_diff)); rdr.curr.set(Some(next.ch)); rdr.col.set(rdr.col.get() + CharPos(1u)); if last_char == '\n' { - rdr.filemap.next_line(rdr.last_pos.get()); + rdr.filemap.deref().next_line(rdr.last_pos.get()); rdr.col.set(CharPos(0u)); } if byte_offset_diff > 1 { - rdr.filemap.record_multibyte_char(rdr.last_pos.get(), byte_offset_diff); + rdr.filemap.deref().record_multibyte_char(rdr.last_pos.get(), byte_offset_diff); } } else { rdr.curr.set(None); @@ -275,8 +276,8 @@ pub fn is_eof(rdr: &StringReader) -> bool { } pub fn nextch(rdr: &StringReader) -> Option<char> { let offset = byte_offset(rdr, rdr.pos.get()).to_uint(); - if offset < (rdr.filemap.src).len() { - Some(rdr.filemap.src.char_at(offset)) + if offset < rdr.filemap.deref().src.len() { + Some(rdr.filemap.deref().src.char_at(offset)) } else { None } @@ -334,56 +335,55 @@ fn consume_any_line_comment(rdr: &StringReader) -> Option<TokenAndSpan> { if rdr.curr_is('/') { match nextch(rdr) { - Some('/') => { - bump(rdr); - bump(rdr); - // line comments starting with "///" or "//!" are doc-comments - if rdr.curr_is('/') || rdr.curr_is('!') { - let start_bpos = rdr.pos.get() - BytePos(3); - while !rdr.curr_is('\n') && !is_eof(rdr) { - bump(rdr); - } - let ret = with_str_from(rdr, start_bpos, |string| { - // but comments with only more "/"s are not - if !is_line_non_doc_comment(string) { - Some(TokenAndSpan{ - tok: token::DOC_COMMENT(str_to_ident(string)), - sp: codemap::mk_sp(start_bpos, rdr.pos.get()) - }) - } else { - None + Some('/') => { + bump(rdr); + bump(rdr); + // line comments starting with "///" or "//!" are doc-comments + if rdr.curr_is('/') || rdr.curr_is('!') { + let start_bpos = rdr.pos.get() - BytePos(3); + while !rdr.curr_is('\n') && !is_eof(rdr) { + bump(rdr); } - }); + let ret = with_str_from(rdr, start_bpos, |string| { + // but comments with only more "/"s are not + if !is_line_non_doc_comment(string) { + Some(TokenAndSpan{ + tok: token::DOC_COMMENT(str_to_ident(string)), + sp: codemap::mk_sp(start_bpos, rdr.pos.get()) + }) + } else { + None + } + }); - if ret.is_some() { - return ret; + if ret.is_some() { + return ret; + } + } else { + while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); } } - } else { - while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); } + // Restart whitespace munch. + consume_whitespace_and_comments(rdr) } - // Restart whitespace munch. - return consume_whitespace_and_comments(rdr); - } - Some('*') => { bump(rdr); bump(rdr); return consume_block_comment(rdr); } - _ => () + Some('*') => { bump(rdr); bump(rdr); consume_block_comment(rdr) } + _ => None } } else if rdr.curr_is('#') { if nextch_is(rdr, '!') { // I guess this is the only way to figure out if // we're at the beginning of the file... - let cmap = @CodeMap::new(); - { - let mut files = cmap.files.borrow_mut(); - files.get().push(rdr.filemap); - } + let cmap = CodeMap::new(); + cmap.files.borrow_mut().get().push(rdr.filemap.clone()); let loc = cmap.lookup_char_pos_adj(rdr.last_pos.get()); if loc.line == 1u && loc.col == CharPos(0u) { while !rdr.curr_is('\n') && !is_eof(rdr) { bump(rdr); } return consume_whitespace_and_comments(rdr); } } + None + } else { + None } - return None; } pub fn is_block_non_doc_comment(s: &str) -> bool { @@ -1007,28 +1007,24 @@ mod test { use std::io::util; use std::vec_ng::Vec; - // represents a testing reader (incl. both reader and interner) - struct Env { - string_reader: StringReader + fn mk_sh() -> diagnostic::SpanHandler { + let emitter = diagnostic::EmitterWriter::new(~util::NullWriter); + let handler = diagnostic::mk_handler(~emitter); + diagnostic::mk_span_handler(handler, CodeMap::new()) } // open a string reader for the given string - fn setup(teststr: ~str) -> Env { - let cm = CodeMap::new(); - let fm = cm.new_filemap(~"zebra.rs", teststr); - let writer = ~util::NullWriter; - let emitter = diagnostic::EmitterWriter::new(writer); - let handler = diagnostic::mk_handler(~emitter); - let span_handler = diagnostic::mk_span_handler(handler, @cm); - Env { - string_reader: new_string_reader(span_handler,fm) - } + fn setup<'a>(span_handler: &'a diagnostic::SpanHandler, + teststr: ~str) -> StringReader<'a> { + let fm = span_handler.cm.new_filemap(~"zebra.rs", teststr); + new_string_reader(span_handler, fm) } #[test] fn t1 () { - let Env {string_reader} = - setup(~"/* my source file */ \ - fn main() { println!(\"zebra\"); }\n"); + let span_handler = mk_sh(); + let string_reader = setup(&span_handler, + ~"/* my source file */ \ + fn main() { println!(\"zebra\"); }\n"); let id = str_to_ident("fn"); let tok1 = string_reader.next_token(); let tok2 = TokenAndSpan{ @@ -1049,11 +1045,9 @@ mod test { // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) - fn check_tokenization (env: Env, expected: Vec<token::Token> ) { + fn check_tokenization (string_reader: StringReader, expected: Vec<token::Token> ) { for expected_tok in expected.iter() { - let TokenAndSpan {tok:actual_tok, sp: _} = - env.string_reader.next_token(); - assert_eq!(&actual_tok,expected_tok); + assert_eq!(&string_reader.next_token().tok, expected_tok); } } @@ -1063,71 +1057,55 @@ mod test { } #[test] fn doublecolonparsing () { - let env = setup (~"a b"); - check_tokenization (env, + check_tokenization(setup(&mk_sh(), ~"a b"), vec!(mk_ident("a",false), mk_ident("b",false))); } #[test] fn dcparsing_2 () { - let env = setup (~"a::b"); - check_tokenization (env, + check_tokenization(setup(&mk_sh(), ~"a::b"), vec!(mk_ident("a",true), token::MOD_SEP, mk_ident("b",false))); } #[test] fn dcparsing_3 () { - let env = setup (~"a ::b"); - check_tokenization (env, + check_tokenization(setup(&mk_sh(), ~"a ::b"), vec!(mk_ident("a",false), token::MOD_SEP, mk_ident("b",false))); } #[test] fn dcparsing_4 () { - let env = setup (~"a:: b"); - check_tokenization (env, + check_tokenization(setup(&mk_sh(), ~"a:: b"), vec!(mk_ident("a",true), token::MOD_SEP, mk_ident("b",false))); } #[test] fn character_a() { - let env = setup(~"'a'"); - let TokenAndSpan {tok, sp: _} = - env.string_reader.next_token(); - assert_eq!(tok,token::LIT_CHAR('a' as u32)); + assert_eq!(setup(&mk_sh(), ~"'a'").next_token().tok, + token::LIT_CHAR('a' as u32)); } #[test] fn character_space() { - let env = setup(~"' '"); - let TokenAndSpan {tok, sp: _} = - env.string_reader.next_token(); - assert_eq!(tok, token::LIT_CHAR(' ' as u32)); + assert_eq!(setup(&mk_sh(), ~"' '").next_token().tok, + token::LIT_CHAR(' ' as u32)); } #[test] fn character_escaped() { - let env = setup(~"'\\n'"); - let TokenAndSpan {tok, sp: _} = - env.string_reader.next_token(); - assert_eq!(tok, token::LIT_CHAR('\n' as u32)); + assert_eq!(setup(&mk_sh(), ~"'\\n'").next_token().tok, + token::LIT_CHAR('\n' as u32)); } #[test] fn lifetime_name() { - let env = setup(~"'abc"); - let TokenAndSpan {tok, sp: _} = - env.string_reader.next_token(); - let id = token::str_to_ident("abc"); - assert_eq!(tok, token::LIFETIME(id)); + assert_eq!(setup(&mk_sh(), ~"'abc").next_token().tok, + token::LIFETIME(token::str_to_ident("abc"))); } #[test] fn raw_string() { - let env = setup(~"r###\"\"#a\\b\x00c\"\"###"); - let TokenAndSpan {tok, sp: _} = - env.string_reader.next_token(); - let id = token::str_to_ident("\"#a\\b\x00c\""); - assert_eq!(tok, token::LIT_STR_RAW(id, 3)); + assert_eq!(setup(&mk_sh(), ~"r###\"\"#a\\b\x00c\"\"###").next_token().tok, + token::LIT_STR_RAW(token::str_to_ident("\"#a\\b\x00c\""), 3)); } #[test] fn line_doc_comments() { @@ -1137,10 +1115,8 @@ mod test { } #[test] fn nested_block_comments() { - let env = setup(~"/* /* */ */'a'"); - let TokenAndSpan {tok, sp: _} = - env.string_reader.next_token(); - assert_eq!(tok,token::LIT_CHAR('a' as u32)); + assert_eq!(setup(&mk_sh(), ~"/* /* */ */'a'").next_token().tok, + token::LIT_CHAR('a' as u32)); } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index cb49ad0905c..062bc100863 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -13,13 +13,13 @@ use ast; use codemap::{Span, CodeMap, FileMap}; -use codemap; use diagnostic::{SpanHandler, mk_span_handler, default_handler}; use parse::attr::ParserAttr; use parse::parser::Parser; use std::cell::RefCell; use std::io::File; +use std::rc::Rc; use std::str; use std::vec_ng::Vec; @@ -40,26 +40,20 @@ pub mod obsolete; // info about a parsing session. pub struct ParseSess { - cm: @codemap::CodeMap, // better be the same as the one in the reader! - span_diagnostic: @SpanHandler, // better be the same as the one in the reader! + span_diagnostic: SpanHandler, // better be the same as the one in the reader! /// Used to determine and report recursive mod inclusions - included_mod_stack: RefCell<Vec<Path> >, + included_mod_stack: RefCell<Vec<Path>>, } -pub fn new_parse_sess() -> @ParseSess { - let cm = @CodeMap::new(); - @ParseSess { - cm: cm, - span_diagnostic: mk_span_handler(default_handler(), cm), +pub fn new_parse_sess() -> ParseSess { + ParseSess { + span_diagnostic: mk_span_handler(default_handler(), CodeMap::new()), included_mod_stack: RefCell::new(Vec::new()), } } -pub fn new_parse_sess_special_handler(sh: @SpanHandler, - cm: @codemap::CodeMap) - -> @ParseSess { - @ParseSess { - cm: cm, +pub fn new_parse_sess_special_handler(sh: SpanHandler) -> ParseSess { + ParseSess { span_diagnostic: sh, included_mod_stack: RefCell::new(Vec::new()), } @@ -73,7 +67,7 @@ pub fn new_parse_sess_special_handler(sh: @SpanHandler, pub fn parse_crate_from_file( input: &Path, cfg: ast::CrateConfig, - sess: @ParseSess + sess: &ParseSess ) -> ast::Crate { new_parser_from_file(sess, cfg, input).parse_crate_mod() // why is there no p.abort_if_errors here? @@ -82,17 +76,17 @@ pub fn parse_crate_from_file( pub fn parse_crate_attrs_from_file( input: &Path, cfg: ast::CrateConfig, - sess: @ParseSess + sess: &ParseSess ) -> Vec<ast::Attribute> { let mut parser = new_parser_from_file(sess, cfg, input); let (inner, _) = parser.parse_inner_attrs_and_next(); - return inner; + inner } pub fn parse_crate_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, - sess: @ParseSess) + sess: &ParseSess) -> ast::Crate { let mut p = new_parser_from_source_str(sess, cfg, @@ -104,20 +98,20 @@ pub fn parse_crate_from_source_str(name: ~str, pub fn parse_crate_attrs_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, - sess: @ParseSess) + sess: &ParseSess) -> Vec<ast::Attribute> { let mut p = new_parser_from_source_str(sess, cfg, name, source); let (inner, _) = maybe_aborted(p.parse_inner_attrs_and_next(),p); - return inner; + inner } pub fn parse_expr_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, - sess: @ParseSess) + sess: &ParseSess) -> @ast::Expr { let mut p = new_parser_from_source_str(sess, cfg, name, source); maybe_aborted(p.parse_expr(), p) @@ -126,7 +120,7 @@ pub fn parse_expr_from_source_str(name: ~str, pub fn parse_item_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, - sess: @ParseSess) + sess: &ParseSess) -> Option<@ast::Item> { let mut p = new_parser_from_source_str(sess, cfg, name, source); let attrs = p.parse_outer_attributes(); @@ -136,7 +130,7 @@ pub fn parse_item_from_source_str(name: ~str, pub fn parse_meta_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, - sess: @ParseSess) + sess: &ParseSess) -> @ast::MetaItem { let mut p = new_parser_from_source_str(sess, cfg, name, source); maybe_aborted(p.parse_meta_item(),p) @@ -146,7 +140,7 @@ pub fn parse_stmt_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, attrs: Vec<ast::Attribute> , - sess: @ParseSess) + sess: &ParseSess) -> @ast::Stmt { let mut p = new_parser_from_source_str( sess, @@ -160,7 +154,7 @@ pub fn parse_stmt_from_source_str(name: ~str, pub fn parse_tts_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, - sess: @ParseSess) + sess: &ParseSess) -> Vec<ast::TokenTree> { let mut p = new_parser_from_source_str( sess, @@ -174,49 +168,45 @@ pub fn parse_tts_from_source_str(name: ~str, } // Create a new parser from a source string -pub fn new_parser_from_source_str(sess: @ParseSess, - cfg: ast::CrateConfig, - name: ~str, - source: ~str) - -> Parser { - filemap_to_parser(sess,string_to_filemap(sess,source,name),cfg) +pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, + cfg: ast::CrateConfig, + name: ~str, + source: ~str) + -> Parser<'a> { + filemap_to_parser(sess, string_to_filemap(sess, source, name), cfg) } /// Create a new parser, handling errors as appropriate /// if the file doesn't exist -pub fn new_parser_from_file( - sess: @ParseSess, - cfg: ast::CrateConfig, - path: &Path -) -> Parser { - filemap_to_parser(sess,file_to_filemap(sess,path,None),cfg) +pub fn new_parser_from_file<'a>(sess: &'a ParseSess, + cfg: ast::CrateConfig, + path: &Path) -> Parser<'a> { + filemap_to_parser(sess, file_to_filemap(sess, path, None), cfg) } /// Given a session, a crate config, a path, and a span, add /// the file at the given path to the codemap, and return a parser. /// On an error, use the given span as the source of the problem. -pub fn new_sub_parser_from_file( - sess: @ParseSess, - cfg: ast::CrateConfig, - path: &Path, - sp: Span -) -> Parser { - filemap_to_parser(sess,file_to_filemap(sess,path,Some(sp)),cfg) +pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, + cfg: ast::CrateConfig, + path: &Path, + sp: Span) -> Parser<'a> { + filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp)), cfg) } /// Given a filemap and config, return a parser -pub fn filemap_to_parser(sess: @ParseSess, - filemap: @FileMap, - cfg: ast::CrateConfig) -> Parser { - tts_to_parser(sess,filemap_to_tts(sess,filemap),cfg) +pub fn filemap_to_parser<'a>(sess: &'a ParseSess, + filemap: Rc<FileMap>, + cfg: ast::CrateConfig) -> Parser<'a> { + tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg) } // must preserve old name for now, because quote! from the *existing* // compiler expands into it -pub fn new_parser_from_tts(sess: @ParseSess, - cfg: ast::CrateConfig, - tts: Vec<ast::TokenTree> ) -> Parser { - tts_to_parser(sess,tts,cfg) +pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, + cfg: ast::CrateConfig, + tts: Vec<ast::TokenTree>) -> Parser<'a> { + tts_to_parser(sess, tts, cfg) } @@ -224,8 +214,8 @@ pub fn new_parser_from_tts(sess: @ParseSess, /// Given a session and a path and an optional span (for error reporting), /// add the path to the session's codemap and return the new filemap. -pub fn file_to_filemap(sess: @ParseSess, path: &Path, spanopt: Option<Span>) - -> @FileMap { +pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>) + -> Rc<FileMap> { let err = |msg: &str| { match spanopt { Some(sp) => sess.span_diagnostic.span_fatal(sp, msg), @@ -250,27 +240,27 @@ pub fn file_to_filemap(sess: @ParseSess, path: &Path, spanopt: Option<Span>) // given a session and a string, add the string to // the session's codemap and return the new filemap -pub fn string_to_filemap(sess: @ParseSess, source: ~str, path: ~str) - -> @FileMap { - sess.cm.new_filemap(path, source) +pub fn string_to_filemap(sess: &ParseSess, source: ~str, path: ~str) + -> Rc<FileMap> { + sess.span_diagnostic.cm.new_filemap(path, source) } // given a filemap, produce a sequence of token-trees -pub fn filemap_to_tts(sess: @ParseSess, filemap: @FileMap) +pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<ast::TokenTree> { // it appears to me that the cfg doesn't matter here... indeed, // parsing tt's probably shouldn't require a parser at all. let cfg = Vec::new(); - let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap); + let srdr = lexer::new_string_reader(&sess.span_diagnostic, filemap); let mut p1 = Parser(sess, cfg, ~srdr); p1.parse_all_token_trees() } // given tts and cfg, produce a parser -pub fn tts_to_parser(sess: @ParseSess, - tts: Vec<ast::TokenTree> , - cfg: ast::CrateConfig) -> Parser { - let trdr = lexer::new_tt_reader(sess.span_diagnostic, None, tts); +pub fn tts_to_parser<'a>(sess: &'a ParseSess, + tts: Vec<ast::TokenTree>, + cfg: ast::CrateConfig) -> Parser<'a> { + let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts); Parser(sess, cfg, ~trdr) } @@ -594,7 +584,8 @@ mod test { } #[test] fn parse_ident_pat () { - let mut parser = string_to_parser(~"b"); + let sess = new_parse_sess(); + let mut parser = string_to_parser(&sess, ~"b"); assert!(parser.parse_pat() == @ast::Pat{id: ast::DUMMY_NODE_ID, node: ast::PatIdent( diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 393282dd063..1d7bf2ef6da 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -59,7 +59,7 @@ pub trait ParserObsoleteMethods { fn eat_obsolete_ident(&mut self, ident: &str) -> bool; } -impl ParserObsoleteMethods for Parser { +impl<'a> ParserObsoleteMethods for Parser<'a> { /// Reports an obsolete syntax non-fatal error. fn obsolete(&mut self, sp: Span, kind: ObsoleteSyntax) { let (kind_str, desc) = match kind { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index f52effb8c81..27c86956499 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -284,8 +284,8 @@ struct ParsedItemsAndViewItems { /* ident is handled by common.rs */ -pub fn Parser(sess: @ParseSess, cfg: ast::CrateConfig, rdr: ~Reader:) - -> Parser { +pub fn Parser<'a>(sess: &'a ParseSess, cfg: ast::CrateConfig, rdr: ~Reader:) + -> Parser<'a> { let tok0 = rdr.next_token(); let span = tok0.sp; let placeholder = TokenAndSpan { @@ -320,8 +320,8 @@ pub fn Parser(sess: @ParseSess, cfg: ast::CrateConfig, rdr: ~Reader:) } } -pub struct Parser { - sess: @ParseSess, +pub struct Parser<'a> { + sess: &'a ParseSess, cfg: CrateConfig, // the current token: token: token::Token, @@ -354,7 +354,7 @@ fn is_plain_ident_or_underscore(t: &token::Token) -> bool { is_plain_ident(t) || *t == token::UNDERSCORE } -impl Parser { +impl<'a> Parser<'a> { // convert a token to a string using self's reader pub fn token_to_str(token: &token::Token) -> ~str { token::to_str(token) @@ -4150,7 +4150,7 @@ impl Parser { outer_attrs: &[ast::Attribute], id_sp: Span) -> (ast::Item_, Vec<ast::Attribute> ) { - let mut prefix = Path::new(self.sess.cm.span_to_filename(self.span)); + let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span)); prefix.pop(); let mod_path = Path::new(".").join_many(self.mod_path_stack.as_slice()); let dir_path = prefix.join(&mod_path); |
