diff options
| author | John Clements <clements@racket-lang.org> | 2013-05-21 11:29:03 -0700 |
|---|---|---|
| committer | John Clements <clements@racket-lang.org> | 2013-06-05 12:01:39 -0700 |
| commit | 367eddf5b1777928ca0119932a877aaec757293a (patch) | |
| tree | 47d4bead5bb22748573ec88ebbcac4a2b5defe77 /src | |
| parent | 19cbd0d284026e785b24f5d619620d09a1b4d6f1 (diff) | |
| download | rust-367eddf5b1777928ca0119932a877aaec757293a.tar.gz rust-367eddf5b1777928ca0119932a877aaec757293a.zip | |
remove interner field from string_reader
Diffstat (limited to 'src')
| -rw-r--r-- | src/libsyntax/parse/comments.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer.rs | 36 | ||||
| -rw-r--r-- | src/libsyntax/parse/mod.rs | 4 |
3 files changed, 16 insertions, 28 deletions
diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 00fe8cf545a..60aa32f22cf 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -327,9 +327,7 @@ pub fn gather_comments_and_literals(span_diagnostic: let itr = parse::token::mk_fake_ident_interner(); let cm = CodeMap::new(); let filemap = cm.new_filemap(path, src); - let rdr = lexer::new_low_level_string_reader(span_diagnostic, - filemap, - itr); + let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap); let mut comments: ~[cmnt] = ~[]; let mut literals: ~[lit] = ~[]; diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 7c6b2774d77..76149e7894d 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -50,25 +50,22 @@ pub struct StringReader { // The last character to be read curr: char, filemap: @codemap::FileMap, - interner: @token::ident_interner, /* cached: */ peek_tok: token::Token, peek_span: span } pub fn new_string_reader(span_diagnostic: @span_handler, - filemap: @codemap::FileMap, - itr: @token::ident_interner) + filemap: @codemap::FileMap) -> @mut StringReader { - let r = new_low_level_string_reader(span_diagnostic, filemap, itr); + let r = new_low_level_string_reader(span_diagnostic, filemap); string_advance_token(r); /* fill in peek_* */ return r; } /* For comments.rs, which hackily pokes into 'pos' and 'curr' */ pub fn new_low_level_string_reader(span_diagnostic: @span_handler, - filemap: @codemap::FileMap, - itr: @token::ident_interner) + filemap: @codemap::FileMap) -> @mut StringReader { // Force the initial reader bump to start on a fresh line let initial_char = '\n'; @@ -79,7 +76,6 @@ pub fn new_low_level_string_reader(span_diagnostic: @span_handler, col: CharPos(0), curr: initial_char, filemap: filemap, - interner: itr, /* dummy values; not read */ peek_tok: token::EOF, peek_span: codemap::dummy_sp() @@ -100,7 +96,6 @@ fn dup_string_reader(r: @mut StringReader) -> @mut StringReader { col: r.col, curr: r.curr, filemap: r.filemap, - interner: get_ident_interner(), peek_tok: copy r.peek_tok, peek_span: copy r.peek_span } @@ -788,7 +783,6 @@ mod test { // represents a testing reader (incl. both reader and interner) struct Env { - interner: @token::ident_interner, string_reader: @mut StringReader } @@ -796,17 +790,15 @@ mod test { fn setup(teststr: ~str) -> Env { let cm = CodeMap::new(); let fm = cm.new_filemap(~"zebra.rs", @teststr); - let ident_interner = token::get_ident_interner(); let span_handler = diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm); Env { - interner: ident_interner, - string_reader: new_string_reader(span_handler,fm,ident_interner) + string_reader: new_string_reader(span_handler,fm) } } #[test] fn t1 () { - let Env {interner: ident_interner, string_reader} = + let Env {string_reader} = setup(~"/* my source file */ \ fn main() { io::println(~\"zebra\"); }\n"); let id = str_to_ident("fn"); @@ -838,39 +830,39 @@ mod test { } // make the identifier by looking up the string in the interner - fn mk_ident (env: Env, id: &str, is_mod_name: bool) -> token::Token { + fn mk_ident (id: &str, is_mod_name: bool) -> token::Token { token::IDENT (str_to_ident(id),is_mod_name) } #[test] fn doublecolonparsing () { let env = setup (~"a b"); check_tokenization (env, - ~[mk_ident (env,"a",false), - mk_ident (env,"b",false)]); + ~[mk_ident("a",false), + mk_ident("b",false)]); } #[test] fn dcparsing_2 () { let env = setup (~"a::b"); check_tokenization (env, - ~[mk_ident (env,"a",true), + ~[mk_ident("a",true), token::MOD_SEP, - mk_ident (env,"b",false)]); + mk_ident("b",false)]); } #[test] fn dcparsing_3 () { let env = setup (~"a ::b"); check_tokenization (env, - ~[mk_ident (env,"a",false), + ~[mk_ident("a",false), token::MOD_SEP, - mk_ident (env,"b",false)]); + mk_ident("b",false)]); } #[test] fn dcparsing_4 () { let env = setup (~"a:: b"); check_tokenization (env, - ~[mk_ident (env,"a",true), + ~[mk_ident("a",true), token::MOD_SEP, - mk_ident (env,"b",false)]); + mk_ident("b",false)]); } #[test] fn character_a() { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 995da6d6147..f5c0f309add 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -306,9 +306,7 @@ pub fn filemap_to_tts(sess: @mut ParseSess, filemap: @FileMap) // it appears to me that the cfg doesn't matter here... indeed, // parsing tt's probably shouldn't require a parser at all. let cfg = ~[]; - let srdr = lexer::new_string_reader(copy sess.span_diagnostic, - filemap, - get_ident_interner()); + let srdr = lexer::new_string_reader(copy sess.span_diagnostic, filemap); let p1 = Parser(sess, cfg, srdr as @reader); p1.parse_all_token_trees() } |
