diff options
| author | Patrick Walton <pcwalton@mimiga.net> | 2012-08-14 15:27:06 -0700 |
|---|---|---|
| committer | Patrick Walton <pcwalton@mimiga.net> | 2012-08-15 16:20:31 -0700 |
| commit | fe9d07dda6e884df9873376c85941cc766dbd1dc (patch) | |
| tree | 426406223615438c3ea72db0fe7130dfb9ded1d8 /src/libsyntax | |
| parent | fd0f616ceb17ac283717322674e35b7589a27232 (diff) | |
| download | rust-fe9d07dda6e884df9873376c85941cc766dbd1dc.tar.gz rust-fe9d07dda6e884df9873376c85941cc766dbd1dc.zip | |
rustc: "as Trait" can now be written "as @Trait".
There is also code for ~Trait and &Trait, but these are currently (incorrectly) synonyms for "as @Trait" and "as &Trait".
Diffstat (limited to 'src/libsyntax')
| -rw-r--r-- | src/libsyntax/ext/tt/earley_parser.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/ext/tt/transcribe.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/parse.rs | 6 | ||||
| -rw-r--r-- | src/libsyntax/parse/comments.rs | 4 | ||||
| -rw-r--r-- | src/libsyntax/parse/common.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/parse/lexer.rs | 26 | ||||
| -rw-r--r-- | src/libsyntax/parse/parser.rs | 2 | ||||
| -rw-r--r-- | src/libsyntax/print/pprust.rs | 14 |
8 files changed, 32 insertions, 32 deletions
diff --git a/src/libsyntax/ext/tt/earley_parser.rs b/src/libsyntax/ext/tt/earley_parser.rs index b6dc1c05a2c..77ba941015b 100644 --- a/src/libsyntax/ext/tt/earley_parser.rs +++ b/src/libsyntax/ext/tt/earley_parser.rs @@ -284,7 +284,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) nts, next_eis.len()}); } else if (bb_eis.len() == 0u && next_eis.len() == 0u) { return failure(sp, ~"No rules expected the token " - + to_str(*rdr.interner(), tok)); + + to_str(rdr.interner(), tok)); } else if (next_eis.len() > 0u) { /* Now process the next token */ while(next_eis.len() > 0u) { @@ -334,7 +334,7 @@ fn parse_nt(p: parser, name: ~str) -> nonterminal { ~"ident" => match copy p.token { token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) } _ => p.fatal(~"expected ident, found " - + token::to_str(*p.reader.interner(), copy p.token)) + + token::to_str(p.reader.interner(), copy p.token)) }, ~"path" => token::nt_path(p.parse_path_with_tps(false)), ~"tt" => { diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 693b538ec6d..8acf0e8ec6d 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -25,7 +25,7 @@ type tt_frame = @{ type tt_reader = @{ sp_diag: span_handler, - interner: @interner<@~str>, + interner: interner<@~str>, mut cur: tt_frame, /* for MBE-style macro transcription */ interpolations: std::map::hashmap<ident, @named_match>, @@ -39,7 +39,7 @@ type tt_reader = @{ /** This can do Macro-By-Example transcription. On the other hand, if * `src` contains no `tt_seq`s and `tt_nonterminal`s, `interp` can (and * should) be none. */ -fn new_tt_reader(sp_diag: span_handler, itr: @interner<@~str>, +fn new_tt_reader(sp_diag: span_handler, itr: interner<@~str>, interp: option<std::map::hashmap<ident,@named_match>>, src: ~[ast::token_tree]) -> tt_reader { @@ -237,4 +237,4 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} { } } -} \ No newline at end of file +} diff --git a/src/libsyntax/parse.rs b/src/libsyntax/parse.rs index 6936eeed8f9..deaaaf69b1c 100644 --- a/src/libsyntax/parse.rs +++ b/src/libsyntax/parse.rs @@ -24,7 +24,7 @@ type parse_sess = @{ cm: codemap::codemap, mut next_id: node_id, span_diagnostic: span_handler, - interner: @interner::interner<@~str>, + interner: interner::interner<@~str>, // these two must be kept up to date mut chpos: uint, mut byte_pos: uint @@ -35,7 +35,7 @@ fn new_parse_sess(demitter: option<emitter>) -> parse_sess { return @{cm: cm, mut next_id: 1, span_diagnostic: mk_span_handler(mk_handler(demitter), cm), - interner: @interner::mk::<@~str>(|x| str::hash(*x), + interner: interner::mk::<@~str>(|x| str::hash(*x), |x,y| str::eq(*x, *y)), mut chpos: 0u, mut byte_pos: 0u}; } @@ -45,7 +45,7 @@ fn new_parse_sess_special_handler(sh: span_handler, cm: codemap::codemap) return @{cm: cm, mut next_id: 1, span_diagnostic: sh, - interner: @interner::mk::<@~str>(|x| str::hash(*x), + interner: interner::mk::<@~str>(|x| str::hash(*x), |x,y| str::eq(*x, *y)), mut chpos: 0u, mut byte_pos: 0u}; } diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 2da34539321..701dd9301ca 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -276,7 +276,7 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, srdr: io::Reader) -> {cmnts: ~[cmnt], lits: ~[lit]} { let src = @str::from_bytes(srdr.read_whole_stream()); - let itr = @interner::mk::<@~str>( + let itr = interner::mk::<@~str>( |x| str::hash(*x), |x,y| str::eq(*x, *y) ); @@ -311,7 +311,7 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, vec::push(literals, {lit: s, pos: sp.lo}); log(debug, ~"tok lit: " + s); } else { - log(debug, ~"tok: " + token::to_str(*rdr.interner, tok)); + log(debug, ~"tok: " + token::to_str(rdr.interner, tok)); } first_read = false; } diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 4038578d3f8..59dad16dc44 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -19,7 +19,7 @@ fn seq_sep_none() -> seq_sep { } fn token_to_str(reader: reader, ++token: token::token) -> ~str { - token::to_str(*reader.interner(), token) + token::to_str(reader.interner(), token) } trait parser_common { diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 99768d558ab..aaafe958efb 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -14,7 +14,7 @@ trait reader { fn next_token() -> {tok: token::token, sp: span}; fn fatal(~str) -> !; fn span_diag() -> span_handler; - pure fn interner() -> @interner<@~str>; + pure fn interner() -> interner<@~str>; fn peek() -> {tok: token::token, sp: span}; fn dup() -> reader; } @@ -27,7 +27,7 @@ type string_reader = @{ mut curr: char, mut chpos: uint, filemap: codemap::filemap, - interner: @interner<@~str>, + interner: interner<@~str>, /* cached: */ mut peek_tok: token::token, mut peek_span: span @@ -35,7 +35,7 @@ type string_reader = @{ fn new_string_reader(span_diagnostic: span_handler, filemap: codemap::filemap, - itr: @interner<@~str>) -> string_reader { + itr: interner<@~str>) -> string_reader { let r = new_low_level_string_reader(span_diagnostic, filemap, itr); string_advance_token(r); /* fill in peek_* */ return r; @@ -44,7 +44,7 @@ fn new_string_reader(span_diagnostic: span_handler, /* For comments.rs, which hackily pokes into 'pos' and 'curr' */ fn new_low_level_string_reader(span_diagnostic: span_handler, filemap: codemap::filemap, - itr: @interner<@~str>) + itr: interner<@~str>) -> string_reader { let r = @{span_diagnostic: span_diagnostic, src: filemap.src, mut col: 0u, mut pos: 0u, mut curr: -1 as char, @@ -79,7 +79,7 @@ impl string_reader: reader { self.span_diagnostic.span_fatal(copy self.peek_span, m) } fn span_diag() -> span_handler { self.span_diagnostic } - pure fn interner() -> @interner<@~str> { self.interner } + pure fn interner() -> interner<@~str> { self.interner } fn peek() -> {tok: token::token, sp: span} { {tok: self.peek_tok, sp: self.peek_span} } @@ -101,7 +101,7 @@ impl tt_reader: reader { self.sp_diag.span_fatal(copy self.cur_span, m); } fn span_diag() -> span_handler { self.sp_diag } - pure fn interner() -> @interner<@~str> { self.interner } + pure fn interner() -> interner<@~str> { self.interner } fn peek() -> {tok: token::token, sp: span} { { tok: self.cur_tok, sp: self.cur_span } } @@ -219,7 +219,7 @@ fn consume_any_line_comment(rdr: string_reader) bump(rdr); } return some({ - tok: token::DOC_COMMENT((*rdr.interner).intern(@acc)), + tok: token::DOC_COMMENT(rdr.interner.intern(@acc)), sp: ast_util::mk_sp(start_chpos, rdr.chpos) }); } else { @@ -264,7 +264,7 @@ fn consume_block_comment(rdr: string_reader) bump(rdr); bump(rdr); return some({ - tok: token::DOC_COMMENT((*rdr.interner).intern(@acc)), + tok: token::DOC_COMMENT(rdr.interner.intern(@acc)), sp: ast_util::mk_sp(start_chpos, rdr.chpos) }); } @@ -398,12 +398,12 @@ fn scan_number(c: char, rdr: string_reader) -> token::token { if c == '3' && n == '2' { bump(rdr); bump(rdr); - return token::LIT_FLOAT((*rdr.interner).intern(@num_str), + return token::LIT_FLOAT(rdr.interner.intern(@num_str), ast::ty_f32); } else if c == '6' && n == '4' { bump(rdr); bump(rdr); - return token::LIT_FLOAT((*rdr.interner).intern(@num_str), + return token::LIT_FLOAT(rdr.interner.intern(@num_str), ast::ty_f64); /* FIXME (#2252): if this is out of range for either a 32-bit or 64-bit float, it won't be noticed till the @@ -413,7 +413,7 @@ fn scan_number(c: char, rdr: string_reader) -> token::token { } } if is_float { - return token::LIT_FLOAT((*rdr.interner).intern(@num_str), ast::ty_f); + return token::LIT_FLOAT(rdr.interner.intern(@num_str), ast::ty_f); } else { if str::len(num_str) == 0u { rdr.fatal(~"no valid digits found for number"); @@ -461,7 +461,7 @@ fn next_token_inner(rdr: string_reader) -> token::token { let is_mod_name = c == ':' && nextch(rdr) == ':'; // FIXME: perform NFKC normalization here. (Issue #2253) - return token::IDENT((*rdr.interner).intern(@accum_str), is_mod_name); + return token::IDENT(rdr.interner.intern(@accum_str), is_mod_name); } if is_dec_digit(c) { return scan_number(c, rdr); @@ -630,7 +630,7 @@ fn next_token_inner(rdr: string_reader) -> token::token { } } bump(rdr); - return token::LIT_STR((*rdr.interner).intern(@accum_str)); + return token::LIT_STR(rdr.interner.intern(@accum_str)); } '-' => { if nextch(rdr) == '>' { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index f894f6fae82..642cdaa8fab 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -263,7 +263,7 @@ class parser { self.sess.span_diagnostic.span_warn(copy self.span, m) } pure fn get_str(i: token::str_num) -> @~str { - (*self.reader.interner()).get(i) + self.reader.interner().get(i) } fn get_id() -> node_id { next_node_id(self.sess) } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index de1cd2c3df0..39bc18529c9 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -11,7 +11,7 @@ import dvec::{DVec, dvec}; import parse::classify::*; import util::interner; -type ident_interner = @interner::interner<@~str>; +type ident_interner = interner::interner<@~str>; // The ps is stored here to prevent recursive type. enum ann_node { @@ -30,7 +30,7 @@ fn no_ann() -> pp_ann { type ps = @{s: pp::printer, cm: option<codemap>, - intr: @interner::interner<@~str>, + intr: interner::interner<@~str>, comments: option<~[comments::cmnt]>, literals: option<~[comments::lit]>, mut cur_cmnt: uint, @@ -51,8 +51,8 @@ fn end(s: ps) { fn rust_printer(writer: io::Writer) -> ps { return @{s: pp::mk_printer(writer, default_columns), cm: none::<codemap>, - intr: @interner::mk::<@~str>(|x| str::hash(*x), - |x,y| str::eq(*x, *y)), + intr: interner::mk::<@~str>(|x| str::hash(*x), + |x,y| str::eq(*x, *y)), comments: none::<~[comments::cmnt]>, literals: none::<~[comments::lit]>, mut cur_cmnt: 0u, @@ -81,7 +81,7 @@ const default_columns: uint = 78u; // Requires you to pass an input filename and reader so that // it can scan the input text for comments and literals to // copy forward. -fn print_crate(cm: codemap, intr: @interner::interner<@~str>, +fn print_crate(cm: codemap, intr: interner::interner<@~str>, span_diagnostic: diagnostic::span_handler, crate: @ast::crate, filename: ~str, in: io::Reader, out: io::Writer, ann: pp_ann, is_expanded: bool) { @@ -690,14 +690,14 @@ fn print_tt(s: ps, tt: ast::token_tree) { } _ => { s.s.token_tree_last_was_ident = false; } } - word(s.s, parse::token::to_str(*s.intr, tk)); + word(s.s, parse::token::to_str(s.intr, tk)); } ast::tt_seq(_, tts, sep, zerok) => { word(s.s, ~"$("); for tts.each() |tt_elt| { print_tt(s, tt_elt); } word(s.s, ~")"); match sep { - some(tk) => word(s.s, parse::token::to_str(*s.intr, tk)), + some(tk) => word(s.s, parse::token::to_str(s.intr, tk)), none => () } word(s.s, if zerok { ~"*" } else { ~"+" }); |
