diff options
| author | Paul Stansifer <paul.stansifer@gmail.com> | 2012-07-18 16:18:02 -0700 |
|---|---|---|
| committer | Paul Stansifer <paul.stansifer@gmail.com> | 2012-08-22 14:59:25 -0700 |
| commit | 1153b5dcc86c3567b0a86e441938f05d4f2e295b (patch) | |
| tree | fdcbcea39abecb4ad1ea5145e62e8c013b05e930 | |
| parent | 7317bf8792ebb3f27768109b7d574ee0806cc5e5 (diff) | |
| download | rust-1153b5dcc86c3567b0a86e441938f05d4f2e295b.tar.gz rust-1153b5dcc86c3567b0a86e441938f05d4f2e295b.zip | |
intern identifiers
94 files changed, 1662 insertions, 1464 deletions
diff --git a/src/cargo/cargo.rs b/src/cargo/cargo.rs index 115ae566c24..30d5fef72ed 100644 --- a/src/cargo/cargo.rs +++ b/src/cargo/cargo.rs @@ -225,7 +225,7 @@ fn load_link(mis: ~[@ast::meta_item]) -> (option<~str>, for mis.each |a| { match a.node { ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) => { - match *v { + match v { ~"name" => name = some(*s), ~"vers" => vers = some(*s), ~"uuid" => uuid = some(*s), @@ -252,15 +252,15 @@ fn load_crate(filename: ~str) -> option<crate> { for c.node.attrs.each |a| { match a.node.value.node { ast::meta_name_value(v, {node: ast::lit_str(s), span: _}) => { - match *v { - ~"desc" => desc = some(*v), - ~"sigs" => sigs = some(*v), - ~"crate_type" => crate_type = some(*v), + match v { + ~"desc" => desc = some(v), + ~"sigs" => sigs = some(v), + ~"crate_type" => crate_type = some(v), _ => { } } } ast::meta_list(v, mis) => { - if *v == ~"link" { + if v == ~"link" { let (n, v, u) = load_link(mis); name = n; vers = v; @@ -278,13 +278,15 @@ fn load_crate(filename: ~str) -> option<crate> { mut deps: ~[~str] }; - fn goto_view_item(e: env, i: @ast::view_item) { + fn goto_view_item(ps: syntax::parse::parse_sess, e: env, + i: @ast::view_item) { match i.node { ast::view_item_use(ident, metas, id) => { let name_items = attr::find_meta_items_by_name(metas, ~"name"); let m = if name_items.is_empty() { - metas + ~[attr::mk_name_value_item_str(@~"name", *ident)] + metas + ~[attr::mk_name_value_item_str( + ~"name", *ps.interner.get(ident))] } else { metas }; @@ -297,9 +299,9 @@ fn load_crate(filename: ~str) -> option<crate> { some(value) => { let name = attr::get_meta_item_name(item); - match *name { - ~"vers" => attr_vers = *value, - ~"from" => attr_from = *value, + match name { + ~"vers" => attr_vers = value, + ~"from" => attr_from = value, _ => () } } @@ -311,11 +313,11 @@ fn load_crate(filename: ~str) -> option<crate> { attr_from } else { if !str::is_empty(attr_vers) { - *attr_name + ~"@" + attr_vers - } else { *attr_name } + ps.interner.get(attr_name) + ~"@" + attr_vers + } else { *ps.interner.get(attr_name) } }; - match *attr_name { + match *ps.interner.get(attr_name) { ~"std" | ~"core" => (), _ => vec::push(e.deps, query) } @@ -330,7 +332,7 @@ fn load_crate(filename: ~str) -> option<crate> { mut deps: ~[] }; let v = visit::mk_simple_visitor(@{ - visit_view_item: |a| goto_view_item(e, a), + visit_view_item: |a| goto_view_item(sess, e, a), visit_item: |a| goto_item(e, a), with *visit::default_simple_visitor() }); diff --git a/src/fuzzer/fuzzer.rs b/src/fuzzer/fuzzer.rs index b741b43b99b..9c42b29304d 100644 --- a/src/fuzzer/fuzzer.rs +++ b/src/fuzzer/fuzzer.rs @@ -238,20 +238,23 @@ fn check_variants_T<T: copy>( filename: ~str, thing_label: ~str, things: ~[T], - stringifier: fn@(@T) -> ~str, + stringifier: fn@(@T, syntax::parse::token::ident_interner) -> ~str, replacer: fn@(ast::crate, uint, T, test_mode) -> ast::crate, cx: context ) { error!{"%s contains %u %s objects", filename, vec::len(things), thing_label}; + // Assuming we're not generating any token_trees + let intr = syntax::parse::token::mk_fake_ident_interner(); + let L = vec::len(things); if L < 100u { do under(uint::min(L, 20u)) |i| { log(error, ~"Replacing... #" + uint::str(i)); do under(uint::min(L, 30u)) |j| { - log(error, ~"With... " + stringifier(@things[j])); + log(error, ~"With... " + stringifier(@things[j], intr)); let crate2 = @replacer(crate, i, things[j], cx.mode); // It would be best to test the *crate* for stability, but // testing the string for stability is easier and ok for now. @@ -259,8 +262,7 @@ fn check_variants_T<T: copy>( let str3 = @as_str(|a|pprust::print_crate( codemap, - // Assuming we're not generating any token_trees - syntax::parse::token::mk_ident_interner(), + intr, diagnostic::mk_span_handler(handler, codemap), crate2, filename, @@ -422,7 +424,7 @@ fn parse_and_print(code: @~str) -> ~str { pprust::print_crate( sess.cm, // Assuming there are no token_trees - syntax::parse::token::mk_ident_interner(), + syntax::parse::token::mk_fake_ident_interner(), sess.span_diagnostic, crate, filename, @@ -570,7 +572,7 @@ fn check_variants(files: ~[~str], cx: context) { as_str(|a| pprust::print_crate( sess.cm, // Assuming no token_trees - syntax::parse::token::mk_ident_interner(), + syntax::parse::token::mk_fake_ident_interner(), sess.span_diagnostic, crate, file, diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index c7ff6671e0f..7d9803b4cc3 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -30,8 +30,18 @@ fn deserialize_span<D>(_d: D) -> span { #[auto_serialize] type spanned<T> = {node: T, span: span}; -#[auto_serialize] -type ident = @~str; +fn serialize_ident<S: serializer>(s: S, i: ident) { + let intr = unsafe{ task::local_data_get(parse::token::interner_key) }; + + s.emit_str(*(*intr.get()).get(i)); +} +fn deserialize_ident<D: deserializer>(d: D) -> ident { + let intr = unsafe{ task::local_data_get(parse::token::interner_key) }; + + (*intr.get()).intern(@d.read_str()) +} + +type ident = token::str_num; // Functions may or may not have names. #[auto_serialize] @@ -127,9 +137,9 @@ type meta_item = spanned<meta_item_>; #[auto_serialize] enum meta_item_ { - meta_word(ident), - meta_list(ident, ~[@meta_item]), - meta_name_value(ident, lit), + meta_word(~str), + meta_list(~str, ~[@meta_item]), + meta_name_value(~str, lit), } #[auto_serialize] @@ -815,36 +825,6 @@ enum inlined_item { ii_dtor(class_dtor, ident, ~[ty_param], def_id /* parent id */) } -// Convenience functions - -pure fn simple_path(id: ident, span: span) -> @path { - @{span: span, - global: false, - idents: ~[id], - rp: none, - types: ~[]} -} - -pure fn empty_span() -> span { - {lo: 0, hi: 0, expn_info: none} -} - -// Convenience implementations - -impl ident: ops::add<ident,@path> { - pure fn add(&&id: ident) -> @path { - simple_path(self, empty_span()) + id - } -} - -impl @path: ops::add<ident,@path> { - pure fn add(&&id: ident) -> @path { - @{ - idents: vec::append_one(self.idents, id) - with *self - } - } -} // // Local Variables: diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index 70aaa5e8be6..2cc8cb23f2e 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -4,31 +4,33 @@ import ast::*; import print::pprust; import ast_util::{path_to_ident, stmt_id}; import diagnostic::span_handler; +import parse::token::ident_interner; enum path_elt { path_mod(ident), path_name(ident) } type path = ~[path_elt]; /* FIXMEs that say "bad" are as per #2543 */ -fn path_to_str_with_sep(p: path, sep: ~str) -> ~str { +fn path_to_str_with_sep(p: path, sep: ~str, itr: ident_interner) -> ~str { let strs = do vec::map(p) |e| { match e { - path_mod(s) => /* FIXME (#2543) */ copy *s, - path_name(s) => /* FIXME (#2543) */ copy *s + path_mod(s) => *itr.get(s), + path_name(s) => *itr.get(s) } }; str::connect(strs, sep) } -fn path_ident_to_str(p: path, i: ident) -> ~str { +fn path_ident_to_str(p: path, i: ident, itr: ident_interner) -> ~str { if vec::is_empty(p) { - /* FIXME (#2543) */ copy *i + //FIXME /* FIXME (#2543) */ copy *i + *itr.get(i) } else { - fmt!{"%s::%s", path_to_str(p), *i} + fmt!{"%s::%s", path_to_str(p, itr), *itr.get(i)} } } -fn path_to_str(p: path) -> ~str { - path_to_str_with_sep(p, ~"::") +fn path_to_str(p: path, itr: ident_interner) -> ~str { + path_to_str_with_sep(p, ~"::", itr) } enum ast_node { @@ -291,43 +293,42 @@ fn map_stmt(stmt: @stmt, cx: ctx, v: vt) { visit::visit_stmt(stmt, cx, v); } -fn node_id_to_str(map: map, id: node_id) -> ~str { +fn node_id_to_str(map: map, id: node_id, itr: ident_interner) -> ~str { match map.find(id) { none => { fmt!{"unknown node (id=%d)", id} } some(node_item(item, path)) => { - fmt!{"item %s (id=%?)", path_ident_to_str(*path, item.ident), id} + fmt!{"item %s (id=%?)", path_ident_to_str(*path, item.ident, itr), id} } some(node_foreign_item(item, abi, path)) => { fmt!{"foreign item %s with abi %? (id=%?)", - path_ident_to_str(*path, item.ident), abi, id} + path_ident_to_str(*path, item.ident, itr), abi, id} } some(node_method(m, impl_did, path)) => { fmt!{"method %s in %s (id=%?)", - *m.ident, path_to_str(*path), id} + *itr.get(m.ident), path_to_str(*path, itr), id} } some(node_trait_method(tm, impl_did, path)) => { let m = ast_util::trait_method_to_ty_method(*tm); fmt!{"method %s in %s (id=%?)", - *m.ident, path_to_str(*path), id} + *itr.get(m.ident), path_to_str(*path, itr), id} } some(node_variant(variant, def_id, path)) => { fmt!{"variant %s in %s (id=%?)", - *variant.node.name, path_to_str(*path), id} + *itr.get(variant.node.name), path_to_str(*path, itr), id} } some(node_expr(expr)) => { - fmt!{"expr %s (id=%?)", - pprust::expr_to_str(expr), id} + fmt!{"expr %s (id=%?)", pprust::expr_to_str(expr, itr), id} } some(node_stmt(stmt)) => { fmt!{"stmt %s (id=%?)", - pprust::stmt_to_str(*stmt), id} + pprust::stmt_to_str(*stmt, itr), id} } // FIXMEs are as per #2410 some(node_export(_, path)) => { fmt!{"export %s (id=%?)", // add more info here - path_to_str(*path), id} + path_to_str(*path, itr), id} } some(node_arg(_, _)) => { // add more info here fmt!{"arg (id=%?)", id} diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index d949c546155..fe1924d1dbe 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -21,13 +21,14 @@ pure fn mk_sp(lo: uint, hi: uint) -> span { // make this a const, once the compiler supports it pure fn dummy_sp() -> span { return mk_sp(0u, 0u); } -pure fn path_name(p: @path) -> ~str { path_name_i(p.idents) } -pure fn path_name_i(idents: ~[ident]) -> ~str { + +pure fn path_name_i(idents: ~[ident], intr: token::ident_interner) -> ~str { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") - str::connect(idents.map(|i|*i), ~"::") + str::connect(idents.map(|i| *intr.get(i)), ~"::") } + pure fn path_to_ident(p: @path) -> ident { vec::last(p.idents) } pure fn local_def(id: node_id) -> def_id { {crate: local_crate, node: id} } @@ -408,7 +409,8 @@ fn dtor_dec() -> fn_decl { let nil_t = @{id: 0, node: ty_nil, span: dummy_sp()}; // dtor has one argument, of type () {inputs: ~[{mode: ast::expl(ast::by_ref), - ty: nil_t, ident: @~"_", id: 0}], + ty: nil_t, ident: parse::token::special_idents::underscore, + id: 0}], output: nil_t, purity: impure_fn, cf: return_val} } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 7c04d6e4570..71fc7f94e36 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -49,23 +49,23 @@ export require_unique_names; /* Constructors */ -fn mk_name_value_item_str(+name: ast::ident, +value: ~str) -> +fn mk_name_value_item_str(name: ~str, +value: ~str) -> @ast::meta_item { let value_lit = dummy_spanned(ast::lit_str(@value)); return mk_name_value_item(name, value_lit); } -fn mk_name_value_item(+name: ast::ident, +value: ast::lit) +fn mk_name_value_item(name: ~str, +value: ast::lit) -> @ast::meta_item { return @dummy_spanned(ast::meta_name_value(name, value)); } -fn mk_list_item(+name: ast::ident, +items: ~[@ast::meta_item]) -> +fn mk_list_item(name: ~str, +items: ~[@ast::meta_item]) -> @ast::meta_item { return @dummy_spanned(ast::meta_list(name, items)); } -fn mk_word_item(+name: ast::ident) -> @ast::meta_item { +fn mk_word_item(name: ~str) -> @ast::meta_item { return @dummy_spanned(ast::meta_word(name)); } @@ -78,7 +78,7 @@ fn mk_sugared_doc_attr(text: ~str, lo: uint, hi: uint) -> ast::attribute { let lit = spanned(lo, hi, ast::lit_str(@text)); let attr = { style: doc_comment_style(text), - value: spanned(lo, hi, ast::meta_name_value(@~"doc", lit)), + value: spanned(lo, hi, ast::meta_name_value(~"doc", lit)), is_sugared_doc: true }; return spanned(lo, hi, attr); @@ -98,8 +98,8 @@ fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] { fn desugar_doc_attr(attr: ast::attribute) -> ast::attribute { if attr.node.is_sugared_doc { let comment = get_meta_item_value_str(@attr.node.value).get(); - let meta = mk_name_value_item_str(@~"doc", - strip_doc_comment_decoration(*comment)); + let meta = mk_name_value_item_str(~"doc", + strip_doc_comment_decoration(comment)); return mk_attr(meta); } else { attr @@ -108,16 +108,15 @@ fn desugar_doc_attr(attr: ast::attribute) -> ast::attribute { /* Accessors */ -fn get_attr_name(attr: ast::attribute) -> ast::ident { +fn get_attr_name(attr: ast::attribute) -> ~str { get_meta_item_name(@attr.node.value) } -// All "bad" FIXME copies are as per #2543 -fn get_meta_item_name(meta: @ast::meta_item) -> ast::ident { +fn get_meta_item_name(meta: @ast::meta_item) -> ~str { match meta.node { - ast::meta_word(n) => /* FIXME (#2543) */ copy n, - ast::meta_name_value(n, _) => /* FIXME (#2543) */ copy n, - ast::meta_list(n, _) => /* FIXME (#2543) */ copy n + ast::meta_word(n) => n, + ast::meta_name_value(n, _) => n, + ast::meta_list(n, _) => n } } @@ -125,13 +124,13 @@ fn get_meta_item_name(meta: @ast::meta_item) -> ast::ident { * Gets the string value if the meta_item is a meta_name_value variant * containing a string, otherwise none */ -fn get_meta_item_value_str(meta: @ast::meta_item) -> option<@~str> { +fn get_meta_item_value_str(meta: @ast::meta_item) -> option<~str> { match meta.node { - ast::meta_name_value(_, v) => match v.node { - ast::lit_str(s) => option::some(s), + ast::meta_name_value(_, v) => match v.node { + ast::lit_str(s) => option::some(*s), + _ => option::none + }, _ => option::none - }, - _ => option::none } } @@ -147,9 +146,7 @@ fn get_meta_item_list(meta: @ast::meta_item) -> option<~[@ast::meta_item]> { * If the meta item is a nam-value type with a string value then returns * a tuple containing the name and string value, otherwise `none` */ -fn get_name_value_str_pair( - item: @ast::meta_item -) -> option<(ast::ident, @~str)> { +fn get_name_value_str_pair(item: @ast::meta_item) -> option<(~str, ~str)> { match attr::get_meta_item_value_str(item) { some(value) => { let name = attr::get_meta_item_name(item); @@ -163,11 +160,11 @@ fn get_name_value_str_pair( /* Searching */ /// Search a list of attributes and return only those with a specific name -fn find_attrs_by_name(attrs: ~[ast::attribute], +name: ~str) -> +fn find_attrs_by_name(attrs: ~[ast::attribute], name: ~str) -> ~[ast::attribute] { let filter = ( fn@(a: ast::attribute) -> option<ast::attribute> { - if *get_attr_name(a) == name { + if get_attr_name(a) == name { option::some(a) } else { option::none } } @@ -176,10 +173,10 @@ fn find_attrs_by_name(attrs: ~[ast::attribute], +name: ~str) -> } /// Searcha list of meta items and return only those with a specific name -fn find_meta_items_by_name(metas: ~[@ast::meta_item], +name: ~str) -> +fn find_meta_items_by_name(metas: ~[@ast::meta_item], name: ~str) -> ~[@ast::meta_item] { let filter = fn@(&&m: @ast::meta_item) -> option<@ast::meta_item> { - if *get_meta_item_name(m) == name { + if get_meta_item_name(m) == name { option::some(m) } else { option::none } }; @@ -191,14 +188,9 @@ fn find_meta_items_by_name(metas: ~[@ast::meta_item], +name: ~str) -> * comparison is performed structurally. */ fn contains(haystack: ~[@ast::meta_item], needle: @ast::meta_item) -> bool { - debug!{"looking for %s", - print::pprust::meta_item_to_str(*needle)}; for haystack.each |item| { - debug!{"looking in %s", - print::pprust::meta_item_to_str(*item)}; - if eq(item, needle) { debug!{"found it!"}; return true; } + if eq(item, needle) { return true; } } - #debug("found it not :("); return false; } @@ -223,17 +215,18 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool { } } -fn contains_name(metas: ~[@ast::meta_item], +name: ~str) -> bool { +fn contains_name(metas: ~[@ast::meta_item], name: ~str) -> bool { let matches = find_meta_items_by_name(metas, name); return vec::len(matches) > 0u; } -fn attrs_contains_name(attrs: ~[ast::attribute], +name: ~str) -> bool { +fn attrs_contains_name(attrs: ~[ast::attribute], name: ~str) -> bool { vec::is_not_empty(find_attrs_by_name(attrs, name)) } -fn first_attr_value_str_by_name(attrs: ~[ast::attribute], +name: ~str) - -> option<@~str> { +fn first_attr_value_str_by_name(attrs: ~[ast::attribute], name: ~str) + -> option<~str> { + let mattrs = find_attrs_by_name(attrs, name); if vec::len(mattrs) > 0u { return get_meta_item_value_str(attr_meta(mattrs[0])); @@ -241,18 +234,16 @@ fn first_attr_value_str_by_name(attrs: ~[ast::attribute], +name: ~str) return option::none; } -fn last_meta_item_by_name( - items: ~[@ast::meta_item], - +name: ~str -) -> option<@ast::meta_item> { +fn last_meta_item_by_name(items: ~[@ast::meta_item], name: ~str) + -> option<@ast::meta_item> { + let items = attr::find_meta_items_by_name(items, name); vec::last_opt(items) } -fn last_meta_item_value_str_by_name( - items: ~[@ast::meta_item], - +name: ~str -) -> option<@~str> { +fn last_meta_item_value_str_by_name(items: ~[@ast::meta_item], name: ~str) + -> option<~str> { + match last_meta_item_by_name(items, name) { some(item) => match attr::get_meta_item_value_str(item) { some(value) => some(value), @@ -262,10 +253,9 @@ fn last_meta_item_value_str_by_name( } } -fn last_meta_item_list_by_name( - items: ~[@ast::meta_item], - +name: ~str -) -> option<~[@ast::meta_item]> { +fn last_meta_item_list_by_name(items: ~[@ast::meta_item], name: ~str) + -> option<~[@ast::meta_item]> { + match last_meta_item_by_name(items, name) { some(item) => attr::get_meta_item_list(item), none => none @@ -279,11 +269,11 @@ fn last_meta_item_list_by_name( // the item name (See [Fixme-sorting]) fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] { pure fn lteq(ma: &@ast::meta_item, mb: &@ast::meta_item) -> bool { - pure fn key(m: &ast::meta_item) -> ast::ident { + pure fn key(m: &ast::meta_item) -> ~str { match m.node { - ast::meta_word(name) => /* FIXME (#2543) */ copy name, - ast::meta_name_value(name, _) => /* FIXME (#2543) */ copy name, - ast::meta_list(name, _) => /* FIXME (#2543) */ copy name + ast::meta_word(name) => name, + ast::meta_name_value(name, _) => name, + ast::meta_list(name, _) => name } } key(*ma) <= key(*mb) @@ -295,7 +285,7 @@ fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] { return vec::from_mut(v); } -fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: ast::ident) -> +fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: ~str) -> ~[@ast::meta_item] { return vec::filter_map(items, |item| { @@ -335,17 +325,17 @@ fn foreign_abi(attrs: ~[ast::attribute]) -> Either<~str, ast::foreign_abi> { option::none => { either::Right(ast::foreign_abi_cdecl) } - option::some(@~"rust-intrinsic") => { + option::some(~"rust-intrinsic") => { either::Right(ast::foreign_abi_rust_intrinsic) } - option::some(@~"cdecl") => { + option::some(~"cdecl") => { either::Right(ast::foreign_abi_cdecl) } - option::some(@~"stdcall") => { + option::some(~"stdcall") => { either::Right(ast::foreign_abi_stdcall) } option::some(t) => { - either::Left(~"unsupported abi: " + *t) + either::Left(~"unsupported abi: " + t) } }; } @@ -362,8 +352,8 @@ fn find_inline_attr(attrs: ~[ast::attribute]) -> inline_attr { // FIXME (#2809)---validate the usage of #[inline] and #[inline(always)] do vec::foldl(ia_none, attrs) |ia,attr| { match attr.node.value.node { - ast::meta_word(@~"inline") => ia_hint, - ast::meta_list(@~"inline", items) => { + ast::meta_word(~"inline") => ia_hint, + ast::meta_list(~"inline", items) => { if !vec::is_empty(find_meta_items_by_name(items, ~"always")) { ia_always } else if !vec::is_empty( @@ -386,11 +376,11 @@ fn require_unique_names(diagnostic: span_handler, let name = get_meta_item_name(meta); // FIXME: How do I silence the warnings? --pcw (#2619) - if map.contains_key(*name) { + if map.contains_key(name) { diagnostic.span_fatal(meta.span, - fmt!{"duplicate meta item `%s`", *name}); + fmt!{"duplicate meta item `%s`", name}); } - map.insert(*name, ()); + map.insert(name, ()); } } diff --git a/src/libsyntax/ext/auto_serialize.rs b/src/libsyntax/ext/auto_serialize.rs index 1fc77bae8bf..e5b65b41fc8 100644 --- a/src/libsyntax/ext/auto_serialize.rs +++ b/src/libsyntax/ext/auto_serialize.rs @@ -84,15 +84,15 @@ mod syntax { export parse; } -type ser_tps_map = map::hashmap<~str, fn@(@ast::expr) -> ~[@ast::stmt]>; -type deser_tps_map = map::hashmap<~str, fn@() -> @ast::expr>; +type ser_tps_map = map::hashmap<ast::ident, fn@(@ast::expr) -> ~[@ast::stmt]>; +type deser_tps_map = map::hashmap<ast::ident, fn@() -> @ast::expr>; fn expand(cx: ext_ctxt, span: span, _mitem: ast::meta_item, in_items: ~[@ast::item]) -> ~[@ast::item] { fn not_auto_serialize(a: ast::attribute) -> bool { - attr::get_attr_name(a) != @~"auto_serialize" + attr::get_attr_name(a) != ~"auto_serialize" } fn filter_attrs(item: @ast::item) -> @ast::item { @@ -156,7 +156,11 @@ impl ext_ctxt: ext_ctxt_helpers { let head = vec::init(base_path.idents); let tail = vec::last(base_path.idents); self.path(base_path.span, - vec::append(head, ~[@(helper_name + ~"_" + *tail)])) + vec::append(head, + ~[self.parse_sess().interner. + intern(@(helper_name + ~"_" + + *self.parse_sess().interner.get( + tail)))])) } fn path(span: span, strs: ~[ast::ident]) -> @ast::path { @@ -181,7 +185,7 @@ impl ext_ctxt: ext_ctxt_helpers { let args = do vec::map(input_tys) |ty| { {mode: ast::expl(ast::by_ref), ty: ty, - ident: @~"", + ident: parse::token::special_idents::invalid, id: self.next_id()} }; @@ -352,7 +356,7 @@ fn ser_variant(cx: ext_ctxt, argfn: fn(-@ast::expr, uint, ast::blk) -> @ast::expr) -> ast::arm { let vnames = do vec::from_fn(vec::len(tys)) |i| { - @fmt!{"__v%u", i} + cx.parse_sess().interner.intern(@fmt!{"__v%u", i}) }; let pats = do vec::from_fn(vec::len(tys)) |i| { cx.binder_pat(tys[i].span, vnames[i]) @@ -384,7 +388,7 @@ fn is_vec_or_str(ty: @ast::ty) -> bool { // This may be wrong if the user has shadowed (!) str ast::ty_path(@{span: _, global: _, idents: ids, rp: none, types: _}, _) - if ids == ~[@~"str"] => true, + if ids == ~[parse::token::special_idents::str] => true, _ => false } } @@ -434,7 +438,8 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, fld.node.ident, ~[])); let s = cx.clone(s); - let f = cx.lit_str(fld.span, fld.node.ident); + let f = cx.lit_str(fld.span, cx.parse_sess().interner.get( + fld.node.ident)); let i = cx.lit_uint(fld.span, fidx); let l = ser_lambda(cx, tps, fld.node.mt.ty, cx.clone(s), vf); #ast[stmt]{$(s).emit_rec_field($(f), $(i), $(l));} @@ -487,7 +492,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map, vec::is_empty(path.types) { let ident = path.idents[0]; - match tps.find(*ident) { + match tps.find(ident) { some(f) => f(v), none => ser_path(cx, tps, path, s, v) } @@ -545,7 +550,7 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, ty: cx.ty_fn(span, ~[cx.ty_path(span, ~[tp.ident], ~[])], cx.ty_nil(span)), - ident: @(~"__s" + *tp.ident), + ident: cx.ident_of(~"__s" + cx.str_of(tp.ident)), id: cx.next_id()}); debug!{"tp_inputs = %?", tp_inputs}; @@ -553,35 +558,36 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, let ser_inputs: ~[ast::arg] = vec::append(~[{mode: ast::expl(ast::by_ref), - ty: cx.ty_path(span, ~[@~"__S"], ~[]), - ident: @~"__s", + ty: cx.ty_path(span, ~[cx.ident_of(~"__S")], ~[]), + ident: cx.ident_of(~"__s"), id: cx.next_id()}, {mode: ast::expl(ast::by_ref), ty: v_ty, - ident: @~"__v", + ident: cx.ident_of(~"__v"), id: cx.next_id()}], tp_inputs); - let tps_map = map::str_hash(); + let tps_map = map::uint_hash(); do vec::iter2(tps, tp_inputs) |tp, arg| { let arg_ident = arg.ident; tps_map.insert( - *tp.ident, + tp.ident, fn@(v: @ast::expr) -> ~[@ast::stmt] { let f = cx.var_ref(span, arg_ident); - debug!{"serializing type arg %s", *arg_ident}; + debug!{"serializing type arg %s", cx.str_of(arg_ident)}; ~[#ast[stmt]{$(f)($(v));}] }); } let ser_bnds = @~[ ast::bound_trait(cx.ty_path(span, - ~[@~"std", @~"serialization", - @~"serializer"], + ~[cx.ident_of(~"std"), + cx.ident_of(~"serialization"), + cx.ident_of(~"serializer")], ~[]))]; let ser_tps: ~[ast::ty_param] = - vec::append(~[{ident: @~"__S", + vec::append(~[{ident: cx.ident_of(~"__S"), id: cx.next_id(), bounds: ser_bnds}], vec::map(tps, |tp| cx.clone_ty_param(tp))); @@ -593,7 +599,7 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident, let ser_blk = cx.blk(span, f(cx, tps_map, #ast{ __s }, #ast{ __v })); - @{ident: @(~"serialize_" + *name), + @{ident: cx.ident_of(~"serialize_" + cx.str_of(name)), attrs: ~[], id: cx.next_id(), node: ast::item_fn({inputs: ser_inputs, @@ -670,7 +676,7 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map, let fields = do vec::from_fn(vec::len(flds)) |fidx| { let fld = flds[fidx]; let d = cx.clone(d); - let f = cx.lit_str(fld.span, fld.node.ident); + let f = cx.lit_str(fld.span, @cx.str_of(fld.node.ident)); let i = cx.lit_uint(fld.span, fidx); let l = deser_lambda(cx, tps, fld.node.mt.ty, cx.clone(d)); {node: {mutbl: fld.node.mt.mutbl, @@ -713,7 +719,7 @@ fn deser_ty(cx: ext_ctxt, tps: deser_tps_map, vec::is_empty(path.types) { let ident = path.idents[0]; - match tps.find(*ident) { + match tps.find(ident) { some(f) => f(), none => deser_path(cx, tps, path, d) } @@ -756,23 +762,23 @@ fn mk_deser_fn(cx: ext_ctxt, span: span, ty: cx.ty_fn(span, ~[], cx.ty_path(span, ~[tp.ident], ~[])), - ident: @(~"__d" + *tp.ident), + ident: cx.ident_of(~"__d" + cx.str_of(tp.ident)), id: cx.next_id()}); debug!{"tp_inputs = %?", tp_inputs}; let deser_inputs: ~[ast::arg] = vec::append(~[{mode: ast::expl(ast::by_ref), - ty: cx.ty_path(span, ~[@~"__D"], ~[]), - ident: @~"__d", + ty: cx.ty_path(span, ~[cx.ident_of(~"__D")], ~[]), + ident: cx.ident_of(~"__d"), id: cx.next_id()}], tp_inputs); - let tps_map = map::str_hash(); + let tps_map = map::uint_hash(); do vec::iter2(tps, tp_inputs) |tp, arg| { let arg_ident = arg.ident; tps_map.insert( - *tp.ident, + tp.ident, fn@() -> @ast::expr { let f = cx.var_ref(span, arg_ident); #ast{ $(f)() } @@ -782,11 +788,12 @@ fn mk_deser_fn(cx: ext_ctxt, span: span, let deser_bnds = @~[ ast::bound_trait(cx.ty_path( span, - ~[@~"std", @~"serialization", @~"deserializer"], + ~[cx.ident_of(~"std"), cx.ident_of(~"serialization"), + cx.ident_of(~"deserializer")], ~[]))]; let deser_tps: ~[ast::ty_param] = - vec::append(~[{ident: @~"__D", + vec::append(~[{ident: cx.ident_of(~"__D"), id: cx.next_id(), bounds: deser_bnds}], vec::map(tps, |tp| { @@ -798,7 +805,7 @@ fn mk_deser_fn(cx: ext_ctxt, span: span, let deser_blk = cx.expr_blk(f(cx, tps_map, #ast[expr]{__d})); - @{ident: @(~"deserialize_" + *name), + @{ident: cx.ident_of(~"deserialize_" + cx.str_of(name)), attrs: ~[], id: cx.next_id(), node: ast::item_fn({inputs: deser_inputs, @@ -853,7 +860,7 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident, // Generate body s.emit_enum_variant("foo", 0u, // 3u, {|| blk }) |-s, blk| { - let v_name = cx.lit_str(v_span, v_name); + let v_name = cx.lit_str(v_span, @cx.str_of(v_name)); let v_id = cx.lit_uint(v_span, vidx); let sz = cx.lit_uint(v_span, vec::len(variant_tys)); let body = cx.lambda(blk); @@ -877,7 +884,7 @@ fn ser_enum(cx: ext_ctxt, tps: ser_tps_map, e_name: ast::ident, } }; let lam = cx.lambda(cx.blk(e_span, ~[cx.alt_stmt(arms, e_span, v)])); - let e_name = cx.lit_str(e_span, e_name); + let e_name = cx.lit_str(e_span, @cx.str_of(e_name)); ~[#ast[stmt]{ $(s).emit_enum($(e_name), $(lam)) }] } @@ -935,7 +942,7 @@ fn deser_enum(cx: ext_ctxt, tps: deser_tps_map, e_name: ast::ident, arms += ~[impossible_case]; // Generate code like: - let e_name = cx.lit_str(e_span, e_name); + let e_name = cx.lit_str(e_span, @cx.str_of(e_name)); let alt_expr = cx.expr(e_span, ast::expr_match(#ast{__i}, arms, ast::alt_exhaustive)); let var_lambda = #ast{ |__i| $(alt_expr) }; diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index b8c036f5724..bb8a845d731 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -23,7 +23,7 @@ type syntax_expander_ = // second argument is the origin of the macro, if user-defined type syntax_expander = {expander: syntax_expander_, span: option<span>}; -type macro_def = {ident: ast::ident, ext: syntax_extension}; +type macro_def = {name: ~str, ext: syntax_extension}; // macro_definer is obsolete, remove when #old_macros go away. type macro_definer = @@ -141,6 +141,9 @@ trait ext_ctxt { fn next_id() -> ast::node_id; pure fn trace_macros() -> bool; fn set_trace_macros(x: bool); + /* for unhygienic identifier transformation */ + fn str_of(id: ast::ident) -> ~str; + fn ident_of(st: ~str) -> ast::ident; } fn mk_ctxt(parse_sess: parse::parse_sess, @@ -211,6 +214,13 @@ fn mk_ctxt(parse_sess: parse::parse_sess, fn set_trace_macros(x: bool) { self.trace_mac = x } + + fn str_of(id: ast::ident) -> ~str { + *self.parse_sess.interner.get(id) + } + fn ident_of(st: ~str) -> ast::ident { + self.parse_sess.interner.intern(@st) + } } let imp : ctxt_repr = { parse_sess: parse_sess, @@ -264,12 +274,12 @@ fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg, cx.span_fatal(sp, fmt!{"#%s needs at least %u arguments.", name, min}); } - _ => return elts /* we're good */ + _ => return elts /* we are good */ } } _ => { cx.span_fatal(sp, fmt!{"#%s: malformed invocation", name}) - } + } }, none => cx.span_fatal(sp, fmt!{"#%s: missing arguments", name}) } @@ -298,22 +308,24 @@ fn tt_args_to_original_flavor(cx: ext_ctxt, sp: span, arg: ~[ast::token_tree]) fn ms(m: matcher_) -> matcher { {node: m, span: {lo: 0u, hi: 0u, expn_info: none}} } + let arg_nm = cx.parse_sess().interner.gensym(@~"arg"); let argument_gram = ~[ms(match_seq(~[ - ms(match_nonterminal(@~"arg",@~"expr", 0u)) + ms(match_nonterminal(arg_nm, parse::token::special_idents::expr, 0u)) ], some(parse::token::COMMA), true, 0u, 1u))]; let arg_reader = new_tt_reader(cx.parse_sess().span_diagnostic, cx.parse_sess().interner, none, arg); let args = match parse_or_else(cx.parse_sess(), cx.cfg(), arg_reader as reader, - argument_gram).get(@~"arg") { - @matched_seq(s, _) => do s.map() |lf| { - match lf { - @matched_nonterminal(parse::token::nt_expr(arg)) => { - arg /* whew! list of exprs, here we come! */ - } - _ => fail ~"badly-structured parse result" + argument_gram).get(arg_nm) { + @matched_seq(s, _) => { + do s.map() |lf| { + match lf { + @matched_nonterminal(parse::token::nt_expr(arg)) => + arg, /* whew! list of exprs, here we come! */ + _ => fail ~"badly-structured parse result" + } } }, _ => fail ~"badly-structured parse result" diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index f5d22e6754c..b53a0439e59 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -3,14 +3,16 @@ import base::*; fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { let args = get_mac_args_no_max(cx,sp,arg,1u,~"concat_idents"); - let mut res = ~""; + let mut res_str = ~""; for args.each |e| { - res += *expr_to_ident(cx, e, ~"expected an ident"); + res_str += *cx.parse_sess().interner.get( + expr_to_ident(cx, e, ~"expected an ident")); } + let res = cx.parse_sess().interner.intern(@res_str); return @{id: cx.next_id(), callee_id: cx.next_id(), - node: ast::expr_path(@{span: sp, global: false, idents: ~[@res], + node: ast::expr_path(@{span: sp, global: false, idents: ~[res], rp: none, types: ~[]}), span: sp}; } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 9b50101683a..fb083744321 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -25,7 +25,9 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt, match mac.node { mac_invoc(pth, args, body) => { assert (vec::len(pth.idents) > 0u); - let extname = pth.idents[0]; + /* using idents and token::special_idents would make the + the macro names be hygienic */ + let extname = cx.parse_sess().interner.get(pth.idents[0]); match exts.find(*extname) { none => { cx.span_fatal(pth.span, @@ -49,7 +51,7 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt, } some(macro_defining(ext)) => { let named_extension = ext(cx, mac.span, args, body); - exts.insert(*named_extension.ident, named_extension.ext); + exts.insert(named_extension.name, named_extension.ext); (ast::expr_rec(~[], none), s) } some(expr_tt(_)) => { @@ -68,7 +70,9 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt, // finished transitioning. mac_invoc_tt(pth, tts) => { assert (vec::len(pth.idents) == 1u); - let extname = pth.idents[0]; + /* using idents and token::special_idents would make the + the macro names be hygienic */ + let extname = cx.parse_sess().interner.get(pth.idents[0]); match exts.find(*extname) { none => { cx.span_fatal(pth.span, @@ -146,7 +150,7 @@ fn expand_mod_items(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt, ast::meta_name_value(n, _) => n, ast::meta_list(n, _) => n }; - match exts.find(*mname) { + match exts.find(mname) { none | some(normal(_)) | some(macro_defining(_)) | some(expr_tt(_)) | some(item_tt(*)) => items, some(item_decorator(dec_fn)) => { @@ -194,7 +198,7 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>, fld: ast_fold) -> option<@ast::item> { match it.node { item_mac({node: mac_invoc_tt(pth, tts), span}) => { - let extname = pth.idents[0]; + let extname = cx.parse_sess().interner.get(pth.idents[0]); match exts.find(*extname) { none => { cx.span_fatal(pth.span, @@ -211,7 +215,7 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>, ~"expr macro in item position: " + *extname), mr_def(mdef) => { - exts.insert(*mdef.ident, mdef.ext); + exts.insert(mdef.name, mdef.ext); none } }; diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index f8966196749..9392ea5ad3c 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -37,12 +37,13 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg, fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: ~[piece], args: ~[@ast::expr]) -> @ast::expr { - fn make_path_vec(_cx: ext_ctxt, ident: ast::ident) -> ~[ast::ident] { - return ~[@~"extfmt", @~"rt", ident]; + fn make_path_vec(_cx: ext_ctxt, ident: @~str) -> ~[ast::ident] { + let intr = _cx.parse_sess().interner; + return ~[intr.intern(@~"extfmt"), intr.intern(@~"rt"), + intr.intern(ident)]; } - fn make_rt_path_expr(cx: ext_ctxt, sp: span, - ident: ast::ident) -> @ast::expr { - let path = make_path_vec(cx, ident); + fn make_rt_path_expr(cx: ext_ctxt, sp: span, nm: @~str) -> @ast::expr { + let path = make_path_vec(cx, nm); return mk_path(cx, sp, path); } // Produces an AST expression that represents a RT::conv record, @@ -94,11 +95,13 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, fn make_conv_rec(cx: ext_ctxt, sp: span, flags_expr: @ast::expr, width_expr: @ast::expr, precision_expr: @ast::expr, ty_expr: @ast::expr) -> @ast::expr { + let intr = cx.parse_sess().interner; return mk_rec_e(cx, sp, - ~[{ident: @~"flags", ex: flags_expr}, - {ident: @~"width", ex: width_expr}, - {ident: @~"precision", ex: precision_expr}, - {ident: @~"ty", ex: ty_expr}]); + ~[{ident: intr.intern(@~"flags"), ex: flags_expr}, + {ident: intr.intern(@~"width"), ex: width_expr}, + {ident: intr.intern(@~"precision"), + ex: precision_expr}, + {ident: intr.intern(@~"ty"), ex: ty_expr}]); } let rt_conv_flags = make_flags(cx, sp, cnv.flags); let rt_conv_width = make_count(cx, sp, cnv.width); @@ -268,7 +271,10 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, } let arg_vec = mk_fixed_vec_e(cx, fmt_sp, piece_exprs); - return mk_call(cx, fmt_sp, ~[@~"str", @~"concat"], ~[arg_vec]); + return mk_call(cx, fmt_sp, + ~[cx.parse_sess().interner.intern(@~"str"), + cx.parse_sess().interner.intern(@~"concat")], + ~[arg_vec]); } // // Local Variables: diff --git a/src/libsyntax/ext/ident_to_str.rs b/src/libsyntax/ext/ident_to_str.rs index 06faff9ee1b..9daaf164562 100644 --- a/src/libsyntax/ext/ident_to_str.rs +++ b/src/libsyntax/ext/ident_to_str.rs @@ -6,6 +6,6 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { let args = get_mac_args(cx,sp,arg,1u,option::some(1u),~"ident_to_str"); - return mk_uniq_str(cx, sp, *expr_to_ident(cx, args[0u], - ~"expected an ident")); + return mk_uniq_str(cx, sp, *cx.parse_sess().interner.get( + expr_to_ident(cx, args[0u], ~"expected an ident"))); } diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs index 4ab3bb29045..61bb00fd6de 100644 --- a/src/libsyntax/ext/log_syntax.rs +++ b/src/libsyntax/ext/log_syntax.rs @@ -6,8 +6,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, tt: ~[ast::token_tree]) cx.print_backtrace(); io::stdout().write_line( - print::pprust::unexpanded_tt_to_str(ast::tt_delim(tt), - cx.parse_sess().interner)); + print::pprust::tt_to_str(ast::tt_delim(tt),cx.parse_sess().interner)); //trivial expression return mr_expr(@{id: cx.next_id(), callee_id: cx.next_id(), diff --git a/src/libsyntax/ext/pipes.rs b/src/libsyntax/ext/pipes.rs index b9ff13c9578..beda46a2c3b 100644 --- a/src/libsyntax/ext/pipes.rs +++ b/src/libsyntax/ext/pipes.rs @@ -54,7 +54,7 @@ fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident, let rdr = tt_rdr as reader; let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE); - let proto = rust_parser.parse_proto(id); + let proto = rust_parser.parse_proto(cx.str_of(id)); // check for errors visit(proto, cx); diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs index 39132fd0d59..45873f81dea 100644 --- a/src/libsyntax/ext/pipes/ast_builder.rs +++ b/src/libsyntax/ext/pipes/ast_builder.rs @@ -15,14 +15,10 @@ mod syntax { export parse; } -fn ident(s: &str) -> ast::ident { - @(s.to_unique()) -} - -fn path(id: ident, span: span) -> @ast::path { +fn path(ids: ~[ident], span: span) -> @ast::path { @{span: span, global: false, - idents: ~[id], + idents: ids, rp: none, types: ~[]} } @@ -94,7 +90,8 @@ trait ext_ctxt_ast_builder { impl ext_ctxt: ext_ctxt_ast_builder { fn ty_option(ty: @ast::ty) -> @ast::ty { - self.ty_path_ast_builder(path(@~"option", self.empty_span()) + self.ty_path_ast_builder(path(~[self.ident_of(~"option")], + self.empty_span()) .add_ty(ty)) } @@ -126,7 +123,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { ty: self.ty_infer(), pat: @{id: self.next_id(), node: ast::pat_ident(ast::bind_by_implicit_ref, - path(ident, + path(~[ident], self.empty_span()), none), span: self.empty_span()}, @@ -301,6 +298,6 @@ impl ext_ctxt: ext_ctxt_ast_builder { fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::ty] { ty_params.map(|p| self.ty_path_ast_builder( - path(p.ident, self.empty_span()))) + path(~[p.ident], self.empty_span()))) } } diff --git a/src/libsyntax/ext/pipes/check.rs b/src/libsyntax/ext/pipes/check.rs index e286b4f76be..b7ad5c21bd9 100644 --- a/src/libsyntax/ext/pipes/check.rs +++ b/src/libsyntax/ext/pipes/check.rs @@ -21,8 +21,6 @@ that. import ext::base::ext_ctxt; -import ast::{ident}; - import proto::{state, protocol, next_state}; import ast_builder::empty_span; @@ -36,11 +34,11 @@ impl ext_ctxt: proto::visitor<(), (), ()> { state.span, // use a real span! fmt!{"state %s contains no messages, \ consider stepping to a terminal state instead", - *state.name}) + state.name}) } } - fn visit_message(name: ident, _span: span, _tys: &[@ast::ty], + fn visit_message(name: ~str, _span: span, _tys: &[@ast::ty], this: state, next: next_state) { match next { some({state: next, tys: next_tys}) => { @@ -51,7 +49,7 @@ impl ext_ctxt: proto::visitor<(), (), ()> { self.span_err( proto.get_state(next).span, fmt!{"message %s steps to undefined state, %s", - *name, *next}); + name, next}); } else { let next = proto.get_state(next); @@ -61,7 +59,7 @@ impl ext_ctxt: proto::visitor<(), (), ()> { next.span, // use a real span fmt!{"message %s target (%s) \ needs %u type parameters, but got %u", - *name, *next.name, + name, next.name, next.ty_params.len(), next_tys.len()}); } diff --git a/src/libsyntax/ext/pipes/liveness.rs b/src/libsyntax/ext/pipes/liveness.rs index 17e569552a7..2325e4ed27d 100644 --- a/src/libsyntax/ext/pipes/liveness.rs +++ b/src/libsyntax/ext/pipes/liveness.rs @@ -70,10 +70,10 @@ fn analyze(proto: protocol, _cx: ext_ctxt) { } if self_live.len() > 0 { - let states = str::connect(self_live.map(|s| *s.name), ~" "); + let states = str::connect(self_live.map(|s| s.name), ~" "); debug!{"protocol %s is unbounded due to loops involving: %s", - *proto.name, states}; + proto.name, states}; // Someday this will be configurable with a warning //cx.span_warn(empty_span(), @@ -85,7 +85,7 @@ fn analyze(proto: protocol, _cx: ext_ctxt) { proto.bounded = some(false); } else { - debug!{"protocol %s is bounded. yay!", *proto.name}; + debug!{"protocol %s is bounded. yay!", proto.name}; proto.bounded = some(true); } } \ No newline at end of file diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index 84180ff3797..4dc61e54aa4 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -1,18 +1,17 @@ // Parsing pipes protocols from token trees. import parse::parser; -import ast::ident; import parse::token; import pipec::*; trait proto_parser { - fn parse_proto(id: ident) -> protocol; + fn parse_proto(id: ~str) -> protocol; fn parse_state(proto: protocol); } impl parser: proto_parser { - fn parse_proto(id: ident) -> protocol { + fn parse_proto(id: ~str) -> protocol { let proto = protocol(id, self.span); self.parse_seq_to_before_end(token::EOF, @@ -24,9 +23,11 @@ impl parser: proto_parser { fn parse_state(proto: protocol) { let id = self.parse_ident(); + let name = *self.interner.get(id); + self.expect(token::COLON); let dir = match copy self.token { - token::IDENT(n, _) => self.get_str(n), + token::IDENT(n, _) => self.interner.get(n), _ => fail }; self.bump(); @@ -41,7 +42,7 @@ impl parser: proto_parser { } else { ~[] }; - let state = proto.add_state_poly(id, dir, typarms); + let state = proto.add_state_poly(name, id, dir, typarms); // parse the messages self.parse_unspanned_seq( @@ -51,7 +52,7 @@ impl parser: proto_parser { } fn parse_message(state: state) { - let mname = self.parse_ident(); + let mname = *self.interner.get(self.parse_ident()); let args = if self.token == token::LPAREN { self.parse_unspanned_seq(token::LPAREN, @@ -66,7 +67,7 @@ impl parser: proto_parser { let next = match copy self.token { token::IDENT(_, _) => { - let name = self.parse_ident(); + let name = *self.interner.get(self.parse_ident()); let ntys = if self.token == token::LT { self.parse_unspanned_seq(token::LT, token::GT, diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index f7f2981f066..b5a1ae588a7 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -13,8 +13,7 @@ import parse; import parse::*; import proto::*; -import ast_builder::append_types; -import ast_builder::path; +import ast_builder::{append_types, path, empty_span}; // Transitional reexports so qquote can find the paths it is looking for mod syntax { @@ -47,17 +46,17 @@ impl message: gen_send { debug!("pipec: next state exists"); let next = this.proto.get_state(next); assert next_tys.len() == next.ty_params.len(); - let arg_names = tys.mapi(|i, _ty| @(~"x_" + i.to_str())); + let arg_names = tys.mapi(|i, _ty| cx.ident_of(~"x_"+i.to_str())); let args_ast = (arg_names, tys).map( |n, t| cx.arg_mode(n, t, ast::by_copy) ); let pipe_ty = cx.ty_path_ast_builder( - path(this.data_name(), span) + path(~[this.data_name()], span) .add_tys(cx.ty_vars(this.ty_params))); let args_ast = vec::append( - ~[cx.arg_mode(@~"pipe", + ~[cx.arg_mode(cx.ident_of(~"pipe"), pipe_ty, ast::by_copy)], args_ast); @@ -75,10 +74,10 @@ impl message: gen_send { body += ~"let b = pipe.reuse_buffer();\n"; body += fmt!("let %s = pipes::send_packet_buffered(\ ptr::addr_of(b.buffer.data.%s));\n", - sp, *next.name); - body += fmt!("let %s = pipes::recv_packet_buffered(\ + sp, next.name); + body += fmt!{"let %s = pipes::recv_packet_buffered(\ ptr::addr_of(b.buffer.data.%s));\n", - rp, *next.name); + rp, next.name}; } else { let pat = match (this.dir, next.dir) { @@ -91,10 +90,10 @@ impl message: gen_send { body += fmt!("let %s = pipes::entangle();\n", pat); } body += fmt!("let message = %s::%s(%s);\n", - *this.proto.name, - *self.name(), - str::connect(vec::append_one(arg_names, @~"s") - .map(|x| *x), + this.proto.name, + self.name(), + str::connect(vec::append_one( + arg_names.map(|x| cx.str_of(x)), ~"s"), ~", ")); if !try { @@ -110,17 +109,15 @@ impl message: gen_send { let body = cx.parse_expr(body); - let mut rty = cx.ty_path_ast_builder(path(next.data_name(), + let mut rty = cx.ty_path_ast_builder(path(~[next.data_name()], span) .add_tys(next_tys)); if try { rty = cx.ty_option(rty); } - let name = if try { - @(~"try_" + *self.name()) - } - else { self.name() }; + let name = cx.ident_of(if try { ~"try_" + self.name() + } else { self.name() } ); cx.item_fn_poly(name, args_ast, @@ -131,16 +128,16 @@ impl message: gen_send { message(id, span, tys, this, none) => { debug!{"pipec: no next state"}; - let arg_names = tys.mapi(|i, _ty| @(~"x_" + i.to_str())); + let arg_names = tys.mapi(|i, _ty| (~"x_" + i.to_str())); let args_ast = (arg_names, tys).map( - |n, t| cx.arg_mode(n, t, ast::by_copy) + |n, t| cx.arg_mode(cx.ident_of(n), t, ast::by_copy) ); let args_ast = vec::append( - ~[cx.arg_mode(@~"pipe", + ~[cx.arg_mode(cx.ident_of(~"pipe"), cx.ty_path_ast_builder( - path(this.data_name(), span) + path(~[this.data_name()], span) .add_tys(cx.ty_vars(this.ty_params))), ast::by_copy)], args_ast); @@ -149,13 +146,13 @@ impl message: gen_send { ~"" } else { - ~"(" + str::connect(arg_names.map(|x| *x), ~", ") + ~")" + ~"(" + str::connect(arg_names, ~", ") + ~")" }; let mut body = ~"{ "; body += fmt!{"let message = %s::%s%s;\n", - *this.proto.name, - *self.name(), + this.proto.name, + self.name(), message_args}; if !try { @@ -170,11 +167,11 @@ impl message: gen_send { let body = cx.parse_expr(body); let name = if try { - @(~"try_" + *self.name()) + ~"try_" + self.name() } else { self.name() }; - cx.item_fn_poly(name, + cx.item_fn_poly(cx.ident_of(name), args_ast, if try { cx.ty_option(cx.ty_nil_ast_builder()) @@ -188,7 +185,7 @@ impl message: gen_send { } fn to_ty(cx: ext_ctxt) -> @ast::ty { - cx.ty_path_ast_builder(path(self.name(), self.span()) + cx.ty_path_ast_builder(path(~[cx.ident_of(self.name())], self.span()) .add_tys(cx.ty_vars(self.get_params()))) } } @@ -212,21 +209,23 @@ impl state: to_type_decls { let tys = match next { some({state: next, tys: next_tys}) => { let next = this.proto.get_state(next); - let next_name = next.data_name(); + let next_name = cx.str_of(next.data_name()); let dir = match this.dir { - send => @~"server", - recv => @~"client" + send => ~"server", + recv => ~"client" }; vec::append_one(tys, - cx.ty_path_ast_builder((dir + next_name) - .add_tys(next_tys))) + cx.ty_path_ast_builder( + path(~[cx.ident_of(dir), + cx.ident_of(next_name)], span) + .add_tys(next_tys))) } none => tys }; - let v = cx.variant(name, span, tys); + let v = cx.variant(cx.ident_of(name), span, tys); vec::push(items_msg, v); } @@ -258,9 +257,13 @@ impl state: to_type_decls { self.data_name(), self.span, cx.ty_path_ast_builder( - (@~"pipes" + @(dir.to_str() + ~"_packet")) + path(~[cx.ident_of(~"pipes"), + cx.ident_of(dir.to_str() + ~"_packet")], + empty_span()) .add_ty(cx.ty_path_ast_builder( - (self.proto.name + self.data_name()) + path(~[cx.ident_of(self.proto.name), + self.data_name()], + empty_span()) .add_tys(cx.ty_vars(self.ty_params))))), self.ty_params)); } @@ -270,10 +273,14 @@ impl state: to_type_decls { self.data_name(), self.span, cx.ty_path_ast_builder( - (@~"pipes" + @(dir.to_str() - + ~"_packet_buffered")) + path(~[cx.ident_of(~"pipes"), + cx.ident_of(dir.to_str() + + ~"_packet_buffered")], + empty_span()) .add_tys(~[cx.ty_path_ast_builder( - (self.proto.name + self.data_name()) + path(~[cx.ident_of(self.proto.name), + self.data_name()], + empty_span()) .add_tys(cx.ty_vars(self.ty_params))), self.proto.buffer_ty_path(cx)])), self.ty_params)); @@ -315,16 +322,17 @@ impl protocol: gen_init { cx.parse_item(fmt!{"fn init%s() -> (client::%s, server::%s)\ { import pipes::has_buffer; %s }", - start_state.ty_params.to_source(), - start_state.to_ty(cx).to_source(), - start_state.to_ty(cx).to_source(), - body.to_source()}) + start_state.ty_params.to_source(cx), + start_state.to_ty(cx).to_source(cx), + start_state.to_ty(cx).to_source(cx), + body.to_source(cx)}) } fn gen_buffer_init(ext_cx: ext_ctxt) -> @ast::expr { ext_cx.rec(self.states.map_to_vec(|s| { let fty = s.to_ty(ext_cx); - ext_cx.field_imm(s.name, #ast { pipes::mk_packet::<$(fty)>() }) + ext_cx.field_imm(ext_cx.ident_of(s.name), + #ast { pipes::mk_packet::<$(fty)>() }) })) } @@ -341,9 +349,11 @@ impl protocol: gen_init { ext_cx.block( self.states.map_to_vec( |s| ext_cx.parse_stmt( - fmt!{"data.%s.set_buffer(buffer)", *s.name})), + fmt!{"data.%s.set_buffer(buffer)", + s.name})), ext_cx.parse_expr( - fmt!{"ptr::addr_of(data.%s)", *self.states[0].name}))); + fmt!{"ptr::addr_of(data.%s)", + self.states[0].name}))); #ast {{ let buffer = $(buffer); @@ -357,14 +367,14 @@ impl protocol: gen_init { let mut params: ~[ast::ty_param] = ~[]; for (copy self.states).each |s| { for s.ty_params.each |tp| { - match params.find(|tpp| *tp.ident == *tpp.ident) { + match params.find(|tpp| tp.ident == tpp.ident) { none => vec::push(params, tp), _ => () } } } - cx.ty_path_ast_builder(path(@~"__Buffer", self.span) + cx.ty_path_ast_builder(path(~[cx.ident_of(~"__Buffer")], self.span) .add_tys(cx.ty_vars(params))) } @@ -373,7 +383,7 @@ impl protocol: gen_init { let mut params: ~[ast::ty_param] = ~[]; let fields = do (copy self.states).map_to_vec |s| { for s.ty_params.each |tp| { - match params.find(|tpp| *tp.ident == *tpp.ident) { + match params.find(|tpp| tp.ident == tpp.ident) { none => vec::push(params, tp), _ => () } @@ -382,11 +392,11 @@ impl protocol: gen_init { let fty = #ast[ty] { pipes::packet<$(ty)> }; - cx.ty_field_imm(s.name, fty) + cx.ty_field_imm(cx.ident_of(s.name), fty) }; cx.item_ty_poly( - @~"__Buffer", + cx.ident_of(~"__Buffer"), cx.empty_span(), cx.ty_rec(fields), params) @@ -410,56 +420,56 @@ impl protocol: gen_init { } vec::push(items, - cx.item_mod(@~"client", + cx.item_mod(cx.ident_of(~"client"), self.span, client_states)); vec::push(items, - cx.item_mod(@~"server", + cx.item_mod(cx.ident_of(~"server"), self.span, server_states)); - cx.item_mod(self.name, self.span, items) + cx.item_mod(cx.ident_of(self.name), self.span, items) } } trait to_source { // Takes a thing and generates a string containing rust code for it. - fn to_source() -> ~str; + fn to_source(cx: ext_ctxt) -> ~str; } impl @ast::item: to_source { - fn to_source() -> ~str { - item_to_str(self) + fn to_source(cx: ext_ctxt) -> ~str { + item_to_str(self, cx.parse_sess().interner) } } impl ~[@ast::item]: to_source { - fn to_source() -> ~str { - str::connect(self.map(|i| i.to_source()), ~"\n\n") + fn to_source(cx: ext_ctxt) -> ~str { + str::connect(self.map(|i| i.to_source(cx)), ~"\n\n") } } impl @ast::ty: to_source { - fn to_source() -> ~str { - ty_to_str(self) + fn to_source(cx: ext_ctxt) -> ~str { + ty_to_str(self, cx.parse_sess().interner) } } impl ~[@ast::ty]: to_source { - fn to_source() -> ~str { - str::connect(self.map(|i| i.to_source()), ~", ") + fn to_source(cx: ext_ctxt) -> ~str { + str::connect(self.map(|i| i.to_source(cx)), ~", ") } } impl ~[ast::ty_param]: to_source { - fn to_source() -> ~str { - pprust::typarams_to_str(self) + fn to_source(cx: ext_ctxt) -> ~str { + pprust::typarams_to_str(self, cx.parse_sess().interner) } } impl @ast::expr: to_source { - fn to_source() -> ~str { - pprust::expr_to_str(self) + fn to_source(cx: ext_ctxt) -> ~str { + pprust::expr_to_str(self, cx.parse_sess().interner) } } diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index 9e44ce7acda..62c5329525f 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -1,8 +1,6 @@ import to_str::ToStr; import dvec::{DVec, dvec}; -import ast::{ident}; - import ast_builder::{path, append_types}; enum direction { @@ -27,15 +25,15 @@ impl direction { } } -type next_state = option<{state: ident, tys: ~[@ast::ty]}>; +type next_state = option<{state: ~str, tys: ~[@ast::ty]}>; enum message { // name, span, data, current state, next state - message(ident, span, ~[@ast::ty], state, next_state) + message(~str, span, ~[@ast::ty], state, next_state) } impl message { - fn name() -> ident { + fn name() -> ~str { match self { message(id, _, _, _, _) => id } @@ -58,7 +56,8 @@ impl message { enum state { state_(@{ id: uint, - name: ident, + name: ~str, + ident: ast::ident, span: span, dir: direction, ty_params: ~[ast::ty_param], @@ -68,7 +67,7 @@ enum state { } impl state { - fn add_message(name: ident, span: span, + fn add_message(name: ~str, span: span, +data: ~[@ast::ty], next: next_state) { self.messages.push(message(name, span, data, self, next)); @@ -78,14 +77,15 @@ impl state { (*self).proto.filename() } - fn data_name() -> ident { - self.name + fn data_name() -> ast::ident { + self.ident } /// Returns the type that is used for the messages. fn to_ty(cx: ext_ctxt) -> @ast::ty { cx.ty_path_ast_builder - (path(self.name, self.span).add_tys(cx.ty_vars(self.ty_params))) + (path(~[cx.ident_of(self.name)],self.span).add_tys( + cx.ty_vars(self.ty_params))) } /// Iterate over the states that can be reached in one message @@ -105,18 +105,18 @@ impl state { type protocol = @protocol_; -fn protocol(name: ident, +span: span) -> protocol { +fn protocol(name: ~str, +span: span) -> protocol { @protocol_(name, span) } struct protocol_ { - let name: ident; + let name: ~str; let span: span; let states: DVec<state>; let mut bounded: option<bool>; - new(name: ident, span: span) { + new(name: ~str, span: span) { self.name = name; self.span = span; self.states = dvec(); @@ -124,18 +124,18 @@ struct protocol_ { } /// Get a state. - fn get_state(name: ident) -> state { + fn get_state(name: ~str) -> state { self.states.find(|i| i.name == name).get() } fn get_state_by_id(id: uint) -> state { self.states[id] } - fn has_state(name: ident) -> bool { + fn has_state(name: ~str) -> bool { self.states.find(|i| i.name == name) != none } fn filename() -> ~str { - ~"proto://" + *self.name + ~"proto://" + self.name } fn num_states() -> uint { self.states.len() } @@ -162,17 +162,14 @@ struct protocol_ { } impl protocol { - fn add_state(name: ident, dir: direction) -> state { - self.add_state_poly(name, dir, ~[]) - } - - fn add_state_poly(name: ident, dir: direction, + fn add_state_poly(name: ~str, ident: ast::ident, dir: direction, +ty_params: ~[ast::ty_param]) -> state { let messages = dvec(); let state = state_(@{ id: self.states.len(), name: name, + ident: ident, span: self.span, dir: dir, ty_params: ty_params, @@ -188,7 +185,7 @@ impl protocol { trait visitor<Tproto, Tstate, Tmessage> { fn visit_proto(proto: protocol, st: &[Tstate]) -> Tproto; fn visit_state(state: state, m: &[Tmessage]) -> Tstate; - fn visit_message(name: ident, spane: span, tys: &[@ast::ty], + fn visit_message(name: ~str, spane: span, tys: &[@ast::ty], this: state, next: next_state) -> Tmessage; } diff --git a/src/libsyntax/ext/qquote.rs b/src/libsyntax/ext/qquote.rs index 6b5dce312d6..8024f5f9542 100644 --- a/src/libsyntax/ext/qquote.rs +++ b/src/libsyntax/ext/qquote.rs @@ -3,6 +3,7 @@ import ast::{crate, expr_, mac_invoc, import parse::parser; import parse::parser::parse_from_source_str; import dvec::{DVec, dvec}; +import parse::token::ident_interner; import fold::*; import visit::*; @@ -26,6 +27,14 @@ enum fragment { from_ty(@ast::ty) } +fn ids_ext(cx: ext_ctxt, strs: ~[~str]) -> ~[ast::ident] { + strs.map(|str| cx.parse_sess().interner.intern(@str)) +} +fn id_ext(cx: ext_ctxt, str: ~str) -> ast::ident { + cx.parse_sess().interner.intern(@str) +} + + trait qq_helper { fn span() -> span; fn visit(aq_ctxt, vt<aq_ctxt>); @@ -40,7 +49,7 @@ impl @ast::crate: qq_helper { fn extract_mac() -> option<ast::mac_> {fail} fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { mk_path(cx, sp, - ~[@~"syntax", @~"ext", @~"qquote", @~"parse_crate"]) + ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_crate"])) } fn get_fold_fn() -> ~str {~"fold_crate"} } @@ -55,7 +64,7 @@ impl @ast::expr: qq_helper { } fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { mk_path(cx, sp, - ~[@~"syntax", @~"ext", @~"qquote", @~"parse_expr"]) + ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_expr"])) } fn get_fold_fn() -> ~str {~"fold_expr"} } @@ -70,7 +79,7 @@ impl @ast::ty: qq_helper { } fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { mk_path(cx, sp, - ~[@~"syntax", @~"ext", @~"qquote", @~"parse_ty"]) + ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_ty"])) } fn get_fold_fn() -> ~str {~"fold_ty"} } @@ -80,7 +89,7 @@ impl @ast::item: qq_helper { fn extract_mac() -> option<ast::mac_> {fail} fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { mk_path(cx, sp, - ~[@~"syntax", @~"ext", @~"qquote", @~"parse_item"]) + ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_item"])) } fn get_fold_fn() -> ~str {~"fold_item"} } @@ -90,7 +99,7 @@ impl @ast::stmt: qq_helper { fn extract_mac() -> option<ast::mac_> {fail} fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { mk_path(cx, sp, - ~[@~"syntax", @~"ext", @~"qquote", @~"parse_stmt"]) + ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", ~"parse_stmt"])) } fn get_fold_fn() -> ~str {~"fold_stmt"} } @@ -99,7 +108,8 @@ impl @ast::pat: qq_helper { fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);} fn extract_mac() -> option<ast::mac_> {fail} fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr { - mk_path(cx, sp, ~[@~"syntax", @~"ext", @~"qquote", @~"parse_pat"]) + mk_path(cx, sp, ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", + ~"parse_pat"])) } fn get_fold_fn() -> ~str {~"fold_pat"} } @@ -159,7 +169,7 @@ fn expand_ast(ecx: ext_ctxt, _sp: span, } match (args[0].node) { ast::expr_path(@{idents: id, _}) if vec::len(id) == 1u - => what = *id[0], + => what = *ecx.parse_sess().interner.get(id[0]), _ => ecx.span_fatal(args[0].span, ~"expected an identifier") } } @@ -243,19 +253,21 @@ fn finish<T: qq_helper> let cx = ecx; let cfg_call = || mk_call_( - cx, sp, mk_access(cx, sp, ~[@~"ext_cx"], @~"cfg"), ~[]); + cx, sp, mk_access(cx, sp, ids_ext(cx, ~[~"ext_cx"]), + id_ext(cx, ~"cfg")), ~[]); let parse_sess_call = || mk_call_( - cx, sp, mk_access(cx, sp, ~[@~"ext_cx"], @~"parse_sess"), ~[]); + cx, sp, mk_access(cx, sp, ids_ext(cx, ~[~"ext_cx"]), + id_ext(cx, ~"parse_sess")), ~[]); let pcall = mk_call(cx,sp, - ~[@~"syntax", @~"parse", @~"parser", - @~"parse_from_source_str"], + ids_ext(cx, ~[~"syntax", ~"parse", ~"parser", + ~"parse_from_source_str"]), ~[node.mk_parse_fn(cx,sp), mk_uniq_str(cx,sp, fname), mk_call(cx,sp, - ~[@~"syntax",@~"ext", - @~"qquote", @~"mk_file_substr"], + ids_ext(cx, ~[~"syntax",~"ext", + ~"qquote", ~"mk_file_substr"]), ~[mk_uniq_str(cx,sp, loc.file.name), mk_uint(cx,sp, loc.line), mk_uint(cx,sp, loc.col)]), @@ -267,16 +279,17 @@ fn finish<T: qq_helper> let mut rcall = pcall; if (g_len > 0u) { rcall = mk_call(cx,sp, - ~[@~"syntax", @~"ext", @~"qquote", @~"replace"], + ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", + ~"replace"]), ~[pcall, mk_uniq_vec_e(cx,sp, qcx.gather.map_to_vec(|g| { mk_call(cx,sp, - ~[@~"syntax", @~"ext", - @~"qquote", @g.constr], + ids_ext(cx, ~[~"syntax", ~"ext", + ~"qquote", g.constr]), ~[g.e])})), mk_path(cx,sp, - ~[@~"syntax", @~"ext", @~"qquote", - @node.get_fold_fn()])]); + ids_ext(cx, ~[~"syntax", ~"ext", ~"qquote", + node.get_fold_fn()]))]); } return rcall; } @@ -331,14 +344,6 @@ fn replace_ty(repls: ~[fragment], } } -fn print_expr(expr: @ast::expr) { - let stdout = io::stdout(); - let pp = pprust::rust_printer(stdout); - pprust::print_expr(pp, expr); - pp::eof(pp.s); - stdout.write_str(~"\n"); -} - fn mk_file_substr(fname: ~str, line: uint, col: uint) -> codemap::file_substr { codemap::fss_external({filename: fname, line: line, col: col}) diff --git a/src/libsyntax/ext/simplext.rs b/src/libsyntax/ext/simplext.rs index e8899a2e541..5b9bac1948f 100644 --- a/src/libsyntax/ext/simplext.rs +++ b/src/libsyntax/ext/simplext.rs @@ -1,5 +1,5 @@ import codemap::span; -import std::map::{hashmap, str_hash, box_str_hash}; +import std::map::{hashmap, str_hash, uint_hash}; import dvec::{DVec, dvec}; import base::*; @@ -135,7 +135,7 @@ fn acumm_bindings(_cx: ext_ctxt, _b_dest: bindings, _b_src: bindings) { } fn pattern_to_selectors(cx: ext_ctxt, e: @expr) -> binders { let res: binders = - {real_binders: box_str_hash::<selector>(), + {real_binders: uint_hash::<selector>(), literal_ast_matchers: dvec()}; //this oughta return binders instead, but macro args are a sequence of //expressions, rather than a single expression @@ -153,7 +153,7 @@ bindings. Most of the work is done in p_t_s, which generates the selectors. */ fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> { - let res = box_str_hash::<arb_depth<matchable>>(); + let res = uint_hash::<arb_depth<matchable>>(); //need to do this first, to check vec lengths. for b.literal_ast_matchers.each |sel| { match sel(match_expr(e)) { none => return none, _ => () } @@ -237,7 +237,7 @@ fn follow_for_trans(cx: ext_ctxt, mmaybe: option<arb_depth<matchable>>, /* helper for transcribe_exprs: what vars from `b` occur in `e`? */ fn free_vars(b: bindings, e: @expr, it: fn(ident)) { - let idents: hashmap<ident, ()> = box_str_hash::<()>(); + let idents: hashmap<ident, ()> = uint_hash::<()>(); fn mark_ident(&&i: ident, _fld: ast_fold, b: bindings, idents: hashmap<ident, ()>) -> ident { if b.contains_key(i) { idents.insert(i, ()); } @@ -253,6 +253,12 @@ fn free_vars(b: bindings, e: @expr, it: fn(ident)) { for idents.each_key |x| { it(x); }; } +fn wrong_occurs(cx: ext_ctxt, l: ident, l_c: uint, r: ident, r_c: uint) + -> ~str { + fmt!{"'%s' occurs %u times, but '%s' occurs %u times", + *cx.parse_sess().interner.get(l), l_c, + *cx.parse_sess().interner.get(r), r_c} +} /* handle sequences (anywhere in the AST) of exprs, either real or ...ed */ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], @@ -279,10 +285,8 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint], some({rep_count: old_len, name: old_name}) => { let len = vec::len(*ms); if old_len != len { - let msg = - fmt!{"'%s' occurs %u times, but ", *fv, len} + - fmt!{"'%s' occurs %u times", *old_name, - old_len}; + let msg = wrong_occurs(cx, fv, len, + old_name, old_len); cx.span_fatal(repeat_me.span, msg); } } @@ -626,7 +630,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> base::macro_def { let args = get_mac_args_no_max(cx, sp, arg, 0u, ~"macro"); - let mut macro_name: option<@~str> = none; + let mut macro_name: option<~str> = none; let mut clauses: ~[@clause] = ~[]; for args.each |arg| { match arg.node { @@ -643,12 +647,15 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, match mac.node { mac_invoc(pth, invoc_arg, body) => { match path_to_ident(pth) { - some(id) => match macro_name { - none => macro_name = some(id), - some(other_id) => if id != other_id { + some(id) => { + let id_str = cx.str_of(id); + match macro_name { + none => macro_name = some(id_str), + some(other_id) => if id_str != other_id { cx.span_fatal(pth.span, ~"macro name must be " + ~"consistent"); + } } }, none => cx.span_fatal(pth.span, @@ -688,7 +695,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg, let ext = |a,b,c,d, move clauses| generic_extension(a,b,c,d,clauses); - return {ident: + return {name: match macro_name { some(id) => id, none => cx.span_fatal(sp, ~"macro definition must have " + diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index e05a713165b..c323f201b83 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -43,14 +43,15 @@ fn expand_file(cx: ext_ctxt, sp: span, arg: ast::mac_arg, fn expand_stringify(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { let args = get_mac_args(cx, sp, arg, 1u, option::some(1u), ~"stringify"); - return mk_uniq_str(cx, sp, pprust::expr_to_str(args[0])); + let s = pprust::expr_to_str(args[0], cx.parse_sess().interner); + return mk_uniq_str(cx, sp, s); } fn expand_mod(cx: ext_ctxt, sp: span, arg: ast::mac_arg, _body: ast::mac_body) -> @ast::expr { get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"file"); return mk_uniq_str(cx, sp, - str::connect(cx.mod_path().map(|x|*x), ~"::")); + str::connect(cx.mod_path().map(|x| cx.str_of(x)), ~"::")); } fn expand_include(cx: ext_ctxt, sp: span, arg: ast::mac_arg, diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index d4f1493169f..22f0aeaa2c0 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -15,10 +15,10 @@ fn expand_trace_macros(cx: ext_ctxt, sp: span, let rdr = tt_rdr as reader; let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE); - let arg = rust_parser.parse_ident(); + let arg = cx.str_of(rust_parser.parse_ident()); match arg { - @~"true" => cx.set_trace_macros(true), - @~"false" => cx.set_trace_macros(false), + ~"true" => cx.set_trace_macros(true), + ~"false" => cx.set_trace_macros(false), _ => cx.span_fatal(sp, ~"trace_macros! only accepts `true` or `false`") } let rust_parser = parser(sess, cfg, rdr.dup(), SOURCE_FILE); diff --git a/src/libsyntax/ext/tt/earley_parser.rs b/src/libsyntax/ext/tt/earley_parser.rs index 77ba941015b..896b6f6e54f 100644 --- a/src/libsyntax/ext/tt/earley_parser.rs +++ b/src/libsyntax/ext/tt/earley_parser.rs @@ -10,7 +10,7 @@ import parse::parse_sess; import dvec::{DVec, dvec}; import ast::{matcher, match_tok, match_seq, match_nonterminal, ident}; import ast_util::mk_sp; -import std::map::{hashmap, box_str_hash}; +import std::map::{hashmap, uint_hash}; /* This is an Earley-like parser, without support for in-grammar nonterminals, onlyl calling out to the main rust parser for named nonterminals (which it @@ -120,14 +120,14 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match]) } {node: match_nonterminal(bind_name, _, idx), span: sp} => { if ret_val.contains_key(bind_name) { - p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: " - + *bind_name) + p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "+ + *p_s.interner.get(bind_name)) } ret_val.insert(bind_name, res[idx]); } } } - let ret_val = box_str_hash::<@named_match>(); + let ret_val = uint_hash::<@named_match>(); for ms.each() |m| { n_rec(p_s, m, res, ret_val) } return ret_val; } @@ -274,7 +274,8 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) let nts = str::connect(vec::map(bb_eis, |ei| { match ei.elts[ei.idx].node { match_nonterminal(bind,name,_) => { - fmt!{"%s ('%s')", *name, *bind} + fmt!{"%s ('%s')", *sess.interner.get(name), + *sess.interner.get(bind)} } _ => fail } }), ~" or "); @@ -298,7 +299,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher]) match ei.elts[ei.idx].node { match_nonterminal(_, name, idx) => { ei.matches[idx].push(@matched_nonterminal( - parse_nt(rust_parser, *name))); + parse_nt(rust_parser, *sess.interner.get(name)))); ei.idx += 1u; } _ => fail diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 05db498a022..be7594ff843 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,6 +8,7 @@ import parse::parser::{parser, SOURCE_FILE}; import earley_parser::{parse, parse_or_else, success, failure, named_match, matched_seq, matched_nonterminal, error}; import std::map::hashmap; +import parse::token::special_idents; fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, arg: ~[ast::token_tree]) -> base::mac_result { @@ -16,14 +17,17 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, {node: m, span: {lo: 0u, hi: 0u, expn_info: none}} } + let lhs_nm = cx.parse_sess().interner.gensym(@~"lhs"); + let rhs_nm = cx.parse_sess().interner.gensym(@~"rhs"); + // The grammar for macro_rules! is: // $( $lhs:mtcs => $rhs:tt );+ // ...quasiquoting this would be nice. let argument_gram = ~[ ms(match_seq(~[ - ms(match_nonterminal(@~"lhs",@~"matchers", 0u)), + ms(match_nonterminal(lhs_nm, special_idents::matchers, 0u)), ms(match_tok(FAT_ARROW)), - ms(match_nonterminal(@~"rhs",@~"tt", 1u)), + ms(match_nonterminal(rhs_nm, special_idents::tt, 1u)), ], some(SEMI), false, 0u, 2u)), //to phase into semicolon-termination instead of //semicolon-separation @@ -37,11 +41,11 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, arg_reader as reader, argument_gram); // Extract the arguments: - let lhses:~[@named_match] = match argument_map.get(@~"lhs") { + let lhses:~[@named_match] = match argument_map.get(lhs_nm) { @matched_seq(s, sp) => s, _ => cx.span_bug(sp, ~"wrong-structured lhs") }; - let rhses:~[@named_match] = match argument_map.get(@~"rhs") { + let rhses:~[@named_match] = match argument_map.get(rhs_nm) { @matched_seq(s, sp) => s, _ => cx.span_bug(sp, ~"wrong-structured rhs") }; @@ -53,8 +57,9 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, -> mac_result { if cx.trace_macros() { - io::println(fmt!("%s! { %s }", *name, - print::pprust::unexpanded_tt_to_str( + io::println(fmt!("%s! { %s }", + cx.str_of(name), + print::pprust::tt_to_str( ast::tt_delim(arg), cx.parse_sess().interner))); } @@ -103,7 +108,7 @@ fn add_new_extension(cx: ext_ctxt, sp: span, name: ident, arg, lhses, rhses); return mr_def({ - ident: name, + name: *cx.parse_sess().interner.get(name), ext: expr_tt({expander: exp, span: some(sp)}) }); } \ No newline at end of file diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index a166957231e..1653c57bf2c 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -47,7 +47,7 @@ fn new_tt_reader(sp_diag: span_handler, itr: ident_interner, mut cur: @{readme: src, mut idx: 0u, dotdotdoted: false, sep: none, up: tt_frame_up(option::none)}, interpolations: match interp { /* just a convienience */ - none => std::map::box_str_hash::<@named_match>(), + none => std::map::uint_hash::<@named_match>(), some(x) => x }, mut repeat_idx: ~[mut], mut repeat_len: ~[], @@ -100,8 +100,8 @@ enum lis { lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str) } -fn lockstep_iter_size(&&t: token_tree, &&r: tt_reader) -> lis { - fn lis_merge(lhs: lis, rhs: lis) -> lis { +fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis { + fn lis_merge(lhs: lis, rhs: lis, r: tt_reader) -> lis { match lhs { lis_unconstrained => rhs, lis_contradiction(_) => lhs, @@ -110,9 +110,11 @@ fn lockstep_iter_size(&&t: token_tree, &&r: tt_reader) -> lis { lis_contradiction(_) => rhs, lis_constraint(r_len, _) if l_len == r_len => lhs, lis_constraint(r_len, r_id) => { + let l_n = *r.interner.get(l_id); + let r_n = *r.interner.get(r_id); lis_contradiction(fmt!{"Inconsistent lockstep iteration: \ '%s' has %u items, but '%s' has %u", - *l_id, l_len, *r_id, r_len}) + l_n, l_len, r_n, r_len}) } } } @@ -120,7 +122,7 @@ fn lockstep_iter_size(&&t: token_tree, &&r: tt_reader) -> lis { match t { tt_delim(tts) | tt_seq(_, tts, _, _) => { vec::foldl(lis_unconstrained, tts, {|lis, tt| - lis_merge(lis, lockstep_iter_size(tt, r)) }) + lis_merge(lis, lockstep_iter_size(tt, r), r) }) } tt_tok(*) => lis_unconstrained, tt_nonterminal(_, name) => match *lookup_cur_matched(r, name) { @@ -230,7 +232,7 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} { r.sp_diag.span_fatal( copy r.cur_span, /* blame the macro writer */ fmt!{"variable '%s' is still repeating at this depth", - *ident}); + *r.interner.get(ident)}); } } } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 705f21895b6..05191c677a0 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -81,15 +81,14 @@ type ast_fold_precursor = @{ fn fold_meta_item_(&&mi: @meta_item, fld: ast_fold) -> @meta_item { return @{node: match mi.node { - meta_word(id) => meta_word(fld.fold_ident(id)), + meta_word(id) => meta_word(id), meta_list(id, mis) => { let fold_meta_item = |x|fold_meta_item_(x, fld); meta_list(/* FIXME: (#2543) */ copy id, vec::map(mis, fold_meta_item)) } meta_name_value(id, s) => { - meta_name_value(fld.fold_ident(id), - /* FIXME (#2543) */ copy s) + meta_name_value(id, /* FIXME (#2543) */ copy s) } }, span: fld.new_span(mi.span)}; diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index cbb6709d9c8..54b0e3388f7 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -66,7 +66,7 @@ impl parser: parser_attr { } token::DOC_COMMENT(s) => { let attr = ::attr::mk_sugared_doc_attr( - *self.get_str(s), self.span.lo, self.span.hi); + *self.id_to_str(s), self.span.lo, self.span.hi); if attr.node.style != ast::attr_outer { self.fatal(~"expected outer comment"); } @@ -128,7 +128,7 @@ impl parser: parser_attr { } token::DOC_COMMENT(s) => { let attr = ::attr::mk_sugared_doc_attr( - *self.get_str(s), self.span.lo, self.span.hi); + *self.id_to_str(s), self.span.lo, self.span.hi); self.bump(); if attr.node.style == ast::attr_inner { inner_attrs += ~[attr]; @@ -145,22 +145,22 @@ impl parser: parser_attr { fn parse_meta_item() -> @ast::meta_item { let lo = self.span.lo; - let ident = self.parse_ident(); + let name = *self.id_to_str(self.parse_ident()); match self.token { token::EQ => { self.bump(); let lit = self.parse_lit(); let mut hi = self.span.hi; - return @spanned(lo, hi, ast::meta_name_value(ident, lit)); + return @spanned(lo, hi, ast::meta_name_value(name, lit)); } token::LPAREN => { let inner_items = self.parse_meta_seq(); let mut hi = self.span.hi; - return @spanned(lo, hi, ast::meta_list(ident, inner_items)); + return @spanned(lo, hi, ast::meta_list(name, inner_items)); } _ => { let mut hi = self.span.hi; - return @spanned(lo, hi, ast::meta_word(ident)); + return @spanned(lo, hi, ast::meta_word(name)); } } } diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index b10da4482e4..09edbda4335 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -276,7 +276,7 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler, srdr: io::Reader) -> {cmnts: ~[cmnt], lits: ~[lit]} { let src = @str::from_bytes(srdr.read_whole_stream()); - let itr = parse::token::mk_ident_interner(); + let itr = parse::token::mk_fake_ident_interner(); let rdr = lexer::new_low_level_string_reader (span_diagnostic, codemap::new_filemap(path, src, 0u, 0u), itr); diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 59dad16dc44..1d260268d3f 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -85,7 +85,7 @@ impl parser: parser_common { fn parse_ident() -> ast::ident { match copy self.token { - token::IDENT(i, _) => { self.bump(); return self.get_str(i); } + token::IDENT(i, _) => { self.bump(); return i; } token::INTERPOLATED(token::nt_ident(*)) => { self.bug( ~"ident interpolation not converted to real token"); } _ => { self.fatal(~"expected ident, found `" @@ -110,6 +110,8 @@ impl parser: parser_common { return if self.token == tok { self.bump(); true } else { false }; } + // Storing keywords as interned idents instead of strings would be nifty. + // A sanity check that the word we are asking for is a known keyword fn require_keyword(word: ~str) { if !self.keywords.contains_key_ref(&word) { @@ -119,7 +121,7 @@ impl parser: parser_common { fn token_is_word(word: ~str, ++tok: token::token) -> bool { match tok { - token::IDENT(sid, false) => { word == *self.get_str(sid) } + token::IDENT(sid, false) => { *self.id_to_str(sid) == word } _ => { false } } } @@ -136,7 +138,7 @@ impl parser: parser_common { fn is_any_keyword(tok: token::token) -> bool { match tok { token::IDENT(sid, false) => { - self.keywords.contains_key_ref(self.get_str(sid)) + self.keywords.contains_key_ref(self.id_to_str(sid)) } _ => false } @@ -148,7 +150,7 @@ impl parser: parser_common { let mut bump = false; let val = match self.token { token::IDENT(sid, false) => { - if word == *self.get_str(sid) { + if word == *self.id_to_str(sid) { bump = true; true } else { false } diff --git a/src/libsyntax/parse/eval.rs b/src/libsyntax/parse/eval.rs index 6b0112922a5..60d2fadb04c 100644 --- a/src/libsyntax/parse/eval.rs +++ b/src/libsyntax/parse/eval.rs @@ -78,10 +78,10 @@ fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>) } } -fn cdir_path_opt(id: ast::ident, attrs: ~[ast::attribute]) -> @~str { +fn cdir_path_opt(default: ~str, attrs: ~[ast::attribute]) -> ~str { match ::attr::first_attr_value_str_by_name(attrs, ~"path") { - some(d) => return d, - none => return id + some(d) => d, + none => default } } @@ -90,11 +90,12 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str, &items: ~[@ast::item]) { match cdir.node { ast::cdir_src_mod(id, attrs) => { - let file_path = cdir_path_opt(@(*id + ~".rs"), attrs); + let file_path = cdir_path_opt((cx.sess.interner.get(id) + ~".rs"), + attrs); let full_path = - if path::path_is_absolute(*file_path) { - *file_path - } else { prefix + path::path_sep() + *file_path }; + if path::path_is_absolute(file_path) { + file_path + } else { prefix + path::path_sep() + file_path }; let (p0, r0) = new_parser_etc_from_file(cx.sess, cx.cfg, full_path, SOURCE_FILE); let inner_attrs = p0.parse_inner_attrs_and_next(); @@ -111,11 +112,11 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: ~str, vec::push(items, i); } ast::cdir_dir_mod(id, cdirs, attrs) => { - let path = cdir_path_opt(id, attrs); + let path = cdir_path_opt(*cx.sess.interner.get(id), attrs); let full_path = - if path::path_is_absolute(*path) { - *path - } else { prefix + path::path_sep() + *path }; + if path::path_is_absolute(path) { + path + } else { prefix + path::path_sep() + path }; let (m0, a0) = eval_crate_directives_to_mod( cx, cdirs, full_path, none); let i = diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 648ec3e60db..fea79309c21 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -6,7 +6,7 @@ import std::map::{hashmap, str_hash}; import token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident, INTERPOLATED}; import codemap::{span,fss_none}; -import util::interner; +import util::interner::interner; import ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec}; import lexer::reader; import prec::{as_prec, token_to_binop}; @@ -193,12 +193,14 @@ struct parser { let mut restriction: restriction; let mut quote_depth: uint; // not (yet) related to the quasiquoter let reader: reader; + let interner: interner<@~str>; let keywords: hashmap<~str, ()>; let restricted_keywords: hashmap<~str, ()>; new(sess: parse_sess, cfg: ast::crate_cfg, +rdr: reader, ftype: file_type) { self.reader <- rdr; + self.interner = self.reader.interner(); let tok0 = self.reader.next_token(); let span0 = tok0.sp; self.sess = sess; @@ -268,11 +270,10 @@ struct parser { fn warn(m: ~str) { self.sess.span_diagnostic.span_warn(copy self.span, m) } - pure fn get_str(i: token::str_num) -> @~str { - self.reader.interner().get(i) - } fn get_id() -> node_id { next_node_id(self.sess) } + pure fn id_to_str(id: ident) -> @~str { self.sess.interner.get(id) } + fn parse_ty_fn(purity: ast::purity) -> ty_ { let proto, bounds; if self.eat_keyword(~"extern") { @@ -398,9 +399,9 @@ struct parser { } } - fn region_from_name(s: option<@~str>) -> @region { + fn region_from_name(s: option<ident>) -> @region { let r = match s { - some (string) => re_named(string), + some (id) => re_named(id), none => re_anon }; @@ -414,8 +415,7 @@ struct parser { match copy self.token { token::IDENT(sid, _) => { self.bump(); - let n = self.get_str(sid); - self.region_from_name(some(n)) + self.region_from_name(some(sid)) } _ => { self.region_from_name(none) @@ -430,7 +430,7 @@ struct parser { token::IDENT(sid, _) => { if self.look_ahead(1u) == token::BINOP(token::SLASH) { self.bump(); self.bump(); - some(self.get_str(sid)) + some(sid) } else { none } @@ -583,7 +583,7 @@ struct parser { let name = self.parse_value_ident(); self.bump(); name - } else { @~"" } + } else { token::special_idents::invalid } }; let t = self.parse_ty(false); @@ -678,10 +678,10 @@ struct parser { token::LIT_INT(i, it) => lit_int(i, it), token::LIT_UINT(u, ut) => lit_uint(u, ut), token::LIT_INT_UNSUFFIXED(i) => lit_int_unsuffixed(i), - token::LIT_FLOAT(s, ft) => lit_float(self.get_str(s), ft), - token::LIT_STR(s) => lit_str(self.get_str(s)), - token::LPAREN => { self.expect(token::RPAREN); lit_nil } - _ => self.unexpected_last(tok) + token::LIT_FLOAT(s, ft) => lit_float(self.id_to_str(s), ft), + token::LIT_STR(s) => lit_str(self.id_to_str(s)), + token::LPAREN => { self.expect(token::RPAREN); lit_nil }, + _ => { self.unexpected_last(tok); } } } @@ -1140,8 +1140,7 @@ struct parser { self.parse_seq_to_gt(some(token::COMMA), |p| p.parse_ty(false)) } else { ~[] }; - e = self.mk_pexpr(lo, hi, expr_field(self.to_expr(e), - self.get_str(i), + e = self.mk_pexpr(lo, hi, expr_field(self.to_expr(e), i, tys)); } _ => self.unexpected() @@ -2123,9 +2122,6 @@ struct parser { } fn expr_is_complete(e: pexpr) -> bool { - log(debug, (~"expr_is_complete", self.restriction, - print::pprust::expr_to_str(*e), - classify::expr_requires_semi_to_be_stmt(*e))); return self.restriction == RESTRICT_STMT_EXPR && !classify::expr_requires_semi_to_be_stmt(*e); } @@ -2306,8 +2302,9 @@ struct parser { fn is_self_ident() -> bool { match self.token { - token::IDENT(sid, false) if ~"self" == *self.get_str(sid) => true, - _ => false + token::IDENT(id, false) if id == token::special_idents::self_ + => true, + _ => false } } @@ -2522,11 +2519,13 @@ struct parser { } // This is a new-style impl declaration. - let ident = @~"__extensions__"; // XXX: clownshoes + // XXX: clownshoes + let ident = token::special_idents::clownshoes_extensions; // Parse the type. let ty = self.parse_ty(false); + // Parse traits, if necessary. let traits = if self.token == token::COLON { self.bump(); @@ -2595,7 +2594,8 @@ struct parser { match the_ctor { some((_, _, _, s_first)) => { self.span_note(s, #fmt("Duplicate constructor \ - declaration for class %s", *class_name)); + declaration for class %s", + *self.interner.get(class_name))); self.span_fatal(copy s_first, ~"First constructor \ declared here"); } @@ -2608,7 +2608,8 @@ struct parser { match the_dtor { some((_, _, s_first)) => { self.span_note(s, #fmt("Duplicate destructor \ - declaration for class %s", *class_name)); + declaration for class %s", + *self.interner.get(class_name))); self.span_fatal(copy s_first, ~"First destructor \ declared here"); } @@ -3081,7 +3082,7 @@ struct parser { let ty_params = self.parse_ty_params(); // Newtype syntax if self.token == token::EQ { - self.check_restricted_keywords_(*id); + self.check_restricted_keywords_(*self.id_to_str(id)); self.bump(); let ty = self.parse_ty(false); self.expect(token::SEMI); @@ -3297,7 +3298,7 @@ struct parser { let lo = self.span.lo; let first_ident = self.parse_ident(); let mut path = ~[first_ident]; - debug!{"parsed view_path: %s", *first_ident}; + debug!{"parsed view_path: %s", *self.id_to_str(first_ident)}; match self.token { token::EQ => { // x = foo::bar @@ -3323,7 +3324,7 @@ struct parser { token::IDENT(i, _) => { self.bump(); - vec::push(path, self.get_str(i)); + vec::push(path, i); } // foo::bar::{a,b,c} @@ -3458,8 +3459,8 @@ struct parser { fn parse_str() -> @~str { match copy self.token { - token::LIT_STR(s) => { self.bump(); self.get_str(s) } - _ => self.fatal(~"expected string literal") + token::LIT_STR(s) => { self.bump(); self.id_to_str(s) } + _ => self.fatal(~"expected string literal") } } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 10fac8d0e23..8eb9270efe2 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -277,23 +277,83 @@ pure fn is_bar(t: token) -> bool { match t { BINOP(OR) | OROR => true, _ => false } } -type ident_interner = util::interner::interner<@~str>; mod special_idents { - const underscore : uint = 0u; - const anon : uint = 1u; - const destr : uint = 2u; // 'drop', but that's reserved + import ast::ident; + const underscore : ident = 0u; + const anon : ident = 1u; + const dtor : ident = 2u; // 'drop', but that's reserved + const invalid : ident = 3u; // '' + const unary : ident = 4u; + const not_fn : ident = 5u; + const idx_fn : ident = 6u; + const unary_minus_fn : ident = 7u; + const clownshoes_extensions : ident = 8u; + + const self_ : ident = 9u; // 'self' + + /* for matcher NTs */ + const item : ident = 10u; + const block : ident = 11u; + const stmt : ident = 12u; + const pat : ident = 13u; + const expr : ident = 14u; + const ty : ident = 15u; + const ident : ident = 16u; + const path : ident = 17u; + const tt : ident = 18u; + const matchers : ident = 19u; + + const str : ident = 20u; // for the type + + /* outside of libsyntax */ + const ty_visitor : ident = 21u; + const arg : ident = 22u; + const descrim : ident = 23u; + const clownshoe_abi : ident = 24u; + const clownshoe_stack_shim : ident = 25u; + const tydesc : ident = 26u; + const literally_dtor : ident = 27u; + const main : ident = 28u; + const opaque : ident = 29u; + const blk : ident = 30u; + const static : ident = 31u; + const intrinsic : ident = 32u; + } +type ident_interner = util::interner::interner<@~str>; + +/** Key for thread-local data for sneaking interner information to the + * serializer/deserializer. It sounds like a hack because it is one. */ +fn interner_key(+_x: @@ident_interner) { } + fn mk_ident_interner() -> ident_interner { /* the indices here must correspond to the numbers in special_idents */ - let init_vec = ~[@~"_", @~"anon", @~"drop"]; - - let rv = @interner::mk_prefill::<@~str>(|x| str::hash(*x), - |x,y| str::eq(*x, *y), init_vec); + let init_vec = ~[@~"_", @~"anon", @~"drop", @~"", @~"unary", @~"!", + @~"[]", @~"unary-", @~"__extensions__", @~"self", + @~"item", @~"block", @~"stmt", @~"pat", @~"expr", + @~"ty", @~"ident", @~"path", @~"tt", @~"matchers", + @~"str", @~"ty_visitor", @~"arg", @~"descrim", + @~"__rust_abi", @~"__rust_stack_shim", @~"tydesc", + @~"dtor", @~"main", @~"<opaque>", @~"blk", @~"static", + @~"intrinsic"]; + + let rv = interner::mk_prefill::<@~str>(|x| str::hash(*x), + |x,y| str::eq(*x, *y), init_vec); + + /* having multiple interners will just confuse the serializer */ + unsafe{ assert task::local_data_get(interner_key) == none }; + unsafe{ task::local_data_set(interner_key, @rv) }; rv } +/* for when we don't care about the contents; doesn't interact with TLD or + serialization */ +fn mk_fake_ident_interner() -> ident_interner { + interner::mk::<@~str>(|x| str::hash(*x), |x,y| str::eq(*x, *y)) +} + /** * All the valid words that have meaning in the Rust language. * diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index b8872f76d8d..573eeffd968 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -8,9 +8,7 @@ import ast::{required, provided}; import ast_util::{operator_prec}; import dvec::{DVec, dvec}; import parse::classify::*; -import util::interner; - -type ident_interner = interner::interner<@~str>; +import parse::token::ident_interner; // The ps is stored here to prevent recursive type. enum ann_node { @@ -47,19 +45,7 @@ fn end(s: ps) { pp::end(s.s); } -fn rust_printer(writer: io::Writer) -> ps { - return @{s: pp::mk_printer(writer, default_columns), - cm: none::<codemap>, - intr: token::mk_ident_interner(), - comments: none::<~[comments::cmnt]>, - literals: none::<~[comments::lit]>, - mut cur_cmnt: 0u, - mut cur_lit: 0u, - boxes: dvec(), - ann: no_ann()}; -} - -fn unexpanded_rust_printer(writer: io::Writer, intr: ident_interner) -> ps { +fn rust_printer(writer: io::Writer, intr: ident_interner) -> ps { return @{s: pp::mk_printer(writer, default_columns), cm: none::<codemap>, intr: intr, @@ -79,7 +65,7 @@ const default_columns: uint = 78u; // Requires you to pass an input filename and reader so that // it can scan the input text for comments and literals to // copy forward. -fn print_crate(cm: codemap, intr: interner::interner<@~str>, +fn print_crate(cm: codemap, intr: ident_interner, span_diagnostic: diagnostic::span_handler, crate: @ast::crate, filename: ~str, in: io::Reader, out: io::Writer, ann: pp_ann, is_expanded: bool) { @@ -107,41 +93,46 @@ fn print_crate_(s: ps, &&crate: @ast::crate) { eof(s.s); } -fn ty_to_str(ty: @ast::ty) -> ~str { return to_str(ty, print_type); } +fn ty_to_str(ty: @ast::ty, intr: ident_interner) -> ~str { + to_str(ty, print_type, intr) +} -fn pat_to_str(pat: @ast::pat) -> ~str { return to_str(pat, print_pat); } +fn pat_to_str(pat: @ast::pat, intr: ident_interner) -> ~str { + to_str(pat, print_pat, intr) +} -fn expr_to_str(e: @ast::expr) -> ~str { return to_str(e, print_expr); } +fn expr_to_str(e: @ast::expr, intr: ident_interner) -> ~str { + to_str(e, print_expr, intr) +} -fn unexpanded_tt_to_str(tt: ast::token_tree, intr: ident_interner) - -> ~str { - let buffer = io::mem_buffer(); - let s = unexpanded_rust_printer(io::mem_buffer_writer(buffer), intr); - print_tt(s, tt); - eof(s.s); - io::mem_buffer_str(buffer) +fn tt_to_str(tt: ast::token_tree, intr: ident_interner) -> ~str { + to_str(tt, print_tt, intr) } -fn stmt_to_str(s: ast::stmt) -> ~str { return to_str(s, print_stmt); } +fn stmt_to_str(s: ast::stmt, intr: ident_interner) -> ~str { + to_str(s, print_stmt, intr) +} -fn item_to_str(i: @ast::item) -> ~str { return to_str(i, print_item); } +fn item_to_str(i: @ast::item, intr: ident_interner) -> ~str { + to_str(i, print_item, intr) +} -fn attr_to_str(i: ast::attribute) -> ~str { - return to_str(i, print_attribute); +fn attr_to_str(i: ast::attribute, intr: ident_interner) -> ~str { + to_str(i, print_attribute, intr) } -fn typarams_to_str(tps: ~[ast::ty_param]) -> ~str { - return to_str(tps, print_type_params) +fn typarams_to_str(tps: ~[ast::ty_param], intr: ident_interner) -> ~str { + to_str(tps, print_type_params, intr) } -fn path_to_str(&&p: @ast::path) -> ~str { - return to_str(p, |a,b| print_path(a, b, false)); +fn path_to_str(&&p: @ast::path, intr: ident_interner) -> ~str { + to_str(p, |a,b| print_path(a, b, false), intr) } fn fun_to_str(decl: ast::fn_decl, name: ast::ident, - params: ~[ast::ty_param]) -> ~str { + params: ~[ast::ty_param], intr: ident_interner) -> ~str { let buffer = io::mem_buffer(); - let s = rust_printer(io::mem_buffer_writer(buffer)); + let s = rust_printer(io::mem_buffer_writer(buffer), intr); print_fn(s, decl, name, params, none); end(s); // Close the head box end(s); // Close the outer box @@ -162,9 +153,9 @@ fn test_fun_to_str() { assert fun_to_str(decl, "a", ~[]) == "fn a()"; } -fn block_to_str(blk: ast::blk) -> ~str { +fn block_to_str(blk: ast::blk, intr: ident_interner) -> ~str { let buffer = io::mem_buffer(); - let s = rust_printer(io::mem_buffer_writer(buffer)); + let s = rust_printer(io::mem_buffer_writer(buffer), intr); // containing cbox, will be closed by print-block at } cbox(s, indent_unit); // head-ibox, will be closed by print-block after { @@ -174,16 +165,16 @@ fn block_to_str(blk: ast::blk) -> ~str { io::mem_buffer_str(buffer) } -fn meta_item_to_str(mi: ast::meta_item) -> ~str { - return to_str(@mi, print_meta_item); +fn meta_item_to_str(mi: ast::meta_item, intr: ident_interner) -> ~str { + to_str(@mi, print_meta_item, intr) } -fn attribute_to_str(attr: ast::attribute) -> ~str { - return to_str(attr, print_attribute); +fn attribute_to_str(attr: ast::attribute, intr: ident_interner) -> ~str { + to_str(attr, print_attribute, intr) } -fn variant_to_str(var: ast::variant) -> ~str { - return to_str(var, print_variant); +fn variant_to_str(var: ast::variant, intr: ident_interner) -> ~str { + to_str(var, print_variant, intr) } #[test] @@ -349,7 +340,7 @@ fn print_region(s: ps, region: @ast::region) { ast::re_anon => word_space(s, ~"&"), ast::re_named(name) => { word(s.s, ~"&"); - word(s.s, *name); + print_ident(s, name); } } } @@ -389,7 +380,7 @@ fn print_type_ex(s: ps, &&ty: @ast::ty, print_colons: bool) { fn print_field(s: ps, f: ast::ty_field) { cbox(s, indent_unit); print_mutability(s, f.node.mt.mutbl); - word(s.s, *f.node.ident); + print_ident(s, f.node.ident); word_space(s, ~":"); print_type(s, f.node.mt.ty); end(s); @@ -446,7 +437,8 @@ fn print_item(s: ps, &&item: @ast::item) { match item.node { ast::item_const(ty, expr) => { head(s, ~"const"); - word_space(s, *item.ident + ~":"); + print_ident(s, item.ident); + word_space(s, ~":"); print_type(s, ty); space(s.s); end(s); // end the head-ibox @@ -464,7 +456,8 @@ fn print_item(s: ps, &&item: @ast::item) { } ast::item_mod(_mod) => { head(s, ~"mod"); - word_nbsp(s, *item.ident); + print_ident(s, item.ident); + nbsp(s); bopen(s); print_mod(s, _mod, item.attrs); bclose(s, item.span); @@ -472,7 +465,8 @@ fn print_item(s: ps, &&item: @ast::item) { ast::item_foreign_mod(nmod) => { head(s, ~"extern"); word_nbsp(s, ~"mod"); - word_nbsp(s, *item.ident); + print_ident(s, item.ident); + nbsp(s); bopen(s); print_foreign_mod(s, nmod, item.attrs); bclose(s, item.span); @@ -481,7 +475,7 @@ fn print_item(s: ps, &&item: @ast::item) { ibox(s, indent_unit); ibox(s, 0u); word_nbsp(s, ~"type"); - word(s.s, *item.ident); + print_ident(s, item.ident); print_type_params(s, params); end(s); // end the inner ibox @@ -498,6 +492,7 @@ fn print_item(s: ps, &&item: @ast::item) { head(s, ~"struct"); print_struct(s, struct_def, tps, item.ident, item.span); } + ast::item_impl(tps, traits, ty, methods) => { head(s, ~"impl"); if tps.is_not_empty() { @@ -522,7 +517,7 @@ fn print_item(s: ps, &&item: @ast::item) { } ast::item_trait(tps, traits, methods) => { head(s, ~"trait"); - word(s.s, *item.ident); + print_ident(s, item.ident); print_type_params(s, tps); if vec::len(traits) != 0u { word_space(s, ~":"); @@ -535,7 +530,9 @@ fn print_item(s: ps, &&item: @ast::item) { bclose(s, item.span); } ast::item_mac({node: ast::mac_invoc_tt(pth, tts), _}) => { - head(s, path_to_str(pth) + ~"! " + *item.ident); + print_path(s, pth, false); + head(s, ~"! "); + print_ident(s, item.ident); bopen(s); for tts.each |tt| { print_tt(s, tt); } bclose(s, item.span); @@ -552,7 +549,7 @@ fn print_enum_def(s: ps, enum_definition: ast::enum_def, span: ast::span) { let mut newtype = vec::len(enum_definition.variants) == 1u && - str::eq(ident, enum_definition.variants[0].node.name); + ident == enum_definition.variants[0].node.name; if newtype { match enum_definition.variants[0].node.kind { ast::tuple_variant_kind(args) if args.len() == 1 => {} @@ -566,7 +563,7 @@ fn print_enum_def(s: ps, enum_definition: ast::enum_def, head(s, ~"enum"); } - word(s.s, *ident); + print_ident(s, ident); print_type_params(s, params); space(s.s); if newtype { @@ -599,7 +596,8 @@ fn print_variants(s: ps, variants: ~[ast::variant], span: ast::span) { fn print_struct(s: ps, struct_def: @ast::struct_def, tps: ~[ast::ty_param], ident: ast::ident, span: ast::span) { - word_nbsp(s, *ident); + print_ident(s, ident); + nbsp(s); print_type_params(s, tps); if vec::len(struct_def.traits) != 0u { word_space(s, ~":"); @@ -639,7 +637,7 @@ fn print_struct(s: ps, struct_def: @ast::struct_def, tps: ~[ast::ty_param], if mutability == ast::class_mutable { word_nbsp(s, ~"mut"); } - word(s.s, *ident); + print_ident(s, ident); word_nbsp(s, ~":"); print_type(s, field.node.ty); word(s.s, ~";"); @@ -684,7 +682,8 @@ fn print_tt(s: ps, tt: ast::token_tree) { s.s.token_tree_last_was_ident = false; } ast::tt_nonterminal(_, name) => { - word(s.s, ~"$" + *name); + word(s.s, ~"$"); + print_ident(s, name); s.s.token_tree_last_was_ident = true; } } @@ -693,7 +692,7 @@ fn print_tt(s: ps, tt: ast::token_tree) { fn print_variant(s: ps, v: ast::variant) { match v.node.kind { ast::tuple_variant_kind(args) => { - word(s.s, *v.node.name); + print_ident(s, v.node.name); if vec::len(args) > 0u { popen(s); fn print_variant_arg(s: ps, arg: ast::variant_arg) { @@ -780,7 +779,7 @@ fn print_attribute(s: ps, attr: ast::attribute) { if attr.node.is_sugared_doc { let meta = attr::attr_meta(attr); let comment = attr::get_meta_item_value_str(meta).get(); - word(s.s, *comment); + word(s.s, comment); } else { word(s.s, ~"#["); print_meta_item(s, @attr.node.value); @@ -935,7 +934,8 @@ fn print_mac(s: ps, m: ast::mac) { // FIXME: extension 'body' (#2339) } ast::mac_invoc_tt(pth, tts) => { - head(s, path_to_str(pth) + ~"!"); + print_path(s, pth, false); + head(s, ~"!"); bopen(s); for tts.each() |tt| { print_tt(s, tt); } bclose(s, m.span); @@ -956,7 +956,7 @@ fn print_vstore(s: ps, t: ast::vstore) { ast::re_anon => word(s.s, ~"&"), ast::re_named(name) => { word(s.s, ~"&"); - word(s.s, *name); + print_ident(s, name); word(s.s, ~"."); } } @@ -967,7 +967,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) { fn print_field(s: ps, field: ast::field) { ibox(s, indent_unit); if field.node.mutbl == ast::m_mutbl { word_nbsp(s, ~"mut"); } - word(s.s, *field.node.ident); + print_ident(s, field.node.ident); word_space(s, ~":"); print_expr(s, field.node.expr); end(s); @@ -1125,7 +1125,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) { ast::expr_loop(blk, opt_ident) => { head(s, ~"loop"); space(s.s); - option::iter(opt_ident, |ident| word_space(s, *ident)); + option::iter(opt_ident, |ident| {print_ident(s, ident); space(s.s)}); print_block(s, blk); } ast::expr_match(expr, arms, mode) => { @@ -1270,7 +1270,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) { print_expr_parens_if_not_bot(s, expr); } word(s.s, ~"."); - word(s.s, *id); + print_ident(s, id); if vec::len(tys) > 0u { word(s.s, ~"::<"); commasep(s, inconsistent, tys, print_type); @@ -1294,12 +1294,12 @@ fn print_expr(s: ps, &&expr: @ast::expr) { ast::expr_break(opt_ident) => { word(s.s, ~"break"); space(s.s); - option::iter(opt_ident, |ident| word_space(s, *ident)); + option::iter(opt_ident, |ident| {print_ident(s, ident); space(s.s)}); } ast::expr_again(opt_ident) => { word(s.s, ~"again"); space(s.s); - option::iter(opt_ident, |ident| word_space(s, *ident)); + option::iter(opt_ident, |ident| {print_ident(s, ident); space(s.s)}); } ast::expr_ret(result) => { word(s.s, ~"return"); @@ -1395,7 +1395,7 @@ fn print_decl(s: ps, decl: @ast::decl) { } } -fn print_ident(s: ps, ident: ast::ident) { word(s.s, *ident); } +fn print_ident(s: ps, ident: ast::ident) { word(s.s, *s.intr.get(ident)); } fn print_for_decl(s: ps, loc: @ast::local, coll: @ast::expr) { print_local_decl(s, loc); @@ -1410,7 +1410,7 @@ fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) { let mut first = true; for path.idents.each |id| { if first { first = false; } else { word(s.s, ~"::"); } - word(s.s, *id); + print_ident(s, id); } if path.rp.is_some() || !path.types.is_empty() { if colons_before_params { word(s.s, ~"::"); } @@ -1471,7 +1471,7 @@ fn print_pat(s: ps, &&pat: @ast::pat) { word(s.s, ~"{"); fn print_field(s: ps, f: ast::field_pat) { cbox(s, indent_unit); - word(s.s, *f.ident); + print_ident(s, f.ident); word_space(s, ~":"); print_pat(s, f.pat); end(s); @@ -1489,7 +1489,7 @@ fn print_pat(s: ps, &&pat: @ast::pat) { word(s.s, ~"{"); fn print_field(s: ps, f: ast::field_pat) { cbox(s, indent_unit); - word(s.s, *f.ident); + print_ident(s, f.ident); word_space(s, ~":"); print_pat(s, f.pat); end(s); @@ -1542,7 +1542,7 @@ fn print_fn(s: ps, decl: ast::fn_decl, name: ast::ident, typarams: ~[ast::ty_param], opt_self_ty: option<ast::self_ty_>) { head(s, fn_header_info_to_str(opt_self_ty, decl.purity, none)); - word(s.s, *name); + print_ident(s, name); print_type_params(s, typarams); print_fn_args_and_ret(s, decl, ~[], opt_self_ty); } @@ -1568,7 +1568,7 @@ fn print_fn_args(s: ps, decl: ast::fn_decl, if first { first = false; } else { word_space(s, ~","); } if cap_item.is_move { word_nbsp(s, ~"move") } else { word_nbsp(s, ~"copy") } - word(s.s, *cap_item.name); + print_ident(s, cap_item.name); } end(s); @@ -1638,7 +1638,7 @@ fn print_type_params(s: ps, &¶ms: ~[ast::ty_param]) { if vec::len(params) > 0u { word(s.s, ~"<"); fn printParam(s: ps, param: ast::ty_param) { - word(s.s, *param.ident); + print_ident(s, param.ident); print_bounds(s, param.bounds); } commasep(s, inconsistent, params, printParam); @@ -1649,14 +1649,14 @@ fn print_type_params(s: ps, &¶ms: ~[ast::ty_param]) { fn print_meta_item(s: ps, &&item: @ast::meta_item) { ibox(s, indent_unit); match item.node { - ast::meta_word(name) => word(s.s, *name), + ast::meta_word(name) => word(s.s, name), ast::meta_name_value(name, value) => { - word_space(s, *name); + word_space(s, name); word_space(s, ~"="); print_literal(s, @value); } ast::meta_list(name, items) => { - word(s.s, *name); + word(s.s, name); popen(s); commasep(s, consistent, items, print_meta_item); pclose(s); @@ -1669,7 +1669,8 @@ fn print_view_path(s: ps, &&vp: @ast::view_path) { match vp.node { ast::view_path_simple(ident, path, _) => { if path.idents[vec::len(path.idents)-1u] != ident { - word_space(s, *ident); + print_ident(s, ident); + space(s.s); word_space(s, ~"="); } print_path(s, path, false); @@ -1684,7 +1685,7 @@ fn print_view_path(s: ps, &&vp: @ast::view_path) { print_path(s, path, false); word(s.s, ~"::{"); do commasep(s, inconsistent, idents) |s, w| { - word(s.s, *w.node.name) + print_ident(s, w.node.name); } word(s.s, ~"}"); } @@ -1702,7 +1703,7 @@ fn print_view_item(s: ps, item: @ast::view_item) { match item.node { ast::view_item_use(id, mta, _) => { head(s, ~"use"); - word(s.s, *id); + print_ident(s, id); if vec::len(mta) > 0u { popen(s); commasep(s, consistent, mta, print_meta_item); @@ -1749,10 +1750,12 @@ fn print_arg(s: ps, input: ast::arg) { ibox(s, indent_unit); print_arg_mode(s, input.mode); match input.ty.node { - ast::ty_infer => word(s.s, *input.ident), + ast::ty_infer => print_ident(s, input.ident), _ => { - if str::len(*input.ident) > 0u { - word_space(s, *input.ident + ~":"); + if input.ident != parse::token::special_idents::invalid { + print_ident(s, input.ident); + word(s.s, ~":"); + space(s.s); } print_type(s, input.ty); } @@ -1768,7 +1771,7 @@ fn print_ty_fn(s: ps, opt_proto: option<ast::proto>, ibox(s, indent_unit); word(s.s, fn_header_info_to_str(opt_self_ty, decl.purity, opt_proto)); print_bounds(s, bounds); - match id { some(id) => { word(s.s, ~" "); word(s.s, *id); } _ => () } + match id { some(id) => { word(s.s, ~" "); print_ident(s, id); } _ => () } match tps { some(tps) => print_type_params(s, tps), _ => () } zerobreak(s.s); @@ -1880,7 +1883,9 @@ fn print_literal(s: ps, &&lit: @ast::lit) { } } -fn lit_to_str(l: @ast::lit) -> ~str { return to_str(l, print_literal); } +fn lit_to_str(l: @ast::lit) -> ~str { + return to_str(l, print_literal, parse::token::mk_fake_ident_interner()); +} fn next_lit(s: ps, pos: uint) -> option<comments::lit> { match s.literals { @@ -1961,9 +1966,9 @@ fn print_string(s: ps, st: ~str) { word(s.s, ~"\""); } -fn to_str<T>(t: T, f: fn@(ps, T)) -> ~str { +fn to_str<T>(t: T, f: fn@(ps, T), intr: ident_interner) -> ~str { let buffer = io::mem_buffer(); - let s = rust_printer(io::mem_buffer_writer(buffer)); + let s = rust_printer(io::mem_buffer_writer(buffer), intr); f(s, t); eof(s.s); io::mem_buffer_str(buffer) diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 19b27be83e2..5cecd315ac4 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -21,7 +21,7 @@ fn mk<T: const copy>(+hasher: hashfn<T>, +eqer: eqfn<T>) -> interner<T> { fn mk_prefill<T: const copy>(hasher: hashfn<T>, eqer: eqfn<T>, init: ~[T]) -> interner<T> { - let rv = mk(hasher, eqer); + let rv = mk(copy hasher, copy eqer); for init.each() |v| { rv.intern(v); } return rv; } @@ -30,6 +30,7 @@ fn mk_prefill<T: const copy>(hasher: hashfn<T>, eqer: eqfn<T>, /* when traits can extend traits, we should extend index<uint,T> to get [] */ trait interner<T: const copy> { fn intern(T) -> uint; + fn gensym(T) -> uint; pure fn get(uint) -> T; fn len() -> uint; } @@ -46,6 +47,12 @@ impl <T: const copy> hash_interner<T>: interner<T> { } } } + fn gensym(val: T) -> uint { + let new_idx = self.vect.len(); + // leave out of .map to avoid colliding + self.vect.push(val); + return new_idx; + } // this isn't "pure" in the traditional sense, because it can go from // failing to returning a value as items are interned. But for typestate, diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index f9d861f15e1..51104e97119 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -27,9 +27,9 @@ enum fn_kind { fn name_of_fn(fk: fn_kind) -> ident { match fk { fk_item_fn(name, _) | fk_method(name, _, _) - | fk_ctor(name, _, _, _, _) => /* FIXME (#2543) */ copy name, - fk_anon(*) | fk_fn_block(*) => @~"anon", - fk_dtor(*) => @~"drop" + | fk_ctor(name, _, _, _, _) => /* FIXME (#2543) */ copy name, + fk_anon(*) | fk_fn_block(*) => parse::token::special_idents::anon, + fk_dtor(*) => parse::token::special_idents::dtor } } diff --git a/src/rustc/back/link.rs b/src/rustc/back/link.rs index dd0648666cc..58e884340e6 100644 --- a/src/rustc/back/link.rs +++ b/src/rustc/back/link.rs @@ -310,24 +310,24 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str, symbol_hasher: &hash::State) -> link_meta { type provided_metas = - {name: option<@~str>, - vers: option<@~str>, + {name: option<~str>, + vers: option<~str>, cmh_items: ~[@ast::meta_item]}; fn provided_link_metas(sess: session, c: ast::crate) -> provided_metas { - let mut name: option<@~str> = none; - let mut vers: option<@~str> = none; + let mut name: option<~str> = none; + let mut vers: option<~str> = none; let mut cmh_items: ~[@ast::meta_item] = ~[]; let linkage_metas = attr::find_linkage_metas(c.node.attrs); attr::require_unique_names(sess.diagnostic(), linkage_metas); for linkage_metas.each |meta| { - if *attr::get_meta_item_name(meta) == ~"name" { + if attr::get_meta_item_name(meta) == ~"name" { match attr::get_meta_item_value_str(meta) { some(v) => { name = some(v); } none => vec::push(cmh_items, meta) } - } else if *attr::get_meta_item_name(meta) == ~"vers" { + } else if attr::get_meta_item_name(meta) == ~"vers" { match attr::get_meta_item_value_str(meta) { some(v) => { vers = some(v); } none => vec::push(cmh_items, meta) @@ -341,7 +341,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str, fn crate_meta_extras_hash(symbol_hasher: &hash::State, _crate: ast::crate, metas: provided_metas, - dep_hashes: ~[@~str]) -> ~str { + dep_hashes: ~[~str]) -> ~str { fn len_and_str(s: ~str) -> ~str { return fmt!{"%u_%s", str::len(s), s}; } @@ -357,11 +357,11 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str, let m = m_; match m.node { ast::meta_name_value(key, value) => { - symbol_hasher.write_str(len_and_str(*key)); + symbol_hasher.write_str(len_and_str(key)); symbol_hasher.write_str(len_and_str_lit(value)); } ast::meta_word(name) => { - symbol_hasher.write_str(len_and_str(*name)); + symbol_hasher.write_str(len_and_str(name)); } ast::meta_list(_, _) => { // FIXME (#607): Implement this @@ -371,7 +371,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str, } for dep_hashes.each |dh| { - symbol_hasher.write_str(len_and_str(*dh)); + symbol_hasher.write_str(len_and_str(dh)); } return truncated_hash_result(symbol_hasher); @@ -384,7 +384,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str, } fn crate_meta_name(sess: session, _crate: ast::crate, - output: ~str, metas: provided_metas) -> @~str { + output: ~str, metas: provided_metas) -> ~str { return match metas.name { some(v) => v, none => { @@ -400,19 +400,19 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str, str::connect(os, ~".") }; warn_missing(sess, ~"name", name); - @name + name } }; } fn crate_meta_vers(sess: session, _crate: ast::crate, - metas: provided_metas) -> @~str { + metas: provided_metas) -> ~str { return match metas.vers { some(v) => v, none => { let vers = ~"0.0"; warn_missing(sess, ~"vers", vers); - @vers + vers } }; } @@ -439,7 +439,7 @@ fn symbol_hash(tcx: ty::ctxt, symbol_hasher: &hash::State, t: ty::t, // to be independent of one another in the crate. symbol_hasher.reset(); - symbol_hasher.write_str(*link_meta.name); + symbol_hasher.write_str(link_meta.name); symbol_hasher.write_str(~"-"); symbol_hasher.write_str(link_meta.extras_hash); symbol_hasher.write_str(~"-"); @@ -497,14 +497,14 @@ fn sanitize(s: ~str) -> ~str { return result; } -fn mangle(ss: path) -> ~str { +fn mangle(sess: session, ss: path) -> ~str { // Follow C++ namespace-mangling style let mut n = ~"_ZN"; // Begin name-sequence. for ss.each |s| { match s { path_name(s) | path_mod(s) => { - let sani = sanitize(*s); + let sani = sanitize(sess.str_of(s)); n += fmt!{"%u%s", str::len(sani), sani}; } } } @@ -512,36 +512,41 @@ fn mangle(ss: path) -> ~str { n } -fn exported_name(path: path, hash: @~str, vers: @~str) -> ~str { - return mangle( - vec::append_one(vec::append_one(path, path_name(hash)), - path_name(vers))); +fn exported_name(sess: session, path: path, hash: ~str, vers: ~str) -> ~str { + return mangle(sess, + vec::append_one( + vec::append_one(path, path_name(sess.ident_of(hash))), + path_name(sess.ident_of(vers)))); } fn mangle_exported_name(ccx: @crate_ctxt, path: path, t: ty::t) -> ~str { let hash = get_symbol_hash(ccx, t); - return exported_name(path, @hash, ccx.link_meta.vers); + return exported_name(ccx.sess, path, hash, ccx.link_meta.vers); } fn mangle_internal_name_by_type_only(ccx: @crate_ctxt, - t: ty::t, name: @~str) -> + t: ty::t, name: ~str) -> ~str { - let s = @util::ppaux::ty_to_short_str(ccx.tcx, t); + let s = util::ppaux::ty_to_short_str(ccx.tcx, t); let hash = get_symbol_hash(ccx, t); - return mangle(~[path_name(name), path_name(s), path_name(@hash)]); + return mangle(ccx.sess, + ~[path_name(ccx.sess.ident_of(name)), + path_name(ccx.sess.ident_of(s)), + path_name(ccx.sess.ident_of(hash))]); } fn mangle_internal_name_by_path_and_seq(ccx: @crate_ctxt, path: path, - flav: @~str) -> ~str { - return mangle(vec::append_one(path, path_name(@ccx.names(*flav)))); + flav: ~str) -> ~str { + return mangle(ccx.sess, + vec::append_one(path, path_name(ccx.names(flav)))); } -fn mangle_internal_name_by_path(_ccx: @crate_ctxt, path: path) -> ~str { - return mangle(path); +fn mangle_internal_name_by_path(ccx: @crate_ctxt, path: path) -> ~str { + return mangle(ccx.sess, path); } -fn mangle_internal_name_by_seq(ccx: @crate_ctxt, flav: @~str) -> ~str { - return ccx.names(*flav); +fn mangle_internal_name_by_seq(ccx: @crate_ctxt, flav: ~str) -> ~str { + return fmt!("%s_%u", flav, ccx.names(flav)); } // If the user wants an exe generated we need to invoke @@ -577,8 +582,8 @@ fn link_binary(sess: session, let output = if sess.building_library { let long_libname = os::dll_filename(fmt!{"%s-%s-%s", - *lm.name, lm.extras_hash, *lm.vers}); - debug!{"link_meta.name: %s", *lm.name}; + lm.name, lm.extras_hash, lm.vers}); + debug!{"link_meta.name: %s", lm.name}; debug!{"long_libname: %s", long_libname}; debug!{"out_filename: %s", out_filename}; debug!{"dirname(out_filename): %s", path::dirname(out_filename)}; diff --git a/src/rustc/driver/driver.rs b/src/rustc/driver/driver.rs index 106cce91ccb..65875af6b7f 100644 --- a/src/rustc/driver/driver.rs +++ b/src/rustc/driver/driver.rs @@ -51,15 +51,15 @@ fn default_configuration(sess: session, argv0: ~str, input: input) -> }; return ~[ // Target bindings. - attr::mk_word_item(@os::family()), - mk(@~"target_os", os::sysname()), - mk(@~"target_family", os::family()), - mk(@~"target_arch", arch), - mk(@~"target_word_size", wordsz), - mk(@~"target_libc", libc), + attr::mk_word_item(os::family()), + mk(~"target_os", os::sysname()), + mk(~"target_family", os::family()), + mk(~"target_arch", arch), + mk(~"target_word_size", wordsz), + mk(~"target_libc", libc), // Build bindings. - mk(@~"build_compiler", argv0), - mk(@~"build_input", source_name(input))]; + mk(~"build_compiler", argv0), + mk(~"build_input", source_name(input))]; } fn build_configuration(sess: session, argv0: ~str, input: input) -> @@ -72,9 +72,9 @@ fn build_configuration(sess: session, argv0: ~str, input: input) -> let gen_cfg = { if sess.opts.test && !attr::contains_name(user_cfg, ~"test") { - ~[attr::mk_word_item(@~"test")] + ~[attr::mk_word_item(~"test")] } else { - ~[attr::mk_word_item(@~"notest")] + ~[attr::mk_word_item(~"notest")] } }; return vec::append(vec::append(user_cfg, gen_cfg), default_cfg); @@ -86,7 +86,7 @@ fn parse_cfgspecs(cfgspecs: ~[~str]) -> ast::crate_cfg { // varieties of meta_item here. At the moment we just support the // meta_word variant. let mut words = ~[]; - for cfgspecs.each |s| { vec::push(words, attr::mk_word_item(@s)); } + for cfgspecs.each |s| { vec::push(words, attr::mk_word_item(s)); } return words; } @@ -169,7 +169,8 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg, creader::read_crates(sess.diagnostic(), *crate, sess.cstore, sess.filesearch, session::sess_os_to_meta_os(sess.targ_cfg.os), - sess.opts.static)); + sess.opts.static, + sess.parse_sess.interner)); let lang_items = time(time_passes, ~"language item collection", || middle::lang_items::collect_language_items(crate, sess)); @@ -552,7 +553,9 @@ fn build_session_(sopts: @session::options, -> session { let target_cfg = build_target_config(sopts, demitter); - let cstore = cstore::mk_cstore(); + let p_s = parse::new_parse_sess_special_handler(span_diagnostic_handler, + cm); + let cstore = cstore::mk_cstore(p_s.interner); let filesearch = filesearch::mk_filesearch( sopts.maybe_sysroot, sopts.target_triple, @@ -561,8 +564,7 @@ fn build_session_(sopts: @session::options, session_(@{targ_cfg: target_cfg, opts: sopts, cstore: cstore, - parse_sess: - parse::new_parse_sess_special_handler(span_diagnostic_handler, cm), + parse_sess: p_s, codemap: cm, // For a library crate, this is always none mut main_fn: none, @@ -701,6 +703,7 @@ fn early_error(emitter: diagnostic::emitter, msg: ~str) -> ! { fn list_metadata(sess: session, path: ~str, out: io::Writer) { metadata::loader::list_file_metadata( + sess.parse_sess.interner, session::sess_os_to_meta_os(sess.targ_cfg.os), path, out); } diff --git a/src/rustc/driver/session.rs b/src/rustc/driver/session.rs index 5115fb4234a..039a5b4d14a 100644 --- a/src/rustc/driver/session.rs +++ b/src/rustc/driver/session.rs @@ -204,6 +204,16 @@ impl session { fn borrowck_stats() -> bool { self.debugging_opt(borrowck_stats) } fn borrowck_note_pure() -> bool { self.debugging_opt(borrowck_note_pure) } fn borrowck_note_loan() -> bool { self.debugging_opt(borrowck_note_loan) } + + fn str_of(id: ast::ident) -> ~str { + *self.parse_sess.interner.get(id) + } + fn ident_of(st: ~str) -> ast::ident { + self.parse_sess.interner.intern(@st) + } + fn intr() -> syntax::parse::token::ident_interner { + self.parse_sess.interner + } } /// Some reasonable defaults @@ -245,7 +255,7 @@ fn building_library(req_crate_type: crate_type, crate: @ast::crate, match syntax::attr::first_attr_value_str_by_name( crate.node.attrs, ~"crate_type") { - option::some(@~"lib") => true, + option::some(~"lib") => true, _ => false } } @@ -273,7 +283,7 @@ mod test { style: ast::attr_outer, value: ast_util::respan(ast_util::dummy_sp(), ast::meta_name_value( - @~"crate_type", + ~"crate_type", ast_util::respan(ast_util::dummy_sp(), ast::lit_str(@t)))), is_sugared_doc: false diff --git a/src/rustc/front/core_inject.rs b/src/rustc/front/core_inject.rs index 7103c736206..bcfdca40ac2 100644 --- a/src/rustc/front/core_inject.rs +++ b/src/rustc/front/core_inject.rs @@ -30,12 +30,13 @@ fn inject_libcore_ref(sess: session, let n1 = sess.next_node_id(); let n2 = sess.next_node_id(); - let vi1 = @{node: ast::view_item_use(@~"core", ~[], n1), + let vi1 = @{node: ast::view_item_use(sess.ident_of(~"core"), ~[], n1), attrs: ~[], vis: ast::public, span: dummy_sp()}; - let vp = spanned(ast::view_path_glob(ident_to_path(dummy_sp(), @~"core"), - n2)); + let vp = spanned(ast::view_path_glob( + ident_to_path(dummy_sp(), sess.ident_of(~"core")), + n2)); let vi2 = @{node: ast::view_item_import(~[vp]), attrs: ~[], vis: ast::public, diff --git a/src/rustc/front/test.rs b/src/rustc/front/test.rs index 90de6a4335f..33908f004f5 100644 --- a/src/rustc/front/test.rs +++ b/src/rustc/front/test.rs @@ -62,17 +62,17 @@ fn strip_test_functions(crate: @ast::crate) -> @ast::crate { } } -fn fold_mod(_cx: test_ctxt, m: ast::_mod, fld: fold::ast_fold) -> ast::_mod { +fn fold_mod(cx: test_ctxt, m: ast::_mod, fld: fold::ast_fold) -> ast::_mod { // Remove any defined main function from the AST so it doesn't clash with // the one we're going to add. // FIXME (#2403): This is sloppy. Instead we should have some mechanism to // indicate to the translation pass which function we want to be main. - fn nomain(&&item: @ast::item) -> option<@ast::item> { + fn nomain(cx: test_ctxt, item: @ast::item) -> option<@ast::item> { match item.node { ast::item_fn(_, _, _) => { - if *item.ident == ~"main" { + if item.ident == cx.sess.ident_of(~"main") { option::none } else { option::some(item) } } @@ -81,7 +81,8 @@ fn fold_mod(_cx: test_ctxt, m: ast::_mod, fld: fold::ast_fold) -> ast::_mod { } let mod_nomain = - {view_items: m.view_items, items: vec::filter_map(m.items, nomain)}; + {view_items: m.view_items, items: vec::filter_map(m.items, + |i| nomain(cx, i))}; return fold::noop_fold_mod(mod_nomain, fld); } @@ -99,7 +100,8 @@ fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) -> option<@ast::item> { vec::push(cx.path, i.ident); - debug!{"current path: %s", ast_util::path_name_i(cx.path)}; + debug!{"current path: %s", + ast_util::path_name_i(cx.path, cx.sess.parse_sess.interner)}; if is_test_fn(i) { match i.node { @@ -192,16 +194,17 @@ fn mk_test_module(cx: test_ctxt) -> @ast::item { let item_ = ast::item_mod(testmod); // This attribute tells resolve to let us call unexported functions let resolve_unexported_attr = - attr::mk_attr(attr::mk_word_item(@~"!resolve_unexported")); + attr::mk_attr(attr::mk_word_item(~"!resolve_unexported")); let item: ast::item = - {ident: @~"__test", + {ident: cx.sess.ident_of(~"__test"), attrs: ~[resolve_unexported_attr], id: cx.sess.next_node_id(), node: item_, vis: ast::public, span: dummy_sp()}; - debug!{"Synthetic test module:\n%s\n", pprust::item_to_str(@item)}; + debug!{"Synthetic test module:\n%s\n", + pprust::item_to_str(@item, cx.sess.intr())}; return @item; } @@ -232,7 +235,7 @@ fn mk_tests(cx: test_ctxt) -> @ast::item { let item_ = ast::item_fn(decl, ~[], body); let item: ast::item = - {ident: @~"tests", + {ident: cx.sess.ident_of(~"tests"), attrs: ~[], id: cx.sess.next_node_id(), node: item_, @@ -247,18 +250,19 @@ fn mk_path(cx: test_ctxt, path: ~[ast::ident]) -> ~[ast::ident] { let is_std = { let items = attr::find_linkage_metas(cx.crate.node.attrs); match attr::last_meta_item_value_str_by_name(items, ~"name") { - some(@~"std") => true, + some(~"std") => true, _ => false } }; if is_std { path } - else { vec::append(~[@~"std"], path) } + else { vec::append(~[cx.sess.ident_of(~"std")], path) } } // The ast::ty of ~[std::test::test_desc] fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty { let test_desc_ty_path = - path_node(mk_path(cx, ~[@~"test", @~"test_desc"])); + path_node(mk_path(cx, ~[cx.sess.ident_of(~"test"), + cx.sess.ident_of(~"test_desc")])); let test_desc_ty: ast::ty = {id: cx.sess.next_node_id(), @@ -296,10 +300,12 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr { let span = test.span; let path = test.path; - debug!{"encoding %s", ast_util::path_name_i(path)}; + debug!{"encoding %s", ast_util::path_name_i(path, + cx.sess.parse_sess.interner)}; let name_lit: ast::lit = - nospan(ast::lit_str(@ast_util::path_name_i(path))); + nospan(ast::lit_str(@ast_util::path_name_i(path, cx.sess.parse_sess + .interner))); let name_expr_inner: @ast::expr = @{id: cx.sess.next_node_id(), callee_id: cx.sess.next_node_id(), @@ -313,7 +319,8 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr { let name_field: ast::field = - nospan({mutbl: ast::m_imm, ident: @~"name", expr: @name_expr}); + nospan({mutbl: ast::m_imm, ident: cx.sess.ident_of(~"name"), + expr: @name_expr}); let fn_path = path_node(path); @@ -326,7 +333,8 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr { let fn_wrapper_expr = mk_test_wrapper(cx, fn_expr, span); let fn_field: ast::field = - nospan({mutbl: ast::m_imm, ident: @~"fn", expr: fn_wrapper_expr}); + nospan({mutbl: ast::m_imm, ident: cx.sess.ident_of(~"fn"), + expr: fn_wrapper_expr}); let ignore_lit: ast::lit = nospan(ast::lit_bool(test.ignore)); @@ -337,7 +345,8 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr { span: span}; let ignore_field: ast::field = - nospan({mutbl: ast::m_imm, ident: @~"ignore", expr: @ignore_expr}); + nospan({mutbl: ast::m_imm, ident: cx.sess.ident_of(~"ignore"), + expr: @ignore_expr}); let fail_lit: ast::lit = nospan(ast::lit_bool(test.should_fail)); @@ -349,7 +358,7 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr { let fail_field: ast::field = nospan({mutbl: ast::m_imm, - ident: @~"should_fail", + ident: cx.sess.ident_of(~"should_fail"), expr: @fail_expr}); let desc_rec_: ast::expr_ = @@ -404,7 +413,7 @@ fn mk_test_wrapper(cx: test_ctxt, } fn mk_main(cx: test_ctxt) -> @ast::item { - let str_pt = path_node(~[@~"str"]); + let str_pt = path_node(~[cx.sess.ident_of(~"str")]); let str_ty_inner = @{id: cx.sess.next_node_id(), node: ast::ty_path(str_pt, cx.sess.next_node_id()), span: dummy_sp()}; @@ -423,7 +432,7 @@ fn mk_main(cx: test_ctxt) -> @ast::item { let args_arg: ast::arg = {mode: ast::expl(ast::by_val), ty: @args_ty, - ident: @~"args", + ident: cx.sess.ident_of(~"args"), id: cx.sess.next_node_id()}; let ret_ty = {id: cx.sess.next_node_id(), @@ -445,7 +454,7 @@ fn mk_main(cx: test_ctxt) -> @ast::item { let item_ = ast::item_fn(decl, ~[], body); let item: ast::item = - {ident: @~"main", + {ident: cx.sess.ident_of(~"main"), attrs: ~[], id: cx.sess.next_node_id(), node: item_, @@ -457,7 +466,7 @@ fn mk_main(cx: test_ctxt) -> @ast::item { fn mk_test_main_call(cx: test_ctxt) -> @ast::expr { // Get the args passed to main so we can pass the to test_main - let args_path = path_node(~[@~"args"]); + let args_path = path_node(~[cx.sess.ident_of(~"args")]); let args_path_expr_: ast::expr_ = ast::expr_path(args_path); @@ -466,7 +475,7 @@ fn mk_test_main_call(cx: test_ctxt) -> @ast::expr { node: args_path_expr_, span: dummy_sp()}; // Call __test::test to generate the vector of test_descs - let test_path = path_node(~[@~"tests"]); + let test_path = path_node(~[cx.sess.ident_of(~"tests")]); let test_path_expr_: ast::expr_ = ast::expr_path(test_path); @@ -481,7 +490,9 @@ fn mk_test_main_call(cx: test_ctxt) -> @ast::expr { node: test_call_expr_, span: dummy_sp()}; // Call std::test::test_main - let test_main_path = path_node(mk_path(cx, ~[@~"test", @~"test_main"])); + let test_main_path = path_node( + mk_path(cx, ~[cx.sess.ident_of(~"test"), + cx.sess.ident_of(~"test_main")])); let test_main_path_expr_: ast::expr_ = ast::expr_path(test_main_path); diff --git a/src/rustc/metadata/common.rs b/src/rustc/metadata/common.rs index 7746f18e167..15e15a8a54c 100644 --- a/src/rustc/metadata/common.rs +++ b/src/rustc/metadata/common.rs @@ -134,5 +134,5 @@ fn hash_path(&&s: ~str) -> uint { return h; } -type link_meta = {name: @~str, vers: @~str, extras_hash: ~str}; +type link_meta = {name: ~str, vers: ~str, extras_hash: ~str}; diff --git a/src/rustc/metadata/creader.rs b/src/rustc/metadata/creader.rs index 744ee50da8b..2a3b59bf349 100644 --- a/src/rustc/metadata/creader.rs +++ b/src/rustc/metadata/creader.rs @@ -10,6 +10,7 @@ import syntax::print::pprust; import filesearch::filesearch; import common::*; import dvec::{DVec, dvec}; +import syntax::parse::token::ident_interner; export read_crates; @@ -17,28 +18,29 @@ export read_crates; // libraries necessary for later resolving, typechecking, linking, etc. fn read_crates(diag: span_handler, crate: ast::crate, cstore: cstore::cstore, filesearch: filesearch, - os: loader::os, static: bool) { + os: loader::os, static: bool, intr: ident_interner) { let e = @{diag: diag, filesearch: filesearch, cstore: cstore, os: os, static: static, crate_cache: dvec(), - mut next_crate_num: 1}; + mut next_crate_num: 1, + intr: intr}; let v = visit::mk_simple_visitor(@{visit_view_item: |a| visit_view_item(e, a), visit_item: |a| visit_item(e, a) - with *visit::default_simple_visitor()}); + with *visit::default_simple_visitor()}); visit::visit_crate(crate, (), v); dump_crates(e.crate_cache); - warn_if_multiple_versions(diag, e.crate_cache.get()); + warn_if_multiple_versions(e, diag, e.crate_cache.get()); } type cache_entry = { cnum: int, span: span, - hash: @~str, + hash: ~str, metas: @~[@ast::meta_item] }; @@ -48,16 +50,10 @@ fn dump_crates(crate_cache: DVec<cache_entry>) { debug!{"cnum: %?", entry.cnum}; debug!{"span: %?", entry.span}; debug!{"hash: %?", entry.hash}; - let attrs = ~[ - attr::mk_attr(attr::mk_list_item(@~"link", *entry.metas)) - ]; - for attr::find_linkage_attrs(attrs).each |attr| { - debug!{"meta: %s", pprust::attr_to_str(attr)}; - } } } -fn warn_if_multiple_versions(diag: span_handler, +fn warn_if_multiple_versions(e: env, diag: span_handler, crate_cache: ~[cache_entry]) { import either::*; @@ -77,17 +73,17 @@ fn warn_if_multiple_versions(diag: span_handler, if matches.len() != 1u { diag.handler().warn( - fmt!{"using multiple versions of crate `%s`", *name}); + fmt!{"using multiple versions of crate `%s`", name}); for matches.each |match_| { diag.span_note(match_.span, ~"used here"); let attrs = ~[ - attr::mk_attr(attr::mk_list_item(@~"link", *match_.metas)) + attr::mk_attr(attr::mk_list_item(~"link", *match_.metas)) ]; - loader::note_linkage_attrs(diag, attrs); + loader::note_linkage_attrs(e.intr, diag, attrs); } } - warn_if_multiple_versions(diag, non_matches); + warn_if_multiple_versions(e, diag, non_matches); } } @@ -97,7 +93,8 @@ type env = @{diag: span_handler, os: loader::os, static: bool, crate_cache: DVec<cache_entry>, - mut next_crate_num: ast::crate_num}; + mut next_crate_num: ast::crate_num, + intr: ident_interner}; fn visit_view_item(e: env, i: @ast::view_item) { match i.node { @@ -125,28 +122,28 @@ fn visit_item(e: env, i: @ast::item) { let foreign_name = match attr::first_attr_value_str_by_name(i.attrs, ~"link_name") { some(nn) => { - if *nn == ~"" { + if nn == ~"" { e.diag.span_fatal( i.span, ~"empty #[link_name] not allowed; use #[nolink]."); } nn } - none => i.ident + none => *e.intr.get(i.ident) }; let mut already_added = false; if vec::len(attr::find_attrs_by_name(i.attrs, ~"nolink")) == 0u { - already_added = !cstore::add_used_library(cstore, *foreign_name); + already_added = !cstore::add_used_library(cstore, foreign_name); } let link_args = attr::find_attrs_by_name(i.attrs, ~"link_args"); if vec::len(link_args) > 0u && already_added { - e.diag.span_fatal(i.span, ~"library '" + *foreign_name + + e.diag.span_fatal(i.span, ~"library '" + foreign_name + ~"' already added: can't specify link_args."); } for link_args.each |a| { match attr::get_meta_item_value_str(attr::attr_meta(a)) { some(linkarg) => { - cstore::add_used_link_args(cstore, *linkarg); + cstore::add_used_link_args(cstore, linkarg); } none => {/* fallthrough */ } } @@ -156,19 +153,19 @@ fn visit_item(e: env, i: @ast::item) { } } -fn metas_with(ident: ast::ident, key: ast::ident, - metas: ~[@ast::meta_item]) -> ~[@ast::meta_item] { - let name_items = attr::find_meta_items_by_name(metas, *key); +fn metas_with(ident: ~str, key: ~str, metas: ~[@ast::meta_item]) + -> ~[@ast::meta_item] { + let name_items = attr::find_meta_items_by_name(metas, key); if name_items.is_empty() { - vec::append_one(metas, attr::mk_name_value_item_str(key, *ident)) + vec::append_one(metas, attr::mk_name_value_item_str(key, ident)) } else { metas } } -fn metas_with_ident(ident: ast::ident, - metas: ~[@ast::meta_item]) -> ~[@ast::meta_item] { - metas_with(ident, @~"name", metas) +fn metas_with_ident(ident: ~str, metas: ~[@ast::meta_item]) + -> ~[@ast::meta_item] { + metas_with(ident, ~"name", metas) } fn existing_match(e: env, metas: ~[@ast::meta_item], hash: ~str) -> @@ -176,7 +173,7 @@ fn existing_match(e: env, metas: ~[@ast::meta_item], hash: ~str) -> for e.crate_cache.each |c| { if loader::metadata_matches(*c.metas, metas) - && (hash.is_empty() || *c.hash == hash) { + && (hash.is_empty() || c.hash == hash) { return some(c.cnum); } } @@ -185,7 +182,7 @@ fn existing_match(e: env, metas: ~[@ast::meta_item], hash: ~str) -> fn resolve_crate(e: env, ident: ast::ident, metas: ~[@ast::meta_item], hash: ~str, span: span) -> ast::crate_num { - let metas = metas_with_ident(ident, metas); + let metas = metas_with_ident(*e.intr.get(ident), metas); match existing_match(e, metas, hash) { none => { @@ -197,7 +194,8 @@ fn resolve_crate(e: env, ident: ast::ident, metas: ~[@ast::meta_item], metas: metas, hash: hash, os: e.os, - static: e.static + static: e.static, + intr: e.intr }; let cinfo = loader::load_library_crate(load_ctxt); @@ -220,9 +218,9 @@ fn resolve_crate(e: env, ident: ast::ident, metas: ~[@ast::meta_item], let cname = match attr::last_meta_item_value_str_by_name(metas, ~"name") { option::some(v) => v, - option::none => ident + option::none => *e.intr.get(ident) }; - let cmeta = @{name: *cname, data: cdata, + let cmeta = @{name: cname, data: cdata, cnum_map: cnum_map, cnum: cnum}; let cstore = e.cstore; @@ -242,13 +240,14 @@ fn resolve_crate_deps(e: env, cdata: @~[u8]) -> cstore::cnum_map { // The map from crate numbers in the crate we're resolving to local crate // numbers let cnum_map = int_hash::<ast::crate_num>(); - for decoder::get_crate_deps(cdata).each |dep| { + for decoder::get_crate_deps(e.intr, cdata).each |dep| { let extrn_cnum = dep.cnum; let cname = dep.name; - let cmetas = metas_with(dep.vers, @~"vers", ~[]); + let cmetas = metas_with(dep.vers, ~"vers", ~[]); debug!{"resolving dep crate %s ver: %s hash: %s", - *dep.name, *dep.vers, *dep.hash}; - match existing_match(e, metas_with_ident(cname, cmetas), *dep.hash) { + *e.intr.get(dep.name), dep.vers, dep.hash}; + match existing_match(e, metas_with_ident(*e.intr.get(cname), cmetas), + dep.hash) { some(local_cnum) => { debug!{"already have it"}; // We've already seen this crate @@ -260,8 +259,8 @@ fn resolve_crate_deps(e: env, cdata: @~[u8]) -> cstore::cnum_map { // FIXME (#2404): Need better error reporting than just a bogus // span. let fake_span = ast_util::dummy_sp(); - let local_cnum = - resolve_crate(e, cname, cmetas, *dep.hash, fake_span); + let local_cnum = resolve_crate(e, cname, cmetas, dep.hash, + fake_span); cnum_map.insert(extrn_cnum, local_cnum); } } diff --git a/src/rustc/metadata/csearch.rs b/src/rustc/metadata/csearch.rs index 0dd3aaa82a5..388e746cb03 100644 --- a/src/rustc/metadata/csearch.rs +++ b/src/rustc/metadata/csearch.rs @@ -54,17 +54,17 @@ fn lookup_method_purity(cstore: cstore::cstore, did: ast::def_id) fn each_path(cstore: cstore::cstore, cnum: ast::crate_num, f: fn(decoder::path_entry) -> bool) { let crate_data = cstore::get_crate_data(cstore, cnum); - decoder::each_path(crate_data, f); + decoder::each_path(cstore.intr, crate_data, f); } fn get_item_path(tcx: ty::ctxt, def: ast::def_id) -> ast_map::path { let cstore = tcx.cstore; let cdata = cstore::get_crate_data(cstore, def.crate); - let path = decoder::get_item_path(cdata, def.node); + let path = decoder::get_item_path(cstore.intr, cdata, def.node); // FIXME #1920: This path is not always correct if the crate is not linked // into the root namespace. - vec::append(~[ast_map::path_mod(@cdata.name)], path) + vec::append(~[ast_map::path_mod(tcx.sess.ident_of(cdata.name))], path) } enum found_ast { @@ -81,7 +81,7 @@ fn maybe_get_item_ast(tcx: ty::ctxt, def: ast::def_id, -> found_ast { let cstore = tcx.cstore; let cdata = cstore::get_crate_data(cstore, def.crate); - decoder::maybe_get_item_ast(cdata, tcx, def.node, + decoder::maybe_get_item_ast(cstore.intr, cdata, tcx, def.node, decode_inlined_item) } @@ -89,14 +89,14 @@ fn get_enum_variants(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::variant_info] { let cstore = tcx.cstore; let cdata = cstore::get_crate_data(cstore, def.crate); - return decoder::get_enum_variants(cdata, def.node, tcx) + return decoder::get_enum_variants(cstore.intr, cdata, def.node, tcx) } fn get_impls_for_mod(cstore: cstore::cstore, def: ast::def_id, name: option<ast::ident>) -> @~[@decoder::_impl] { let cdata = cstore::get_crate_data(cstore, def.crate); - do decoder::get_impls_for_mod(cdata, def.node, name) |cnum| { + do decoder::get_impls_for_mod(cstore.intr, cdata, def.node, name) |cnum| { cstore::get_crate_data(cstore, cnum) } } @@ -104,14 +104,14 @@ fn get_impls_for_mod(cstore: cstore::cstore, def: ast::def_id, fn get_trait_methods(tcx: ty::ctxt, def: ast::def_id) -> @~[ty::method] { let cstore = tcx.cstore; let cdata = cstore::get_crate_data(cstore, def.crate); - decoder::get_trait_methods(cdata, def.node, tcx) + decoder::get_trait_methods(cstore.intr, cdata, def.node, tcx) } fn get_method_names_if_trait(cstore: cstore::cstore, def: ast::def_id) - -> option<@DVec<(@~str, ast::self_ty_)>> { + -> option<@DVec<(ast::ident, ast::self_ty_)>> { let cdata = cstore::get_crate_data(cstore, def.crate); - return decoder::get_method_names_if_trait(cdata, def.node); + return decoder::get_method_names_if_trait(cstore.intr, cdata, def.node); } fn get_item_attrs(cstore: cstore::cstore, @@ -125,7 +125,7 @@ fn get_item_attrs(cstore: cstore::cstore, fn get_class_fields(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::field_ty] { let cstore = tcx.cstore; let cdata = cstore::get_crate_data(cstore, def.crate); - decoder::get_class_fields(cdata, def.node) + decoder::get_class_fields(cstore.intr, cdata, def.node) } fn get_type(tcx: ty::ctxt, def: ast::def_id) -> ty::ty_param_bounds_and_ty { @@ -173,7 +173,7 @@ fn get_impl_method(cstore: cstore::cstore, def: ast::def_id, mname: ast::ident) -> ast::def_id { let cdata = cstore::get_crate_data(cstore, def.crate); - decoder::get_impl_method(cdata, def.node, mname) + decoder::get_impl_method(cstore.intr, cdata, def.node, mname) } /* Because classes use the trait format rather than the impl format @@ -184,7 +184,7 @@ fn get_class_method(cstore: cstore::cstore, def: ast::def_id, mname: ast::ident) -> ast::def_id { let cdata = cstore::get_crate_data(cstore, def.crate); - decoder::get_class_method(cdata, def.node, mname) + decoder::get_class_method(cstore.intr, cdata, def.node, mname) } /* If def names a class with a dtor, return it. Otherwise, return none. */ diff --git a/src/rustc/metadata/cstore.rs b/src/rustc/metadata/cstore.rs index 0041093cee3..56ea0f028f6 100644 --- a/src/rustc/metadata/cstore.rs +++ b/src/rustc/metadata/cstore.rs @@ -5,6 +5,7 @@ import std::map; import std::map::hashmap; import syntax::{ast, attr}; import syntax::ast_util::new_def_hash; +import syntax::parse::token::ident_interner; export cstore; export cnum_map; @@ -57,7 +58,8 @@ type cstore_private = mod_path_map: mod_path_map, mut used_crate_files: ~[~str], mut used_libraries: ~[~str], - mut used_link_args: ~[~str]}; + mut used_link_args: ~[~str], + intr: ident_interner}; // Map from node_id's of local use statements to crate numbers type use_crate_map = map::hashmap<ast::node_id, ast::crate_num>; @@ -67,28 +69,29 @@ pure fn p(cstore: cstore) -> cstore_private { match cstore { private(p) => p } } -fn mk_cstore() -> cstore { +fn mk_cstore(intr: ident_interner) -> cstore { let meta_cache = map::int_hash::<crate_metadata>(); let crate_map = map::int_hash::<ast::crate_num>(); let mod_path_map = new_def_hash(); return private(@{metas: meta_cache, - use_crate_map: crate_map, - mod_path_map: mod_path_map, - mut used_crate_files: ~[], - mut used_libraries: ~[], - mut used_link_args: ~[]}); + use_crate_map: crate_map, + mod_path_map: mod_path_map, + mut used_crate_files: ~[], + mut used_libraries: ~[], + mut used_link_args: ~[], + intr: intr}); } fn get_crate_data(cstore: cstore, cnum: ast::crate_num) -> crate_metadata { return p(cstore).metas.get(cnum); } -fn get_crate_hash(cstore: cstore, cnum: ast::crate_num) -> @~str { +fn get_crate_hash(cstore: cstore, cnum: ast::crate_num) -> ~str { let cdata = get_crate_data(cstore, cnum); return decoder::get_crate_hash(cdata.data); } -fn get_crate_vers(cstore: cstore, cnum: ast::crate_num) -> @~str { +fn get_crate_vers(cstore: cstore, cnum: ast::crate_num) -> ~str { let cdata = get_crate_data(cstore, cnum); return decoder::get_crate_vers(cdata.data); } @@ -96,7 +99,7 @@ fn get_crate_vers(cstore: cstore, cnum: ast::crate_num) -> @~str { fn set_crate_data(cstore: cstore, cnum: ast::crate_num, data: crate_metadata) { p(cstore).metas.insert(cnum, data); - do vec::iter(decoder::get_crate_module_paths(data)) |dp| { + do vec::iter(decoder::get_crate_module_paths(cstore.intr, data)) |dp| { let (did, path) = dp; let d = {crate: cnum, node: did.node}; p(cstore).mod_path_map.insert(d, @path); @@ -153,32 +156,29 @@ fn find_use_stmt_cnum(cstore: cstore, // returns hashes of crates directly used by this crate. Hashes are // sorted by crate name. -fn get_dep_hashes(cstore: cstore) -> ~[@~str] { - type crate_hash = {name: @~str, hash: @~str}; +fn get_dep_hashes(cstore: cstore) -> ~[~str] { + type crate_hash = {name: ~str, hash: ~str}; let mut result = ~[]; for p(cstore).use_crate_map.each_value |cnum| { let cdata = cstore::get_crate_data(cstore, cnum); let hash = decoder::get_crate_hash(cdata.data); - debug!{"Add hash[%s]: %s", cdata.name, *hash}; - vec::push(result, {name: @cdata.name, hash: hash}); + debug!{"Add hash[%s]: %s", cdata.name, hash}; + vec::push(result, {name: cdata.name, hash: hash}); }; - pure fn lteq(a: &crate_hash, b: &crate_hash) -> bool { - *a.name <= *b.name - } + pure fn lteq(a: &crate_hash, b: &crate_hash) -> bool {a.name <= b.name} let sorted = std::sort::merge_sort(lteq, result); debug!{"sorted:"}; for sorted.each |x| { - debug!{" hash[%s]: %s", *x.name, *x.hash}; + debug!{" hash[%s]: %s", x.name, x.hash}; } - fn mapper(ch: crate_hash) -> @~str { return ch.hash; } + fn mapper(ch: crate_hash) -> ~str { return ch.hash; } return vec::map(sorted, mapper); } -fn get_path(cstore: cstore, d: ast::def_id) -> ~[ast::ident] { - // let f = bind str::split_str(_, "::"); +fn get_path(cstore: cstore, d: ast::def_id) -> ~[~str] { option::map_default(p(cstore).mod_path_map.find(d), ~[], - |ds| str::split_str(*ds, ~"::").map(|x| @x ) ) + |ds| str::split_str(*ds, ~"::")) } // Local Variables: // mode: rust diff --git a/src/rustc/metadata/decoder.rs b/src/rustc/metadata/decoder.rs index ea5405cc510..d9bec9fc5b2 100644 --- a/src/rustc/metadata/decoder.rs +++ b/src/rustc/metadata/decoder.rs @@ -15,6 +15,8 @@ import cmd=cstore::crate_metadata; import util::ppaux::ty_to_str; import syntax::diagnostic::span_handler; import common::*; +import syntax::parse::token::ident_interner; + export class_dtor; export get_class_fields; @@ -212,7 +214,7 @@ fn enum_variant_ids(item: ebml::doc, cdata: cmd) -> ~[ast::def_id] { return ids; } -fn item_path(item_doc: ebml::doc) -> ast_map::path { +fn item_path(intr: ident_interner, item_doc: ebml::doc) -> ast_map::path { let path_doc = ebml::get_doc(item_doc, tag_path); let len_doc = ebml::get_doc(path_doc, tag_path_len); @@ -224,10 +226,10 @@ fn item_path(item_doc: ebml::doc) -> ast_map::path { for ebml::docs(path_doc) |tag, elt_doc| { if tag == tag_path_elt_mod { let str = ebml::doc_as_str(elt_doc); - vec::push(result, ast_map::path_mod(@str)); + vec::push(result, ast_map::path_mod(intr.intern(@str))); } else if tag == tag_path_elt_name { let str = ebml::doc_as_str(elt_doc); - vec::push(result, ast_map::path_name(@str)); + vec::push(result, ast_map::path_name(intr.intern(@str))); } else { // ignore tag_path_len element } @@ -236,9 +238,9 @@ fn item_path(item_doc: ebml::doc) -> ast_map::path { return result; } -fn item_name(item: ebml::doc) -> ast::ident { +fn item_name(intr: ident_interner, item: ebml::doc) -> ast::ident { let name = ebml::get_doc(item, tag_paths_data_name); - @str::from_bytes(ebml::doc_data(name)) + intr.intern(@str::from_bytes(ebml::doc_data(name))) } fn item_to_def_like(item: ebml::doc, did: ast::def_id, cnum: ast::crate_num) @@ -304,37 +306,38 @@ fn get_impl_traits(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) -> ~[ty::t] { item_impl_traits(lookup_item(id, cdata.data), tcx, cdata) } -fn get_impl_method(cdata: cmd, id: ast::node_id, +fn get_impl_method(intr: ident_interner, cdata: cmd, id: ast::node_id, name: ast::ident) -> ast::def_id { let items = ebml::get_doc(ebml::doc(cdata.data), tag_items); let mut found = none; for ebml::tagged_docs(find_item(id, items), tag_item_impl_method) |mid| { let m_did = ebml::with_doc_data(mid, |d| parse_def_id(d)); - if item_name(find_item(m_did.node, items)) == name { + if item_name(intr, find_item(m_did.node, items)) == name { found = some(translate_def_id(cdata, m_did)); } } option::get(found) } -fn get_class_method(cdata: cmd, id: ast::node_id, +fn get_class_method(intr: ident_interner, cdata: cmd, id: ast::node_id, name: ast::ident) -> ast::def_id { let items = ebml::get_doc(ebml::doc(cdata.data), tag_items); let mut found = none; let cls_items = match maybe_find_item(id, items) { some(it) => it, none => fail (fmt!{"get_class_method: class id not found \ - when looking up method %s", *name}) + when looking up method %s", *intr.get(name)}) }; for ebml::tagged_docs(cls_items, tag_item_trait_method) |mid| { let m_did = item_def_id(mid, cdata); - if item_name(mid) == name { + if item_name(intr, mid) == name { found = some(m_did); } } match found { some(found) => found, - none => fail (fmt!{"get_class_method: no method named %s", *name}) + none => fail (fmt!{"get_class_method: no method named %s", + *intr.get(name)}) } } @@ -387,7 +390,7 @@ struct path_entry { } /// Iterates over all the paths in the given crate. -fn each_path(cdata: cmd, f: fn(path_entry) -> bool) { +fn each_path(intr: ident_interner, cdata: cmd, f: fn(path_entry) -> bool) { let root = ebml::doc(cdata.data); let items = ebml::get_doc(root, tag_items); let items_data = ebml::get_doc(items, tag_items_data); @@ -397,8 +400,8 @@ fn each_path(cdata: cmd, f: fn(path_entry) -> bool) { // First, go through all the explicit items. for ebml::tagged_docs(items_data, tag_items_data_item) |item_doc| { if !broken { - let path = ast_map::path_to_str_with_sep(item_path(item_doc), - ~"::"); + let path = ast_map::path_to_str_with_sep( + item_path(intr, item_doc), ~"::", intr); if path != ~"" { // Extract the def ID. let def_id = item_def_id(item_doc, cdata); @@ -467,8 +470,9 @@ fn each_path(cdata: cmd, f: fn(path_entry) -> bool) { } } -fn get_item_path(cdata: cmd, id: ast::node_id) -> ast_map::path { - item_path(lookup_item(id, cdata.data)) +fn get_item_path(intr: ident_interner, cdata: cmd, id: ast::node_id) + -> ast_map::path { + item_path(intr, lookup_item(id, cdata.data)) } type decode_inlined_item = fn( @@ -477,13 +481,13 @@ type decode_inlined_item = fn( path: ast_map::path, par_doc: ebml::doc) -> option<ast::inlined_item>; -fn maybe_get_item_ast(cdata: cmd, tcx: ty::ctxt, +fn maybe_get_item_ast(intr: ident_interner, cdata: cmd, tcx: ty::ctxt, id: ast::node_id, decode_inlined_item: decode_inlined_item ) -> csearch::found_ast { debug!{"Looking up item: %d", id}; let item_doc = lookup_item(id, cdata.data); - let path = vec::init(item_path(item_doc)); + let path = vec::init(item_path(intr, item_doc)); match decode_inlined_item(cdata, tcx, path, item_doc) { some(ii) => csearch::found(ii), none => { @@ -503,8 +507,8 @@ fn maybe_get_item_ast(cdata: cmd, tcx: ty::ctxt, } } -fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) - -> ~[ty::variant_info] { +fn get_enum_variants(intr: ident_interner, cdata: cmd, id: ast::node_id, + tcx: ty::ctxt) -> ~[ty::variant_info] { let data = cdata.data; let items = ebml::get_doc(ebml::doc(data), tag_items); let item = find_item(id, items); @@ -515,7 +519,7 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) let item = find_item(did.node, items); let ctor_ty = item_type({crate: cdata.cnum, node: id}, item, tcx, cdata); - let name = item_name(item); + let name = item_name(intr, item); let mut arg_tys: ~[ty::t] = ~[]; match ty::get(ctor_ty).struct { ty::ty_fn(f) => { @@ -573,8 +577,8 @@ fn get_self_ty(item: ebml::doc) -> ast::self_ty_ { } } -fn item_impl_methods(cdata: cmd, item: ebml::doc, base_tps: uint) - -> ~[@method_info] { +fn item_impl_methods(intr: ident_interner, cdata: cmd, item: ebml::doc, + base_tps: uint) -> ~[@method_info] { let mut rslt = ~[]; for ebml::tagged_docs(item, tag_item_impl_method) |doc| { let m_did = ebml::with_doc_data(doc, |d| parse_def_id(d)); @@ -583,15 +587,14 @@ fn item_impl_methods(cdata: cmd, item: ebml::doc, base_tps: uint) vec::push(rslt, @{did: translate_def_id(cdata, m_did), /* FIXME (maybe #2323) tjc: take a look at this. */ n_tps: item_ty_param_count(mth_item) - base_tps, - ident: item_name(mth_item), + ident: item_name(intr, mth_item), self_type: self_ty}); } rslt } -fn get_impls_for_mod(cdata: cmd, - m_id: ast::node_id, - name: option<ast::ident>, +fn get_impls_for_mod(intr: ident_interner, cdata: cmd, + m_id: ast::node_id, name: option<ast::ident>, get_cdata: fn(ast::crate_num) -> cmd) -> @~[@_impl] { @@ -608,12 +611,12 @@ fn get_impls_for_mod(cdata: cmd, let impl_cdata = get_cdata(local_did.crate); let impl_data = impl_cdata.data; let item = lookup_item(local_did.node, impl_data); - let nm = item_name(item); + let nm = item_name(intr, item); if match name { some(n) => { n == nm } none => { true } } { let base_tps = item_ty_param_count(item); vec::push(result, @{ did: local_did, ident: nm, - methods: item_impl_methods(impl_cdata, item, base_tps) + methods: item_impl_methods(intr, impl_cdata, item, base_tps) }); }; } @@ -621,14 +624,14 @@ fn get_impls_for_mod(cdata: cmd, } /* Works for both classes and traits */ -fn get_trait_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) - -> @~[ty::method] { +fn get_trait_methods(intr: ident_interner, cdata: cmd, id: ast::node_id, + tcx: ty::ctxt) -> @~[ty::method] { let data = cdata.data; let item = lookup_item(id, data); let mut result = ~[]; for ebml::tagged_docs(item, tag_item_trait_method) |mth| { let bounds = item_ty_param_bounds(mth, tcx, cdata); - let name = item_name(mth); + let name = item_name(intr, mth); let ty = doc_type(mth, tcx, cdata); let fty = match ty::get(ty).struct { ty::ty_fn(f) => f, @@ -651,8 +654,9 @@ fn get_trait_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) // If the item in question is a trait, returns its set of methods and // their self types. Otherwise, returns none. This overlaps in an // annoying way with get_trait_methods. -fn get_method_names_if_trait(cdata: cmd, node_id: ast::node_id) - -> option<@DVec<(@~str, ast::self_ty_)>> { +fn get_method_names_if_trait(intr: ident_interner, cdata: cmd, + node_id: ast::node_id) + -> option<@DVec<(ast::ident, ast::self_ty_)>> { let item = lookup_item(node_id, cdata.data); if item_family(item) != 'I' { @@ -662,7 +666,7 @@ fn get_method_names_if_trait(cdata: cmd, node_id: ast::node_id) let resulting_methods = @dvec(); for ebml::tagged_docs(item, tag_item_trait_method) |method| { resulting_methods.push( - (item_name(method), get_self_ty(method))); + (item_name(intr, method), get_self_ty(method))); } return some(resulting_methods); } @@ -680,7 +684,7 @@ fn get_item_attrs(cdata: cmd, } // Helper function that gets either fields or methods -fn get_class_members(cdata: cmd, id: ast::node_id, +fn get_class_members(intr: ident_interner, cdata: cmd, id: ast::node_id, p: fn(char) -> bool) -> ~[ty::field_ty] { let data = cdata.data; let item = lookup_item(id, data); @@ -688,7 +692,7 @@ fn get_class_members(cdata: cmd, id: ast::node_id, for ebml::tagged_docs(item, tag_item_field) |an_item| { let f = item_family(an_item); if p(f) { - let name = item_name(an_item); + let name = item_name(intr, an_item); let did = item_def_id(an_item, cdata); let mt = field_mutability(an_item); vec::push(result, {ident: name, id: did, vis: @@ -708,8 +712,9 @@ pure fn family_to_visibility(family: char) -> ast::visibility { } /* 'g' for public field, 'j' for private field, 'N' for inherited field */ -fn get_class_fields(cdata: cmd, id: ast::node_id) -> ~[ty::field_ty] { - get_class_members(cdata, id, |f| f == 'g' || f == 'j' || f == 'N') +fn get_class_fields(intr: ident_interner, cdata: cmd, id: ast::node_id) + -> ~[ty::field_ty] { + get_class_members(intr, cdata, id, |f| f == 'g' || f == 'j' || f == 'N') } fn family_has_type_params(fam_ch: char) -> bool { @@ -774,7 +779,7 @@ fn get_meta_items(md: ebml::doc) -> ~[@ast::meta_item] { for ebml::tagged_docs(md, tag_meta_item_word) |meta_item_doc| { let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name); let n = str::from_bytes(ebml::doc_data(nd)); - vec::push(items, attr::mk_word_item(@n)); + vec::push(items, attr::mk_word_item(n)); }; for ebml::tagged_docs(md, tag_meta_item_name_value) |meta_item_doc| { let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name); @@ -783,13 +788,13 @@ fn get_meta_items(md: ebml::doc) -> ~[@ast::meta_item] { let v = str::from_bytes(ebml::doc_data(vd)); // FIXME (#623): Should be able to decode meta_name_value variants, // but currently the encoder just drops them - vec::push(items, attr::mk_name_value_item_str(@n, v)); + vec::push(items, attr::mk_name_value_item_str(n, v)); }; for ebml::tagged_docs(md, tag_meta_item_list) |meta_item_doc| { let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name); let n = str::from_bytes(ebml::doc_data(nd)); let subitems = get_meta_items(meta_item_doc); - vec::push(items, attr::mk_list_item(@n, subitems)); + vec::push(items, attr::mk_list_item(n, subitems)); }; return items; } @@ -815,17 +820,19 @@ fn get_attributes(md: ebml::doc) -> ~[ast::attribute] { return attrs; } -fn list_meta_items(meta_items: ebml::doc, out: io::Writer) { +fn list_meta_items(intr: ident_interner, + meta_items: ebml::doc, out: io::Writer) { for get_meta_items(meta_items).each |mi| { - out.write_str(fmt!{"%s\n", pprust::meta_item_to_str(*mi)}); + out.write_str(fmt!{"%s\n", pprust::meta_item_to_str(*mi, intr)}); } } -fn list_crate_attributes(md: ebml::doc, hash: @~str, out: io::Writer) { - out.write_str(fmt!{"=Crate Attributes (%s)=\n", *hash}); +fn list_crate_attributes(intr: ident_interner, md: ebml::doc, hash: ~str, + out: io::Writer) { + out.write_str(fmt!{"=Crate Attributes (%s)=\n", hash}); for get_attributes(md).each |attr| { - out.write_str(fmt!{"%s\n", pprust::attribute_to_str(attr)}); + out.write_str(fmt!{"%s\n", pprust::attribute_to_str(attr, intr)}); } out.write_str(~"\n\n"); @@ -836,9 +843,9 @@ fn get_crate_attributes(data: @~[u8]) -> ~[ast::attribute] { } type crate_dep = {cnum: ast::crate_num, name: ast::ident, - vers: @~str, hash: @~str}; + vers: ~str, hash: ~str}; -fn get_crate_deps(data: @~[u8]) -> ~[crate_dep] { +fn get_crate_deps(intr: ident_interner, data: @~[u8]) -> ~[crate_dep] { let mut deps: ~[crate_dep] = ~[]; let cratedoc = ebml::doc(data); let depsdoc = ebml::get_doc(cratedoc, tag_crate_deps); @@ -848,42 +855,44 @@ fn get_crate_deps(data: @~[u8]) -> ~[crate_dep] { } for ebml::tagged_docs(depsdoc, tag_crate_dep) |depdoc| { vec::push(deps, {cnum: crate_num, - name: @docstr(depdoc, tag_crate_dep_name), - vers: @docstr(depdoc, tag_crate_dep_vers), - hash: @docstr(depdoc, tag_crate_dep_hash)}); + name: intr.intern(@docstr(depdoc, tag_crate_dep_name)), + vers: docstr(depdoc, tag_crate_dep_vers), + hash: docstr(depdoc, tag_crate_dep_hash)}); crate_num += 1; }; return deps; } -fn list_crate_deps(data: @~[u8], out: io::Writer) { +fn list_crate_deps(intr: ident_interner, data: @~[u8], out: io::Writer) { out.write_str(~"=External Dependencies=\n"); - for get_crate_deps(data).each |dep| { - out.write_str(fmt!{"%d %s-%s-%s\n", - dep.cnum, *dep.name, *dep.hash, *dep.vers}); + for get_crate_deps(intr, data).each |dep| { + out.write_str( + fmt!{"%d %s-%s-%s\n", + dep.cnum, *intr.get(dep.name), dep.hash, dep.vers}); } out.write_str(~"\n"); } -fn get_crate_hash(data: @~[u8]) -> @~str { +fn get_crate_hash(data: @~[u8]) -> ~str { let cratedoc = ebml::doc(data); let hashdoc = ebml::get_doc(cratedoc, tag_crate_hash); - return @str::from_bytes(ebml::doc_data(hashdoc)); + return str::from_bytes(ebml::doc_data(hashdoc)); } -fn get_crate_vers(data: @~[u8]) -> @~str { +fn get_crate_vers(data: @~[u8]) -> ~str { let attrs = decoder::get_crate_attributes(data); return match attr::last_meta_item_value_str_by_name( attr::find_linkage_metas(attrs), ~"vers") { some(ver) => ver, - none => @~"0.0" + none => ~"0.0" }; } -fn iter_crate_items(cdata: cmd, proc: fn(~str, ast::def_id)) { - for each_path(cdata) |path_entry| { +fn iter_crate_items(intr: ident_interner, + cdata: cmd, proc: fn(~str, ast::def_id)) { + for each_path(intr, cdata) |path_entry| { match path_entry.def_like { dl_impl(*) | dl_field => {} dl_def(def) => { @@ -893,7 +902,8 @@ fn iter_crate_items(cdata: cmd, proc: fn(~str, ast::def_id)) { } } -fn get_crate_module_paths(cdata: cmd) -> ~[(ast::def_id, ~str)] { +fn get_crate_module_paths(intr: ident_interner, cdata: cmd) + -> ~[(ast::def_id, ~str)] { fn mod_of_path(p: ~str) -> ~str { str::connect(vec::init(str::split_str(p, ~"::")), ~"::") } @@ -902,7 +912,7 @@ fn get_crate_module_paths(cdata: cmd) -> ~[(ast::def_id, ~str)] { // fowarded path due to renamed import or reexport let mut res = ~[]; let mods = map::str_hash(); - do iter_crate_items(cdata) |path, did| { + do iter_crate_items(intr, cdata) |path, did| { let m = mod_of_path(path); if str::is_not_empty(m) { // if m has a sub-item, it must be a module @@ -919,11 +929,12 @@ fn get_crate_module_paths(cdata: cmd) -> ~[(ast::def_id, ~str)] { } } -fn list_crate_metadata(bytes: @~[u8], out: io::Writer) { +fn list_crate_metadata(intr: ident_interner, bytes: @~[u8], + out: io::Writer) { let hash = get_crate_hash(bytes); let md = ebml::doc(bytes); - list_crate_attributes(md, hash, out); - list_crate_deps(bytes, out); + list_crate_attributes(intr, md, hash, out); + list_crate_deps(intr, bytes, out); } // Translates a def_id from an external crate to a def_id for the current diff --git a/src/rustc/metadata/encoder.rs b/src/rustc/metadata/encoder.rs index 2389f43b5d5..e14e64ddd0c 100644 --- a/src/rustc/metadata/encoder.rs +++ b/src/rustc/metadata/encoder.rs @@ -71,8 +71,8 @@ fn reachable(ecx: @encode_ctxt, id: node_id) -> bool { ecx.reachable.contains_key(id) } -fn encode_name(ebml_w: ebml::writer, name: ident) { - ebml_w.wr_tagged_str(tag_paths_data_name, *name); +fn encode_name(ecx: @encode_ctxt, ebml_w: ebml::writer, name: ident) { + ebml_w.wr_tagged_str(tag_paths_data_name, ecx.tcx.sess.str_of(name)); } fn encode_def_id(ebml_w: ebml::writer, id: def_id) { @@ -97,13 +97,15 @@ fn encode_mutability(ebml_w: ebml::writer, mt: class_mutability) { type entry<T> = {val: T, pos: uint}; -fn add_to_index(ebml_w: ebml::writer, path: &[ident], &index: ~[entry<~str>], - name: ident) { +fn add_to_index(ecx: @encode_ctxt, ebml_w: ebml::writer, path: &[ident], + &index: ~[entry<~str>], name: ident) { let mut full_path = ~[]; vec::push_all(full_path, path); vec::push(full_path, name); - vec::push(index, {val: ast_util::path_name_i(full_path), - pos: ebml_w.writer.tell()}); + vec::push(index, + {val: ast_util::path_name_i(full_path, + ecx.tcx.sess.parse_sess.interner), + pos: ebml_w.writer.tell()}); } fn encode_trait_ref(ebml_w: ebml::writer, ecx: @encode_ctxt, t: @trait_ref) { @@ -209,7 +211,7 @@ fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer, ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(variant.node.id)); encode_family(ebml_w, 'v'); - encode_name(ebml_w, variant.node.name); + encode_name(ecx, ebml_w, variant.node.name); encode_parent_item(ebml_w, local_def(id)); encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, variant.node.id)); @@ -227,29 +229,29 @@ fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer, disr_val = vi[i].disr_val; } encode_type_param_bounds(ebml_w, ecx, ty_params); - encode_path(ebml_w, path, ast_map::path_name(variant.node.name)); + encode_path(ecx, ebml_w, path, ast_map::path_name(variant.node.name)); ebml_w.end_tag(); disr_val += 1; i += 1; } } -fn encode_path(ebml_w: ebml::writer, - path: ast_map::path, +fn encode_path(ecx: @encode_ctxt, ebml_w: ebml::writer, path: ast_map::path, name: ast_map::path_elt) { - fn encode_path_elt(ebml_w: ebml::writer, elt: ast_map::path_elt) { + fn encode_path_elt(ecx: @encode_ctxt, ebml_w: ebml::writer, + elt: ast_map::path_elt) { let (tag, name) = match elt { ast_map::path_mod(name) => (tag_path_elt_mod, name), ast_map::path_name(name) => (tag_path_elt_name, name) }; - ebml_w.wr_tagged_str(tag, *name); + ebml_w.wr_tagged_str(tag, ecx.tcx.sess.str_of(name)); } do ebml_w.wr_tag(tag_path) { ebml_w.wr_tagged_u32(tag_path_len, (vec::len(path) + 1u) as u32); - do vec::iter(path) |pe| { encode_path_elt(ebml_w, pe); } - encode_path_elt(ebml_w, name); + do vec::iter(path) |pe| { encode_path_elt(ecx, ebml_w, pe); } + encode_path_elt(ecx, ebml_w, name); } } @@ -258,7 +260,7 @@ fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::writer, md: _mod, ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(id)); encode_family(ebml_w, 'm'); - encode_name(ebml_w, name); + encode_name(ecx, ebml_w, name); debug!{"(encoding info for module) encoding info for module ID %d", id}; // Encode info about all the module children. @@ -268,10 +270,11 @@ fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::writer, md: _mod, let (ident, did) = (item.ident, item.id); debug!{"(encoding info for module) ... encoding impl %s \ (%?/%?), exported? %?", - *ident, - did, - ast_map::node_id_to_str(ecx.tcx.items, did), - ast_util::is_exported(ident, md)}; + ecx.tcx.sess.str_of(ident), + did, + ast_map::node_id_to_str(ecx.tcx.items, did, ecx.tcx + .sess.parse_sess.interner), + ast_util::is_exported(ident, md)}; ebml_w.start_tag(tag_mod_impl); ebml_w.wr_str(def_to_str(local_def(did))); @@ -281,7 +284,7 @@ fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::writer, md: _mod, } } - encode_path(ebml_w, path, ast_map::path_mod(name)); + encode_path(ecx, ebml_w, path, ast_map::path_mod(name)); // Encode the reexports of this module. debug!("(encoding info for module) encoding reexports for %d", id); @@ -371,10 +374,11 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer, vec::push(*global_index, {val: id, pos: ebml_w.writer.tell()}); ebml_w.start_tag(tag_items_data_item); - debug!{"encode_info_for_class: doing %s %d", *nm, id}; + debug!{"encode_info_for_class: doing %s %d", + tcx.sess.str_of(nm), id}; encode_visibility(ebml_w, vis); - encode_name(ebml_w, nm); - encode_path(ebml_w, path, ast_map::path_name(nm)); + encode_name(ecx, ebml_w, nm); + encode_path(ecx, ebml_w, path, ast_map::path_name(nm)); encode_type(ecx, ebml_w, node_id_to_type(tcx, id)); encode_mutability(ebml_w, mt); encode_def_id(ebml_w, local_def(id)); @@ -392,7 +396,8 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer, {val: m.id, pos: ebml_w.writer.tell()}); let impl_path = vec::append_one(path, ast_map::path_name(m.ident)); - debug!{"encode_info_for_class: doing %s %d", *m.ident, m.id}; + debug!{"encode_info_for_class: doing %s %d", + ecx.tcx.sess.str_of(m.ident), m.id}; encode_info_for_method(ecx, ebml_w, impl_path, should_inline(m.attrs), id, m, vec::append(class_tps, m.tps)); @@ -409,15 +414,16 @@ fn encode_info_for_fn(ecx: @encode_ctxt, ebml_w: ebml::writer, item: option<inlined_item>, tps: ~[ty_param], decl: fn_decl) { ebml_w.start_tag(tag_items_data_item); - encode_name(ebml_w, ident); + encode_name(ecx, ebml_w, ident); encode_def_id(ebml_w, local_def(id)); encode_family(ebml_w, purity_fn_family(decl.purity)); encode_type_param_bounds(ebml_w, ecx, tps); let its_ty = node_id_to_type(ecx.tcx, id); - debug!{"fn name = %s ty = %s its node id = %d", *ident, + debug!{"fn name = %s ty = %s its node id = %d", + ecx.tcx.sess.str_of(ident), util::ppaux::ty_to_str(ecx.tcx, its_ty), id}; encode_type(ecx, ebml_w, its_ty); - encode_path(ebml_w, path, ast_map::path_name(ident)); + encode_path(ecx, ebml_w, path, ast_map::path_name(ident)); match item { some(it) => { ecx.encode_inlined_item(ecx, ebml_w, path, it); @@ -433,14 +439,15 @@ fn encode_info_for_method(ecx: @encode_ctxt, ebml_w: ebml::writer, impl_path: ast_map::path, should_inline: bool, parent_id: node_id, m: @method, all_tps: ~[ty_param]) { - debug!{"encode_info_for_method: %d %s %u", m.id, *m.ident, all_tps.len()}; + debug!{"encode_info_for_method: %d %s %u", m.id, + ecx.tcx.sess.str_of(m.ident), all_tps.len()}; ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(m.id)); encode_family(ebml_w, purity_fn_family(m.decl.purity)); encode_type_param_bounds(ebml_w, ecx, all_tps); encode_type(ecx, ebml_w, node_id_to_type(ecx.tcx, m.id)); - encode_name(ebml_w, m.ident); - encode_path(ebml_w, impl_path, ast_map::path_name(m.ident)); + encode_name(ecx, ebml_w, m.ident); + encode_path(ecx, ebml_w, impl_path, ast_map::path_name(m.ident)); encode_self_type(ebml_w, m.self_ty.node); if all_tps.len() > 0u || should_inline { ecx.encode_inlined_item( @@ -504,7 +511,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, encode_family(ebml_w, 'c'); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); encode_symbol(ecx, ebml_w, item.id); - encode_path(ebml_w, path, ast_map::path_name(item.ident)); + encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); ebml_w.end_tag(); } item_fn(decl, tps, _) => { @@ -514,7 +521,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, encode_family(ebml_w, purity_fn_family(decl.purity)); encode_type_param_bounds(ebml_w, ecx, tps); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); - encode_path(ebml_w, path, ast_map::path_name(item.ident)); + encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); if tps.len() > 0u || should_inline(item.attrs) { ecx.encode_inlined_item(ecx, ebml_w, path, ii_item(item)); } else { @@ -531,8 +538,8 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(item.id)); encode_family(ebml_w, 'n'); - encode_name(ebml_w, item.ident); - encode_path(ebml_w, path, ast_map::path_name(item.ident)); + encode_name(ecx, ebml_w, item.ident); + encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); ebml_w.end_tag(); } item_ty(_, tps) => { @@ -542,8 +549,8 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, encode_family(ebml_w, 'y'); encode_type_param_bounds(ebml_w, ecx, tps); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); - encode_name(ebml_w, item.ident); - encode_path(ebml_w, path, ast_map::path_name(item.ident)); + encode_name(ecx, ebml_w, item.ident); + encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); encode_region_param(ecx, ebml_w, item); ebml_w.end_tag(); } @@ -554,12 +561,12 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, encode_family(ebml_w, 't'); encode_type_param_bounds(ebml_w, ecx, tps); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); - encode_name(ebml_w, item.ident); + encode_name(ecx, ebml_w, item.ident); for enum_definition.variants.each |v| { encode_variant_id(ebml_w, local_def(v.node.id)); } ecx.encode_inlined_item(ecx, ebml_w, path, ii_item(item)); - encode_path(ebml_w, path, ast_map::path_name(item.ident)); + encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); encode_region_param(ecx, ebml_w, item); } encode_enum_variant_info(ecx, ebml_w, item.id, @@ -576,10 +583,12 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, /* Encode the dtor */ do option::iter(struct_def.dtor) |dtor| { vec::push(*index, {val: dtor.node.id, pos: ebml_w.writer.tell()}); - encode_info_for_fn(ecx, ebml_w, dtor.node.id, @(*item.ident - + ~"_dtor"), path, if tps.len() > 0u { - some(ii_dtor(dtor, item.ident, tps, - local_def(item.id))) } + encode_info_for_fn(ecx, ebml_w, dtor.node.id, + ecx.tcx.sess.ident_of( + ecx.tcx.sess.str_of(item.ident) + ~"_dtor"), + path, if tps.len() > 0u { + some(ii_dtor(dtor, item.ident, tps, + local_def(item.id))) } else { none }, tps, ast_util::dtor_dec()); } @@ -596,8 +605,8 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, encode_type_param_bounds(ebml_w, ecx, tps); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); - encode_name(ebml_w, item.ident); - encode_path(ebml_w, path, ast_map::path_name(item.ident)); + encode_name(ecx, ebml_w, item.ident); + encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); encode_region_param(ecx, ebml_w, item); for struct_def.traits.each |t| { encode_trait_ref(ebml_w, ecx, t); @@ -618,7 +627,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, named_field(ident, mutability, vis) => { ebml_w.start_tag(tag_item_field); encode_visibility(ebml_w, vis); - encode_name(ebml_w, ident); + encode_name(ecx, ebml_w, ident); encode_def_id(ebml_w, local_def(f.node.id)); ebml_w.end_tag(); } @@ -634,7 +643,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, as a trait */ ebml_w.start_tag(tag_item_trait_method); encode_family(ebml_w, purity_fn_family(m.decl.purity)); - encode_name(ebml_w, m.ident); + encode_name(ecx, ebml_w, m.ident); encode_type_param_bounds(ebml_w, ecx, m.tps); encode_type(ecx, ebml_w, node_id_to_type(tcx, m.id)); encode_def_id(ebml_w, local_def(m.id)); @@ -655,8 +664,8 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, /* Encode the constructor */ for struct_def.ctor.each |ctor| { - debug!{"encoding info for ctor %s %d", *item.ident, - ctor.node.id}; + debug!{"encoding info for ctor %s %d", + ecx.tcx.sess.str_of(item.ident), ctor.node.id}; vec::push(*index, { val: ctor.node.id, pos: ebml_w.writer.tell() @@ -676,7 +685,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, encode_region_param(ecx, ebml_w, item); encode_type_param_bounds(ebml_w, ecx, tps); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); - encode_name(ebml_w, item.ident); + encode_name(ecx, ebml_w, item.ident); encode_attributes(ebml_w, item.attrs); for methods.each |m| { ebml_w.start_tag(tag_item_impl_method); @@ -689,7 +698,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, for traits.each |associated_trait| { encode_trait_ref(ebml_w, ecx, associated_trait) } - encode_path(ebml_w, path, ast_map::path_name(item.ident)); + encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); ebml_w.end_tag(); let impl_path = vec::append_one(path, @@ -709,7 +718,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, encode_region_param(ecx, ebml_w, item); encode_type_param_bounds(ebml_w, ecx, tps); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); - encode_name(ebml_w, item.ident); + encode_name(ecx, ebml_w, item.ident); encode_attributes(ebml_w, item.attrs); let mut i = 0u; for vec::each(*ty::trait_methods(tcx, local_def(item.id))) |mty| { @@ -717,7 +726,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, required(ty_m) => { ebml_w.start_tag(tag_item_trait_method); encode_def_id(ebml_w, local_def(ty_m.id)); - encode_name(ebml_w, mty.ident); + encode_name(ecx, ebml_w, mty.ident); encode_type_param_bounds(ebml_w, ecx, ty_m.tps); encode_type(ecx, ebml_w, ty::mk_fn(tcx, mty.fty)); encode_family(ebml_w, purity_fn_family(mty.purity)); @@ -732,7 +741,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, } i += 1u; } - encode_path(ebml_w, path, ast_map::path_name(item.ident)); + encode_path(ecx, ebml_w, path, ast_map::path_name(item.ident)); for traits.each |associated_trait| { encode_trait_ref(ebml_w, ecx, associated_trait) } @@ -750,13 +759,13 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item, ebml_w.start_tag(tag_items_data_item); encode_def_id(ebml_w, local_def(ty_m.id)); - encode_name(ebml_w, ty_m.ident); + encode_name(ecx, ebml_w, ty_m.ident); encode_family(ebml_w, purity_static_method_family(ty_m.decl.purity)); let polyty = ecx.tcx.tcache.get(local_def(ty_m.id)); encode_ty_type_param_bounds(ebml_w, ecx, polyty.bounds); encode_type(ecx, ebml_w, polyty.ty); - encode_path(ebml_w, path, ast_map::path_name(ty_m.ident)); + encode_path(ecx, ebml_w, path, ast_map::path_name(ty_m.ident)); ebml_w.end_tag(); } @@ -786,7 +795,7 @@ fn encode_info_for_foreign_item(ecx: @encode_ctxt, ebml_w: ebml::writer, } else { encode_symbol(ecx, ebml_w, nitem.id); } - encode_path(ebml_w, path, ast_map::path_name(nitem.ident)); + encode_path(ecx, ebml_w, path, ast_map::path_name(nitem.ident)); } } ebml_w.end_tag(); @@ -798,7 +807,8 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer, ebml_w.start_tag(tag_items_data); vec::push(*index, {val: crate_node_id, pos: ebml_w.writer.tell()}); encode_info_for_mod(ecx, ebml_w, crate.node.module, - crate_node_id, ~[], @~""); + crate_node_id, ~[], + syntax::parse::token::special_idents::invalid); visit::visit_crate(*crate, (), visit::mk_vt(@{ visit_expr: |_e, _cx, _v| { }, visit_item: |i, cx, v, copy ebml_w| { @@ -883,7 +893,7 @@ fn encode_meta_item(ebml_w: ebml::writer, mi: meta_item) { meta_word(name) => { ebml_w.start_tag(tag_meta_item_word); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(str::bytes(*name)); + ebml_w.writer.write(str::bytes(name)); ebml_w.end_tag(); ebml_w.end_tag(); } @@ -892,7 +902,7 @@ fn encode_meta_item(ebml_w: ebml::writer, mi: meta_item) { lit_str(value) => { ebml_w.start_tag(tag_meta_item_name_value); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(str::bytes(*name)); + ebml_w.writer.write(str::bytes(name)); ebml_w.end_tag(); ebml_w.start_tag(tag_meta_item_value); ebml_w.writer.write(str::bytes(*value)); @@ -905,7 +915,7 @@ fn encode_meta_item(ebml_w: ebml::writer, mi: meta_item) { meta_list(name, items) => { ebml_w.start_tag(tag_meta_item_list); ebml_w.start_tag(tag_meta_item_name); - ebml_w.writer.write(str::bytes(*name)); + ebml_w.writer.write(str::bytes(name)); ebml_w.end_tag(); for items.each |inner_item| { encode_meta_item(ebml_w, *inner_item); @@ -934,22 +944,22 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] { fn synthesize_link_attr(ecx: @encode_ctxt, items: ~[@meta_item]) -> attribute { - assert (*ecx.link_meta.name != ~""); - assert (*ecx.link_meta.vers != ~""); + assert (ecx.link_meta.name != ~""); + assert (ecx.link_meta.vers != ~""); let name_item = - attr::mk_name_value_item_str(@~"name", *ecx.link_meta.name); + attr::mk_name_value_item_str(~"name", ecx.link_meta.name); let vers_item = - attr::mk_name_value_item_str(@~"vers", *ecx.link_meta.vers); + attr::mk_name_value_item_str(~"vers", ecx.link_meta.vers); let other_items = { - let tmp = attr::remove_meta_items_by_name(items, @~"name"); - attr::remove_meta_items_by_name(tmp, @~"vers") + let tmp = attr::remove_meta_items_by_name(items, ~"name"); + attr::remove_meta_items_by_name(tmp, ~"vers") }; let meta_items = vec::append(~[name_item, vers_item], other_items); - let link_item = attr::mk_list_item(@~"link", meta_items); + let link_item = attr::mk_list_item(~"link", meta_items); return attr::mk_attr(link_item); } @@ -959,7 +969,7 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] { for crate.node.attrs.each |attr| { vec::push( attrs, - if *attr::get_attr_name(attr) != ~"link" { + if attr::get_attr_name(attr) != ~"link" { attr } else { match attr.node.value.node { @@ -977,16 +987,19 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] { return attrs; } -fn encode_crate_deps(ebml_w: ebml::writer, cstore: cstore::cstore) { +fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: ebml::writer, + cstore: cstore::cstore) { + + fn get_ordered_deps(ecx: @encode_ctxt, cstore: cstore::cstore) + -> ~[decoder::crate_dep] { - fn get_ordered_deps(cstore: cstore::cstore) -> ~[decoder::crate_dep] { type hashkv = @{key: crate_num, val: cstore::crate_metadata}; type numdep = decoder::crate_dep; // Pull the cnums and name,vers,hash out of cstore let mut deps: ~[mut numdep] = ~[mut]; do cstore::iter_crate_data(cstore) |key, val| { - let dep = {cnum: key, name: @val.name, + let dep = {cnum: key, name: ecx.tcx.sess.ident_of(val.name), vers: decoder::get_crate_vers(val.data), hash: decoder::get_crate_hash(val.data)}; vec::push(deps, dep); @@ -1014,22 +1027,23 @@ fn encode_crate_deps(ebml_w: ebml::writer, cstore: cstore::cstore) { // FIXME (#2166): This is not nearly enough to support correct versioning // but is enough to get transitive crate dependencies working. ebml_w.start_tag(tag_crate_deps); - for get_ordered_deps(cstore).each |dep| { - encode_crate_dep(ebml_w, dep); + for get_ordered_deps(ecx, cstore).each |dep| { + encode_crate_dep(ecx, ebml_w, dep); } ebml_w.end_tag(); } -fn encode_crate_dep(ebml_w: ebml::writer, dep: decoder::crate_dep) { +fn encode_crate_dep(ecx: @encode_ctxt, ebml_w: ebml::writer, + dep: decoder::crate_dep) { ebml_w.start_tag(tag_crate_dep); ebml_w.start_tag(tag_crate_dep_name); - ebml_w.writer.write(str::bytes(*dep.name)); + ebml_w.writer.write(str::bytes(ecx.tcx.sess.str_of(dep.name))); ebml_w.end_tag(); ebml_w.start_tag(tag_crate_dep_vers); - ebml_w.writer.write(str::bytes(*dep.vers)); + ebml_w.writer.write(str::bytes(dep.vers)); ebml_w.end_tag(); ebml_w.start_tag(tag_crate_dep_hash); - ebml_w.writer.write(str::bytes(*dep.hash)); + ebml_w.writer.write(str::bytes(dep.hash)); ebml_w.end_tag(); ebml_w.end_tag(); } @@ -1064,7 +1078,7 @@ fn encode_metadata(parms: encode_parms, crate: @crate) -> ~[u8] { let crate_attrs = synthesize_crate_attrs(ecx, crate); encode_attributes(ebml_w, crate_attrs); - encode_crate_deps(ebml_w, ecx.cstore); + encode_crate_deps(ecx, ebml_w, ecx.cstore); // Encode and index the items. ebml_w.start_tag(tag_items); diff --git a/src/rustc/metadata/loader.rs b/src/rustc/metadata/loader.rs index d1e24642927..d4b66a7e4ec 100644 --- a/src/rustc/metadata/loader.rs +++ b/src/rustc/metadata/loader.rs @@ -7,6 +7,7 @@ import syntax::codemap::span; import lib::llvm::{False, llvm, mk_object_file, mk_section_iter}; import filesearch::filesearch; import io::WriterUtil; +import syntax::parse::token::ident_interner; export os; export os_macos, os_win32, os_linux, os_freebsd; @@ -33,7 +34,8 @@ type ctxt = { metas: ~[@ast::meta_item], hash: ~str, os: os, - static: bool + static: bool, + intr: ident_interner }; fn load_library_crate(cx: ctxt) -> {ident: ~str, data: @~[u8]} { @@ -41,7 +43,8 @@ fn load_library_crate(cx: ctxt) -> {ident: ~str, data: @~[u8]} { some(t) => return t, none => { cx.diag.span_fatal( - cx.span, fmt!{"can't find crate for `%s`", *cx.ident}); + cx.span, fmt!{"can't find crate for `%s`", + *cx.intr.get(cx.ident)}); } } } @@ -66,7 +69,7 @@ fn find_library_crate_aux(cx: ctxt, filesearch: filesearch::filesearch) -> option<{ident: ~str, data: @~[u8]}> { let crate_name = crate_name_from_metas(cx.metas); - let prefix: ~str = nn.prefix + *crate_name + ~"-"; + let prefix: ~str = nn.prefix + crate_name + ~"-"; let suffix: ~str = nn.suffix; let mut matches = ~[]; @@ -104,19 +107,19 @@ fn find_library_crate_aux(cx: ctxt, some(matches[0]) } else { cx.diag.span_err( - cx.span, fmt!{"multiple matching crates for `%s`", *crate_name}); + cx.span, fmt!{"multiple matching crates for `%s`", crate_name}); cx.diag.handler().note(~"candidates:"); for matches.each |match_| { cx.diag.handler().note(fmt!{"path: %s", match_.ident}); let attrs = decoder::get_crate_attributes(match_.data); - note_linkage_attrs(cx.diag, attrs); + note_linkage_attrs(cx.intr, cx.diag, attrs); } cx.diag.handler().abort_if_errors(); none } } -fn crate_name_from_metas(metas: ~[@ast::meta_item]) -> @~str { +fn crate_name_from_metas(metas: ~[@ast::meta_item]) -> ~str { let name_items = attr::find_meta_items_by_name(metas, ~"name"); match vec::last_opt(name_items) { some(i) => { @@ -131,9 +134,10 @@ fn crate_name_from_metas(metas: ~[@ast::meta_item]) -> @~str { } } -fn note_linkage_attrs(diag: span_handler, attrs: ~[ast::attribute]) { +fn note_linkage_attrs(intr: ident_interner, diag: span_handler, + attrs: ~[ast::attribute]) { for attr::find_linkage_attrs(attrs).each |attr| { - diag.handler().note(fmt!{"meta: %s", pprust::attr_to_str(attr)}); + diag.handler().note(fmt!{"meta: %s", pprust::attr_to_str(attr,intr)}); } } @@ -143,7 +147,7 @@ fn crate_matches(crate_data: @~[u8], metas: ~[@ast::meta_item], let linkage_metas = attr::find_linkage_metas(attrs); if hash.is_not_empty() { let chash = decoder::get_crate_hash(crate_data); - if *chash != hash { return false; } + if chash != hash { return false; } } metadata_matches(linkage_metas, metas) } @@ -154,15 +158,8 @@ fn metadata_matches(extern_metas: ~[@ast::meta_item], debug!{"matching %u metadata requirements against %u items", vec::len(local_metas), vec::len(extern_metas)}; - debug!{"crate metadata:"}; - for extern_metas.each |have| { - debug!{" %s", pprust::meta_item_to_str(*have)}; - } - for local_metas.each |needed| { - debug!{"looking for %s", pprust::meta_item_to_str(*needed)}; if !attr::contains(extern_metas, needed) { - debug!{"missing %s", pprust::meta_item_to_str(*needed)}; return false; } } @@ -206,9 +203,10 @@ fn meta_section_name(os: os) -> ~str { } // A diagnostic function for dumping crate metadata to an output stream -fn list_file_metadata(os: os, path: ~str, out: io::Writer) { +fn list_file_metadata(intr: ident_interner, os: os, path: ~str, + out: io::Writer) { match get_metadata_section(os, path) { - option::some(bytes) => decoder::list_crate_metadata(bytes, out), + option::some(bytes) => decoder::list_crate_metadata(intr, bytes, out), option::none => { out.write_str(~"could not find metadata in " + path + ~".\n"); } diff --git a/src/rustc/metadata/tydecode.rs b/src/rustc/metadata/tydecode.rs index a61e111cca9..5b76e0da7f5 100644 --- a/src/rustc/metadata/tydecode.rs +++ b/src/rustc/metadata/tydecode.rs @@ -46,7 +46,7 @@ fn parse_ident_(st: @pstate, is_last: fn@(char) -> bool) -> while !is_last(peek(st)) { rslt += str::from_byte(next_byte(st)); } - return @rslt; + return st.tcx.sess.ident_of(rslt); } @@ -133,7 +133,7 @@ fn parse_bound_region(st: @pstate) -> ty::bound_region { assert next(st) == '|'; ty::br_anon(id) } - '[' => ty::br_named(@parse_str(st, ']')), + '[' => ty::br_named(st.tcx.sess.ident_of(parse_str(st, ']'))), 'c' => { let id = parse_int(st); assert next(st) == '|'; @@ -249,7 +249,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t { assert (next(st) == '['); let mut fields: ~[ty::field] = ~[]; while peek(st) != ']' { - let name = @parse_str(st, '='); + let name = st.tcx.sess.ident_of(parse_str(st, '=')); vec::push(fields, {ident: name, mt: parse_mt(st, conv)}); } st.pos = st.pos + 1u; diff --git a/src/rustc/metadata/tyencode.rs b/src/rustc/metadata/tyencode.rs index 68fe7fc4d65..4158656aa54 100644 --- a/src/rustc/metadata/tyencode.rs +++ b/src/rustc/metadata/tyencode.rs @@ -126,14 +126,14 @@ fn enc_region(w: io::Writer, cx: @ctxt, r: ty::region) { match r { ty::re_bound(br) => { w.write_char('b'); - enc_bound_region(w, br); + enc_bound_region(w, cx, br); } ty::re_free(id, br) => { w.write_char('f'); w.write_char('['); w.write_int(id); w.write_char('|'); - enc_bound_region(w, br); + enc_bound_region(w, cx, br); w.write_char(']'); } ty::re_scope(nid) => { @@ -151,7 +151,7 @@ fn enc_region(w: io::Writer, cx: @ctxt, r: ty::region) { } } -fn enc_bound_region(w: io::Writer, br: ty::bound_region) { +fn enc_bound_region(w: io::Writer, cx: @ctxt, br: ty::bound_region) { match br { ty::br_self => w.write_char('s'), ty::br_anon(idx) => { @@ -161,14 +161,14 @@ fn enc_bound_region(w: io::Writer, br: ty::bound_region) { } ty::br_named(s) => { w.write_char('['); - w.write_str(*s); + w.write_str(cx.tcx.sess.str_of(s)); w.write_char(']') } ty::br_cap_avoid(id, br) => { w.write_char('c'); w.write_int(id); w.write_char('|'); - enc_bound_region(w, *br); + enc_bound_region(w, cx, *br); } } } @@ -265,7 +265,7 @@ fn enc_sty(w: io::Writer, cx: @ctxt, st: ty::sty) { ty::ty_rec(fields) => { w.write_str(&"R["); for fields.each |field| { - w.write_str(*field.ident); + w.write_str(cx.tcx.sess.str_of(field.ident)); w.write_char('='); enc_mt(w, cx, field.mt); } diff --git a/src/rustc/middle/astencode.rs b/src/rustc/middle/astencode.rs index d8c5db4cd75..34ebeb2fe19 100644 --- a/src/rustc/middle/astencode.rs +++ b/src/rustc/middle/astencode.rs @@ -83,7 +83,8 @@ fn encode_inlined_item(ecx: @e::encode_ctxt, ii: ast::inlined_item, maps: maps) { debug!{"> Encoding inlined item: %s::%s (%u)", - ast_map::path_to_str(path), *ii.ident(), + ast_map::path_to_str(path, ecx.tcx.sess.parse_sess.interner), + ecx.tcx.sess.str_of(ii.ident()), ebml_w.writer.tell()}; let id_range = ast_util::compute_id_range_for_inlined_item(ii); @@ -94,7 +95,8 @@ fn encode_inlined_item(ecx: @e::encode_ctxt, } debug!{"< Encoded inlined fn: %s::%s (%u)", - ast_map::path_to_str(path), *ii.ident(), + ast_map::path_to_str(path, ecx.tcx.sess.parse_sess.interner), + ecx.tcx.sess.str_of(ii.ident()), ebml_w.writer.tell()}; } @@ -107,7 +109,8 @@ fn decode_inlined_item(cdata: cstore::crate_metadata, match par_doc.opt_child(c::tag_ast) { none => none, some(ast_doc) => { - debug!{"> Decoding inlined fn: %s::?", ast_map::path_to_str(path)}; + debug!{"> Decoding inlined fn: %s::?", + ast_map::path_to_str(path, tcx.sess.parse_sess.interner)}; let ast_dsr = ebml::ebml_deserializer(ast_doc); let from_id_range = ast_util::deserialize_id_range(ast_dsr); let to_id_range = reserve_id_range(dcx.tcx.sess, from_id_range); @@ -118,14 +121,15 @@ fn decode_inlined_item(cdata: cstore::crate_metadata, let ii = renumber_ast(xcx, raw_ii); ast_map::map_decoded_item(tcx.sess.diagnostic(), dcx.tcx.items, path, ii); - debug!{"Fn named: %s", *ii.ident()}; + debug!{"Fn named: %s", tcx.sess.str_of(ii.ident())}; decode_side_tables(xcx, ast_doc); debug!{"< Decoded inlined fn: %s::%s", - ast_map::path_to_str(path), *ii.ident()}; + ast_map::path_to_str(path, tcx.sess.parse_sess.interner), + tcx.sess.str_of(ii.ident())}; match ii { ast::ii_item(i) => { debug!{">>> DECODED ITEM >>>\n%s\n<<< DECODED ITEM <<<", - syntax::print::pprust::item_to_str(i)}; + syntax::print::pprust::item_to_str(i, tcx.sess.intr())}; } _ => { } } @@ -915,28 +919,26 @@ trait fake_ext_ctxt { } #[cfg(test)] -type fake_session = (); +type fake_session = parse::parse_sess; #[cfg(test)] impl fake_session: fake_ext_ctxt { fn cfg() -> ast::crate_cfg { ~[] } - fn parse_sess() -> parse::parse_sess { parse::new_parse_sess(none) } + fn parse_sess() -> parse::parse_sess { self } } #[cfg(test)] fn mk_ctxt() -> fake_ext_ctxt { - () as fake_ext_ctxt + parse::new_parse_sess(none) as fake_ext_ctxt } #[cfg(test)] fn roundtrip(in_item: @ast::item) { - debug!{"in_item = %s", pprust::item_to_str(in_item)}; let mbuf = io::mem_buffer(); let ebml_w = ebml::writer(io::mem_buffer_writer(mbuf)); encode_item_ast(ebml_w, in_item); let ebml_doc = ebml::doc(@io::mem_buffer_buf(mbuf)); let out_item = decode_item_ast(ebml_doc); - debug!{"out_item = %s", pprust::item_to_str(out_item)}; let exp_str = io::with_str_writer(|w| ast::serialize_item(w, *in_item) ); @@ -993,7 +995,8 @@ fn test_simplification() { }); match (item_out, item_exp) { (ast::ii_item(item_out), ast::ii_item(item_exp)) => { - assert pprust::item_to_str(item_out) == pprust::item_to_str(item_exp); + assert pprust::item_to_str(item_out, ext_cx.parse_sess().interner) + == pprust::item_to_str(item_exp, ext_cx.parse_sess().interner); } _ => fail } diff --git a/src/rustc/middle/borrowck/check_loans.rs b/src/rustc/middle/borrowck/check_loans.rs index 4c3ee9eb4dd..de6d0621b35 100644 --- a/src/rustc/middle/borrowck/check_loans.rs +++ b/src/rustc/middle/borrowck/check_loans.rs @@ -159,7 +159,7 @@ impl check_loan_ctxt { debug!{"check_pure_callee_or_arg(pc=%?, expr=%?, \ callee_id=%d, ty=%s)", pc, - opt_expr.map(|e| pprust::expr_to_str(e) ), + opt_expr.map(|e| pprust::expr_to_str(e, tcx.sess.intr()) ), callee_id, ty_to_str(self.tcx(), ty::node_id_to_type(tcx, callee_id))}; diff --git a/src/rustc/middle/borrowck/gather_loans.rs b/src/rustc/middle/borrowck/gather_loans.rs index 83dc54e5f86..c0d7521e9f2 100644 --- a/src/rustc/middle/borrowck/gather_loans.rs +++ b/src/rustc/middle/borrowck/gather_loans.rs @@ -90,7 +90,8 @@ fn req_loans_in_expr(ex: @ast::expr, let tcx = bccx.tcx; let old_root_ub = self.root_ub; - debug!{"req_loans_in_expr(ex=%s)", pprust::expr_to_str(ex)}; + debug!{"req_loans_in_expr(ex=%s)", + pprust::expr_to_str(ex, tcx.sess.intr())}; // If this expression is borrowed, have to ensure it remains valid: for tcx.borrowings.find(ex.id).each |borrow| { diff --git a/src/rustc/middle/capture.rs b/src/rustc/middle/capture.rs index b45064db88d..73c2a46f1fd 100644 --- a/src/rustc/middle/capture.rs +++ b/src/rustc/middle/capture.rs @@ -44,7 +44,7 @@ fn check_capture_clause(tcx: ty::ctxt, tcx.sess.span_warn( cap_item.span, fmt!{"captured variable `%s` not used in closure", - *cap_item.name}); + tcx.sess.str_of(cap_item.name)}); } let cap_def_id = ast_util::def_id_of_def(cap_def).node; @@ -52,7 +52,7 @@ fn check_capture_clause(tcx: ty::ctxt, tcx.sess.span_err( cap_item.span, fmt!{"variable `%s` captured more than once", - *cap_item.name}); + tcx.sess.str_of(cap_item.name)}); } } } @@ -68,7 +68,7 @@ fn compute_capture_vars(tcx: ty::ctxt, for (*cap_clause).each |cap_item| { debug!{"Doing capture var: %s (%?)", - *cap_item.name, cap_item.id}; + tcx.sess.str_of(cap_item.name), cap_item.id}; let cap_def = tcx.def_map.get(cap_item.id); let cap_def_id = ast_util::def_id_of_def(cap_def).node; diff --git a/src/rustc/middle/check_alt.rs b/src/rustc/middle/check_alt.rs index 07351f49bea..9d1846708cc 100644 --- a/src/rustc/middle/check_alt.rs +++ b/src/rustc/middle/check_alt.rs @@ -91,15 +91,15 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: ~[@pat]) { match ty::get(ty).struct { ty::ty_bool => { match check ctor { - val(const_int(1i64)) => some(@~"true"), - val(const_int(0i64)) => some(@~"false") + val(const_int(1i64)) => some(~"true"), + val(const_int(0i64)) => some(~"false") } } ty::ty_enum(id, _) => { let vid = match check ctor { variant(id) => id }; match check vec::find(*ty::enum_variants(tcx, id), |v| v.id == vid) { - some(v) => some(v.name) + some(v) => some(tcx.sess.str_of(v.name)) } } _ => none @@ -107,7 +107,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: ~[@pat]) { } }; let msg = ~"non-exhaustive patterns" + match ext { - some(s) => ~": " + *s + ~" not covered", + some(s) => ~": " + s + ~" not covered", none => ~"" }; tcx.sess.span_err(sp, msg); diff --git a/src/rustc/middle/freevars.rs b/src/rustc/middle/freevars.rs index e1760a7dde1..cc63dfc12fc 100644 --- a/src/rustc/middle/freevars.rs +++ b/src/rustc/middle/freevars.rs @@ -50,7 +50,7 @@ fn collect_freevars(def_map: resolve3::DefMap, blk: ast::blk) ast::expr_path(path) => { let mut i = 0; match def_map.find(expr.id) { - none => fail (~"Not found: " + path_to_str(path)), + none => fail ~"path not found", some(df) => { let mut def = df; while i < depth { diff --git a/src/rustc/middle/kind.rs b/src/rustc/middle/kind.rs index e73cf84d947..8d2f87734bf 100644 --- a/src/rustc/middle/kind.rs +++ b/src/rustc/middle/kind.rs @@ -214,7 +214,7 @@ fn check_block(b: blk, cx: ctx, v: visit::vt<ctx>) { } fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) { - debug!{"kind::check_expr(%s)", expr_to_str(e)}; + debug!{"kind::check_expr(%s)", expr_to_str(e, cx.tcx.sess.intr())}; // Handle any kind bounds on type parameters do option::iter(cx.tcx.node_type_substs.find(e.id)) |ts| { diff --git a/src/rustc/middle/lang_items.rs b/src/rustc/middle/lang_items.rs index 557ee9661a2..26360b5b82c 100644 --- a/src/rustc/middle/lang_items.rs +++ b/src/rustc/middle/lang_items.rs @@ -102,18 +102,12 @@ struct LanguageItemCollector { meta_name_value(key, literal) => { match literal.node { lit_str(value) => { - self.match_and_collect_item(item_def_id, - *key, - *value); - } - _ => { - // Skip. + self.match_and_collect_item(item_def_id, key, *value); } + _ => {} // Skip. } } - meta_word(*) | meta_list(*) => { - // Skip. - } + meta_word(*) | meta_list(*) => {} // Skip. } } diff --git a/src/rustc/middle/lint.rs b/src/rustc/middle/lint.rs index 9105d811872..ff97f759268 100644 --- a/src/rustc/middle/lint.rs +++ b/src/rustc/middle/lint.rs @@ -246,13 +246,13 @@ impl ctxt { for triples.each |pair| { let (meta, level, lintname) = pair; - match self.dict.find(*lintname) { + match self.dict.find(lintname) { none => { self.span_lint( new_ctxt.get_level(unrecognized_lint), meta.span, fmt!{"unknown `%s` attribute: `%s`", - level_to_str(level), *lintname}); + level_to_str(level), lintname}); } some(lint) => { @@ -263,7 +263,7 @@ impl ctxt { meta.span, fmt!{"%s(%s) overruled by outer forbid(%s)", level_to_str(level), - *lintname, *lintname}); + lintname, lintname}); } // we do multiple unneeded copies of the @@ -433,9 +433,10 @@ fn check_item_path_statement(cx: ty::ctxt, it: @ast::item) { } fn check_item_non_camel_case_types(cx: ty::ctxt, it: @ast::item) { - fn is_camel_case(ident: ast::ident) -> bool { + fn is_camel_case(cx: ty::ctxt, ident: ast::ident) -> bool { + let ident = cx.sess.str_of(ident); assert ident.is_not_empty(); - let ident = ident_without_trailing_underscores(*ident); + let ident = ident_without_trailing_underscores(ident); let ident = ident_without_leading_underscores(ident); char::is_uppercase(str::char_at(ident, 0)) && !ident.contains_char('_') @@ -443,11 +444,8 @@ fn check_item_non_camel_case_types(cx: ty::ctxt, it: @ast::item) { fn ident_without_trailing_underscores(ident: ~str) -> ~str { match str::rfind(ident, |c| c != '_') { - some(idx) => ident.slice(0, idx + 1), - none => { - // all underscores - ident - } + some(idx) => (ident).slice(0, idx + 1), + none => { ident } // all underscores } } @@ -464,7 +462,7 @@ fn check_item_non_camel_case_types(cx: ty::ctxt, it: @ast::item) { fn check_case(cx: ty::ctxt, ident: ast::ident, expr_id: ast::node_id, item_id: ast::node_id, span: span) { - if !is_camel_case(ident) { + if !is_camel_case(cx, ident) { cx.sess.span_lint( non_camel_case_types, expr_id, item_id, span, ~"type, variant, or trait must be camel case"); @@ -488,7 +486,7 @@ fn check_item_non_camel_case_types(cx: ty::ctxt, it: @ast::item) { } fn check_pat(tcx: ty::ctxt, pat: @ast::pat) { - debug!{"lint check_pat pat=%s", pat_to_str(pat)}; + debug!{"lint check_pat pat=%s", pat_to_str(pat, tcx.sess.intr())}; do pat_bindings(tcx.def_map, pat) |binding_mode, id, span, path| { match binding_mode { @@ -501,7 +499,7 @@ fn check_pat(tcx: ty::ctxt, pat: @ast::pat) { deprecated_pattern, id, id, span, fmt!{"binding `%s` should use ref or copy mode", - *path_to_ident(path)}); + tcx.sess.str_of(path_to_ident(path))}); } } } diff --git a/src/rustc/middle/liveness.rs b/src/rustc/middle/liveness.rs index 321e3db318e..67e2d4a3cef 100644 --- a/src/rustc/middle/liveness.rs +++ b/src/rustc/middle/liveness.rs @@ -101,7 +101,7 @@ */ import dvec::{DVec, dvec}; -import std::map::{hashmap, int_hash, str_hash, box_str_hash}; +import std::map::{hashmap, int_hash, str_hash, uint_hash}; import syntax::{visit, ast_util}; import syntax::print::pprust::{expr_to_str}; import visit::vt; @@ -233,7 +233,7 @@ struct ir_maps { self.live_node_map = int_hash(); self.variable_map = int_hash(); self.capture_map = int_hash(); - self.field_map = box_str_hash(); + self.field_map = uint_hash(); self.var_kinds = ~[]; self.lnks = ~[]; } @@ -286,12 +286,12 @@ struct ir_maps { } } - fn variable_name(var: variable) -> ident { - match self.var_kinds[*var] { - vk_local(_, name) | vk_arg(_, name, _) => name, - vk_field(name) => @(~"self." + *name), - vk_self => @~"self", - vk_implicit_return => @~"<implicit-ret>" + fn variable_name(var: variable) -> ~str { + match copy self.var_kinds[*var] { + vk_local(_, nm) | vk_arg(_, nm, _) => self.tcx.sess.str_of(nm), + vk_field(nm) => ~"self." + self.tcx.sess.str_of(nm), + vk_self => ~"self", + vk_implicit_return => ~"<implicit-ret>" } } @@ -1492,7 +1492,8 @@ impl @liveness { none => { /* ok */ } some(lnk_exit) => { self.tcx.sess.span_err( - sp, fmt!{"field `self.%s` is never initialized", *nm}); + sp, fmt!{"field `self.%s` is never initialized", + self.tcx.sess.str_of(nm)}); } some(lnk) => { self.report_illegal_read( @@ -1548,7 +1549,7 @@ impl @liveness { fn check_move_from_expr(expr: @expr, vt: vt<@liveness>) { debug!{"check_move_from_expr(node %d: %s)", - expr.id, expr_to_str(expr)}; + expr.id, expr_to_str(expr, self.tcx.sess.intr())}; if self.ir.method_map.contains_key(expr.id) { // actually an rvalue, since this calls a method @@ -1664,13 +1665,14 @@ impl @liveness { self.tcx.sess.span_err( move_span, fmt!{"illegal move from argument `%s`, which is not \ - copy or move mode", *name}); + copy or move mode", self.tcx.sess.str_of(name)}); return; } vk_field(name) => { self.tcx.sess.span_err( move_span, - fmt!{"illegal move from field `%s`", *name}); + fmt!{"illegal move from field `%s`", + self.tcx.sess.str_of(name)}); return; } vk_self => { @@ -1711,12 +1713,12 @@ impl @liveness { lnk_freevar(span) => { self.tcx.sess.span_err( span, - fmt!{"capture of %s: `%s`", msg, *name}); + fmt!{"capture of %s: `%s`", msg, name}); } lnk_expr(span) => { self.tcx.sess.span_err( span, - fmt!{"use of %s: `%s`", msg, *name}); + fmt!{"use of %s: `%s`", msg, name}); } lnk_exit | lnk_vdef(_) => { @@ -1727,9 +1729,9 @@ impl @liveness { } } - fn should_warn(var: variable) -> option<ident> { + fn should_warn(var: variable) -> option<~str> { let name = (*self.ir).variable_name(var); - if (*name)[0] == ('_' as u8) {none} else {some(name)} + if name[0] == ('_' as u8) {none} else {some(name)} } fn warn_about_unused_args(sp: span, decl: fn_decl, entry_ln: live_node) { @@ -1780,10 +1782,10 @@ impl @liveness { if is_assigned { self.tcx.sess.span_warn( sp, fmt!{"variable `%s` is assigned to, \ - but never used", *name}); + but never used", name}); } else { self.tcx.sess.span_warn( - sp, fmt!{"unused variable: `%s`", *name}); + sp, fmt!{"unused variable: `%s`", name}); } } return true; @@ -1796,7 +1798,7 @@ impl @liveness { for self.should_warn(var).each |name| { self.tcx.sess.span_warn( sp, - fmt!{"value assigned to `%s` is never read", *name}); + fmt!{"value assigned to `%s` is never read", name}); } } } diff --git a/src/rustc/middle/mem_categorization.rs b/src/rustc/middle/mem_categorization.rs index c98e65f38cf..8d5be4aa063 100644 --- a/src/rustc/middle/mem_categorization.rs +++ b/src/rustc/middle/mem_categorization.rs @@ -263,7 +263,7 @@ impl &mem_categorization_ctxt { fn cat_expr(expr: @ast::expr) -> cmt { debug!{"cat_expr: id=%d expr=%s", - expr.id, pprust::expr_to_str(expr)}; + expr.id, pprust::expr_to_str(expr, self.tcx.sess.intr())}; let tcx = self.tcx; let expr_ty = tcx.ty(expr); @@ -468,7 +468,8 @@ impl &mem_categorization_ctxt { self.tcx.sess.span_bug( node.span(), fmt!{"Cannot find field `%s` in type `%s`", - *f_name, ty_to_str(self.tcx, base_cmt.ty)}); + self.tcx.sess.str_of(f_name), + ty_to_str(self.tcx, base_cmt.ty)}); } }; let m = self.inherited_mutability(base_cmt.mutbl, f_mutbl); @@ -650,12 +651,13 @@ impl &mem_categorization_ctxt { // in the alt, the id of `local(x)->@` is the `@y` pattern, // and the id of `local(x)->@->@` is the id of the `y` pattern. + + let _i = indenter(); + let tcx = self.tcx; debug!{"cat_pattern: id=%d pat=%s cmt=%s", - pat.id, pprust::pat_to_str(pat), + pat.id, pprust::pat_to_str(pat, tcx.sess.intr()), self.cmt_to_repr(cmt)}; - let _i = indenter(); - let tcx = self.tcx; match pat.node { ast::pat_wild => { // _ @@ -767,7 +769,7 @@ impl &mem_categorization_ctxt { fn comp_to_repr(comp: comp_kind) -> ~str { match comp { - comp_field(fld, _) => *fld, + comp_field(fld, _) => self.tcx.sess.str_of(fld), comp_index(*) => ~"[]", comp_tuple => ~"()", comp_variant(_) => ~"<enum>" diff --git a/src/rustc/middle/pat_util.rs b/src/rustc/middle/pat_util.rs index 0b625a63d5a..d3fbf598e13 100644 --- a/src/rustc/middle/pat_util.rs +++ b/src/rustc/middle/pat_util.rs @@ -14,7 +14,7 @@ type pat_id_map = std::map::hashmap<ident, node_id>; // This is used because same-named variables in alternative patterns need to // use the node_id of their namesake in the first pattern. fn pat_id_map(dm: resolve3::DefMap, pat: @pat) -> pat_id_map { - let map = std::map::box_str_hash(); + let map = std::map::uint_hash(); do pat_bindings(dm, pat) |_bm, p_id, _s, n| { map.insert(path_to_ident(n), p_id); }; diff --git a/src/rustc/middle/region.rs b/src/rustc/middle/region.rs index 4071800d4cf..d3c34ceea4d 100644 --- a/src/rustc/middle/region.rs +++ b/src/rustc/middle/region.rs @@ -254,11 +254,13 @@ fn resolve_expr(expr: @ast::expr, cx: ctxt, visitor: visit::vt<ctxt>) { let mut new_cx = cx; match expr.node { ast::expr_call(*) => { - debug!{"node %d: %s", expr.id, pprust::expr_to_str(expr)}; + debug!{"node %d: %s", expr.id, pprust::expr_to_str(expr, + cx.sess.intr())}; new_cx.parent = some(expr.id); } ast::expr_match(subexpr, _, _) => { - debug!{"node %d: %s", expr.id, pprust::expr_to_str(expr)}; + debug!{"node %d: %s", expr.id, pprust::expr_to_str(expr, + cx.sess.intr())}; new_cx.parent = some(expr.id); } ast::expr_fn(_, _, _, cap_clause) | @@ -390,8 +392,9 @@ impl determine_rp_ctxt { fn add_rp(id: ast::node_id) { assert id != 0; if self.region_paramd_items.insert(id, ()) { - debug!{"add region-parameterized item: %d (%s)", - id, ast_map::node_id_to_str(self.ast_map, id)}; + debug!{"add region-parameterized item: %d (%s)", id, + ast_map::node_id_to_str(self.ast_map, id, + self.sess.parse_sess.interner)}; self.worklist.push(id); } else { debug!{"item %d already region-parameterized", id}; @@ -401,8 +404,10 @@ impl determine_rp_ctxt { fn add_dep(from: ast::node_id, to: ast::node_id) { debug!{"add dependency from %d -> %d (%s -> %s)", from, to, - ast_map::node_id_to_str(self.ast_map, from), - ast_map::node_id_to_str(self.ast_map, to)}; + ast_map::node_id_to_str(self.ast_map, from, + self.sess.parse_sess.interner), + ast_map::node_id_to_str(self.ast_map, to, + self.sess.parse_sess.interner)}; let vec = match self.dep_map.find(from) { some(vec) => {vec} none => { @@ -448,9 +453,10 @@ impl determine_rp_ctxt { // that flag to false when we enter a method. fn region_is_relevant(r: @ast::region) -> bool { match r.node { - ast::re_anon => self.anon_implies_rp, - ast::re_named(@~"self") => true, - ast::re_named(_) => false + ast::re_anon => self.anon_implies_rp, + ast::re_named(id) => { + id == syntax::parse::token::special_idents::self_ + } } } @@ -511,7 +517,8 @@ fn determine_rp_in_ty(ty: @ast::ty, match ty.node { ast::ty_rptr(r, _) | ast::ty_path(@{rp: some(r), _}, _) => { - debug!{"referenced type with regions %s", pprust::ty_to_str(ty)}; + debug!{"referenced type with regions %s", + pprust::ty_to_str(ty, cx.sess.intr())}; if cx.region_is_relevant(r) { cx.add_rp(cx.item_id); } @@ -520,7 +527,7 @@ fn determine_rp_in_ty(ty: @ast::ty, ast::ty_fn(ast::proto_bare, _, _) | ast::ty_fn(ast::proto_block, _, _) if cx.anon_implies_rp => { debug!("referenced bare fn type with regions %s", - pprust::ty_to_str(ty)); + pprust::ty_to_str(ty, cx.sess.intr())); cx.add_rp(cx.item_id); } @@ -541,7 +548,7 @@ fn determine_rp_in_ty(ty: @ast::ty, let cstore = cx.sess.cstore; if csearch::get_region_param(cstore, did) { debug!{"reference to external, rp'd type %s", - pprust::ty_to_str(ty)}; + pprust::ty_to_str(ty, cx.sess.intr())}; cx.add_rp(cx.item_id); } } diff --git a/src/rustc/middle/resolve3.rs b/src/rustc/middle/resolve3.rs index ad63be3b142..d6b057deed5 100644 --- a/src/rustc/middle/resolve3.rs +++ b/src/rustc/middle/resolve3.rs @@ -57,9 +57,10 @@ import dvec::{DVec, dvec}; import option::{get, is_some}; import str::{connect, split_str}; import vec::pop; +import syntax::parse::token::ident_interner; import std::list::{cons, list, nil}; -import std::map::{hashmap, int_hash, box_str_hash}; +import std::map::{hashmap, int_hash, uint_hash}; import str_eq = str::eq; // Definition mapping @@ -250,63 +251,6 @@ fn Atom(n: uint) -> Atom { return n; } -struct AtomTable { - let atoms: hashmap<@~str,Atom>; - let strings: DVec<@~str>; - let mut atom_count: uint; - - new() { - self.atoms = hashmap::<@~str,Atom>(|x| str::hash(*x), - |x, y| str::eq(*x, *y)); - self.strings = dvec(); - self.atom_count = 0u; - } - - fn intern(string: @~str) -> Atom { - match self.atoms.find(string) { - none => { /* fall through */ } - some(atom) => return atom - } - - let atom = Atom(self.atom_count); - self.atom_count += 1u; - self.atoms.insert(string, atom); - self.strings.push(string); - - return atom; - } - - fn atom_to_str(atom: Atom) -> @~str { - return self.strings.get_elt(atom); - } - - fn atoms_to_strs(atoms: ~[Atom], f: fn(@~str) -> bool) { - for atoms.each |atom| { - if !f(self.atom_to_str(atom)) { - return; - } - } - } - - fn atoms_to_str(atoms: ~[Atom]) -> @~str { - // XXX: str::connect should do this. - let mut result = ~""; - let mut first = true; - for self.atoms_to_strs(atoms) |string| { - if first { - first = false; - } else { - result += ~"::"; - } - - result += *string; - } - - // XXX: Shouldn't copy here. We need string builder functionality. - return @result; - } -} - /// Creates a hash table of atoms. fn atom_hashmap<V:copy>() -> hashmap<Atom,V> { hashmap::<Atom,V>(uint::hash, uint::eq) @@ -601,30 +545,30 @@ struct NameBindings { struct PrimitiveTypeTable { let primitive_types: hashmap<Atom,prim_ty>; - new(atom_table: @AtomTable) { + new(intr: ident_interner) { self.primitive_types = atom_hashmap(); - self.intern(atom_table, @~"bool", ty_bool); - self.intern(atom_table, @~"char", ty_int(ty_char)); - self.intern(atom_table, @~"float", ty_float(ty_f)); - self.intern(atom_table, @~"f32", ty_float(ty_f32)); - self.intern(atom_table, @~"f64", ty_float(ty_f64)); - self.intern(atom_table, @~"int", ty_int(ty_i)); - self.intern(atom_table, @~"i8", ty_int(ty_i8)); - self.intern(atom_table, @~"i16", ty_int(ty_i16)); - self.intern(atom_table, @~"i32", ty_int(ty_i32)); - self.intern(atom_table, @~"i64", ty_int(ty_i64)); - self.intern(atom_table, @~"str", ty_str); - self.intern(atom_table, @~"uint", ty_uint(ty_u)); - self.intern(atom_table, @~"u8", ty_uint(ty_u8)); - self.intern(atom_table, @~"u16", ty_uint(ty_u16)); - self.intern(atom_table, @~"u32", ty_uint(ty_u32)); - self.intern(atom_table, @~"u64", ty_uint(ty_u64)); + self.intern(intr, @~"bool", ty_bool); + self.intern(intr, @~"char", ty_int(ty_char)); + self.intern(intr, @~"float", ty_float(ty_f)); + self.intern(intr, @~"f32", ty_float(ty_f32)); + self.intern(intr, @~"f64", ty_float(ty_f64)); + self.intern(intr, @~"int", ty_int(ty_i)); + self.intern(intr, @~"i8", ty_int(ty_i8)); + self.intern(intr, @~"i16", ty_int(ty_i16)); + self.intern(intr, @~"i32", ty_int(ty_i32)); + self.intern(intr, @~"i64", ty_int(ty_i64)); + self.intern(intr, @~"str", ty_str); + self.intern(intr, @~"uint", ty_uint(ty_u)); + self.intern(intr, @~"u8", ty_uint(ty_u8)); + self.intern(intr, @~"u16", ty_uint(ty_u16)); + self.intern(intr, @~"u32", ty_uint(ty_u32)); + self.intern(intr, @~"u64", ty_uint(ty_u64)); } - fn intern(atom_table: @AtomTable, string: @~str, + fn intern(intr: ident_interner, string: @~str, primitive_type: prim_ty) { - let atom = (*atom_table).intern(string); + let atom = intr.intern(string); self.primitive_types.insert(atom, primitive_type); } } @@ -643,7 +587,7 @@ struct Resolver { let lang_items: LanguageItems; let crate: @crate; - let atom_table: @AtomTable; + let intr: ident_interner; let graph_root: @NameBindings; @@ -694,8 +638,6 @@ struct Resolver { self.lang_items = copy lang_items; self.crate = crate; - self.atom_table = @AtomTable(); - // The outermost module has def ID 0; this is not reflected in the // AST. @@ -719,8 +661,9 @@ struct Resolver { self.xray_context = NoXray; self.current_trait_refs = none; - self.self_atom = (*self.atom_table).intern(@~"self"); - self.primitive_type_table = @PrimitiveTypeTable(self.atom_table); + self.self_atom = syntax::parse::token::special_idents::self_; + self.primitive_type_table = @PrimitiveTypeTable(self.session. + parse_sess.interner); self.namespaces = ~[ ModuleNS, TypeNS, ValueNS ]; @@ -728,6 +671,8 @@ struct Resolver { self.export_map = int_hash(); self.export_map2 = int_hash(); self.trait_map = @int_hash(); + + self.intr = session.intr(); } /// The main name resolution procedure. @@ -844,12 +789,12 @@ struct Resolver { self.session.span_err(sp, #fmt("Duplicate definition of %s %s", namespace_to_str(ns), - *(*self.atom_table).atom_to_str(name))); + self.session.str_of(name))); do child.span_for_namespace(ns).iter() |sp| { self.session.span_note(sp, #fmt("First definition of %s %s here:", - namespace_to_str(ns), - *(*self.atom_table).atom_to_str(name))); + namespace_to_str(ns), + self.session.str_of(name))); } } _ => {} @@ -903,7 +848,7 @@ struct Resolver { parent: ReducedGraphParent, &&visitor: vt<ReducedGraphParent>) { - let atom = (*self.atom_table).intern(item.ident); + let atom = item.ident; let sp = item.span; match item.node { @@ -1037,7 +982,7 @@ struct Resolver { for methods.each |method| { let ty_m = trait_method_to_ty_method(method); - let atom = (*self.atom_table).intern(ty_m.ident); + let atom = ty_m.ident; // Add it to the trait info if not static, // add it as a name in the enclosing module otherwise. match ty_m.self_ty.node { @@ -1080,7 +1025,7 @@ struct Resolver { parent: ReducedGraphParent, &&visitor: vt<ReducedGraphParent>) { - let atom = (*self.atom_table).intern(variant.node.name); + let atom = variant.node.name; let (child, _) = self.add_child(atom, parent, ~[ValueNS], variant.span); @@ -1132,9 +1077,7 @@ struct Resolver { for full_path.idents.eachi |i, ident| { if i != path_len - 1u { - let atom = - (*self.atom_table).intern(ident); - (*module_path).push(atom); + (*module_path).push(ident); } } } @@ -1142,8 +1085,7 @@ struct Resolver { view_path_glob(module_ident_path, _) | view_path_list(module_ident_path, _, _) => { for module_ident_path.idents.each |ident| { - let atom = (*self.atom_table).intern(ident); - (*module_path).push(atom); + (*module_path).push(ident); } } } @@ -1152,13 +1094,9 @@ struct Resolver { let module_ = self.get_module_from_parent(parent); match view_path.node { view_path_simple(binding, full_path, _) => { - let target_atom = - (*self.atom_table).intern(binding); let source_ident = full_path.idents.last(); - let source_atom = - (*self.atom_table).intern(source_ident); - let subclass = @SingleImport(target_atom, - source_atom); + let subclass = @SingleImport(binding, + source_ident); self.build_import_directive(module_, module_path, subclass, @@ -1167,8 +1105,7 @@ struct Resolver { view_path_list(_, source_idents, _) => { for source_idents.each |source_ident| { let name = source_ident.node.name; - let atom = (*self.atom_table).intern(name); - let subclass = @SingleImport(atom, atom); + let subclass = @SingleImport(name, name); self.build_import_directive(module_, module_path, subclass, @@ -1204,8 +1141,7 @@ struct Resolver { module"); } - let atom = (*self.atom_table).intern(ident); - module_.exported_names.insert(atom, ident_id); + module_.exported_names.insert(ident, ident_id); } view_path_glob(*) => { @@ -1234,8 +1170,7 @@ struct Resolver { } for path_list_idents.each |path_list_ident| { - let atom = (*self.atom_table).intern - (path_list_ident.node.name); + let atom = path_list_ident.node.name; let id = path_list_ident.node.id; module_.exported_names.insert(atom, id); } @@ -1248,15 +1183,14 @@ struct Resolver { view_item_use(name, _, node_id) => { match find_use_stmt_cnum(self.session.cstore, node_id) { some(crate_id) => { - let atom = (*self.atom_table).intern(name); let (child_name_bindings, new_parent) = // should this be in ModuleNS? --tjc - self.add_child(atom, parent, ~[ModuleNS], + self.add_child(name, parent, ~[ModuleNS], view_item.span); let def_id = { crate: crate_id, node: 0 }; let parent_link = ModuleParentLink - (self.get_module_from_parent(new_parent), atom); + (self.get_module_from_parent(new_parent), name); (*child_name_bindings).define_module(parent_link, some(def_id), @@ -1278,7 +1212,7 @@ struct Resolver { &&visitor: vt<ReducedGraphParent>) { - let name = (*self.atom_table).intern(foreign_item.ident); + let name = foreign_item.ident; match foreign_item.node { foreign_item_fn(fn_decl, type_parameters) => { @@ -1398,7 +1332,7 @@ struct Resolver { // to the trait info. match get_method_names_if_trait(self.session.cstore, - def_id) { + def_id) { none => { // Nothing to do. } @@ -1408,13 +1342,12 @@ struct Resolver { let (method_name, self_ty) = method_data; debug!("(building reduced graph for \ external crate) ... adding \ - trait method '%?'", method_name); - - let m_atom = self.atom_table.intern(method_name); + trait method '%s'", + self.session.str_of(method_name)); // Add it to the trait info if not static. if self_ty != sty_static { - interned_method_names.insert(m_atom, ()); + interned_method_names.insert(method_name, ()); } } self.trait_info.insert(def_id, interned_method_names); @@ -1456,23 +1389,24 @@ struct Resolver { for each_path(self.session.cstore, get(root.def_id).crate) |path_entry| { - debug!{"(building reduced graph for external crate) found path \ - entry: %s (%?)", - path_entry.path_string, - path_entry.def_like}; + debug!("(building reduced graph for external crate) found path \ + entry: %s (%?)", + path_entry.path_string, + path_entry.def_like); let mut pieces = split_str(path_entry.path_string, ~"::"); - let final_ident = pop(pieces); + let final_ident_str = pop(pieces); + let final_ident = self.session.ident_of(final_ident_str); // Find the module we need, creating modules along the way if we // need to. let mut current_module = root; - for pieces.each |ident| { + for pieces.each |ident_str| { + let ident = self.session.ident_of(ident_str); // Create or reuse a graph node for the child. - let atom = (*self.atom_table).intern(@copy ident); let (child_name_bindings, new_parent) = - self.add_child(atom, + self.add_child(ident, ModuleReducedGraphParent(current_module), // May want a better span ~[], dummy_sp()); @@ -1481,9 +1415,9 @@ struct Resolver { match child_name_bindings.module_def { NoModuleDef => { debug!{"(building reduced graph for external crate) \ - autovivifying %s", ident}; + autovivifying %s", ident_str}; let parent_link = self.get_parent_link(new_parent, - atom); + ident); (*child_name_bindings).define_module(parent_link, none, dummy_sp()); } @@ -1494,9 +1428,8 @@ struct Resolver { } // Add the new child item. - let atom = (*self.atom_table).intern(@copy final_ident); let (child_name_bindings, new_parent) = - self.add_child(atom, + self.add_child(final_ident, ModuleReducedGraphParent(current_module), ~[], dummy_sp()); @@ -1504,7 +1437,8 @@ struct Resolver { dl_def(def) => { self.handle_external_def(def, modules, child_name_bindings, - final_ident, atom, new_parent); + self.session.str_of(final_ident), + final_ident, new_parent); } dl_impl(_) => { // Because of the infelicitous way the metadata is @@ -1512,11 +1446,11 @@ struct Resolver { // later. debug!{"(building reduced graph for external crate) \ - ignoring impl %s", final_ident}; + ignoring impl %s", final_ident_str}; } dl_field => { debug!{"(building reduced graph for external crate) \ - ignoring field %s", final_ident}; + ignoring field %s", final_ident_str}; } } } @@ -1653,6 +1587,21 @@ struct Resolver { } } + fn atoms_to_str(atoms: ~[Atom]) -> ~str { + // XXX: str::connect should do this. + let mut result = ~""; + let mut first = true; + for atoms.each() |atom| { + if first { + first = false; + } else { + result += ~"::"; + } + result += self.session.str_of(atom); + } + // XXX: Shouldn't copy here. We need string builder functionality. + return result; + } /** * Attempts to resolve the given import. The return value indicates * failure if we're certain the name does not exist, indeterminate if we @@ -1669,7 +1618,7 @@ struct Resolver { debug!{"(resolving import for module) resolving import `%s::...` in \ `%s`", - *(*self.atom_table).atoms_to_str((*module_path).get()), + self.atoms_to_str((*module_path).get()), self.module_to_str(module_)}; // One-level renaming imports of the form `import foo = bar;` are @@ -1753,14 +1702,14 @@ struct Resolver { debug!{"(resolving single import) resolving `%s` = `%s::%s` from \ `%s`", - *(*self.atom_table).atom_to_str(target), + self.session.str_of(target), self.module_to_str(containing_module), - *(*self.atom_table).atom_to_str(source), + self.session.str_of(source), self.module_to_str(module_)}; if !self.name_is_exported(containing_module, source) { debug!{"(resolving single import) name `%s` is unexported", - *(*self.atom_table).atom_to_str(source)}; + self.session.str_of(source)}; return Failed; } @@ -1966,7 +1915,7 @@ struct Resolver { if !self.name_is_exported(containing_module, atom) { debug!{"(resolving glob import) name `%s` is unexported", - *(*self.atom_table).atom_to_str(atom)}; + self.session.str_of(atom)}; again; } @@ -2030,7 +1979,7 @@ struct Resolver { for containing_module.children.each |atom, name_bindings| { if !self.name_is_exported(containing_module, atom) { debug!{"(resolving glob import) name `%s` is unexported", - *(*self.atom_table).atom_to_str(atom)}; + self.session.str_of(atom)}; again; } @@ -2050,7 +1999,7 @@ struct Resolver { debug!{"(resolving glob import) writing resolution `%s` in `%s` \ to `%s`", - *(*self.atom_table).atom_to_str(atom), + self.session.str_of(atom), self.module_to_str(containing_module), self.module_to_str(module_)}; @@ -2103,7 +2052,7 @@ struct Resolver { Indeterminate => { debug!{"(resolving module path for import) module \ resolution is indeterminate: %s", - *(*self.atom_table).atom_to_str(name)}; + self.session.str_of(name)}; return Indeterminate; } Success(target) => { @@ -2112,8 +2061,8 @@ struct Resolver { // Not a module. self.session.span_err(span, fmt!{"not a module: %s", - *(*self.atom_table). - atom_to_str(name)}); + self.session. + str_of(name)}); return Failed; } ModuleDef(module_) => { @@ -2144,7 +2093,7 @@ struct Resolver { debug!{"(resolving module path for import) processing `%s` rooted at \ `%s`", - *(*self.atom_table).atoms_to_str((*module_path).get()), + self.atoms_to_str((*module_path).get()), self.module_to_str(module_)}; // The first element of the module path must be in the current scope @@ -2181,7 +2130,7 @@ struct Resolver { debug!{"(resolving item in lexical scope) resolving `%s` in \ namespace %? in `%s`", - *(*self.atom_table).atom_to_str(name), + self.session.str_of(name), namespace, self.module_to_str(module_)}; @@ -2307,12 +2256,12 @@ struct Resolver { -> ResolveResult<Target> { debug!{"(resolving name in module) resolving `%s` in `%s`", - *(*self.atom_table).atom_to_str(name), + self.session.str_of(name), self.module_to_str(module_)}; if xray == NoXray && !self.name_is_exported(module_, name) { debug!{"(resolving name in module) name `%s` is unexported", - *(*self.atom_table).atom_to_str(name)}; + self.session.str_of(name)}; return Failed; } @@ -2367,7 +2316,7 @@ struct Resolver { // We're out of luck. debug!{"(resolving name in module) failed to resolve %s", - *(*self.atom_table).atom_to_str(name)}; + self.session.str_of(name)}; return Failed; } @@ -2394,8 +2343,8 @@ struct Resolver { debug!{"(resolving one-level naming result) resolving import `%s` = \ `%s` in `%s`", - *(*self.atom_table).atom_to_str(target_name), - *(*self.atom_table).atom_to_str(source_name), + self.session.str_of(target_name), + self.session.str_of(source_name), self.module_to_str(module_)}; // Find the matching items in the lexical scope chain for every @@ -2509,7 +2458,7 @@ struct Resolver { debug!{"(resolving one-level renaming import) writing module \ result %? for `%s` into `%s`", is_none(module_result), - *(*self.atom_table).atom_to_str(target_name), + self.session.str_of(target_name), self.module_to_str(module_)}; import_resolution.module_target = module_result; @@ -2617,7 +2566,7 @@ struct Resolver { ChildNameDefinition(target_def) => { debug!("(computing exports) found child export '%s' \ for %?", - *self.atom_table.atom_to_str(name), + self.session.str_of(name), module_.def_id); vec::push(exports, { reexp: false, @@ -2625,14 +2574,14 @@ struct Resolver { }); vec::push(exports2, Export2 { reexport: false, - name: copy *self.atom_table.atom_to_str(name), + name: self.session.str_of(name), def_id: def_id_of_def(target_def) }); } ImportNameDefinition(target_def) => { debug!("(computing exports) found reexport '%s' for \ %?", - *self.atom_table.atom_to_str(name), + self.session.str_of(name), module_.def_id); vec::push(exports, { reexp: true, @@ -2640,7 +2589,7 @@ struct Resolver { }); vec::push(exports2, Export2 { reexport: true, - name: copy *self.atom_table.atom_to_str(name), + name: self.session.str_of(name), def_id: def_id_of_def(target_def) }); } @@ -2690,7 +2639,7 @@ struct Resolver { match orig_module.children.find(name) { none => { debug!{"!!! (with scope) didn't find `%s` in `%s`", - *(*self.atom_table).atom_to_str(name), + self.session.str_of(name), self.module_to_str(orig_module)}; } some(name_bindings) => { @@ -2698,7 +2647,7 @@ struct Resolver { none => { debug!{"!!! (with scope) didn't find module \ for `%s` in `%s`", - *(*self.atom_table).atom_to_str(name), + self.session.str_of(name), self.module_to_str(orig_module)}; } some(module_) => { @@ -2867,7 +2816,8 @@ struct Resolver { } fn resolve_item(item: @item, visitor: ResolveVisitor) { - debug!{"(resolving item) resolving %s", *item.ident}; + debug!{"(resolving item) resolving %s", + self.session.str_of(item.ident)}; // Items with the !resolve_unexported attribute are X-ray contexts. // This is used to allow the test runner to run unexported tests. @@ -2984,16 +2934,14 @@ struct Resolver { } item_mod(module_) => { - let atom = (*self.atom_table).intern(item.ident); - do self.with_scope(some(atom)) { + do self.with_scope(some(item.ident)) { self.resolve_module(module_, item.span, item.ident, item.id, visitor); } } item_foreign_mod(foreign_module) => { - let atom = (*self.atom_table).intern(item.ident); - do self.with_scope(some(atom)) { + do self.with_scope(some(item.ident)) { for foreign_module.items.each |foreign_item| { match foreign_item.node { foreign_item_fn(_, type_parameters) => { @@ -3021,8 +2969,8 @@ struct Resolver { // of conditionals. if !self.session.building_library && - is_none(self.session.main_fn) && - *item.ident == ~"main" { + is_none(self.session.main_fn) && + item.ident == syntax::parse::token::special_idents::main { self.session.main_fn = some((item.id, item.span)); } @@ -3061,8 +3009,7 @@ struct Resolver { (*self.type_ribs).push(function_type_rib); for (*type_parameters).eachi |index, type_parameter| { - let name = - (*self.atom_table).intern(type_parameter.ident); + let name = type_parameter.ident; debug!{"with_type_parameter_rib: %d %d", node_id, type_parameter.id}; let def_like = dl_def(def_ty_param @@ -3172,7 +3119,7 @@ struct Resolver { } some(declaration) => { for declaration.inputs.each |argument| { - let name = (*self.atom_table).intern(argument.ident); + let name = argument.ident; let def_like = dl_def(def_arg(argument.id, argument.mode)); (*function_value_rib).bindings.insert(name, def_like); @@ -3180,7 +3127,7 @@ struct Resolver { self.resolve_type(argument.ty, visitor); debug!{"(resolving function) recorded argument `%s`", - *(*self.atom_table).atom_to_str(name)}; + self.session.str_of(name)}; } self.resolve_type(declaration.output, visitor); @@ -3443,7 +3390,7 @@ struct Resolver { } fn binding_mode_map(pat: @pat) -> BindingMap { - let result = box_str_hash(); + let result = uint_hash(); do pat_bindings(self.def_map, pat) |binding_mode, _id, sp, path| { let ident = path_to_ident(path); result.insert(ident, @@ -3466,7 +3413,7 @@ struct Resolver { p.span, fmt!{"variable `%s` from pattern #1 is \ not bound in pattern #%u", - *key, i + 1}); + self.session.str_of(key), i + 1}); } some(binding_i) => { if binding_0.binding_mode != binding_i.binding_mode { @@ -3474,7 +3421,7 @@ struct Resolver { binding_i.span, fmt!{"variable `%s` is bound with different \ mode in pattern #%u than in pattern #1", - *key, i + 1}); + self.session.str_of(key), i + 1}); } } } @@ -3486,7 +3433,7 @@ struct Resolver { binding.span, fmt!{"variable `%s` from pattern #%u is \ not bound in pattern #1", - *key, i + 1}); + self.session.str_of(key), i + 1}); } } } @@ -3549,7 +3496,7 @@ struct Resolver { match self.resolve_path(path, TypeNS, true, visitor) { some(def) => { debug!{"(resolving type) resolved `%s` to type", - *path.idents.last()}; + self.session.str_of(path.idents.last())}; result_def = some(def); } none => { @@ -3564,8 +3511,7 @@ struct Resolver { none => { // Check to see whether the name is a primitive type. if path.idents.len() == 1u { - let name = - (*self.atom_table).intern(path.idents.last()); + let name = path.idents.last(); match self.primitive_type_table .primitive_types @@ -3588,14 +3534,16 @@ struct Resolver { // Write the result into the def map. debug!{"(resolving type) writing resolution for `%s` \ (id %d)", - connect(path.idents.map(|x| *x), ~"::"), + connect(path.idents.map( + |x| self.session.str_of(x)), ~"::"), path_id}; self.record_def(path_id, def); } none => { self.session.span_err (ty.span, fmt!{"use of undeclared type name `%s`", - connect(path.idents.map(|x| *x), + connect(path.idents.map( + |x| self.session.str_of(x)), ~"::")}); } } @@ -3630,13 +3578,13 @@ struct Resolver { // matching such a variant is simply disallowed (since // it's rarely what you want). - let atom = (*self.atom_table).intern(path.idents[0]); + let atom = path.idents[0]; match self.resolve_enum_variant_or_const(atom) { FoundEnumVariant(def) if mode == RefutableMode => { debug!{"(resolving pattern) resolving `%s` to \ enum variant", - *path.idents[0]}; + self.session.str_of(atom)}; self.record_def(pattern.id, def); } @@ -3645,9 +3593,8 @@ struct Resolver { fmt!{"declaration of `%s` \ shadows an enum \ that's in scope", - *(*self.atom_table). - atom_to_str - (atom)}); + self.session + .str_of(atom)}); } FoundConst => { self.session.span_err(pattern.span, @@ -3657,7 +3604,7 @@ struct Resolver { } EnumVariantOrConstNotFound => { debug!{"(resolving pattern) binding `%s`", - *path.idents[0]}; + self.session.str_of(atom)}; let is_mutable = mutability == Mutable; @@ -3702,7 +3649,8 @@ struct Resolver { self.session.span_err(pattern.span, fmt!{"Identifier %s is bound more \ than once in the same pattern", - path_to_str(path)}); + path_to_str(path, self.session + .intr())}); } // Not bound in the same pattern: do nothing } @@ -3728,10 +3676,11 @@ struct Resolver { self.record_def(pattern.id, def); } some(_) => { - self.session.span_err(path.span, - fmt!{"not an enum \ - variant: %s", - *path.idents.last()}); + self.session.span_err( + path.span, + fmt!{"not an enum variant: %s", + self.session.str_of( + path.idents.last())}); } none => { self.session.span_err(path.span, @@ -3768,12 +3717,12 @@ struct Resolver { self.record_def(pattern.id, definition); } _ => { - self.session.span_err(path.span, - fmt!("`%s` does not name a \ - structure", - connect(path.idents.map - (|x| *x), - ~"::"))); + self.session.span_err( + path.span, + fmt!("`%s` does not name a structure", + connect(path.idents.map( + |x| self.session.str_of(x)), + ~"::"))); } } } @@ -3888,7 +3837,7 @@ struct Resolver { if xray == NoXray && !self.name_is_exported(containing_module, name) { debug!{"(resolving definition of name in module) name `%s` is \ unexported", - *(*self.atom_table).atom_to_str(name)}; + self.session.str_of(name)}; return NoNameDefinition; } @@ -3948,7 +3897,7 @@ struct Resolver { break; } - (*module_path_atoms).push((*self.atom_table).intern(ident)); + (*module_path_atoms).push(ident); } return module_path_atoms; @@ -3970,8 +3919,8 @@ struct Resolver { Failed => { self.session.span_err(path.span, fmt!{"use of undeclared module `%s`", - *(*self.atom_table).atoms_to_str - ((*module_path_atoms).get())}); + self.atoms_to_str( + (*module_path_atoms).get())}); return none; } @@ -3984,19 +3933,18 @@ struct Resolver { } } - let name = (*self.atom_table).intern(path.idents.last()); + let name = path.idents.last(); match self.resolve_definition_of_name_in_module(containing_module, - name, - namespace, - xray) { + name, + namespace, + xray) { NoNameDefinition => { // We failed to resolve the name. Report an error. - self.session.span_err(path.span, - fmt!{"unresolved name: %s::%s", - *(*self.atom_table).atoms_to_str - ((*module_path_atoms).get()), - *(*self.atom_table).atom_to_str - (name)}); + self.session.span_err( + path.span, + fmt!{"unresolved name: %s::%s", + self.atoms_to_str((*module_path_atoms).get()), + self.session.str_of(name)}); return none; } ChildNameDefinition(def) | ImportNameDefinition(def) => { @@ -4024,7 +3972,7 @@ struct Resolver { Failed => { self.session.span_err(path.span, fmt!{"use of undeclared module `::%s`", - *(*self.atom_table).atoms_to_str + self.atoms_to_str ((*module_path_atoms).get())}); return none; } @@ -4038,19 +3986,18 @@ struct Resolver { } } - let name = (*self.atom_table).intern(path.idents.last()); + let name = path.idents.last(); match self.resolve_definition_of_name_in_module(containing_module, name, namespace, xray) { NoNameDefinition => { // We failed to resolve the name. Report an error. - self.session.span_err(path.span, - fmt!{"unresolved name: %s::%s", - *(*self.atom_table).atoms_to_str - ((*module_path_atoms).get()), - *(*self.atom_table).atom_to_str - (name)}); + self.session.span_err( + path.span, + fmt!{"unresolved name: %s::%s", self.atoms_to_str( + (*module_path_atoms).get()), + self.session.str_of(name)}); return none; } ChildNameDefinition(def) | ImportNameDefinition(def) => { @@ -4059,22 +4006,19 @@ struct Resolver { } } - fn resolve_identifier_in_local_ribs(identifier: ident, + fn resolve_identifier_in_local_ribs(ident: ident, namespace: Namespace, span: span) -> option<def> { - - let name = (*self.atom_table).intern(identifier); - // Check the local set of ribs. let mut search_result; match namespace { ValueNS => { - search_result = self.search_ribs(self.value_ribs, name, span, + search_result = self.search_ribs(self.value_ribs, ident, span, DontAllowCapturingSelf); } TypeNS => { - search_result = self.search_ribs(self.type_ribs, name, span, + search_result = self.search_ribs(self.type_ribs, ident, span, AllowCapturingSelf); } ModuleNS => { @@ -4086,7 +4030,7 @@ struct Resolver { some(dl_def(def)) => { debug!{"(resolving path in local ribs) resolved `%s` to \ local: %?", - *(*self.atom_table).atom_to_str(name), + self.session.str_of(ident), def}; return some(def); } @@ -4100,11 +4044,9 @@ struct Resolver { namespace: Namespace) -> option<def> { - let name = (*self.atom_table).intern(ident); - // Check the items. match self.resolve_item_in_lexical_scope(self.current_module, - name, + ident, namespace) { Success(target) => { @@ -4116,7 +4058,7 @@ struct Resolver { some(def) => { debug!{"(resolving item path in lexical scope) \ resolved `%s` to item", - *(*self.atom_table).atom_to_str(name)}; + self.session.str_of(ident)}; return some(def.def); } } @@ -4149,14 +4091,17 @@ struct Resolver { some(def) => { // Write the result into the def map. debug!{"(resolving expr) resolved `%s`", - connect(path.idents.map(|x| *x), ~"::")}; + connect(path.idents.map( + |x| self.session.str_of(x)), ~"::")}; self.record_def(expr.id, def); } none => { - self.session.span_err(expr.span, - fmt!{"unresolved name: %s", - connect(path.idents.map(|x| *x), - ~"::")}); + self.session.span_err( + expr.span, + fmt!{"unresolved name: %s", + connect(path.idents.map( + |x| self.session.str_of(x)), + ~"::")}); } } @@ -4202,12 +4147,12 @@ struct Resolver { self.record_def(expr.id, definition); } _ => { - self.session.span_err(path.span, - fmt!{"`%s` does not name a \ - structure", - connect(path.idents.map - (|x| *x), - ~"::")}); + self.session.span_err( + path.span, + fmt!{"`%s` does not name a structure", + connect(path.idents.map( + |x| self.session.str_of(x)), + ~"::")}); } } @@ -4216,22 +4161,21 @@ struct Resolver { expr_loop(_, some(label)) => { do self.with_label_rib { - let atom = self.atom_table.intern(label); let def_like = dl_def(def_label(expr.id)); - self.label_ribs.last().bindings.insert(atom, def_like); + self.label_ribs.last().bindings.insert(label, def_like); visit_expr(expr, (), visitor); } } expr_break(some(label)) | expr_again(some(label)) => { - let atom = self.atom_table.intern(label); - match self.search_ribs(self.label_ribs, atom, expr.span, + match self.search_ribs(self.label_ribs, label, expr.span, DontAllowCapturingSelf) { none => self.session.span_err(expr.span, fmt!("use of undeclared label \ - `%s`", *label)), + `%s`", self.session.str_of( + label))), some(dl_def(def @ def_label(id))) => self.record_def(expr.id, def), some(_) => @@ -4250,8 +4194,7 @@ struct Resolver { fn record_candidate_traits_for_expr_if_necessary(expr: @expr) { match expr.node { expr_field(_, ident, _) => { - let atom = (*self.atom_table).intern(ident); - let traits = self.search_for_traits_containing_method(atom); + let traits = self.search_for_traits_containing_method(ident); self.trait_map.insert(expr.id, traits); } expr_binary(add, _, _) | expr_assign_op(add, _, _) => { @@ -4401,7 +4344,7 @@ struct Resolver { %d:%d for method '%s'", trait_def_id.crate, trait_def_id.node, - *(*self.atom_table).atom_to_str(name)}; + self.session.str_of(name)}; (*found_traits).push(trait_def_id); } some(_) | none => { @@ -4498,6 +4441,7 @@ struct Resolver { } } + // // Diagnostics // @@ -4519,7 +4463,7 @@ struct Resolver { current_module = module_; } BlockParentLink(module_, node_id) => { - atoms.push((*self.atom_table).intern(@~"<opaque>")); + atoms.push(syntax::parse::token::special_idents::opaque); current_module = module_; } } @@ -4535,7 +4479,7 @@ struct Resolver { if i < atoms.len() - 1u { string += ~"::"; } - string += *(*self.atom_table).atom_to_str(atoms.get_elt(i)); + string += self.session.str_of(atoms.get_elt(i)); if i == 0u { break; @@ -4551,7 +4495,7 @@ struct Resolver { debug!{"Children:"}; for module_.children.each |name, _child| { - debug!{"* %s", *(*self.atom_table).atom_to_str(name)}; + debug!{"* %s", self.session.str_of(name)}; } debug!{"Import resolutions:"}; @@ -4584,7 +4528,7 @@ struct Resolver { } debug!{"* %s:%s%s%s", - *(*self.atom_table).atom_to_str(name), + self.session.str_of(name), module_repr, value_repr, type_repr}; } } diff --git a/src/rustc/middle/trans/alt.rs b/src/rustc/middle/trans/alt.rs index e12070e265d..40623e9af93 100644 --- a/src/rustc/middle/trans/alt.rs +++ b/src/rustc/middle/trans/alt.rs @@ -95,7 +95,7 @@ type bind_map = ~[{ fn assoc(key: ast::ident, list: bind_map) -> option<binding> { for vec::each(list) |elt| { - if str::eq(elt.ident, key) { + if elt.ident == key { return some(elt.binding); } } @@ -232,7 +232,7 @@ fn enter_rec_or_struct(bcx: block, dm: DefMap, m: match_, col: uint, for vec::each(fields) |fname| { let mut pat = dummy; for vec::each(fpats) |fpat| { - if str::eq(fpat.ident, fname) { pat = fpat.pat; break; } + if fpat.ident == fname { pat = fpat.pat; break; } } vec::push(pats, pat); } @@ -334,7 +334,7 @@ fn collect_record_fields(m: match_, col: uint) -> ~[ast::ident] { match br.pats[col].node { ast::pat_rec(fs, _) => { for vec::each(fs) |f| { - if !vec::any(fields, |x| str::eq(f.ident, x)) { + if !vec::any(fields, |x| f.ident == x) { vec::push(fields, f.ident); } } @@ -351,7 +351,7 @@ fn collect_struct_fields(m: match_, col: uint) -> ~[ast::ident] { match br.pats[col].node { ast::pat_struct(_, fs, _) => { for vec::each(fs) |f| { - if !vec::any(fields, |x| str::eq(f.ident, x)) { + if !vec::any(fields, |x| f.ident == x) { vec::push(fields, f.ident); } } @@ -550,7 +550,7 @@ fn compile_submatch(bcx: block, m: match_, vals: ~[ValueRef], } // Index the class fields. - let field_map = std::map::box_str_hash(); + let field_map = std::map::uint_hash(); for class_fields.eachi |i, class_field| { field_map.insert(class_field.ident, i); } @@ -951,7 +951,7 @@ fn bind_irrefutable_pat(bcx: block, pat: @ast::pat, val: ValueRef, } // Index the class fields. - let field_map = std::map::box_str_hash(); + let field_map = std::map::uint_hash(); for class_fields.eachi |i, class_field| { field_map.insert(class_field.ident, i); } diff --git a/src/rustc/middle/trans/base.rs b/src/rustc/middle/trans/base.rs index 1def54d5ebf..2f838ad8d40 100644 --- a/src/rustc/middle/trans/base.rs +++ b/src/rustc/middle/trans/base.rs @@ -49,6 +49,7 @@ import type_of::*; import common::*; import common::result; import syntax::ast_map::{path, path_mod, path_name}; +import syntax::parse::token::special_idents; import std::smallintmap; import option::{is_none, is_some}; @@ -500,8 +501,8 @@ fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info { let llalign = llalign_of(ccx, llty); //XXX this triggers duplicate LLVM symbols let name = if false /*ccx.sess.opts.debuginfo*/ { - mangle_internal_name_by_type_only(ccx, t, @~"tydesc") - } else { mangle_internal_name_by_seq(ccx, @~"tydesc") }; + mangle_internal_name_by_type_only(ccx, t, ~"tydesc") + } else { mangle_internal_name_by_seq(ccx, ~"tydesc") }; note_unique_llvm_symbol(ccx, name); log(debug, fmt!{"+++ declare_tydesc %s %s", ty_to_str(ccx.tcx, t), name}); let gvar = str::as_c_str(name, |buf| { @@ -529,9 +530,9 @@ fn declare_generic_glue(ccx: @crate_ctxt, t: ty::t, llfnty: TypeRef, let mut fn_nm; //XXX this triggers duplicate LLVM symbols if false /*ccx.sess.opts.debuginfo*/ { - fn_nm = mangle_internal_name_by_type_only(ccx, t, @(~"glue_" + name)); + fn_nm = mangle_internal_name_by_type_only(ccx, t, (~"glue_" + name)); } else { - fn_nm = mangle_internal_name_by_seq(ccx, @(~"glue_" + name)); + fn_nm = mangle_internal_name_by_seq(ccx, (~"glue_" + name)); } note_unique_llvm_symbol(ccx, fn_nm); let llfn = decl_cdecl_fn(ccx.llmod, fn_nm, llfnty); @@ -698,8 +699,9 @@ fn incr_refcnt_of_boxed(cx: block, box_ptr: ValueRef) { fn make_visit_glue(bcx: block, v: ValueRef, t: ty::t) { let _icx = bcx.insn_ctxt("make_visit_glue"); let mut bcx = bcx; - assert bcx.ccx().tcx.intrinsic_defs.contains_key(@~"ty_visitor"); - let (trait_id, ty) = bcx.ccx().tcx.intrinsic_defs.get(@~"ty_visitor"); + let ty_visitor_name = special_idents::ty_visitor; + assert bcx.ccx().tcx.intrinsic_defs.contains_key(ty_visitor_name); + let (trait_id, ty) = bcx.ccx().tcx.intrinsic_defs.get(ty_visitor_name); let v = PointerCast(bcx, v, T_ptr(type_of::type_of(bcx.ccx(), ty))); bcx = reflect::emit_calls_to_trait_visit_ty(bcx, t, v, trait_id); build_return(bcx); @@ -1720,7 +1722,7 @@ fn trans_eager_binop(cx: block, span: span, op: ast::binop, lhs: ValueRef, fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop, dst: @ast::expr, src: @ast::expr) -> block { - debug!{"%s", expr_to_str(ex)}; + debug!{"%s", expr_to_str(ex, bcx.tcx().sess.parse_sess.interner)}; let _icx = bcx.insn_ctxt("trans_assign_op"); let t = expr_ty(bcx, src); let lhs_res = trans_lval(bcx, dst); @@ -1731,7 +1733,8 @@ fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop, some(origin) => { let bcx = lhs_res.bcx; debug!{"user-defined method callee_id: %s", - ast_map::node_id_to_str(bcx.tcx().items, ex.callee_id)}; + ast_map::node_id_to_str(bcx.tcx().items, ex.callee_id, + bcx.sess().parse_sess.interner)}; let fty = node_id_type(bcx, ex.callee_id); let dty = expr_ty(bcx, dst); @@ -2164,7 +2167,8 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id, must_cast: true}; } ast_map::node_ctor(nm, _, ct, _, pt) => (pt, nm, ct.span), - ast_map::node_dtor(_, dtor, _, pt) => (pt, @~"drop", dtor.span), + ast_map::node_dtor(_, dtor, _, pt) => + (pt, special_idents::dtor, dtor.span), ast_map::node_trait_method(*) => { ccx.tcx.sess.bug(~"Can't monomorphize a trait method") } @@ -2198,7 +2202,8 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id, } ccx.monomorphizing.insert(fn_id, depth + 1u); - let pt = vec::append(*pt, ~[path_name(@ccx.names(*name))]); + let pt = vec::append(*pt, + ~[path_name(ccx.names(ccx.sess.str_of(name)))]); let s = mangle_exported_name(ccx, pt, mono_ty); let mk_lldecl = || { @@ -2897,9 +2902,9 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr, &temp_cleanups: ~[ValueRef], ret_flag: option<ValueRef>, derefs: uint) -> result { - debug!{"+++ trans_arg_expr on %s", expr_to_str(e)}; let _icx = cx.insn_ctxt("trans_arg_expr"); let ccx = cx.ccx(); + debug!{"+++ trans_arg_expr on %s", expr_to_str(e, ccx.sess.intr())}; let e_ty = expr_ty(cx, e); let is_bot = ty::type_is_bot(e_ty); @@ -3436,9 +3441,8 @@ fn trans_rec(bcx: block, fields: ~[ast::field], let mut temp_cleanups = ~[]; for fields.each |fld| { - let ix = option::get(vec::position(ty_fields, |ft| { - str::eq(fld.node.ident, ft.ident) - })); + let ix = option::get(vec::position(ty_fields, + |ft| ft.ident == fld.node.ident)); let dst = GEPi(bcx, addr, ~[0u, ix]); bcx = trans_expr_save_in(bcx, fld.node.expr, dst); add_clean_temp_mem(bcx, dst, ty_fields[ix].mt.ty); @@ -3450,7 +3454,7 @@ fn trans_rec(bcx: block, fields: ~[ast::field], bcx = cx; // Copy over inherited fields for ty_fields.eachi |i, tf| { - if !vec::any(fields, |f| str::eq(f.node.ident, tf.ident)) { + if !vec::any(fields, |f| f.node.ident == tf.ident) { let dst = GEPi(bcx, addr, ~[0u, i]); let base = GEPi(bcx, base_val, ~[0u, i]); let val = load_if_immediate(bcx, base, tf.mt.ty); @@ -3533,7 +3537,7 @@ fn trans_struct(block_context: block, span: span, fields: ~[ast::field], for fields.each |field| { let mut found = none; for class_fields.eachi |i, class_field| { - if str::eq(class_field.ident, field.node.ident) { + if class_field.ident == field.node.ident { found = some((i, class_field.id)); break; } @@ -3572,7 +3576,7 @@ fn trans_struct(block_context: block, span: span, fields: ~[ast::field], // Copy over inherited fields. for class_fields.eachi |i, class_field| { let exists = do vec::any(fields) |provided_field| { - str::eq(provided_field.node.ident, class_field.ident) + provided_field.node.ident == class_field.ident }; if exists { again; @@ -3809,7 +3813,7 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block { match check ty::get(expr_ty(bcx, e)).struct { ty::ty_fn({proto, _}) => { debug!{"translating fn_block %s with type %s", - expr_to_str(e), + expr_to_str(e, tcx.sess.intr()), ppaux::ty_to_str(tcx, expr_ty(bcx, e))}; return closure::trans_expr_fn(bcx, proto, decl, body, e.id, cap_clause, none, dest); @@ -3941,8 +3945,8 @@ fn lval_to_dps(bcx: block, e: @ast::expr, dest: dest) -> block { let ty = expr_ty(bcx, e); let lv = trans_lval(bcx, e); let last_use = (lv.kind == lv_owned && last_use_map.contains_key(e.id)); - debug!{"is last use (%s) = %b, %d", expr_to_str(e), last_use, - lv.kind as int}; + debug!{"is last use (%s) = %b, %d", expr_to_str(e, bcx.ccx().sess.intr()), + last_use, lv.kind as int}; lval_result_to_dps(lv, ty, last_use, dest) } @@ -4016,17 +4020,17 @@ fn trans_log(log_ex: @ast::expr, lvl: @ast::expr, } let modpath = vec::append( - ~[path_mod(ccx.link_meta.name)], + ~[path_mod(ccx.sess.ident_of(ccx.link_meta.name))], vec::filter(bcx.fcx.path, |e| match e { path_mod(_) => true, _ => false } )); - let modname = path_str(modpath); + let modname = path_str(ccx.sess, modpath); let global = if ccx.module_data.contains_key(modname) { ccx.module_data.get(modname) } else { let s = link::mangle_internal_name_by_path_and_seq( - ccx, modpath, @~"loglevel"); + ccx, modpath, ~"loglevel"); let global = str::as_c_str(s, |buf| { llvm::LLVMAddGlobal(ccx.llmod, T_i32(), buf) }); @@ -4061,7 +4065,8 @@ fn trans_log(log_ex: @ast::expr, lvl: @ast::expr, fn trans_check_expr(bcx: block, chk_expr: @ast::expr, pred_expr: @ast::expr, s: ~str) -> block { let _icx = bcx.insn_ctxt("trans_check_expr"); - let expr_str = s + ~" " + expr_to_str(pred_expr) + ~" failed"; + let expr_str = s + ~" " + expr_to_str(pred_expr, bcx.ccx().sess.intr()) + + ~" failed"; let {bcx, val} = { do with_scope_result(bcx, chk_expr.info(), ~"check") |bcx| { trans_temp_expr(bcx, pred_expr) @@ -4292,10 +4297,10 @@ fn init_local(bcx: block, local: @ast::local) -> block { fn trans_stmt(cx: block, s: ast::stmt) -> block { let _icx = cx.insn_ctxt("trans_stmt"); - debug!{"trans_stmt(%s)", stmt_to_str(s)}; + debug!{"trans_stmt(%s)", stmt_to_str(s, cx.tcx().sess.intr())}; if !cx.sess().no_asm_comments() { - add_span_comment(cx, s.span, stmt_to_str(s)); + add_span_comment(cx, s.span, stmt_to_str(s, cx.ccx().sess.intr())); } let mut bcx = cx; @@ -4331,8 +4336,8 @@ fn new_block(cx: fn_ctxt, parent: option<block>, +kind: block_kind, let s = if cx.ccx.sess.opts.save_temps || cx.ccx.sess.opts.debuginfo { cx.ccx.names(name) - } else { ~"" }; - let llbb: BasicBlockRef = str::as_c_str(s, |buf| { + } else { special_idents::invalid }; + let llbb: BasicBlockRef = str::as_c_str(cx.ccx.sess.str_of(s), |buf| { llvm::LLVMAppendBasicBlock(cx.llfn, buf) }); let bcx = mk_block(llbb, parent, kind, is_lpad, opt_node_info, cx); @@ -4543,7 +4548,7 @@ fn alloc_local(cx: block, local: @ast::local) -> block { let val = alloc_ty(cx, t); if cx.sess().opts.debuginfo { do option::iter(simple_name) |name| { - str::as_c_str(*name, |buf| { + str::as_c_str(cx.ccx().sess.str_of(name), |buf| { llvm::LLVMSetValueName(val, buf) }); } @@ -4808,7 +4813,7 @@ fn trans_fn(ccx: @crate_ctxt, |_bcx| { }); if do_time { let end = time::get_time(); - log_fn_time(ccx, path_str(path), start, end); + log_fn_time(ccx, path_str(ccx.sess, path), start, end); } } @@ -4824,7 +4829,7 @@ fn trans_enum_variant(ccx: @crate_ctxt, let fn_args = vec::map(args, |varg| {mode: ast::expl(ast::by_copy), ty: varg.ty, - ident: @~"arg", + ident: special_idents::arg, id: varg.id}); let fcx = new_fn_ctxt_w_id(ccx, ~[], llfndecl, variant.node.id, param_substs, none); @@ -5129,7 +5134,8 @@ fn register_fn_fuller(ccx: @crate_ctxt, sp: span, path: path, ccx.item_symbols.insert(node_id, ps); debug!{"register_fn_fuller created fn %s for item %d with path %s", - val_str(ccx.tn, llfn), node_id, ast_map::path_to_str(path)}; + val_str(ccx.tn, llfn), node_id, + ast_map::path_to_str(path, ccx.sess.parse_sess.interner)}; let is_main = is_main_name(path) && !ccx.sess.building_library; if is_main { create_main_wrapper(ccx, sp, llfn, node_type); } @@ -5252,7 +5258,7 @@ fn get_dtor_symbol(ccx: @crate_ctxt, path: path, id: ast::node_id, none if is_none(substs) => { let s = mangle_exported_name( ccx, - vec::append(path, ~[path_name(@ccx.names(~"dtor"))]), + vec::append(path, ~[path_name(ccx.names(~"dtor"))]), t); ccx.item_symbols.insert(id, s); s @@ -5266,7 +5272,7 @@ fn get_dtor_symbol(ccx: @crate_ctxt, path: path, id: ast::node_id, mangle_exported_name( ccx, vec::append(path, - ~[path_name(@ccx.names(~"dtor"))]), + ~[path_name(ccx.names(~"dtor"))]), mono_ty) } none => { @@ -5397,7 +5403,7 @@ fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef { fn register_method(ccx: @crate_ctxt, id: ast::node_id, pth: @ast_map::path, m: @ast::method) -> ValueRef { let mty = ty::node_id_to_type(ccx.tcx, id); - let pth = vec::append(*pth, ~[path_name(@ccx.names(~"meth")), + let pth = vec::append(*pth, ~[path_name(ccx.names(~"meth")), path_name(m.ident)]); let llfn = register_fn_full(ccx, m.span, pth, id, mty); set_inline_hint_if_appr(m.attrs, llfn); @@ -5415,7 +5421,7 @@ fn trans_constant(ccx: @crate_ctxt, it: @ast::item) { let path = item_path(ccx, it); for vec::each(enum_definition.variants) |variant| { let p = vec::append(path, ~[path_name(variant.node.name), - path_name(@~"discrim")]); + path_name(special_idents::descrim)]); let s = mangle_exported_name(ccx, p, ty::mk_int(ccx.tcx)); let disr_val = vi[i].disr_val; note_unique_llvm_symbol(ccx, s); @@ -5535,7 +5541,7 @@ fn gather_local_rtcalls(ccx: @crate_ctxt, crate: @ast::crate) { do vec::iter(attr_metas) |attr_meta| { match attr::get_meta_item_list(attr_meta) { some(list) => { - let name = *attr::get_meta_item_name(vec::head(list)); + let name = attr::get_meta_item_name(vec::head(list)); push_rtcall(ccx, name, {crate: ast::local_crate, node: item.id}); } @@ -5551,7 +5557,7 @@ fn gather_local_rtcalls(ccx: @crate_ctxt, crate: @ast::crate) { fn gather_external_rtcalls(ccx: @crate_ctxt) { do cstore::iter_crate_data(ccx.sess.cstore) |_cnum, cmeta| { - do decoder::each_path(cmeta) |path| { + do decoder::each_path(ccx.sess.intr(), cmeta) |path| { let pathname = path.path_string; match path.def_like { decoder::dl_def(d) => { @@ -5624,7 +5630,7 @@ fn decl_crate_map(sess: session::session, mapmeta: link_meta, let cstore = sess.cstore; while cstore::have_crate_data(cstore, n_subcrates) { n_subcrates += 1; } let mapname = if sess.building_library { - *mapmeta.name + ~"_" + *mapmeta.vers + ~"_" + mapmeta.extras_hash + mapmeta.name + ~"_" + mapmeta.vers + ~"_" + mapmeta.extras_hash } else { ~"toplevel" }; let sym_name = ~"_rust_crate_map_" + mapname; let arrtype = T_array(int_type, n_subcrates as uint); @@ -5643,8 +5649,8 @@ fn fill_crate_map(ccx: @crate_ctxt, map: ValueRef) { while cstore::have_crate_data(cstore, i) { let cdata = cstore::get_crate_data(cstore, i); let nm = ~"_rust_crate_map_" + cdata.name + - ~"_" + *cstore::get_crate_vers(cstore, i) + - ~"_" + *cstore::get_crate_hash(cstore, i); + ~"_" + cstore::get_crate_vers(cstore, i) + + ~"_" + cstore::get_crate_hash(cstore, i); let cr = str::as_c_str(nm, |buf| { llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf) }); @@ -5683,8 +5689,8 @@ fn crate_ctxt_to_encode_parms(cx: @crate_ctxt) if !def.reexp { again; } let path = match check cx.tcx.items.get(exp_id) { ast_map::node_export(_, path) => { - - ast_map::path_to_str(*path) + ast_map::path_to_str(*path, + cx.sess.parse_sess.interner) } }; vec::push(reexports, (path, def.id)); @@ -5746,7 +5752,7 @@ fn trans_crate(sess: session::session, // crashes if the module identifer is same as other symbols // such as a function name in the module. // 1. http://llvm.org/bugs/show_bug.cgi?id=11479 - let llmod_id = *link_meta.name + ~".rc"; + let llmod_id = link_meta.name + ~".rc"; let llmod = str::as_c_str(llmod_id, |buf| { llvm::LLVMModuleCreateWithNameInContext @@ -5776,7 +5782,7 @@ fn trans_crate(sess: session::session, lib::llvm::associate_type(tn, ~"tydesc", tydesc_type); let crate_map = decl_crate_map(sess, link_meta, llmod); let dbg_cx = if sess.opts.debuginfo { - option::some(debuginfo::mk_ctxt(llmod_id)) + option::some(debuginfo::mk_ctxt(llmod_id, sess.parse_sess.interner)) } else { option::none }; @@ -5808,7 +5814,7 @@ fn trans_crate(sess: session::session, const_globals: int_hash::<ValueRef>(), module_data: str_hash::<ValueRef>(), lltypes: ty::new_ty_hash(), - names: new_namegen(), + names: new_namegen(sess.parse_sess.interner), symbol_hasher: symbol_hasher, type_hashcodes: ty::new_ty_hash(), type_short_names: ty::new_ty_hash(), diff --git a/src/rustc/middle/trans/closure.rs b/src/rustc/middle/trans/closure.rs index 3304f0c4a54..7b2ba7eee3b 100644 --- a/src/rustc/middle/trans/closure.rs +++ b/src/rustc/middle/trans/closure.rs @@ -263,7 +263,9 @@ fn build_closure(bcx0: block, let lv = trans_local_var(bcx, cap_var.def); let nid = ast_util::def_id_of_def(cap_var.def).node; debug!{"Node id is %s", - syntax::ast_map::node_id_to_str(bcx.ccx().tcx.items, nid)}; + syntax::ast_map::node_id_to_str + (bcx.ccx().tcx.items, nid, + bcx.ccx().sess.parse_sess.interner)}; let mut ty = node_id_type(bcx, nid); match cap_var.mode { capture::cap_ref => { @@ -359,7 +361,8 @@ fn trans_expr_fn(bcx: block, let ccx = bcx.ccx(); let fty = node_id_type(bcx, id); let llfnty = type_of_fn_from_ty(ccx, fty); - let sub_path = vec::append_one(bcx.fcx.path, path_name(@~"anon")); + let sub_path = vec::append_one(bcx.fcx.path, + path_name(special_idents::anon)); let s = mangle_internal_name_by_path(ccx, sub_path); let llfn = decl_internal_cdecl_fn(ccx.llmod, s, llfnty); diff --git a/src/rustc/middle/trans/common.rs b/src/rustc/middle/trans/common.rs index 8414995181f..19f591f4885 100644 --- a/src/rustc/middle/trans/common.rs +++ b/src/rustc/middle/trans/common.rs @@ -20,11 +20,14 @@ import metadata::{csearch}; import metadata::common::link_meta; import syntax::ast_map::path; import util::ppaux::ty_to_str; +import syntax::parse::token::ident_interner; +import syntax::ast::ident; -type namegen = fn@(~str) -> ~str; -fn new_namegen() -> namegen { - let i = @mut 0; - return fn@(prefix: ~str) -> ~str { *i += 1; prefix + int::str(*i) }; +type namegen = fn@(~str) -> ident; +fn new_namegen(intr: ident_interner) -> namegen { + return fn@(prefix: ~str) -> ident { + return intr.gensym(@fmt!("%s_%u", prefix, intr.gensym(@prefix))) + }; } type tydesc_info = @@ -873,7 +876,7 @@ fn C_cstr(cx: @crate_ctxt, s: ~str) -> ValueRef { llvm::LLVMConstString(buf, str::len(s) as c_uint, False) }; let g = - str::as_c_str(cx.names(~"str"), + str::as_c_str(fmt!{"str%u", cx.names(~"str")}, |buf| llvm::LLVMAddGlobal(cx.llmod, val_ty(sc), buf)); llvm::LLVMSetInitializer(g, sc); llvm::LLVMSetGlobalConstant(g, True); @@ -927,7 +930,7 @@ fn C_bytes(bytes: ~[u8]) -> ValueRef unsafe { fn C_shape(ccx: @crate_ctxt, bytes: ~[u8]) -> ValueRef { let llshape = C_bytes(bytes); - let llglobal = str::as_c_str(ccx.names(~"shape"), |buf| { + let llglobal = str::as_c_str(fmt!{"shape%u", ccx.names(~"shape")}, |buf| { llvm::LLVMAddGlobal(ccx.llmod, val_ty(llshape), buf) }); llvm::LLVMSetInitializer(llglobal, llshape); @@ -983,13 +986,13 @@ fn align_to(cx: block, off: ValueRef, align: ValueRef) -> ValueRef { return build::And(cx, bumped, build::Not(cx, mask)); } -fn path_str(p: path) -> ~str { +fn path_str(sess: session::session, p: path) -> ~str { let mut r = ~"", first = true; for vec::each(p) |e| { match e { ast_map::path_name(s) | ast_map::path_mod(s) => { if first { first = false; } else { r += ~"::"; } - r += *s; + r += sess.str_of(s); } } } r @@ -1023,7 +1026,7 @@ fn field_idx_strict(cx: ty::ctxt, sp: span, ident: ast::ident, match ty::field_idx(ident, fields) { none => cx.sess.span_bug( sp, fmt!{"base expr doesn't appear to \ - have a field named %s", *ident}), + have a field named %s", cx.sess.str_of(ident)}), some(i) => i } } diff --git a/src/rustc/middle/trans/debuginfo.rs b/src/rustc/middle/trans/debuginfo.rs index 68bdd983d8b..b1b70fe98a8 100644 --- a/src/rustc/middle/trans/debuginfo.rs +++ b/src/rustc/middle/trans/debuginfo.rs @@ -7,6 +7,7 @@ import trans::base; import trans::build::B; import middle::ty; import syntax::{ast, codemap, ast_util, ast_map}; +import syntax::parse::token::ident_interner; import codemap::span; import ast::ty; import pat_util::*; @@ -89,9 +90,9 @@ type debug_ctxt = { crate_file: ~str }; -fn mk_ctxt(crate: ~str) -> debug_ctxt { +fn mk_ctxt(crate: ~str, intr: ident_interner) -> debug_ctxt { {llmetadata: map::int_hash(), - names: new_namegen(), + names: new_namegen(intr), crate_file: crate} } @@ -392,14 +393,15 @@ fn create_record(cx: @crate_ctxt, t: ty::t, fields: ~[ast::ty_field], let fname = filename_from_span(cx, span); let file_node = create_file(cx, fname); let scx = create_structure(file_node, - option::get(cx.dbg_cx).names(~"rec"), + cx.sess.str_of( + option::get(cx.dbg_cx).names(~"rec")), line_from_span(cx.sess.codemap, span) as int); for fields.each |field| { let field_t = ty::get_field(t, field.node.ident).mt.ty; let ty_md = create_ty(cx, field_t, field.node.mt.ty); let (size, align) = size_and_align_of(cx, field_t); - add_member(scx, *field.node.ident, + add_member(scx, cx.sess.str_of(field.node.ident), line_from_span(cx.sess.codemap, field.span) as int, size as int, align as int, ty_md.node); } @@ -635,7 +637,7 @@ fn create_local_var(bcx: block, local: @ast::local) none => create_function(bcx.fcx).node, some(_) => create_block(bcx).node }; - let mdnode = create_var(tg, context, *name, filemd.node, + let mdnode = create_var(tg, context, cx.sess.str_of(name), filemd.node, loc.line as int, tymd.node); let mdval = @{node: mdnode, data: {id: local.node.id}}; update_cache(cache, AutoVariableTag, local_var_metadata(mdval)); @@ -677,8 +679,8 @@ fn create_arg(bcx: block, arg: ast::arg, sp: span) let tymd = create_ty(cx, ty, arg.ty); let filemd = create_file(cx, loc.file.name); let context = create_function(bcx.fcx); - let mdnode = create_var(tg, context.node, *arg.ident, filemd.node, - loc.line as int, tymd.node); + let mdnode = create_var(tg, context.node, cx.sess.str_of(arg.ident), + filemd.node, loc.line as int, tymd.node); let mdval = @{node: mdnode, data: {id: arg.id}}; update_cache(cache, tg, argument_metadata(mdval)); @@ -736,10 +738,10 @@ fn create_function(fcx: fn_ctxt) -> @metadata<subprogram_md> { ast_map::node_expr(expr) => { match expr.node { ast::expr_fn(_, decl, _, _) => { - (@dbg_cx.names(~"fn"), decl.output, expr.id) + (dbg_cx.names(~"fn"), decl.output, expr.id) } ast::expr_fn_block(decl, _, _) => { - (@dbg_cx.names(~"fn"), decl.output, expr.id) + (dbg_cx.names(~"fn"), decl.output, expr.id) } _ => fcx.ccx.sess.span_bug(expr.span, ~"create_function: \ @@ -778,8 +780,9 @@ fn create_function(fcx: fn_ctxt) -> @metadata<subprogram_md> { let fn_metadata = ~[lltag(SubprogramTag), llunused(), file_node, - llstr(*ident), - llstr(*ident), //XXX fully-qualified C++ name + llstr(cx.sess.str_of(ident)), + //XXX fully-qualified C++ name: + llstr(cx.sess.str_of(ident)), llstr(~""), //XXX MIPS name????? file_node, lli32(loc.line as int), diff --git a/src/rustc/middle/trans/foreign.rs b/src/rustc/middle/trans/foreign.rs index 50c86ddd86a..3b671b11483 100644 --- a/src/rustc/middle/trans/foreign.rs +++ b/src/rustc/middle/trans/foreign.rs @@ -412,10 +412,10 @@ fn decl_x86_64_fn(tys: x86_64_tys, return llfn; } -fn link_name(i: @ast::foreign_item) -> ~str { +fn link_name(ccx: @crate_ctxt, i: @ast::foreign_item) -> ~str { match attr::first_attr_value_str_by_name(i.attrs, ~"link_name") { - none => return *i.ident, - option::some(ln) => return *ln + none => ccx.sess.str_of(i.ident), + option::some(ln) => ln } } @@ -669,7 +669,7 @@ fn trans_foreign_mod(ccx: @crate_ctxt, } } - let lname = link_name(foreign_item); + let lname = link_name(ccx, foreign_item); let llbasefn = base_fn(ccx, lname, tys, cc); // Name the shim function let shim_name = lname + ~"__c_stack_shim"; @@ -700,7 +700,7 @@ fn trans_foreign_mod(ccx: @crate_ctxt, cc: lib::llvm::CallConv) { let fcx = new_fn_ctxt(ccx, ~[], decl, none); let bcx = top_scope_block(fcx, none), lltop = bcx.llbb; - let llbasefn = base_fn(ccx, link_name(item), tys, cc); + let llbasefn = base_fn(ccx, link_name(ccx, item), tys, cc); let ty = ty::lookup_item_type(ccx.tcx, ast_util::local_def(item.id)).ty; let args = vec::from_fn(ty::ty_fn_args(ty).len(), |i| { @@ -799,7 +799,7 @@ fn trans_intrinsic(ccx: @crate_ctxt, decl: ValueRef, item: @ast::foreign_item, let fcx = new_fn_ctxt_w_id(ccx, path, decl, item.id, some(substs), some(item.span)); let mut bcx = top_scope_block(fcx, none), lltop = bcx.llbb; - match *item.ident { + match ccx.sess.str_of(item.ident) { // NB: Transitionary, de-mode-ing. Remove the first string of each // pattern when the old intrinsics are gone. ~"atomic_xchng" | ~"atomic_xchg" => { @@ -1001,7 +1001,9 @@ fn trans_foreign_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, let _icx = ccx.insn_ctxt("foreign::foreign::build_rust_fn"); let t = ty::node_id_to_type(ccx.tcx, id); let ps = link::mangle_internal_name_by_path( - ccx, vec::append_one(path, ast_map::path_name(@~"__rust_abi"))); + ccx, vec::append_one(path, ast_map::path_name( + syntax::parse::token::special_idents::clownshoe_abi + ))); let llty = type_of_fn_from_ty(ccx, t); let llfndecl = decl_internal_cdecl_fn(ccx.llmod, ps, llty); trans_fn(ccx, path, decl, body, llfndecl, no_self, none, id); @@ -1038,8 +1040,9 @@ fn trans_foreign_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl, } let shim_name = link::mangle_internal_name_by_path( - ccx, vec::append_one(path, - ast_map::path_name(@~"__rust_stack_shim"))); + ccx, vec::append_one(path, ast_map::path_name( + syntax::parse::token::special_idents::clownshoe_stack_shim + ))); return build_shim_fn_(ccx, shim_name, llrustfn, tys, lib::llvm::CCallConv, build_args, build_ret); diff --git a/src/rustc/middle/trans/impl.rs b/src/rustc/middle/trans/impl.rs index bc0822bec82..2c927d2cc5d 100644 --- a/src/rustc/middle/trans/impl.rs +++ b/src/rustc/middle/trans/impl.rs @@ -163,7 +163,7 @@ fn trans_static_method_callee(bcx: block, method_id: ast::def_id, } }; debug!("trans_static_method_callee: method_id=%?, callee_id=%?, \ - name=%s", method_id, callee_id, *mname); + name=%s", method_id, callee_id, ccx.sess.str_of(mname)); let vtbls = resolve_vtables_in_fn_ctxt( bcx.fcx, ccx.maps.vtable_map.get(callee_id)); @@ -361,7 +361,7 @@ fn get_vtable(ccx: @crate_ctxt, origin: typeck::vtable_origin) fn make_vtable(ccx: @crate_ctxt, ptrs: ~[ValueRef]) -> ValueRef { let _icx = ccx.insn_ctxt("impl::make_vtable"); let tbl = C_struct(ptrs); - let vt_gvar = str::as_c_str(ccx.names(~"vtable"), |buf| { + let vt_gvar = str::as_c_str(ccx.sess.str_of(ccx.names(~"vtable")), |buf| { llvm::LLVMAddGlobal(ccx.llmod, val_ty(tbl), buf) }); llvm::LLVMSetInitializer(vt_gvar, tbl); diff --git a/src/rustc/middle/trans/reachable.rs b/src/rustc/middle/trans/reachable.rs index 62107e76bdb..96169c8a22a 100644 --- a/src/rustc/middle/trans/reachable.rs +++ b/src/rustc/middle/trans/reachable.rs @@ -175,7 +175,8 @@ fn traverse_inline_body(cx: ctx, body: blk) { traverse_def_id(cx, def_id_of_def(d)); } none => cx.tcx.sess.span_bug(e.span, fmt!{"Unbound node \ - id %? while traversing %s", e.id, expr_to_str(e)}) + id %? while traversing %s", e.id, + expr_to_str(e, cx.tcx.sess.intr())}) } } expr_field(_, _, _) => { diff --git a/src/rustc/middle/trans/reflect.rs b/src/rustc/middle/trans/reflect.rs index 5829041a54a..7a21bea30d3 100644 --- a/src/rustc/middle/trans/reflect.rs +++ b/src/rustc/middle/trans/reflect.rs @@ -55,8 +55,9 @@ impl reflector { fn visit(ty_name: ~str, args: ~[ValueRef]) { let tcx = self.bcx.tcx(); - let mth_idx = option::get(ty::method_idx(@(~"visit_" + ty_name), - *self.visitor_methods)); + let mth_idx = option::get(ty::method_idx( + tcx.sess.ident_of(~"visit_" + ty_name), + *self.visitor_methods)); let mth_ty = ty::mk_fn(tcx, self.visitor_methods[mth_idx].fty); let v = self.visitor_val; let get_lval = |bcx| { @@ -157,7 +158,8 @@ impl reflector { for fields.eachi |i, field| { self.visit(~"rec_field", ~[self.c_uint(i), - self.c_slice(*field.ident)] + self.c_slice( + bcx.ccx().sess.str_of(field.ident))] + self.c_mt(field.mt)); } } @@ -233,7 +235,8 @@ impl reflector { for fields.eachi |i, field| { self.visit(~"class_field", ~[self.c_uint(i), - self.c_slice(*field.ident)] + self.c_slice( + bcx.ccx().sess.str_of(field.ident))] + self.c_mt(field.mt)); } } @@ -256,7 +259,8 @@ impl reflector { ~[self.c_uint(i), self.c_int(v.disr_val), self.c_uint(vec::len(v.args)), - self.c_slice(*v.name)]) { + self.c_slice( + bcx.ccx().sess.str_of(v.name))]) { for v.args.eachi |j, a| { self.visit(~"enum_variant_field", ~[self.c_uint(j), @@ -291,10 +295,10 @@ impl reflector { fn emit_calls_to_trait_visit_ty(bcx: block, t: ty::t, visitor_val: ValueRef, visitor_trait_id: def_id) -> block { - + import syntax::parse::token::special_idents::tydesc; let final = sub_block(bcx, ~"final"); - assert bcx.ccx().tcx.intrinsic_defs.contains_key(@~"tydesc"); - let (_, tydesc_ty) = bcx.ccx().tcx.intrinsic_defs.get(@~"tydesc"); + assert bcx.ccx().tcx.intrinsic_defs.contains_key(tydesc); + let (_, tydesc_ty) = bcx.ccx().tcx.intrinsic_defs.get(tydesc); let tydesc_ty = type_of::type_of(bcx.ccx(), tydesc_ty); let r = reflector({ visitor_val: visitor_val, diff --git a/src/rustc/middle/trans/shape.rs b/src/rustc/middle/trans/shape.rs index 5209b6f61f0..1781fdd7931 100644 --- a/src/rustc/middle/trans/shape.rs +++ b/src/rustc/middle/trans/shape.rs @@ -391,7 +391,7 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef { let variant_shape = shape_of_variant(ccx, v); add_substr(data, variant_shape); - let zname = str::bytes(*v.name) + ~[0u8]; + let zname = str::bytes(ccx.sess.str_of(v.name)) + ~[0u8]; add_substr(data, zname); } enum_variants += ~[variants]; @@ -732,9 +732,9 @@ fn simplify_type(tcx: ty::ctxt, typ: ty::t) -> ty::t { ty::ty_class(did, ref substs) => { let simpl_fields = (if is_some(ty::ty_dtor(tcx, did)) { // remember the drop flag - ~[{ident: @~"drop", mt: {ty: - ty::mk_u8(tcx), - mutbl: ast::m_mutbl}}] } + ~[{ident: syntax::parse::token::special_idents::dtor, + mt: {ty: ty::mk_u8(tcx), + mutbl: ast::m_mutbl}}] } else { ~[] }) + do ty::lookup_class_fields(tcx, did).map |f| { let t = ty::lookup_field_type(tcx, did, f.id, substs); diff --git a/src/rustc/middle/trans/type_use.rs b/src/rustc/middle/trans/type_use.rs index 93f5d4e996f..727bf0d9e08 100644 --- a/src/rustc/middle/trans/type_use.rs +++ b/src/rustc/middle/trans/type_use.rs @@ -81,7 +81,7 @@ fn type_uses_for(ccx: @crate_ctxt, fn_id: def_id, n_tps: uint) ast_map::node_foreign_item(i@@{node: foreign_item_fn(_, _), _}, abi, _) => { if abi == foreign_abi_rust_intrinsic { - let flags = match *i.ident { + let flags = match cx.ccx.sess.str_of(i.ident) { ~"size_of" | ~"pref_align_of" | ~"min_align_of" | ~"init" | ~"reinterpret_cast" | ~"move_val" | ~"move_val_init" => { diff --git a/src/rustc/middle/ty.rs b/src/rustc/middle/ty.rs index b0f8746d162..773d8f3a684 100644 --- a/src/rustc/middle/ty.rs +++ b/src/rustc/middle/ty.rs @@ -616,7 +616,7 @@ fn mk_ctxt(s: session::session, node_types: @smallintmap::mk(), node_type_substs: map::int_hash(), items: amap, - intrinsic_defs: map::box_str_hash(), + intrinsic_defs: map::uint_hash(), freevars: freevars, tcache: ast_util::new_def_hash(), rcache: mk_rcache(), @@ -2200,7 +2200,7 @@ pure fn hash_bound_region(br: &bound_region) -> uint { match *br { // no idea if this is any good ty::br_self => 0u, ty::br_anon(idx) => 1u | (idx << 2), - ty::br_named(str) => 2u | (str::hash(str) << 2), + ty::br_named(ident) => 2u | (ident << 2), ty::br_cap_avoid(id, br) => 3u | (id as uint << 2) | hash_bound_region(br) } @@ -2310,10 +2310,13 @@ pure fn hash_type_structure(st: &sty) -> uint { } fn node_id_to_type(cx: ctxt, id: ast::node_id) -> t { + //io::println(fmt!("%?/%?", id, cx.node_types.size())); match smallintmap::find(*cx.node_types, id as uint) { some(t) => t, - none => cx.sess.bug(fmt!{"node_id_to_type: unbound node ID %s", - ast_map::node_id_to_str(cx.items, id)}) + none => cx.sess.bug( + fmt!{"node_id_to_type: unbound node ID %s", + ast_map::node_id_to_str(cx.items, id, + cx.sess.parse_sess.interner)}) } } @@ -2482,7 +2485,7 @@ fn field_idx(id: ast::ident, fields: ~[field]) -> option<uint> { } fn get_field(rec_ty: t, id: ast::ident) -> field { - match check vec::find(get_fields(rec_ty), |f| str::eq(f.ident, id)) { + match check vec::find(get_fields(rec_ty), |f| f.ident == id) { some(f) => f } } @@ -2703,9 +2706,9 @@ fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str { ~"record elements differ in mutability" } terr_record_fields(values) => { - fmt!("expected a record with field `%s` \ - but found one with field `%s`", - *values.expected, *values.found) + fmt!("expected a record with field `%s` but found one with field \ + `%s`", + cx.sess.str_of(values.expected), cx.sess.str_of(values.found)) } terr_arg_count => ~"incorrect number of function parameters", terr_mode_mismatch(values) => { @@ -2734,7 +2737,8 @@ fn type_err_to_str(cx: ctxt, err: &type_err) -> ~str { vstore_to_str(cx, values.found)) } terr_in_field(err, fname) => { - fmt!{"in field `%s`, %s", *fname, type_err_to_str(cx, err)} + fmt!("in field `%s`, %s", cx.sess.str_of(fname), + type_err_to_str(cx, err)) } terr_sorts(values) => { fmt!{"expected %s but found %s", @@ -2844,7 +2848,7 @@ fn substd_enum_variants(cx: ctxt, } fn item_path_str(cx: ctxt, id: ast::def_id) -> ~str { - ast_map::path_to_str(item_path(cx, id)) + ast_map::path_to_str(item_path(cx, id), cx.sess.parse_sess.interner) } /* If class_id names a class with a dtor, return some(the dtor's id). @@ -2909,7 +2913,8 @@ fn item_path(cx: ctxt, id: ast::def_id) -> ast_map::path { vec::append_one(*path, ast_map::path_name(nm)) } ast_map::node_dtor(_, _, _, path) => { - vec::append_one(*path, ast_map::path_name(@~"dtor")) + vec::append_one(*path, ast_map::path_name( + syntax::parse::token::special_idents::literally_dtor)) } ast_map::node_stmt(*) | ast_map::node_expr(*) | @@ -3062,8 +3067,10 @@ fn lookup_class_fields(cx: ctxt, did: ast::def_id) -> ~[field_ty] { } } _ => { - cx.sess.bug(fmt!{"class ID not bound to an item: %s", - ast_map::node_id_to_str(cx.items, did.node)}); + cx.sess.bug( + fmt!{"class ID not bound to an item: %s", + ast_map::node_id_to_str(cx.items, did.node, + cx.sess.parse_sess.interner)}); } } } @@ -3129,7 +3136,7 @@ fn lookup_class_method_by_name(cx:ctxt, did: ast::def_id, name: ident, } } cx.sess.span_fatal(sp, fmt!{"Class doesn't have a method \ - named %s", *name}); + named %s", cx.sess.str_of(name)}); } else { csearch::get_class_method(cx.sess.cstore, did, name) diff --git a/src/rustc/middle/typeck/astconv.rs b/src/rustc/middle/typeck/astconv.rs index 145d0fd0bd5..76788bd0555 100644 --- a/src/rustc/middle/typeck/astconv.rs +++ b/src/rustc/middle/typeck/astconv.rs @@ -309,8 +309,9 @@ fn ast_ty_to_ty<AC: ast_conv, RS: region_scope copy owned>( } ast::ty_path(path, id) => { let a_def = match tcx.def_map.find(id) { - none => tcx.sess.span_fatal(ast_ty.span, fmt!{"unbound path %s", - path_to_str(path)}), + none => tcx.sess.span_fatal( + ast_ty.span, fmt!{"unbound path %s", + path_to_str(path, tcx.sess.intr())}), some(d) => d }; match a_def { diff --git a/src/rustc/middle/typeck/check.rs b/src/rustc/middle/typeck/check.rs index ff9bd009498..9dd5af2cf10 100644 --- a/src/rustc/middle/typeck/check.rs +++ b/src/rustc/middle/typeck/check.rs @@ -75,7 +75,7 @@ import rscope::{in_binding_rscope, region_scope, type_rscope}; import syntax::ast::ty_i; import typeck::infer::{resolve_type, force_tvar}; -import std::map::str_hash; +import std::map::{str_hash, uint_hash}; type self_info = { self_ty: ty::t, @@ -335,7 +335,8 @@ fn check_fn(ccx: @crate_ctxt, do vec::iter2(arg_tys, decl.inputs) |arg_ty, input| { assign(input.ty.span, input.id, some(arg_ty)); debug!{"Argument %s is assigned to %s", - *input.ident, fcx.locals.get(input.id).to_str()}; + tcx.sess.str_of(input.ident), + fcx.locals.get(input.id).to_str()}; } // Add explicitly-declared locals. @@ -347,7 +348,7 @@ fn check_fn(ccx: @crate_ctxt, }; assign(local.span, local.node.id, o_ty); debug!{"Local variable %s is assigned to %s", - pat_to_str(local.node.pat), + pat_to_str(local.node.pat, tcx.sess.intr()), fcx.locals.get(local.node.id).to_str()}; visit::visit_local(local, e, v); }; @@ -359,7 +360,7 @@ fn check_fn(ccx: @crate_ctxt, if !pat_util::pat_is_variant(fcx.ccx.tcx.def_map, p) => { assign(p.span, p.id, none); debug!{"Pattern binding %s is assigned to %s", - *path.idents[0], + tcx.sess.str_of(path.idents[0]), fcx.locals.get(p.id).to_str()}; } _ => {} @@ -405,15 +406,15 @@ fn check_method(ccx: @crate_ctxt, method: @ast::method, fn check_no_duplicate_fields(tcx: ty::ctxt, fields: ~[(ast::ident, span)]) { - let field_names = hashmap::<@~str, span>(|x| str::hash(*x), - |x,y| str::eq(*x, *y)); + let field_names = uint_hash(); + for fields.each |p| { let (id, sp) = p; match field_names.find(id) { some(orig_sp) => { tcx.sess.span_err(sp, fmt!{"Duplicate field \ name %s in record type declaration", - *id}); + tcx.sess.str_of(id)}); tcx.sess.span_note(orig_sp, ~"First declaration of \ this field occurred here"); break; @@ -479,7 +480,7 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) { ast::item_impl(tps, _, ty, ms) => { let rp = ccx.tcx.region_paramd_items.contains_key(it.id); debug!{"item_impl %s with id %d rp %b", - *it.ident, it.id, rp}; + ccx.tcx.sess.str_of(it.ident), it.id, rp}; let self_ty = ccx.to_ty(rscope::type_rscope(rp), ty); for ms.each |m| { check_method(ccx, m, self_ty, local_def(it.id)); @@ -555,9 +556,11 @@ impl @fn_ctxt: region_scope { do empty_rscope.named_region(span, id).chain_err |_e| { match self.in_scope_regions.find(ty::br_named(id)) { some(r) => result::ok(r), - none if *id == ~"blk" => result::ok(self.block_region()), + none if id == syntax::parse::token::special_idents::blk + => result::ok(self.block_region()), none => { - result::err(fmt!{"named region `%s` not in scope here", *id}) + result::err(fmt!{"named region `%s` not in scope here", + self.ccx.tcx.sess.str_of(id)}) } } } @@ -601,8 +604,10 @@ impl @fn_ctxt { match self.node_types.find(ex.id) { some(t) => t, none => { - self.tcx().sess.bug(fmt!{"no type for expr %d (%s) in fcx %s", - ex.id, expr_to_str(ex), self.tag()}); + self.tcx().sess.bug( + fmt!{"no type for expr %d (%s) in fcx %s", + ex.id, expr_to_str(ex, self.ccx.tcx.sess.intr()), + self.tag()}); } } } @@ -612,7 +617,9 @@ impl @fn_ctxt { none => { self.tcx().sess.bug( fmt!{"no type for node %d: %s in fcx %s", - id, ast_map::node_id_to_str(self.tcx().items, id), + id, ast_map::node_id_to_str( + self.tcx().items, id, + self.tcx().sess.parse_sess.interner), self.tag()}); } } @@ -623,7 +630,9 @@ impl @fn_ctxt { none => { self.tcx().sess.bug( fmt!{"no type substs for node %d: %s in fcx %s", - id, ast_map::node_id_to_str(self.tcx().items, id), + id, ast_map::node_id_to_str( + self.tcx().items, id, + self.tcx().sess.parse_sess.interner), self.tag()}); } } @@ -842,8 +851,10 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expected: option<ty::t>, unifier: fn()) -> bool { - debug!{">> typechecking expr %d (%s)", - expr.id, syntax::print::pprust::expr_to_str(expr)}; + debug!{ + ">> typechecking expr %d (%s)", + expr.id, syntax::print::pprust::expr_to_str(expr, + fcx.ccx.tcx.sess.intr())}; // A generic function to factor out common logic from call and // overloaded operations @@ -1028,10 +1039,10 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, fn lookup_op_method(fcx: @fn_ctxt, op_ex: @ast::expr, self_ex: @ast::expr, self_t: ty::t, - opname: ~str, args: ~[@ast::expr]) + opname: ast::ident, args: ~[@ast::expr]) -> option<(ty::t, bool)> { let lkup = method::lookup(fcx, op_ex, self_ex, op_ex.id, - op_ex.callee_id, @opname, self_t, ~[], false); + op_ex.callee_id, opname, self_t, ~[], false); match lkup.method() { some(origin) => { let {fty: method_ty, bot: bot} = { @@ -1100,9 +1111,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, let tcx = fcx.ccx.tcx; match ast_util::binop_to_method_name(op) { some(name) => { - match lookup_op_method(fcx, ex, - lhs_expr, lhs_resolved_t, - name, ~[rhs]) { + match lookup_op_method(fcx, ex, lhs_expr, lhs_resolved_t, + fcx.tcx().sess.ident_of(name), ~[rhs]) { some(pair) => return pair, _ => () } @@ -1134,7 +1144,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, fn check_user_unop(fcx: @fn_ctxt, op_str: ~str, mname: ~str, ex: @ast::expr, rhs_expr: @ast::expr, rhs_t: ty::t) -> ty::t { - match lookup_op_method(fcx, ex, rhs_expr, rhs_t, mname, ~[]) { + match lookup_op_method(fcx, ex, rhs_expr, rhs_t, + fcx.tcx().sess.ident_of(mname), ~[]) { some((ret_ty, _)) => ret_ty, _ => { fcx.ccx.tcx.sess.span_err( @@ -1221,7 +1232,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, let fty = ty::mk_fn(tcx, fn_ty); debug!{"check_expr_fn_with_unifier %s fty=%s", - expr_to_str(expr), fcx.infcx.ty_to_str(fty)}; + expr_to_str(expr, tcx.sess.intr()), fcx.infcx.ty_to_str(fty)}; fcx.write_ty(expr.id, fty); @@ -1315,7 +1326,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, let msg = fmt!{"attempted access of field `%s` on type `%s`, \ but no public field or method with that name \ was found", - *field, fcx.infcx.ty_to_str(t_err)}; + tcx.sess.str_of(field), + fcx.infcx.ty_to_str(t_err)}; tcx.sess.span_err(expr.span, msg); // NB: Adding a bogus type to allow typechecking to continue fcx.write_ty(expr.id, fcx.infcx.next_ty_var()); @@ -1788,7 +1800,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, for fields_t.each |f| { let mut found = false; for base_fields.each |bf| { - if str::eq(f.node.ident, bf.ident) { + if f.node.ident == bf.ident { demand::suptype(fcx, f.span, bf.mt.ty, f.node.mt.ty); found = true; } @@ -1796,7 +1808,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, if !found { tcx.sess.span_fatal(f.span, ~"unknown field in record update: " + - *f.node.ident); + tcx.sess.str_of(f.node.ident)); } } } @@ -1873,27 +1885,27 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, // Look up the class fields and build up a map. let class_fields = ty::lookup_class_fields(tcx, class_id); - let class_field_map = str_hash(); + let class_field_map = uint_hash(); let mut fields_found = 0; for class_fields.each |field| { // XXX: Check visibility here. - class_field_map.insert(*field.ident, (field.id, false)); + class_field_map.insert(field.ident, (field.id, false)); } // Typecheck each field. for fields.each |field| { - match class_field_map.find(*field.node.ident) { + match class_field_map.find(field.node.ident) { none => { - tcx.sess.span_err(field.span, - fmt!{"structure has no field named \ - field named `%s`", - *field.node.ident}); + tcx.sess.span_err( + field.span, + fmt!{"structure has no field named field named `%s`", + tcx.sess.str_of(field.node.ident)}); } some((_, true)) => { - tcx.sess.span_err(field.span, - fmt!{"field `%s` specified more than \ - once", - *field.node.ident}); + tcx.sess.span_err( + field.span, + fmt!{"field `%s` specified more than once", + tcx.sess.str_of(field.node.ident)}); } some((field_id, false)) => { let expected_field_type = @@ -1914,11 +1926,11 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, if fields_found < class_fields.len() { let mut missing_fields = ~[]; for class_fields.each |class_field| { - let name = *class_field.ident; + let name = class_field.ident; let (_, seen) = class_field_map.get(name); if !seen { vec::push(missing_fields, - ~"`" + name + ~"`"); + ~"`" + tcx.sess.str_of(name) + ~"`"); } } @@ -1960,7 +1972,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, none => { let resolved = structurally_resolved_type(fcx, expr.span, raw_base_t); - match lookup_op_method(fcx, expr, base, resolved, ~"index", + match lookup_op_method(fcx, expr, base, resolved, + tcx.sess.ident_of(~"index"), ~[idx]) { some((ret_ty, _)) => fcx.write_ty(id, ret_ty), _ => { @@ -1976,7 +1989,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, if bot { fcx.write_bot(expr.id); } debug!{"type of expr %s is %s, expected is %s", - syntax::print::pprust::expr_to_str(expr), + syntax::print::pprust::expr_to_str(expr, tcx.sess.intr()), ty_to_str(tcx, fcx.expr_ty(expr)), match expected { some(t) => ty_to_str(tcx, t), @@ -2456,7 +2469,8 @@ fn check_bounds_are_used(ccx: @crate_ctxt, for tps_used.eachi |i, b| { if !b { ccx.tcx.sess.span_err( - span, fmt!{"type parameter `%s` is unused", *tps[i].ident}); + span, fmt!{"type parameter `%s` is unused", + ccx.tcx.sess.str_of(tps[i].ident)}); } } } @@ -2469,7 +2483,7 @@ fn check_intrinsic_type(ccx: @crate_ctxt, it: @ast::foreign_item) { {mode: ast::expl(m), ty: ty} } let tcx = ccx.tcx; - let (n_tps, inputs, output) = match *it.ident { + let (n_tps, inputs, output) = match ccx.tcx.sess.str_of(it.ident) { ~"size_of" | ~"pref_align_of" | ~"min_align_of" => (1u, ~[], ty::mk_uint(ccx.tcx)), ~"init" => (1u, ~[], param(ccx, 0u)), @@ -2511,14 +2525,16 @@ fn check_intrinsic_type(ccx: @crate_ctxt, it: @ast::foreign_item) { (1u, ~[], ty::mk_nil_ptr(tcx)) } ~"visit_tydesc" => { - assert ccx.tcx.intrinsic_defs.contains_key(@~"tydesc"); - assert ccx.tcx.intrinsic_defs.contains_key(@~"ty_visitor"); - let (_, tydesc_ty) = ccx.tcx.intrinsic_defs.get(@~"tydesc"); - let (_, visitor_trait) = ccx.tcx.intrinsic_defs.get(@~"ty_visitor"); - let td_ptr = ty::mk_ptr(ccx.tcx, {ty: tydesc_ty, - mutbl: ast::m_imm}); - (0u, ~[arg(ast::by_val, td_ptr), - arg(ast::by_ref, visitor_trait)], ty::mk_nil(tcx)) + let tydesc_name = syntax::parse::token::special_idents::tydesc; + let ty_visitor_name = tcx.sess.ident_of(~"ty_visitor"); + assert tcx.intrinsic_defs.contains_key(tydesc_name); + assert ccx.tcx.intrinsic_defs.contains_key(ty_visitor_name); + let (_, tydesc_ty) = tcx.intrinsic_defs.get(tydesc_name); + let (_, visitor_trait) = tcx.intrinsic_defs.get(ty_visitor_name); + let td_ptr = ty::mk_ptr(ccx.tcx, {ty: tydesc_ty, + mutbl: ast::m_imm}); + (0u, ~[arg(ast::by_val, td_ptr), + arg(ast::by_ref, visitor_trait)], ty::mk_nil(tcx)) } ~"frame_address" => { let fty = ty::mk_fn(ccx.tcx, { diff --git a/src/rustc/middle/typeck/check/alt.rs b/src/rustc/middle/typeck/check/alt.rs index d136b13eaac..83f62b7a6f1 100644 --- a/src/rustc/middle/typeck/check/alt.rs +++ b/src/rustc/middle/typeck/check/alt.rs @@ -215,11 +215,9 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) { fields", ex_f_count, f_count}); } - fn matches(name: ast::ident, f: ty::field) -> bool { - str::eq(name, f.ident) - } + for fields.each |f| { - match vec::find(ex_fields, |a| matches(f.ident, a)) { + match vec::find(ex_fields, |a| f.ident == a.ident) { some(field) => { check_pat(pcx, f.pat, field.mt.ty); } @@ -227,7 +225,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) { tcx.sess.span_fatal(pat.span, fmt!{"mismatched types: did not \ expect a record with a field `%s`", - *f.ident}); + tcx.sess.str_of(f.ident)}); } } } @@ -259,7 +257,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) { // OK. } ast::def_class(*) => { - let name = syntax::print::pprust::path_to_str(path); + let name = pprust::path_to_str(path, tcx.sess.intr()); tcx.sess.span_err(pat.span, fmt!("mismatched types: expected `%s` but \ found `%s`", @@ -278,7 +276,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) { } // Index the class fields. - let field_map = std::map::box_str_hash(); + let field_map = std::map::uint_hash(); for class_fields.eachi |i, class_field| { field_map.insert(class_field.ident, i); } @@ -297,10 +295,11 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) { found_fields.insert(index, ()); } none => { - let name = syntax::print::pprust::path_to_str(path); + let name = pprust::path_to_str(path, tcx.sess.intr()); tcx.sess.span_err(pat.span, fmt!("struct `%s` does not have a field - named `%s`", name, *field.ident)); + named `%s`", name, + tcx.sess.str_of(field.ident))); } } } @@ -313,7 +312,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) { } tcx.sess.span_err(pat.span, fmt!("pattern does not mention field `%s`", - *field.ident)); + tcx.sess.str_of(field.ident))); } } diff --git a/src/rustc/middle/typeck/check/method.rs b/src/rustc/middle/typeck/check/method.rs index a711e2280b4..2b953bcdd8e 100644 --- a/src/rustc/middle/typeck/check/method.rs +++ b/src/rustc/middle/typeck/check/method.rs @@ -107,7 +107,8 @@ struct lookup { // Entrypoint: fn method() -> option<method_map_entry> { debug!{"method lookup(m_name=%s, self_ty=%s, %?)", - *self.m_name, self.fcx.infcx.ty_to_str(self.self_ty), + self.fcx.tcx().sess.str_of(self.m_name), + self.fcx.infcx.ty_to_str(self.self_ty), ty::get(self.self_ty).struct}; // Determine if there are any inherent methods we can call. @@ -533,7 +534,9 @@ struct lookup { debug!{"(adding inherent and extension candidates) \ adding candidates from impl: %s", node_id_to_str(self.tcx().items, - implementation.did.node)}; + implementation.did.node, + self.fcx.tcx().sess.parse_sess + .interner)}; self.add_candidates_from_impl(implementation, mode); } } @@ -572,9 +575,11 @@ struct lookup { fn def_id_to_str(def_id: ast::def_id) -> ~str { if def_id.crate == ast::local_crate { - node_id_to_str(self.tcx().items, def_id.node) + node_id_to_str(self.tcx().items, def_id.node, + self.fcx.tcx().sess.parse_sess.interner) } else { - ast_map::path_to_str(csearch::get_item_path(self.tcx(), def_id)) + ast_map::path_to_str(csearch::get_item_path(self.tcx(), def_id), + self.fcx.tcx().sess.parse_sess.interner) } } diff --git a/src/rustc/middle/typeck/check/regionck.rs b/src/rustc/middle/typeck/check/regionck.rs index 4b115f375c4..87701a01bdf 100644 --- a/src/rustc/middle/typeck/check/regionck.rs +++ b/src/rustc/middle/typeck/check/regionck.rs @@ -156,7 +156,8 @@ fn visit_block(b: ast::blk, &&rcx: @rcx, v: rvt) { } fn visit_expr(e: @ast::expr, &&rcx: @rcx, v: rvt) { - debug!{"visit_expr(e=%s)", pprust::expr_to_str(e)}; + debug!{"visit_expr(e=%s)", + pprust::expr_to_str(e, rcx.fcx.tcx().sess.intr())}; match e.node { ast::expr_path(*) => { diff --git a/src/rustc/middle/typeck/check/vtable.rs b/src/rustc/middle/typeck/check/vtable.rs index a2daf107dfe..d6c8d1cd2f7 100644 --- a/src/rustc/middle/typeck/check/vtable.rs +++ b/src/rustc/middle/typeck/check/vtable.rs @@ -287,7 +287,7 @@ fn connect_trait_tps(fcx: @fn_ctxt, expr: @ast::expr, impl_tys: ~[ty::t], fn early_resolve_expr(ex: @ast::expr, &&fcx: @fn_ctxt, is_early: bool) { debug!("vtable: early_resolve_expr() ex with id %?: %s", - ex.id, expr_to_str(ex)); + ex.id, expr_to_str(ex, fcx.tcx().sess.intr())); let cx = fcx.ccx; match ex.node { ast::expr_path(*) => { diff --git a/src/rustc/middle/typeck/check/writeback.rs b/src/rustc/middle/typeck/check/writeback.rs index 4f9bc928860..15251098b1f 100644 --- a/src/rustc/middle/typeck/check/writeback.rs +++ b/src/rustc/middle/typeck/check/writeback.rs @@ -117,7 +117,7 @@ fn visit_pat(p: @ast::pat, wbcx: wb_ctxt, v: wb_vt) { if !wbcx.success { return; } resolve_type_vars_for_node(wbcx, p.span, p.id); debug!{"Type for pattern binding %s (id %d) resolved to %s", - pat_to_str(p), p.id, + pat_to_str(p, wbcx.fcx.ccx.tcx.sess.intr()), p.id, wbcx.fcx.infcx.ty_to_str( ty::node_id_to_type(wbcx.fcx.ccx.tcx, p.id))}; @@ -130,7 +130,7 @@ fn visit_local(l: @ast::local, wbcx: wb_ctxt, v: wb_vt) { match resolve_type(wbcx.fcx.infcx, var_ty, resolve_all | force_all) { result::ok(lty) => { debug!{"Type for local %s (id %d) resolved to %s", - pat_to_str(l.node.pat), l.node.id, + pat_to_str(l.node.pat, wbcx.fcx.ccx.tcx.sess.intr()),l.node.id, wbcx.fcx.infcx.ty_to_str(lty)}; write_ty_to_tcx(wbcx.fcx.ccx.tcx, l.node.id, lty); } diff --git a/src/rustc/middle/typeck/coherence.rs b/src/rustc/middle/typeck/coherence.rs index ffed540323a..b7f39f4ba40 100644 --- a/src/rustc/middle/typeck/coherence.rs +++ b/src/rustc/middle/typeck/coherence.rs @@ -164,6 +164,7 @@ struct CoherenceChecker { // Create a mapping containing a MethodInfo for every provided // method in every trait. fn build_provided_methods_map(crate: @crate) { + let sess = self.crate_context.tcx.sess; let pmm = self.crate_context.provided_methods_map; @@ -173,7 +174,8 @@ struct CoherenceChecker { item_trait(_, _, trait_methods) => { for trait_methods.each |trait_method| { debug!{"(building provided methods map) checking \ - trait `%s` with id %d", *item.ident, item.id}; + trait `%s` with id %d", + sess.str_of(item.ident), item.id}; match trait_method { required(_) => { /* fall through */} @@ -193,7 +195,7 @@ struct CoherenceChecker { methods map) adding \ method `%s` to entry for \ existing trait", - *mi.ident}; + sess.str_of(mi.ident)}; let mut method_infos = mis; push(method_infos, mi); pmm.insert(item.id, method_infos); @@ -204,7 +206,7 @@ struct CoherenceChecker { debug!{"(building provided \ methods map) creating new \ entry for method `%s`", - *mi.ident}; + sess.str_of(mi.ident)}; pmm.insert(item.id, ~[mi]); } } @@ -227,7 +229,8 @@ struct CoherenceChecker { // inherent methods and extension methods. visit_crate(*crate, (), mk_simple_visitor(@{ visit_item: |item| { - debug!{"(checking coherence) item '%s'", *item.ident}; + debug!{"(checking coherence) item '%s'", + self.crate_context.tcx.sess.str_of(item.ident)}; match item.node { item_impl(_, associated_traits, _, _) => { @@ -269,7 +272,7 @@ struct CoherenceChecker { if associated_traits.len() == 0 { debug!{"(checking implementation) no associated traits for item \ '%s'", - *item.ident}; + self.crate_context.tcx.sess.str_of(item.ident)}; match get_base_type_def_id(self.inference_context, item.span, @@ -292,9 +295,10 @@ struct CoherenceChecker { self.trait_ref_to_trait_def_id(associated_trait); debug!{"(checking implementation) adding impl for trait \ '%s', item '%s'", - ast_map::node_id_to_str(self.crate_context.tcx.items, - trait_did.node), - *item.ident}; + ast_map::node_id_to_str( + self.crate_context.tcx.items, trait_did.node, + self.crate_context.tcx.sess.parse_sess.interner), + self.crate_context.tcx.sess.str_of(item.ident)}; let implementation = self.create_impl_from_item(item); self.add_trait_method(trait_did, implementation); @@ -567,7 +571,8 @@ struct CoherenceChecker { fn create_impl_from_item(item: @item) -> @Impl { fn add_provided_methods(inherent_methods: ~[@MethodInfo], - all_provided_methods: ~[@MethodInfo]) + all_provided_methods: ~[@MethodInfo], + sess: driver::session::session) -> ~[@MethodInfo] { let mut methods = inherent_methods; @@ -583,8 +588,9 @@ struct CoherenceChecker { } if !method_inherent_to_impl { - debug!{"(creating impl) adding provided method `%s` to \ - impl", *provided_method.ident}; + debug!{ + "(creating impl) adding provided method `%s` to impl", + sess.str_of(provided_method.ident)}; push(methods, provided_method); } } @@ -625,8 +631,9 @@ struct CoherenceChecker { // trait. // XXX: could probably be doing this with filter. - methods = add_provided_methods(methods, - all_provided); + methods = add_provided_methods( + methods, all_provided, + self.crate_context.tcx.sess); } } } @@ -717,11 +724,11 @@ struct CoherenceChecker { self_type.ty) { none => { let session = self.crate_context.tcx.sess; - session.bug(fmt!{"no base type for external impl \ - with no trait: %s (type %s)!", - *implementation.ident, - ty_to_str(self.crate_context.tcx, - self_type.ty)}); + session.bug(fmt!{ + "no base type for external impl \ + with no trait: %s (type %s)!", + session.str_of(implementation.ident), + ty_to_str(self.crate_context.tcx,self_type.ty)}); } some(_) => { // Nothing to do. diff --git a/src/rustc/middle/typeck/collect.rs b/src/rustc/middle/typeck/collect.rs index 250f431a1c7..5359e43401c 100644 --- a/src/rustc/middle/typeck/collect.rs +++ b/src/rustc/middle/typeck/collect.rs @@ -30,7 +30,9 @@ fn collect_item_types(ccx: @crate_ctxt, crate: @ast::crate) { // There ought to be a better approach. Attributes? for crate.node.module.items.each |crate_item| { - if *crate_item.ident == ~"intrinsic" { + if crate_item.ident + == syntax::parse::token::special_idents::intrinsic { + match crate_item.node { ast::item_mod(m) => { for m.items.each |intrinsic_item| { @@ -246,7 +248,7 @@ fn compare_impl_method(tcx: ty::ctxt, sp: span, tcx.sess.span_err( sp, fmt!{"method `%s`'s purity does \ not match the trait method's \ - purity", *impl_m.ident}); + purity", tcx.sess.str_of(impl_m.ident)}); } // is this check right? @@ -254,11 +256,11 @@ fn compare_impl_method(tcx: ty::ctxt, sp: span, tcx.sess.span_err( sp, fmt!{"method `%s`'s self type does \ not match the trait method's \ - self type", *impl_m.ident}); + self type", tcx.sess.str_of(impl_m.ident)}); } if impl_m.tps != trait_m.tps { - tcx.sess.span_err(sp, ~"method `" + *trait_m.ident + + tcx.sess.span_err(sp, ~"method `" + tcx.sess.str_of(trait_m.ident) + ~"` has an incompatible set of type parameters"); return; } @@ -266,9 +268,9 @@ fn compare_impl_method(tcx: ty::ctxt, sp: span, if vec::len(impl_m.fty.inputs) != vec::len(trait_m.fty.inputs) { tcx.sess.span_err(sp,fmt!{"method `%s` has %u parameters \ but the trait has %u", - *trait_m.ident, - vec::len(impl_m.fty.inputs), - vec::len(trait_m.fty.inputs)}); + tcx.sess.str_of(trait_m.ident), + vec::len(impl_m.fty.inputs), + vec::len(trait_m.fty.inputs)}); return; } @@ -299,7 +301,8 @@ fn compare_impl_method(tcx: ty::ctxt, sp: span, }; require_same_types( tcx, none, false, sp, impl_fty, trait_fty, - || ~"method `" + *trait_m.ident + ~"` has an incompatible type"); + || ~"method `" + tcx.sess.str_of(trait_m.ident) + + ~"` has an incompatible type"); return; // Replaces bound references to the self region with `with_r`. @@ -351,7 +354,8 @@ fn check_methods_against_trait(ccx: @crate_ctxt, none => { tcx.sess.span_err( a_trait_ty.path.span, - fmt!{"missing method `%s`", *trait_m.ident}); + fmt!{"missing method `%s`", + tcx.sess.str_of(trait_m.ident)}); } } } @@ -402,7 +406,8 @@ fn convert_methods(ccx: @crate_ctxt, fn convert(ccx: @crate_ctxt, it: @ast::item) { let tcx = ccx.tcx; let rp = tcx.region_paramd_items.contains_key(it.id); - debug!{"convert: item %s with id %d rp %b", *it.ident, it.id, rp}; + debug!{"convert: item %s with id %d rp %b", tcx.sess.str_of(it.ident), + it.id, rp}; match it.node { // These don't define types. ast::item_foreign_mod(_) | ast::item_mod(_) => {} @@ -607,7 +612,7 @@ fn ty_of_item(ccx: @crate_ctxt, it: @ast::item) rp: false, // functions do not have a self ty: ty::mk_fn(ccx.tcx, tofd)}; debug!{"type of %s (id %d) is %s", - *it.ident, it.id, ty_to_str(tcx, tpt.ty)}; + tcx.sess.str_of(it.ident), it.id, ty_to_str(tcx, tpt.ty)}; ccx.tcx.tcache.insert(local_def(it.id), tpt); return tpt; } diff --git a/src/rustc/middle/typeck/rscope.rs b/src/rustc/middle/typeck/rscope.rs index b3c275e415b..f11b09913af 100644 --- a/src/rustc/middle/typeck/rscope.rs +++ b/src/rustc/middle/typeck/rscope.rs @@ -1,4 +1,5 @@ import result::result; +import syntax::parse::token::special_idents; trait region_scope { fn anon_region(span: span) -> result<ty::region, ~str>; @@ -11,7 +12,7 @@ impl empty_rscope: region_scope { result::ok(ty::re_static) } fn named_region(_span: span, id: ast::ident) -> result<ty::region, ~str> { - if *id == ~"static" { result::ok(ty::re_static) } + if id == special_idents::static { result::ok(ty::re_static) } else { result::err(~"only the static region is allowed here") } } } @@ -28,7 +29,7 @@ impl type_rscope: region_scope { } fn named_region(span: span, id: ast::ident) -> result<ty::region, ~str> { do empty_rscope.named_region(span, id).chain_err |_e| { - if *id == ~"self" { + if id == special_idents::self_ { self.anon_region(span) } else { result::err(~"named regions other than `self` are not \ diff --git a/src/rustc/util/common.rs b/src/rustc/util/common.rs index e34113432ae..42b7bc19cc8 100644 --- a/src/rustc/util/common.rs +++ b/src/rustc/util/common.rs @@ -84,7 +84,9 @@ fn local_rhs_span(l: @ast::local, def: span) -> span { fn is_main_name(path: syntax::ast_map::path) -> bool { // FIXME (#34): path should be a constrained type, so we know // the call to last doesn't fail. - vec::last(path) == syntax::ast_map::path_name(@~"main") + vec::last(path) == syntax::ast_map::path_name( + syntax::parse::token::special_idents::main + ) } // diff --git a/src/rustc/util/ppaux.rs b/src/rustc/util/ppaux.rs index f3aee60f291..e22fa9fb173 100644 --- a/src/rustc/util/ppaux.rs +++ b/src/rustc/util/ppaux.rs @@ -106,7 +106,7 @@ fn explain_region_and_span(cx: ctxt, region: ty::region) fn bound_region_to_str(cx: ctxt, br: bound_region) -> ~str { match br { - br_named(str) => fmt!{"&%s", *str}, + br_named(id) => fmt!("&%s", cx.sess.str_of(id)), br_self if cx.sess.ppregions() => ~"&<self>", br_self => ~"&self", @@ -161,7 +161,8 @@ fn re_scope_id_to_str(cx: ctxt, node_id: ast::node_id) -> ~str { } _ => { cx.sess.bug( fmt!{"re_scope refers to %s", - ast_map::node_id_to_str(cx.items, node_id)}) } + ast_map::node_id_to_str(cx.items, node_id, + cx.sess.parse_sess.interner)}) } } } @@ -257,7 +258,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str { s += proto_ty_to_str(cx, proto); match ident { - some(i) => { s += ~" "; s += *i; } + some(i) => { s += ~" "; s += cx.sess.str_of(i); } _ => { } } s += ~"("; @@ -280,13 +281,13 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str { m.fty.output, m.fty.ret_style) + ~";"; } fn field_to_str(cx: ctxt, f: field) -> ~str { - return *f.ident + ~": " + mt_to_str(cx, f.mt); + return cx.sess.str_of(f.ident) + ~": " + mt_to_str(cx, f.mt); } // if there is an id, print that instead of the structural type: for ty::type_def_id(typ).each |def_id| { // note that this typedef cannot have type parameters - return ast_map::path_to_str(ty::item_path(cx, def_id)); + return ast_map::path_to_str(ty::item_path(cx, def_id),cx.sess.intr()); } // pretty print the structural type representation: @@ -336,12 +337,12 @@ fn ty_to_str(cx: ctxt, typ: t) -> ~str { ty_self => ~"self", ty_enum(did, substs) | ty_class(did, substs) => { let path = ty::item_path(cx, did); - let base = ast_map::path_to_str(path); + let base = ast_map::path_to_str(path, cx.sess.intr()); parameterized(cx, base, substs.self_r, substs.tps) } ty_trait(did, substs, vs) => { let path = ty::item_path(cx, did); - let base = ast_map::path_to_str(path); + let base = ast_map::path_to_str(path, cx.sess.intr()); let result = parameterized(cx, base, substs.self_r, substs.tps); vstore_ty_to_str(cx, result, vs) } diff --git a/src/rustdoc/attr_parser.rs b/src/rustdoc/attr_parser.rs index c590a1ac61e..bb37697ff39 100644 --- a/src/rustdoc/attr_parser.rs +++ b/src/rustdoc/attr_parser.rs @@ -63,8 +63,7 @@ fn parse_crate(attrs: ~[ast::attribute]) -> crate_attrs { let link_metas = attr::find_linkage_metas(attrs); { - name: attr::last_meta_item_value_str_by_name( - link_metas, ~"name").map(|x| *x ) + name: attr::last_meta_item_value_str_by_name(link_metas, ~"name") } } @@ -95,7 +94,7 @@ fn should_not_extract_crate_name_if_no_name_value_in_link_attribute() { fn parse_desc(attrs: ~[ast::attribute]) -> option<~str> { match doc_meta(attrs) { some(meta) => { - attr::get_meta_item_value_str(meta).map(|x| *x ) + attr::get_meta_item_value_str(meta) } none => none } diff --git a/src/rustdoc/attr_pass.rs b/src/rustdoc/attr_pass.rs index ba737a725bf..2439ad9801b 100644 --- a/src/rustdoc/attr_pass.rs +++ b/src/rustdoc/attr_pass.rs @@ -7,6 +7,7 @@ )]; import doc::item_utils; +import extract::to_str; import syntax::ast; import syntax::ast_map; import std::map::hashmap; @@ -150,7 +151,7 @@ fn fold_enum( }, _) => { let ast_variant = option::get( vec::find(enum_definition.variants, |v| { - *v.node.name == variant.name + to_str(v.node.name) == variant.name })); attr_parser::parse_desc(ast_variant.node.attrs) @@ -208,10 +209,10 @@ fn merge_method_attrs( vec::map(methods, |method| { match method { ast::required(ty_m) => { - (*ty_m.ident, attr_parser::parse_desc(ty_m.attrs)) + (to_str(ty_m.ident), attr_parser::parse_desc(ty_m.attrs)) } ast::provided(m) => { - (*m.ident, attr_parser::parse_desc(m.attrs)) + (to_str(m.ident), attr_parser::parse_desc(m.attrs)) } } }) @@ -220,7 +221,7 @@ fn merge_method_attrs( node: ast::item_impl(_, _, _, methods), _ }, _) => { vec::map(methods, |method| { - (*method.ident, attr_parser::parse_desc(method.attrs)) + (to_str(method.ident), attr_parser::parse_desc(method.attrs)) }) } _ => fail ~"unexpected item" diff --git a/src/rustdoc/extract.rs b/src/rustdoc/extract.rs index 9fd4cea4bfd..b3e5785c06e 100644 --- a/src/rustdoc/extract.rs +++ b/src/rustdoc/extract.rs @@ -3,7 +3,21 @@ import syntax::ast; import doc::item_utils; -export from_srv, extract; +export from_srv, extract, to_str, interner; + +// Hack; rather than thread an interner through everywhere, rely on +// thread-local data +fn to_str(id: ast::ident) -> ~str { + let intr = unsafe{ task::local_data_get( + syntax::parse::token::interner_key) }; + + return *(*intr.get()).get(id); +} + +fn interner() -> syntax::parse::token::ident_interner { + return *(unsafe{ task::local_data_get( + syntax::parse::token::interner_key) }).get(); +} fn from_srv( srv: astsrv::srv, @@ -34,14 +48,14 @@ fn top_moddoc_from_crate( crate: @ast::crate, default_name: ~str ) -> doc::moddoc { - moddoc_from_mod(mk_itemdoc(ast::crate_node_id, @default_name), + moddoc_from_mod(mk_itemdoc(ast::crate_node_id, default_name), crate.node.module) } -fn mk_itemdoc(id: ast::node_id, name: ast::ident) -> doc::itemdoc { +fn mk_itemdoc(id: ast::node_id, name: ~str) -> doc::itemdoc { { id: id, - name: *name, + name: name, path: ~[], brief: none, desc: none, @@ -57,7 +71,7 @@ fn moddoc_from_mod( doc::moddoc_({ item: itemdoc, items: do vec::filter_map(module_.items) |item| { - let itemdoc = mk_itemdoc(item.id, item.ident); + let itemdoc = mk_itemdoc(item.id, to_str(item.ident)); match item.node { ast::item_mod(m) => { some(doc::modtag( @@ -113,7 +127,7 @@ fn nmoddoc_from_mod( { item: itemdoc, fns: do vec::map(module_.items) |item| { - let itemdoc = mk_itemdoc(item.id, item.ident); + let itemdoc = mk_itemdoc(item.id, to_str(item.ident)); match item.node { ast::foreign_item_fn(_, _) => { fndoc_from_fn(itemdoc) @@ -162,8 +176,9 @@ fn variantdocs_from_variants( } fn variantdoc_from_variant(variant: ast::variant) -> doc::variantdoc { + { - name: *variant.node.name, + name: to_str(variant.node.name), desc: none, sig: none } @@ -192,7 +207,7 @@ fn traitdoc_from_trait( match method { ast::required(ty_m) => { { - name: *ty_m.ident, + name: to_str(ty_m.ident), brief: none, desc: none, sections: ~[], @@ -202,7 +217,7 @@ fn traitdoc_from_trait( } ast::provided(m) => { { - name: *m.ident, + name: to_str(m.ident), brief: none, desc: none, sections: ~[], @@ -237,7 +252,7 @@ fn impldoc_from_impl( self_ty: none, methods: do vec::map(methods) |method| { { - name: *method.ident, + name: to_str(method.ident), brief: none, desc: none, sections: ~[], diff --git a/src/rustdoc/tystr_pass.rs b/src/rustdoc/tystr_pass.rs index 6c89f0dc769..e545870e8cb 100644 --- a/src/rustdoc/tystr_pass.rs +++ b/src/rustdoc/tystr_pass.rs @@ -5,6 +5,7 @@ import syntax::ast; import syntax::print::pprust; import syntax::ast_map; import std::map::hashmap; +import extract::to_str; export mk_pass; @@ -55,7 +56,7 @@ fn get_fn_sig(srv: astsrv::srv, fn_id: doc::ast_id) -> option<~str> { ident: ident, node: ast::foreign_item_fn(decl, tys), _ }, _, _) => { - some(pprust::fun_to_str(decl, ident, tys)) + some(pprust::fun_to_str(decl, ident, tys, extract::interner())) } } } @@ -85,7 +86,7 @@ fn fold_const( ast_map::node_item(@{ node: ast::item_const(ty, _), _ }, _) => { - pprust::ty_to_str(ty) + pprust::ty_to_str(ty, extract::interner()) } } }) @@ -115,10 +116,10 @@ fn fold_enum( }, _) => { let ast_variant = option::get( do vec::find(enum_definition.variants) |v| { - *v.node.name == variant.name + to_str(v.node.name) == variant.name }); - pprust::variant_to_str(ast_variant) + pprust::variant_to_str(ast_variant, extract::interner()) } } }; @@ -173,8 +174,8 @@ fn get_method_sig( }, _) => { match check vec::find(methods, |method| { match method { - ast::required(ty_m) => *ty_m.ident == method_name, - ast::provided(m) => *m.ident == method_name, + ast::required(ty_m) => to_str(ty_m.ident) == method_name, + ast::provided(m) => to_str(m.ident) == method_name, } }) { some(method) => { @@ -183,14 +184,16 @@ fn get_method_sig( some(pprust::fun_to_str( ty_m.decl, ty_m.ident, - ty_m.tps + ty_m.tps, + extract::interner() )) } ast::provided(m) => { some(pprust::fun_to_str( m.decl, m.ident, - m.tps + m.tps, + extract::interner() )) } } @@ -201,13 +204,14 @@ fn get_method_sig( node: ast::item_impl(_, _, _, methods), _ }, _) => { match check vec::find(methods, |method| { - *method.ident == method_name + to_str(method.ident) == method_name }) { some(method) => { some(pprust::fun_to_str( method.decl, method.ident, - method.tps + method.tps, + extract::interner() )) } } @@ -236,9 +240,10 @@ fn fold_impl( node: ast::item_impl(_, trait_types, self_ty, _), _ }, _) => { let trait_types = vec::map(trait_types, |p| { - pprust::path_to_str(p.path) + pprust::path_to_str(p.path, extract::interner()) }); - (trait_types, some(pprust::ty_to_str(self_ty))) + (trait_types, some(pprust::ty_to_str(self_ty, + extract::interner()))) } _ => fail ~"expected impl" } @@ -293,9 +298,9 @@ fn fold_type( }, _) => { some(fmt!{ "type %s%s = %s", - *ident, - pprust::typarams_to_str(params), - pprust::ty_to_str(ty) + to_str(ident), + pprust::typarams_to_str(params, extract::interner()), + pprust::ty_to_str(ty, extract::interner()) }) } _ => fail ~"expected type" diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index 4a36e7a1c46..87bf362fe97 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -16,15 +16,15 @@ trait fake_ext_ctxt { fn parse_sess() -> parse::parse_sess; } -type fake_session = (); +type fake_session = parse::parse_sess; impl fake_session: fake_ext_ctxt { fn cfg() -> ast::crate_cfg { ~[] } - fn parse_sess() -> parse::parse_sess { parse::new_parse_sess(none) } + fn parse_sess() -> parse::parse_sess { self } } fn mk_ctxt() -> fake_ext_ctxt { - () as fake_ext_ctxt + parse::new_parse_sess(none) as fake_ext_ctxt } @@ -32,60 +32,61 @@ fn main() { let ext_cx = mk_ctxt(); let abc = #ast{23}; - check_pp(abc, pprust::print_expr, ~"23"); + check_pp(ext_cx, abc, pprust::print_expr, ~"23"); let expr3 = #ast{2 - $(abc) + 7}; - check_pp(expr3, pprust::print_expr, ~"2 - 23 + 7"); + check_pp(ext_cx, expr3, pprust::print_expr, ~"2 - 23 + 7"); let expr4 = #ast{2 - $(#ast{3}) + 9}; - check_pp(expr4, pprust::print_expr, ~"2 - 3 + 9"); + check_pp(ext_cx, expr4, pprust::print_expr, ~"2 - 3 + 9"); let ty = #ast[ty]{int}; - check_pp(ty, pprust::print_type, ~"int"); + check_pp(ext_cx, ty, pprust::print_type, ~"int"); let ty2 = #ast[ty]{option<$(ty)>}; - check_pp(ty2, pprust::print_type, ~"option<int>"); + check_pp(ext_cx, ty2, pprust::print_type, ~"option<int>"); let item = #ast[item]{const x : int = 10;}; - check_pp(item, pprust::print_item, ~"const x: int = 10;"); + check_pp(ext_cx, item, pprust::print_item, ~"const x: int = 10;"); let item2: @ast::item = #ast[item]{const x : int = $(abc);}; - check_pp(item2, pprust::print_item, ~"const x: int = 23;"); + check_pp(ext_cx, item2, pprust::print_item, ~"const x: int = 23;"); let stmt = #ast[stmt]{let x = 20;}; - check_pp(*stmt, pprust::print_stmt, ~"let x = 20;"); + check_pp(ext_cx, *stmt, pprust::print_stmt, ~"let x = 20;"); let stmt2 = #ast[stmt]{let x : $(ty) = $(abc);}; - check_pp(*stmt2, pprust::print_stmt, ~"let x: int = 23;"); + check_pp(ext_cx, *stmt2, pprust::print_stmt, ~"let x: int = 23;"); let pat = #ast[pat]{some(_)}; - check_pp(pat, pprust::print_pat, ~"some(_)"); + check_pp(ext_cx, pat, pprust::print_pat, ~"some(_)"); // issue #1785 let x = #ast{1}; let test1 = #ast{1+$(x)}; - check_pp(test1, pprust::print_expr, ~"1 + 1"); + check_pp(ext_cx, test1, pprust::print_expr, ~"1 + 1"); let test2 = #ast{$(x)+1}; - check_pp(test2, pprust::print_expr, ~"1 + 1"); + check_pp(ext_cx, test2, pprust::print_expr, ~"1 + 1"); let y = #ast{2}; let test3 = #ast{$(x) + $(y)}; - check_pp(test3, pprust::print_expr, ~"1 + 2"); + check_pp(ext_cx, test3, pprust::print_expr, ~"1 + 2"); let crate = #ast[crate] { fn a() { } }; - check_pp(crate, pprust::print_crate_, ~"fn a() { }\n"); + check_pp(ext_cx, crate, pprust::print_crate_, ~"fn a() { }\n"); // issue #1926 let s = #ast[expr]{__s}; let e = #ast[expr]{__e}; let call = #ast[expr]{$(s).foo(|__e| $(e) )}; - check_pp(call, pprust::print_expr, ~"__s.foo(|__e| __e)") + check_pp(ext_cx, call, pprust::print_expr, ~"__s.foo(|__e| __e)") } -fn check_pp<T>(expr: T, f: fn(pprust::ps, T), expect: ~str) { +fn check_pp<T>(cx: fake_ext_ctxt, + expr: T, f: fn(pprust::ps, T), expect: ~str) { let buf = mem_buffer(); - let pp = pprust::rust_printer(buf as io::Writer); + let pp = pprust::rust_printer(buf as io::Writer,cx.parse_sess().interner); f(pp, expr); pp::eof(pp.s); let str = mem_buffer_str(buf); |
