about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast_map.rs38
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs23
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs4
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs13
-rw-r--r--src/libsyntax/parse/common.rs8
-rw-r--r--src/libsyntax/parse/obsolete.rs4
-rw-r--r--src/libsyntax/parse/parser.rs12
-rw-r--r--src/libsyntax/parse/token.rs54
8 files changed, 77 insertions, 79 deletions
diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs
index 8989bb88cd7..eef14ac8b7a 100644
--- a/src/libsyntax/ast_map.rs
+++ b/src/libsyntax/ast_map.rs
@@ -23,9 +23,9 @@ use print::pprust;
 use visit;
 
 use core::cmp;
+use core::hashmap::linear::LinearMap;
 use core::str;
 use core::vec;
-use std;
 
 pub enum path_elt {
     path_mod(ident),
@@ -104,10 +104,10 @@ pub enum ast_node {
     node_struct_ctor(@struct_def, @item, @path),
 }
 
-pub type map = std::oldmap::HashMap<node_id, ast_node>;
+pub type map = @mut LinearMap<node_id, ast_node>;
 
 pub struct Ctx {
-    map: @map,
+    map: map,
     path: path,
     local_id: uint,
     diag: @span_handler,
@@ -134,13 +134,13 @@ pub fn mk_ast_map_visitor() -> vt {
 
 pub fn map_crate(diag: @span_handler, c: crate) -> map {
     let cx = @mut Ctx {
-        map: @std::oldmap::HashMap(),
+        map: @mut LinearMap::new(),
         path: ~[],
         local_id: 0u,
         diag: diag,
     };
     visit::visit_crate(c, cx, mk_ast_map_visitor());
-    *cx.map
+    cx.map
 }
 
 // Used for items loaded from external crate that are being inlined into this
@@ -157,7 +157,7 @@ pub fn map_decoded_item(diag: @span_handler,
     // even if we did I think it only needs an ordering between local
     // variables that are simultaneously in scope).
     let cx = @mut Ctx {
-        map: @map,
+        map: map,
         path: copy path,
         local_id: 0,
         diag: diag,
@@ -374,7 +374,7 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
       None => {
         fmt!("unknown node (id=%d)", id)
       }
-      Some(node_item(item, path)) => {
+      Some(&node_item(item, path)) => {
         let path_str = path_ident_to_str(*path, item.ident, itr);
         let item_str = match item.node {
           item_const(*) => ~"const",
@@ -390,43 +390,43 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
         };
         fmt!("%s %s (id=%?)", item_str, path_str, id)
       }
-      Some(node_foreign_item(item, abi, _, path)) => {
+      Some(&node_foreign_item(item, abi, _, path)) => {
         fmt!("foreign item %s with abi %? (id=%?)",
              path_ident_to_str(*path, item.ident, itr), abi, id)
       }
-      Some(node_method(m, _, path)) => {
+      Some(&node_method(m, _, path)) => {
         fmt!("method %s in %s (id=%?)",
              *itr.get(m.ident), path_to_str(*path, itr), id)
       }
-      Some(node_trait_method(ref tm, _, path)) => {
+      Some(&node_trait_method(ref tm, _, path)) => {
         let m = ast_util::trait_method_to_ty_method(&**tm);
         fmt!("method %s in %s (id=%?)",
              *itr.get(m.ident), path_to_str(*path, itr), id)
       }
-      Some(node_variant(ref variant, _, path)) => {
+      Some(&node_variant(ref variant, _, path)) => {
         fmt!("variant %s in %s (id=%?)",
              *itr.get(variant.node.name), path_to_str(*path, itr), id)
       }
-      Some(node_expr(expr)) => {
+      Some(&node_expr(expr)) => {
         fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id)
       }
-      Some(node_stmt(stmt)) => {
+      Some(&node_stmt(stmt)) => {
         fmt!("stmt %s (id=%?)",
              pprust::stmt_to_str(*stmt, itr), id)
       }
-      Some(node_arg(_, _)) => { // add more info here
+      Some(&node_arg(_, _)) => { // add more info here
         fmt!("arg (id=%?)", id)
       }
-      Some(node_local(_)) => { // add more info here
+      Some(&node_local(_)) => { // add more info here
         fmt!("local (id=%?)", id)
       }
-      Some(node_dtor(*)) => { // add more info here
+      Some(&node_dtor(*)) => { // add more info here
         fmt!("node_dtor (id=%?)", id)
       }
-      Some(node_block(_)) => {
+      Some(&node_block(_)) => {
         fmt!("block")
       }
-      Some(node_struct_ctor(*)) => {
+      Some(&node_struct_ctor(*)) => {
         fmt!("struct_ctor")
       }
     }
@@ -436,7 +436,7 @@ pub fn node_item_query<Result>(items: map, id: node_id,
                                query: &fn(@item) -> Result,
                                +error_msg: ~str) -> Result {
     match items.find(&id) {
-        Some(node_item(it, _)) => query(it),
+        Some(&node_item(it, _)) => query(it),
         _ => fail!(error_msg)
     }
 }
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index 688d7a57d91..b7ba9c5c6c0 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -19,11 +19,8 @@ use parse::parser::Parser;
 use parse::token::{Token, EOF, to_str, nonterminal};
 use parse::token;
 
-use core::option::{Option, Some, None};
-use core::str;
-use core::uint;
-use core::vec;
-use std::oldmap::HashMap;
+use core::hashmap::linear::LinearMap;
+use core::prelude::*;
 
 /* This is an Earley-like parser, without support for in-grammar nonterminals,
 only by calling out to the main rust parser for named nonterminals (which it
@@ -189,9 +186,9 @@ pub enum named_match {
 pub type earley_item = ~MatcherPos;
 
 pub fn nameize(p_s: @mut ParseSess, ms: ~[matcher], res: ~[@named_match])
-            -> HashMap<ident,@named_match> {
+            -> LinearMap<ident,@named_match> {
     fn n_rec(p_s: @mut ParseSess, m: matcher, res: ~[@named_match],
-             ret_val: HashMap<ident, @named_match>) {
+             ret_val: &mut LinearMap<ident, @named_match>) {
         match m {
           codemap::spanned {node: match_tok(_), _} => (),
           codemap::spanned {node: match_seq(ref more_ms, _, _, _, _), _} => {
@@ -210,13 +207,13 @@ pub fn nameize(p_s: @mut ParseSess, ms: ~[matcher], res: ~[@named_match])
           }
         }
     }
-    let ret_val = HashMap();
-    for ms.each() |m| { n_rec(p_s, *m, res, ret_val) }
+    let mut ret_val = LinearMap::new();
+    for ms.each() |m| { n_rec(p_s, *m, res, &mut ret_val) }
     return ret_val;
 }
 
 pub enum parse_result {
-    success(HashMap<ident, @named_match>),
+    success(LinearMap<ident, @named_match>),
     failure(codemap::span, ~str),
     error(codemap::span, ~str)
 }
@@ -226,11 +223,11 @@ pub fn parse_or_else(
     +cfg: ast::crate_cfg,
     rdr: @reader,
     ms: ~[matcher]
-) -> HashMap<ident, @named_match> {
+) -> LinearMap<ident, @named_match> {
     match parse(sess, cfg, rdr, ms) {
       success(m) => m,
-      failure(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str)),
-      error(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str))
+      failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str),
+      error(sp, str) => sess.span_diagnostic.span_fatal(sp, str)
     }
 }
 
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 5a6fd6fec58..6bd72b95109 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -63,12 +63,12 @@ pub fn add_new_extension(cx: @ext_ctxt,
                                      argument_gram);
 
     // Extract the arguments:
-    let lhses = match argument_map.get(&lhs_nm) {
+    let lhses = match *argument_map.get(&lhs_nm) {
         @matched_seq(ref s, _) => /* FIXME (#2543) */ copy *s,
         _ => cx.span_bug(sp, ~"wrong-structured lhs")
     };
 
-    let rhses = match argument_map.get(&rhs_nm) {
+    let rhses = match *argument_map.get(&rhs_nm) {
       @matched_seq(ref s, _) => /* FIXME (#2543) */ copy *s,
       _ => cx.span_bug(sp, ~"wrong-structured rhs")
     };
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 49076c74972..67c2f438269 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -18,9 +18,9 @@ use ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal};
 use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident, ident_interner};
 use parse::lexer::TokenAndSpan;
 
+use core::hashmap::linear::LinearMap;
 use core::option;
 use core::vec;
-use std;
 
 /* FIXME #2811: figure out how to have a uniquely linked stack, and change to
    `~` */
@@ -39,7 +39,7 @@ pub struct TtReader {
     // the unzipped tree:
     cur: @mut TtFrame,
     /* for MBE-style macro transcription */
-    interpolations: std::oldmap::HashMap<ident, @named_match>,
+    interpolations: LinearMap<ident, @named_match>,
     repeat_idx: ~[uint],
     repeat_len: ~[uint],
     /* cached: */
@@ -52,7 +52,7 @@ pub struct TtReader {
  *  should) be none. */
 pub fn new_tt_reader(sp_diag: @span_handler,
                      itr: @ident_interner,
-                     interp: Option<std::oldmap::HashMap<ident,@named_match>>,
+                     interp: Option<LinearMap<ident,@named_match>>,
                      +src: ~[ast::token_tree])
                   -> @mut TtReader {
     let r = @mut TtReader {
@@ -66,7 +66,7 @@ pub fn new_tt_reader(sp_diag: @span_handler,
             up: option::None
         },
         interpolations: match interp { /* just a convienience */
-            None => std::oldmap::HashMap(),
+            None => LinearMap::new(),
             Some(x) => x
         },
         repeat_idx: ~[],
@@ -124,7 +124,10 @@ fn lookup_cur_matched_by_matched(r: &mut TtReader,
 }
 
 fn lookup_cur_matched(r: &mut TtReader, name: ident) -> @named_match {
-    lookup_cur_matched_by_matched(r, r.interpolations.get(&name))
+    // FIXME (#3850): this looks a bit silly with an extra scope.
+    let start;
+    { start = *r.interpolations.get(&name); }
+    return lookup_cur_matched_by_matched(r, start);
 }
 enum lis {
     lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str)
diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs
index ea599e8290a..c14c7bed139 100644
--- a/src/libsyntax/parse/common.rs
+++ b/src/libsyntax/parse/common.rs
@@ -129,7 +129,7 @@ pub impl Parser {
 
     // A sanity check that the word we are asking for is a known keyword
     fn require_keyword(&self, word: &~str) {
-        if !self.keywords.contains_key(word) {
+        if !self.keywords.contains(word) {
             self.bug(fmt!("unknown keyword: %s", *word));
         }
     }
@@ -153,7 +153,7 @@ pub impl Parser {
     fn is_any_keyword(&self, tok: &token::Token) -> bool {
         match *tok {
           token::IDENT(sid, false) => {
-            self.keywords.contains_key(self.id_to_str(sid))
+            self.keywords.contains(self.id_to_str(sid))
           }
           _ => false
         }
@@ -183,7 +183,7 @@ pub impl Parser {
     }
 
     fn is_strict_keyword(&self, word: &~str) -> bool {
-        self.strict_keywords.contains_key(word)
+        self.strict_keywords.contains(word)
     }
 
     fn check_strict_keywords(&self) {
@@ -203,7 +203,7 @@ pub impl Parser {
     }
 
     fn is_reserved_keyword(&self, word: &~str) -> bool {
-        self.reserved_keywords.contains_key(word)
+        self.reserved_keywords.contains(word)
     }
 
     fn check_reserved_keywords(&self) {
diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs
index 0f4de9257c9..32c8b88aed8 100644
--- a/src/libsyntax/parse/obsolete.rs
+++ b/src/libsyntax/parse/obsolete.rs
@@ -225,9 +225,9 @@ pub impl Parser {
                    desc: &str) {
         self.span_err(sp, fmt!("obsolete syntax: %s", kind_str));
 
-        if !self.obsolete_set.contains_key(&kind) {
+        if !self.obsolete_set.contains(&kind) {
             self.sess.span_diagnostic.handler().note(fmt!("%s", desc));
-            self.obsolete_set.insert(kind, ());
+            self.obsolete_set.insert(kind);
         }
     }
 
diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs
index af64bf07b7c..171cd90bcd2 100644
--- a/src/libsyntax/parse/parser.rs
+++ b/src/libsyntax/parse/parser.rs
@@ -92,8 +92,8 @@ use opt_vec::OptVec;
 
 use core::either::{Either, Left, Right};
 use core::either;
+use core::hashmap::linear::LinearSet;
 use core::vec;
-use std::oldmap::HashMap;
 
 #[deriving(Eq)]
 enum restriction {
@@ -240,7 +240,7 @@ pub fn Parser(sess: @mut ParseSess,
         keywords: token::keyword_table(),
         strict_keywords: token::strict_keyword_table(),
         reserved_keywords: token::reserved_keyword_table(),
-        obsolete_set: HashMap(),
+        obsolete_set: @mut LinearSet::new(),
         mod_path_stack: @mut ~[],
     }
 }
@@ -259,12 +259,12 @@ pub struct Parser {
     quote_depth: @mut uint, // not (yet) related to the quasiquoter
     reader: @reader,
     interner: @token::ident_interner,
-    keywords: HashMap<~str, ()>,
-    strict_keywords: HashMap<~str, ()>,
-    reserved_keywords: HashMap<~str, ()>,
+    keywords: LinearSet<~str>,
+    strict_keywords: LinearSet<~str>,
+    reserved_keywords: LinearSet<~str>,
     /// The set of seen errors about obsolete syntax. Used to suppress
     /// extra detail when the same error is seen twice
-    obsolete_set: HashMap<ObsoleteSyntax, ()>,
+    obsolete_set: @mut LinearSet<ObsoleteSyntax>,
     /// Used to determine the path to externally loaded source files
     mod_path_stack: @mut ~[~str],
 
diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs
index 074bb13e199..5fdf6f7620c 100644
--- a/src/libsyntax/parse/token.rs
+++ b/src/libsyntax/parse/token.rs
@@ -18,9 +18,9 @@ use util::interner;
 
 use core::cast;
 use core::char;
+use core::hashmap::linear::LinearSet;
 use core::str;
 use core::task;
-use std::oldmap::HashMap;
 
 #[auto_encode]
 #[auto_decode]
@@ -458,35 +458,33 @@ pub fn mk_fake_ident_interner() -> @ident_interner {
  * appear as identifiers at all. Reserved keywords are not used anywhere in
  * the language and may not appear as identifiers.
  */
-pub fn keyword_table() -> HashMap<~str, ()> {
-    let keywords = HashMap();
-    for temporary_keyword_table().each_key |&word| {
-        keywords.insert(word, ());
-    }
-    for strict_keyword_table().each_key |&word| {
-        keywords.insert(word, ());
-    }
-    for reserved_keyword_table().each_key |&word| {
-        keywords.insert(word, ());
-    }
-    keywords
+pub fn keyword_table() -> LinearSet<~str> {
+    let mut keywords = LinearSet::new();
+    let mut tmp = temporary_keyword_table();
+    let mut strict = strict_keyword_table();
+    let mut reserved = reserved_keyword_table();
+
+    do tmp.consume |word|      { keywords.insert(word); }
+    do strict.consume |word|   { keywords.insert(word); }
+    do reserved.consume |word| { keywords.insert(word); }
+    return keywords;
 }
 
 /// Keywords that may be used as identifiers
-pub fn temporary_keyword_table() -> HashMap<~str, ()> {
-    let words = HashMap();
+pub fn temporary_keyword_table() -> LinearSet<~str> {
+    let mut words = LinearSet::new();
     let keys = ~[
         ~"self", ~"static",
     ];
-    for keys.each |word| {
-        words.insert(copy *word, ());
+    do vec::consume(keys) |_, s| {
+        words.insert(s);
     }
-    words
+    return words;
 }
 
 /// Full keywords. May not appear anywhere else.
-pub fn strict_keyword_table() -> HashMap<~str, ()> {
-    let words = HashMap();
+pub fn strict_keyword_table() -> LinearSet<~str> {
+    let mut words = LinearSet::new();
     let keys = ~[
         ~"as",
         ~"break",
@@ -505,21 +503,21 @@ pub fn strict_keyword_table() -> HashMap<~str, ()> {
         ~"unsafe", ~"use",
         ~"while"
     ];
-    for keys.each |word| {
-        words.insert(copy *word, ());
+    do vec::consume(keys) |_, w| {
+        words.insert(w);
     }
-    words
+    return words;
 }
 
-pub fn reserved_keyword_table() -> HashMap<~str, ()> {
-    let words = HashMap();
+pub fn reserved_keyword_table() -> LinearSet<~str> {
+    let mut words = LinearSet::new();
     let keys = ~[
         ~"be"
     ];
-    for keys.each |word| {
-        words.insert(copy *word, ());
+    do vec::consume(keys) |_, s| {
+        words.insert(s);
     }
-    words
+    return words;
 }