about summary refs log tree commit diff
path: root/src/libsyntax
diff options
context:
space:
mode:
authorEduard-Mihai Burtescu <edy.burt@gmail.com>2018-08-18 13:55:43 +0300
committerEduard-Mihai Burtescu <edy.burt@gmail.com>2018-08-28 17:04:04 +0300
commit93f3f5b1552489dbee03020505d896f01fd53852 (patch)
tree19ccbcb3c491764f5ae6d3b65e1e0f76f450ce55 /src/libsyntax
parent83ddc33347cade429fdb47509818e775a67c1af6 (diff)
downloadrust-93f3f5b1552489dbee03020505d896f01fd53852.tar.gz
rust-93f3f5b1552489dbee03020505d896f01fd53852.zip
Use FxHash{Map,Set} instead of the default Hash{Map,Set} everywhere in rustc.
Diffstat (limited to 'src/libsyntax')
-rw-r--r--src/libsyntax/ast.rs4
-rw-r--r--src/libsyntax/ext/base.rs6
-rw-r--r--src/libsyntax/ext/derive.rs4
-rw-r--r--src/libsyntax/ext/expand.rs4
-rw-r--r--src/libsyntax/ext/placeholders.rs6
-rw-r--r--src/libsyntax/ext/tt/macro_parser.rs10
-rw-r--r--src/libsyntax/ext/tt/macro_rules.rs6
-rw-r--r--src/libsyntax/ext/tt/transcribe.rs12
-rw-r--r--src/libsyntax/parse/lexer/mod.rs6
-rw-r--r--src/libsyntax/parse/mod.rs8
10 files changed, 33 insertions, 33 deletions
diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs
index b6084bcf343..bd0e0d277ee 100644
--- a/src/libsyntax/ast.rs
+++ b/src/libsyntax/ast.rs
@@ -28,8 +28,8 @@ use ThinVec;
 use tokenstream::{ThinTokenStream, TokenStream};
 
 use serialize::{self, Encoder, Decoder};
-use std::collections::HashSet;
 use std::fmt;
+use rustc_data_structures::fx::FxHashSet;
 use rustc_data_structures::sync::Lrc;
 use std::u32;
 
@@ -407,7 +407,7 @@ pub struct WhereEqPredicate {
 
 /// The set of MetaItems that define the compilation environment of the crate,
 /// used to drive conditional compilation
-pub type CrateConfig = HashSet<(Name, Option<Symbol>)>;
+pub type CrateConfig = FxHashSet<(Name, Option<Symbol>)>;
 
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct Crate {
diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs
index 75d24df7b62..e8a68b6d767 100644
--- a/src/libsyntax/ext/base.rs
+++ b/src/libsyntax/ext/base.rs
@@ -26,7 +26,7 @@ use OneVector;
 use symbol::{keywords, Ident, Symbol};
 use ThinVec;
 
-use std::collections::HashMap;
+use rustc_data_structures::fx::FxHashMap;
 use std::iter;
 use std::path::PathBuf;
 use std::rc::Rc;
@@ -800,7 +800,7 @@ pub struct ExtCtxt<'a> {
     pub resolver: &'a mut dyn Resolver,
     pub resolve_err_count: usize,
     pub current_expansion: ExpansionData,
-    pub expansions: HashMap<Span, Vec<String>>,
+    pub expansions: FxHashMap<Span, Vec<String>>,
 }
 
 impl<'a> ExtCtxt<'a> {
@@ -821,7 +821,7 @@ impl<'a> ExtCtxt<'a> {
                 directory_ownership: DirectoryOwnership::Owned { relative: None },
                 crate_span: None,
             },
-            expansions: HashMap::new(),
+            expansions: FxHashMap::default(),
         }
     }
 
diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs
index 80bbc618932..684cee38874 100644
--- a/src/libsyntax/ext/derive.rs
+++ b/src/libsyntax/ext/derive.rs
@@ -17,7 +17,7 @@ use parse::parser::PathStyle;
 use symbol::Symbol;
 use syntax_pos::Span;
 
-use std::collections::HashSet;
+use rustc_data_structures::fx::FxHashSet;
 
 pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
     let mut result = Vec::new();
@@ -48,7 +48,7 @@ pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec
 pub fn add_derived_markers<T>(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path], item: T) -> T
     where T: HasAttrs,
 {
-    let (mut names, mut pretty_name) = (HashSet::new(), "derive(".to_owned());
+    let (mut names, mut pretty_name) = (FxHashSet::default(), "derive(".to_owned());
     for (i, path) in traits.iter().enumerate() {
         if i > 0 {
             pretty_name.push_str(", ");
diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs
index a2e84b508dc..6e38f820586 100644
--- a/src/libsyntax/ext/expand.rs
+++ b/src/libsyntax/ext/expand.rs
@@ -34,7 +34,7 @@ use syntax_pos::hygiene::ExpnFormat;
 use tokenstream::{TokenStream, TokenTree};
 use visit::{self, Visitor};
 
-use std::collections::HashMap;
+use rustc_data_structures::fx::FxHashMap;
 use std::fs::File;
 use std::io::Read;
 use std::iter::FromIterator;
@@ -319,7 +319,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
         // Unresolved macros produce dummy outputs as a recovery measure.
         invocations.reverse();
         let mut expanded_fragments = Vec::new();
-        let mut derives: HashMap<Mark, Vec<_>> = HashMap::new();
+        let mut derives: FxHashMap<Mark, Vec<_>> = FxHashMap::default();
         let mut undetermined_invocations = Vec::new();
         let (mut progress, mut force) = (false, !self.monotonic);
         loop {
diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs
index 7a8ccfddf8e..5906412883a 100644
--- a/src/libsyntax/ext/placeholders.rs
+++ b/src/libsyntax/ext/placeholders.rs
@@ -21,7 +21,7 @@ use symbol::keywords;
 use ThinVec;
 use util::move_map::MoveMap;
 
-use std::collections::HashMap;
+use rustc_data_structures::fx::FxHashMap;
 
 pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
     fn mac_placeholder() -> ast::Mac {
@@ -81,7 +81,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
 }
 
 pub struct PlaceholderExpander<'a, 'b: 'a> {
-    expanded_fragments: HashMap<ast::NodeId, AstFragment>,
+    expanded_fragments: FxHashMap<ast::NodeId, AstFragment>,
     cx: &'a mut ExtCtxt<'b>,
     monotonic: bool,
 }
@@ -90,7 +90,7 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> {
     pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
         PlaceholderExpander {
             cx,
-            expanded_fragments: HashMap::new(),
+            expanded_fragments: FxHashMap::default(),
             monotonic,
         }
     }
diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs
index dcdeee5c2e7..c962e7fcbb4 100644
--- a/src/libsyntax/ext/tt/macro_parser.rs
+++ b/src/libsyntax/ext/tt/macro_parser.rs
@@ -96,11 +96,11 @@ use OneVector;
 use symbol::keywords;
 use tokenstream::TokenStream;
 
+use rustc_data_structures::fx::FxHashMap;
+use std::collections::hash_map::Entry::{Occupied, Vacant};
 use std::mem;
 use std::ops::{Deref, DerefMut};
 use std::rc::Rc;
-use std::collections::HashMap;
-use std::collections::hash_map::Entry::{Occupied, Vacant};
 
 // To avoid costly uniqueness checks, we require that `MatchSeq` always has a nonempty body.
 
@@ -263,7 +263,7 @@ pub enum ParseResult<T> {
 
 /// A `ParseResult` where the `Success` variant contains a mapping of `Ident`s to `NamedMatch`es.
 /// This represents the mapping of metavars to the token trees they bind to.
-pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
+pub type NamedParseResult = ParseResult<FxHashMap<Ident, Rc<NamedMatch>>>;
 
 /// Count how many metavars are named in the given matcher `ms`.
 pub fn count_names(ms: &[TokenTree]) -> usize {
@@ -351,7 +351,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
         sess: &ParseSess,
         m: &TokenTree,
         res: &mut I,
-        ret_val: &mut HashMap<Ident, Rc<NamedMatch>>,
+        ret_val: &mut FxHashMap<Ident, Rc<NamedMatch>>,
     ) -> Result<(), (syntax_pos::Span, String)> {
         match *m {
             TokenTree::Sequence(_, ref seq) => for next_m in &seq.tts {
@@ -382,7 +382,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
         Ok(())
     }
 
-    let mut ret_val = HashMap::new();
+    let mut ret_val = FxHashMap::default();
     for m in ms {
         match n_rec(sess, m, res.by_ref(), &mut ret_val) {
             Ok(_) => {}
diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs
index 2c738ac2a04..d09127d6b08 100644
--- a/src/libsyntax/ext/tt/macro_rules.rs
+++ b/src/libsyntax/ext/tt/macro_rules.rs
@@ -27,8 +27,8 @@ use parse::token::Token::*;
 use symbol::Symbol;
 use tokenstream::{TokenStream, TokenTree};
 
+use rustc_data_structures::fx::FxHashMap;
 use std::borrow::Cow;
-use std::collections::HashMap;
 use std::collections::hash_map::Entry;
 
 use rustc_data_structures::sync::Lrc;
@@ -451,14 +451,14 @@ struct FirstSets {
     // If two sequences have the same span in a matcher, then map that
     // span to None (invalidating the mapping here and forcing the code to
     // use a slow path).
-    first: HashMap<Span, Option<TokenSet>>,
+    first: FxHashMap<Span, Option<TokenSet>>,
 }
 
 impl FirstSets {
     fn new(tts: &[quoted::TokenTree]) -> FirstSets {
         use self::quoted::TokenTree;
 
-        let mut sets = FirstSets { first: HashMap::new() };
+        let mut sets = FirstSets { first: FxHashMap::default() };
         build_recur(&mut sets, tts);
         return sets;
 
diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs
index 67a15b149f6..549e5f00dce 100644
--- a/src/libsyntax/ext/tt/transcribe.rs
+++ b/src/libsyntax/ext/tt/transcribe.rs
@@ -19,11 +19,11 @@ use OneVector;
 use syntax_pos::{Span, DUMMY_SP};
 use tokenstream::{TokenStream, TokenTree, Delimited};
 
-use std::rc::Rc;
+use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::sync::Lrc;
 use std::mem;
 use std::ops::Add;
-use std::collections::HashMap;
+use std::rc::Rc;
 
 // An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
 enum Frame {
@@ -67,11 +67,11 @@ impl Iterator for Frame {
 /// `src` contains no `TokenTree::{Sequence, MetaVar, MetaVarDecl}`s, `interp` can
 /// (and should) be None.
 pub fn transcribe(cx: &ExtCtxt,
-                  interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
+                  interp: Option<FxHashMap<Ident, Rc<NamedMatch>>>,
                   src: Vec<quoted::TokenTree>)
                   -> TokenStream {
     let mut stack: OneVector<Frame> = smallvec![Frame::new(src)];
-    let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */
+    let interpolations = interp.unwrap_or_else(FxHashMap::default); /* just a convenience */
     let mut repeats = Vec::new();
     let mut result: Vec<TokenStream> = Vec::new();
     let mut result_stack = Vec::new();
@@ -187,7 +187,7 @@ pub fn transcribe(cx: &ExtCtxt,
 }
 
 fn lookup_cur_matched(ident: Ident,
-                      interpolations: &HashMap<Ident, Rc<NamedMatch>>,
+                      interpolations: &FxHashMap<Ident, Rc<NamedMatch>>,
                       repeats: &[(usize, usize)])
                       -> Option<Rc<NamedMatch>> {
     interpolations.get(&ident).map(|matched| {
@@ -234,7 +234,7 @@ impl Add for LockstepIterSize {
 }
 
 fn lockstep_iter_size(tree: &quoted::TokenTree,
-                      interpolations: &HashMap<Ident, Rc<NamedMatch>>,
+                      interpolations: &FxHashMap<Ident, Rc<NamedMatch>>,
                       repeats: &[(usize, usize)])
                       -> LockstepIterSize {
     use self::quoted::TokenTree;
diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs
index 448ff9676c9..96584a580f1 100644
--- a/src/libsyntax/parse/lexer/mod.rs
+++ b/src/libsyntax/parse/lexer/mod.rs
@@ -1831,10 +1831,10 @@ mod tests {
     use errors;
     use feature_gate::UnstableFeatures;
     use parse::token;
-    use std::collections::HashSet;
     use std::io;
     use std::path::PathBuf;
     use diagnostics::plugin::ErrorMap;
+    use rustc_data_structures::fx::FxHashSet;
     use rustc_data_structures::sync::Lock;
     use with_globals;
     fn mk_sess(cm: Lrc<SourceMap>) -> ParseSess {
@@ -1845,10 +1845,10 @@ mod tests {
         ParseSess {
             span_diagnostic: errors::Handler::with_emitter(true, false, Box::new(emitter)),
             unstable_features: UnstableFeatures::from_environment(),
-            config: CrateConfig::new(),
+            config: CrateConfig::default(),
             included_mod_stack: Lock::new(Vec::new()),
             code_map: cm,
-            missing_fragment_specifiers: Lock::new(HashSet::new()),
+            missing_fragment_specifiers: Lock::new(FxHashSet::default()),
             raw_identifier_spans: Lock::new(Vec::new()),
             registered_diagnostics: Lock::new(ErrorMap::new()),
             non_modrs_mods: Lock::new(vec![]),
diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs
index 1136cda5ee3..28d63399b44 100644
--- a/src/libsyntax/parse/mod.rs
+++ b/src/libsyntax/parse/mod.rs
@@ -24,8 +24,8 @@ use symbol::Symbol;
 use tokenstream::{TokenStream, TokenTree};
 use diagnostics::plugin::ErrorMap;
 
+use rustc_data_structures::fx::FxHashSet;
 use std::borrow::Cow;
-use std::collections::HashSet;
 use std::iter;
 use std::path::{Path, PathBuf};
 use std::str;
@@ -46,7 +46,7 @@ pub struct ParseSess {
     pub span_diagnostic: Handler,
     pub unstable_features: UnstableFeatures,
     pub config: CrateConfig,
-    pub missing_fragment_specifiers: Lock<HashSet<Span>>,
+    pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
     /// Places where raw identifiers were used. This is used for feature gating
     /// raw identifiers
     pub raw_identifier_spans: Lock<Vec<Span>>,
@@ -75,8 +75,8 @@ impl ParseSess {
         ParseSess {
             span_diagnostic: handler,
             unstable_features: UnstableFeatures::from_environment(),
-            config: HashSet::new(),
-            missing_fragment_specifiers: Lock::new(HashSet::new()),
+            config: FxHashSet::default(),
+            missing_fragment_specifiers: Lock::new(FxHashSet::default()),
             raw_identifier_spans: Lock::new(Vec::new()),
             registered_diagnostics: Lock::new(ErrorMap::new()),
             included_mod_stack: Lock::new(vec![]),