Skip to content

Commit

Permalink
Replace FnvHasher use with FxHasher.
Browse files Browse the repository at this point in the history
This speeds up compilation by 3--6% across most of rustc-benchmarks.
  • Loading branch information
nnethercote committed Nov 8, 2016
1 parent eca1cc9 commit 00e48af
Show file tree
Hide file tree
Showing 91 changed files with 588 additions and 588 deletions.
6 changes: 3 additions & 3 deletions src/librustc/dep_graph/dep_tracking_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
// except according to those terms.

use hir::def_id::DefId;
use rustc_data_structures::fnv::FnvHashMap;
use rustc_data_structures::fx::FxHashMap;
use std::cell::RefCell;
use std::ops::Index;
use std::hash::Hash;
Expand All @@ -24,7 +24,7 @@ use super::{DepNode, DepGraph};
pub struct DepTrackingMap<M: DepTrackingMapConfig> {
phantom: PhantomData<M>,
graph: DepGraph,
map: FnvHashMap<M::Key, M::Value>,
map: FxHashMap<M::Key, M::Value>,
}

pub trait DepTrackingMapConfig {
Expand All @@ -38,7 +38,7 @@ impl<M: DepTrackingMapConfig> DepTrackingMap<M> {
DepTrackingMap {
phantom: PhantomData,
graph: graph,
map: FnvHashMap()
map: FxHashMap()
}
}

Expand Down
10 changes: 5 additions & 5 deletions src/librustc/dep_graph/edges.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,15 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use rustc_data_structures::fnv::{FnvHashMap, FnvHashSet};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use std::fmt::Debug;
use std::hash::Hash;
use super::{DepGraphQuery, DepNode};

pub struct DepGraphEdges<D: Clone + Debug + Eq + Hash> {
nodes: Vec<DepNode<D>>,
indices: FnvHashMap<DepNode<D>, IdIndex>,
edges: FnvHashSet<(IdIndex, IdIndex)>,
indices: FxHashMap<DepNode<D>, IdIndex>,
edges: FxHashSet<(IdIndex, IdIndex)>,
open_nodes: Vec<OpenNode>,
}

Expand Down Expand Up @@ -46,8 +46,8 @@ impl<D: Clone + Debug + Eq + Hash> DepGraphEdges<D> {
pub fn new() -> DepGraphEdges<D> {
DepGraphEdges {
nodes: vec![],
indices: FnvHashMap(),
edges: FnvHashSet(),
indices: FxHashMap(),
edges: FxHashSet(),
open_nodes: Vec::new()
}
}
Expand Down
12 changes: 6 additions & 6 deletions src/librustc/dep_graph/graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
// except according to those terms.

use hir::def_id::DefId;
use rustc_data_structures::fnv::FnvHashMap;
use rustc_data_structures::fx::FxHashMap;
use session::config::OutputType;
use std::cell::{Ref, RefCell};
use std::rc::Rc;
Expand All @@ -34,19 +34,19 @@ struct DepGraphData {
/// things available to us. If we find that they are not dirty, we
/// load the path to the file storing those work-products here into
/// this map. We can later look for and extract that data.
previous_work_products: RefCell<FnvHashMap<Arc<WorkProductId>, WorkProduct>>,
previous_work_products: RefCell<FxHashMap<Arc<WorkProductId>, WorkProduct>>,

/// Work-products that we generate in this run.
work_products: RefCell<FnvHashMap<Arc<WorkProductId>, WorkProduct>>,
work_products: RefCell<FxHashMap<Arc<WorkProductId>, WorkProduct>>,
}

impl DepGraph {
pub fn new(enabled: bool) -> DepGraph {
DepGraph {
data: Rc::new(DepGraphData {
thread: DepGraphThreadData::new(enabled),
previous_work_products: RefCell::new(FnvHashMap()),
work_products: RefCell::new(FnvHashMap()),
previous_work_products: RefCell::new(FxHashMap()),
work_products: RefCell::new(FxHashMap()),
})
}
}
Expand Down Expand Up @@ -117,7 +117,7 @@ impl DepGraph {

/// Access the map of work-products created during this run. Only
/// used during saving of the dep-graph.
pub fn work_products(&self) -> Ref<FnvHashMap<Arc<WorkProductId>, WorkProduct>> {
pub fn work_products(&self) -> Ref<FxHashMap<Arc<WorkProductId>, WorkProduct>> {
self.data.work_products.borrow()
}
}
Expand Down
6 changes: 3 additions & 3 deletions src/librustc/dep_graph/query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use rustc_data_structures::fnv::FnvHashMap;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::graph::{Direction, INCOMING, Graph, NodeIndex, OUTGOING};
use std::fmt::Debug;
use std::hash::Hash;
Expand All @@ -17,15 +17,15 @@ use super::DepNode;

pub struct DepGraphQuery<D: Clone + Debug + Hash + Eq> {
pub graph: Graph<DepNode<D>, ()>,
pub indices: FnvHashMap<DepNode<D>, NodeIndex>,
pub indices: FxHashMap<DepNode<D>, NodeIndex>,
}

impl<D: Clone + Debug + Hash + Eq> DepGraphQuery<D> {
pub fn new(nodes: &[DepNode<D>],
edges: &[(DepNode<D>, DepNode<D>)])
-> DepGraphQuery<D> {
let mut graph = Graph::new();
let mut indices = FnvHashMap();
let mut indices = FxHashMap();
for node in nodes {
indices.insert(node.clone(), graph.next_node_index());
graph.add_node(node.clone());
Expand Down
6 changes: 3 additions & 3 deletions src/librustc/hir/map/definitions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
// except according to those terms.

use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
use rustc_data_structures::fnv::FnvHashMap;
use rustc_data_structures::fx::FxHashMap;
use std::fmt::Write;
use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
Expand All @@ -22,7 +22,7 @@ use util::nodemap::NodeMap;
#[derive(Clone)]
pub struct Definitions {
data: Vec<DefData>,
key_map: FnvHashMap<DefKey, DefIndex>,
key_map: FxHashMap<DefKey, DefIndex>,
node_map: NodeMap<DefIndex>,
}

Expand Down Expand Up @@ -219,7 +219,7 @@ impl Definitions {
pub fn new() -> Definitions {
Definitions {
data: vec![],
key_map: FnvHashMap(),
key_map: FxHashMap(),
node_map: NodeMap(),
}
}
Expand Down
4 changes: 2 additions & 2 deletions src/librustc/hir/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ pub use self::PathParameters::*;

use hir::def::Def;
use hir::def_id::DefId;
use util::nodemap::{NodeMap, FnvHashSet};
use util::nodemap::{NodeMap, FxHashSet};

use syntax_pos::{mk_sp, Span, ExpnId, DUMMY_SP};
use syntax::codemap::{self, respan, Spanned};
Expand Down Expand Up @@ -1605,4 +1605,4 @@ pub type TraitMap = NodeMap<Vec<TraitCandidate>>;

// Map from the NodeId of a glob import to a list of items which are actually
// imported.
pub type GlobMap = NodeMap<FnvHashSet<Name>>;
pub type GlobMap = NodeMap<FxHashSet<Name>>;
6 changes: 3 additions & 3 deletions src/librustc/infer/freshen.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
use ty::{self, Ty, TyCtxt, TypeFoldable};
use ty::fold::TypeFolder;
use util::nodemap::FnvHashMap;
use util::nodemap::FxHashMap;
use std::collections::hash_map::Entry;

use super::InferCtxt;
Expand All @@ -41,7 +41,7 @@ use super::unify_key::ToType;
pub struct TypeFreshener<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
freshen_count: u32,
freshen_map: FnvHashMap<ty::InferTy, Ty<'tcx>>,
freshen_map: FxHashMap<ty::InferTy, Ty<'tcx>>,
}

impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> {
Expand All @@ -50,7 +50,7 @@ impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> {
TypeFreshener {
infcx: infcx,
freshen_count: 0,
freshen_map: FnvHashMap(),
freshen_map: FxHashMap(),
}
}

Expand Down
23 changes: 11 additions & 12 deletions src/librustc/infer/higher_ranked/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use ty::{self, TyCtxt, Binder, TypeFoldable};
use ty::error::TypeError;
use ty::relate::{Relate, RelateResult, TypeRelation};
use syntax_pos::Span;
use util::nodemap::{FnvHashMap, FnvHashSet};
use util::nodemap::{FxHashMap, FxHashSet};

pub struct HrMatchResult<U> {
pub value: U,
Expand Down Expand Up @@ -135,7 +135,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
// Map each skolemized region to a vector of other regions that it
// must be equated with. (Note that this vector may include other
// skolemized regions from `skol_map`.)
let skol_resolution_map: FnvHashMap<_, _> =
let skol_resolution_map: FxHashMap<_, _> =
skol_map
.iter()
.map(|(&br, &skol)| {
Expand All @@ -158,7 +158,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
// `skol_map`. There should always be a representative if things
// are properly well-formed.
let mut unconstrained_regions = vec![];
let skol_representatives: FnvHashMap<_, _> =
let skol_representatives: FxHashMap<_, _> =
skol_resolution_map
.iter()
.map(|(&skol, &(br, ref regions))| {
Expand Down Expand Up @@ -268,7 +268,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
snapshot: &CombinedSnapshot,
debruijn: ty::DebruijnIndex,
new_vars: &[ty::RegionVid],
a_map: &FnvHashMap<ty::BoundRegion, &'tcx ty::Region>,
a_map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>,
r0: &'tcx ty::Region)
-> &'tcx ty::Region {
// Regions that pre-dated the LUB computation stay as they are.
Expand Down Expand Up @@ -364,8 +364,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
snapshot: &CombinedSnapshot,
debruijn: ty::DebruijnIndex,
new_vars: &[ty::RegionVid],
a_map: &FnvHashMap<ty::BoundRegion,
&'tcx ty::Region>,
a_map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>,
a_vars: &[ty::RegionVid],
b_vars: &[ty::RegionVid],
r0: &'tcx ty::Region)
Expand Down Expand Up @@ -434,7 +433,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {

fn rev_lookup<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
span: Span,
a_map: &FnvHashMap<ty::BoundRegion, &'tcx ty::Region>,
a_map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>,
r: &'tcx ty::Region) -> &'tcx ty::Region
{
for (a_br, a_r) in a_map {
Expand All @@ -457,7 +456,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
}

fn var_ids<'a, 'gcx, 'tcx>(fields: &CombineFields<'a, 'gcx, 'tcx>,
map: &FnvHashMap<ty::BoundRegion, &'tcx ty::Region>)
map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>)
-> Vec<ty::RegionVid> {
map.iter()
.map(|(_, &r)| match *r {
Expand Down Expand Up @@ -504,7 +503,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
snapshot: &CombinedSnapshot,
r: &'tcx ty::Region,
directions: TaintDirections)
-> FnvHashSet<&'tcx ty::Region> {
-> FxHashSet<&'tcx ty::Region> {
self.region_vars.tainted(&snapshot.region_vars_snapshot, r, directions)
}

Expand Down Expand Up @@ -568,7 +567,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
let escaping_types =
self.type_variables.borrow_mut().types_escaping_snapshot(&snapshot.type_snapshot);

let mut escaping_region_vars = FnvHashSet();
let mut escaping_region_vars = FxHashSet();
for ty in &escaping_types {
self.tcx.collect_regions(ty, &mut escaping_region_vars);
}
Expand Down Expand Up @@ -764,7 +763,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
// region back to the `ty::BoundRegion` that it originally
// represented. Because `leak_check` passed, we know that
// these taint sets are mutually disjoint.
let inv_skol_map: FnvHashMap<&'tcx ty::Region, ty::BoundRegion> =
let inv_skol_map: FxHashMap<&'tcx ty::Region, ty::BoundRegion> =
skol_map
.iter()
.flat_map(|(&skol_br, &skol)| {
Expand Down Expand Up @@ -837,7 +836,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
snapshot: &CombinedSnapshot)
{
debug!("pop_skolemized({:?})", skol_map);
let skol_regions: FnvHashSet<_> = skol_map.values().cloned().collect();
let skol_regions: FxHashSet<_> = skol_map.values().cloned().collect();
self.region_vars.pop_skolemized(&skol_regions, &snapshot.region_vars_snapshot);
if !skol_map.is_empty() {
self.projection_cache.borrow_mut().rollback_skolemized(
Expand Down
12 changes: 6 additions & 6 deletions src/librustc/infer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ use std::fmt;
use syntax::ast;
use errors::DiagnosticBuilder;
use syntax_pos::{self, Span, DUMMY_SP};
use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap};
use util::nodemap::{FxHashMap, FxHashSet, NodeMap};

use self::combine::CombineFields;
use self::higher_ranked::HrMatchResult;
Expand Down Expand Up @@ -134,7 +134,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {

// the set of predicates on which errors have been reported, to
// avoid reporting the same error twice.
pub reported_trait_errors: RefCell<FnvHashSet<traits::TraitErrorKey<'tcx>>>,
pub reported_trait_errors: RefCell<FxHashSet<traits::TraitErrorKey<'tcx>>>,

// Sadly, the behavior of projection varies a bit depending on the
// stage of compilation. The specifics are given in the
Expand Down Expand Up @@ -170,7 +170,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {

/// A map returned by `skolemize_late_bound_regions()` indicating the skolemized
/// region that each late-bound region was replaced with.
pub type SkolemizationMap<'tcx> = FnvHashMap<ty::BoundRegion, &'tcx ty::Region>;
pub type SkolemizationMap<'tcx> = FxHashMap<ty::BoundRegion, &'tcx ty::Region>;

/// Why did we require that the two types be related?
///
Expand Down Expand Up @@ -492,7 +492,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'gcx> {
selection_cache: traits::SelectionCache::new(),
evaluation_cache: traits::EvaluationCache::new(),
projection_cache: RefCell::new(traits::ProjectionCache::new()),
reported_trait_errors: RefCell::new(FnvHashSet()),
reported_trait_errors: RefCell::new(FxHashSet()),
projection_mode: Reveal::NotSpecializable,
tainted_by_errors_flag: Cell::new(false),
err_count_on_creation: self.sess.err_count(),
Expand Down Expand Up @@ -531,7 +531,7 @@ impl<'a, 'gcx, 'tcx> InferCtxtBuilder<'a, 'gcx, 'tcx> {
parameter_environment: param_env,
selection_cache: traits::SelectionCache::new(),
evaluation_cache: traits::EvaluationCache::new(),
reported_trait_errors: RefCell::new(FnvHashSet()),
reported_trait_errors: RefCell::new(FxHashSet()),
projection_mode: projection_mode,
tainted_by_errors_flag: Cell::new(false),
err_count_on_creation: tcx.sess.err_count(),
Expand Down Expand Up @@ -1530,7 +1530,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
span: Span,
lbrct: LateBoundRegionConversionTime,
value: &ty::Binder<T>)
-> (T, FnvHashMap<ty::BoundRegion, &'tcx ty::Region>)
-> (T, FxHashMap<ty::BoundRegion, &'tcx ty::Region>)
where T : TypeFoldable<'tcx>
{
self.tcx.replace_late_bound_regions(
Expand Down
12 changes: 6 additions & 6 deletions src/librustc/infer/region_inference/graphviz.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use middle::region::CodeExtent;
use super::Constraint;
use infer::SubregionOrigin;
use infer::region_inference::RegionVarBindings;
use util::nodemap::{FnvHashMap, FnvHashSet};
use util::nodemap::{FxHashMap, FxHashSet};

use std::borrow::Cow;
use std::collections::hash_map::Entry::Vacant;
Expand Down Expand Up @@ -122,8 +122,8 @@ pub fn maybe_print_constraints_for<'a, 'gcx, 'tcx>(
struct ConstraintGraph<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>,
graph_name: String,
map: &'a FnvHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>,
node_ids: FnvHashMap<Node, usize>,
map: &'a FxHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>,
node_ids: FxHashMap<Node, usize>,
}

#[derive(Clone, Hash, PartialEq, Eq, Debug, Copy)]
Expand All @@ -145,7 +145,7 @@ impl<'a, 'gcx, 'tcx> ConstraintGraph<'a, 'gcx, 'tcx> {
map: &'a ConstraintMap<'tcx>)
-> ConstraintGraph<'a, 'gcx, 'tcx> {
let mut i = 0;
let mut node_ids = FnvHashMap();
let mut node_ids = FxHashMap();
{
let mut add_node = |node| {
if let Vacant(e) = node_ids.entry(node) {
Expand Down Expand Up @@ -235,7 +235,7 @@ impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> {
type Node = Node;
type Edge = Edge<'tcx>;
fn nodes(&self) -> dot::Nodes<Node> {
let mut set = FnvHashSet();
let mut set = FxHashSet();
for node in self.node_ids.keys() {
set.insert(*node);
}
Expand All @@ -261,7 +261,7 @@ impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> {
}
}

pub type ConstraintMap<'tcx> = FnvHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>;
pub type ConstraintMap<'tcx> = FxHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>;

fn dump_region_constraints_to<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
map: &ConstraintMap<'tcx>,
Expand Down
Loading

0 comments on commit 00e48af

Please sign in to comment.