提交 86d5a69d 编写于 作者: V varkor

Use Vec instead of FxHashMap

上级 688cbad9
......@@ -6,7 +6,6 @@
use super::type_variable::TypeVariableOrigin;
use std::ops::Range;
use rustc_data_structures::fx::FxHashMap;
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
/// This rather funky routine is used while processing expected
......@@ -102,10 +101,10 @@ pub fn fudge_inference_if_ok<T, E, F>(
// Micro-optimization: if no variables have been created, then
// `value` can't refer to any of them. =) So we can just return it.
if fudger.type_vars.is_empty() &&
if fudger.type_vars.0.is_empty() &&
fudger.int_vars.is_empty() &&
fudger.float_vars.is_empty() &&
fudger.region_vars.is_empty() {
fudger.region_vars.0.is_empty() {
Ok(value)
} else {
Ok(value.fold_with(&mut fudger))
......@@ -115,10 +114,10 @@ pub fn fudge_inference_if_ok<T, E, F>(
pub struct InferenceFudger<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
type_vars: FxHashMap<TyVid, TypeVariableOrigin>,
type_vars: (Range<TyVid>, Vec<TypeVariableOrigin>),
int_vars: Range<IntVid>,
float_vars: Range<FloatVid>,
region_vars: FxHashMap<RegionVid, RegionVariableOrigin>,
region_vars: (Range<RegionVid>, Vec<RegionVariableOrigin>),
}
impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for InferenceFudger<'a, 'gcx, 'tcx> {
......@@ -129,9 +128,11 @@ fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
match ty.sty {
ty::Infer(ty::InferTy::TyVar(vid)) => {
if let Some(&origin) = self.type_vars.get(&vid) {
if self.type_vars.0.contains(&vid) {
// This variable was created during the fudging.
// Recreate it with a fresh variable here.
let idx = (vid.index - self.type_vars.0.start.index) as usize;
let origin = self.type_vars.1[idx];
self.infcx.next_ty_var(origin)
} else {
// This variable was created before the
......@@ -165,7 +166,9 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
if let ty::ReVar(vid) = r {
if let Some(&origin) = self.region_vars.get(&vid) {
if self.region_vars.0.contains(&vid) {
let idx = (vid.index() - self.region_vars.0.start.index()) as usize;
let origin = self.region_vars.1[idx];
return self.infcx.next_region_var(origin);
}
}
......
......@@ -16,6 +16,7 @@
use std::collections::BTreeMap;
use std::{cmp, fmt, mem, u32};
use std::ops::Range;
mod leak_check;
......@@ -843,13 +844,11 @@ pub fn universe(&self, region: Region<'tcx>) -> ty::UniverseIndex {
pub fn vars_since_snapshot(
&self,
mark: &RegionSnapshot,
) -> FxHashMap<RegionVid, RegionVariableOrigin> {
) -> (Range<RegionVid>, Vec<RegionVariableOrigin>) {
let range = self.unification_table.vars_since_snapshot(&mark.region_snapshot);
(range.start.index()..range.end.index()).map(|index| {
let vid = ty::RegionVid::from(index);
let origin = self.var_infos[vid].origin.clone();
(vid, origin)
}).collect()
(range.clone(), (range.start.index()..range.end.index()).map(|index| {
self.var_infos[ty::RegionVid::from(index)].origin.clone()
}).collect())
}
/// See [`RegionInference::region_constraints_added_in_snapshot`].
......
......@@ -5,7 +5,7 @@
use std::cmp;
use std::marker::PhantomData;
use std::u32;
use rustc_data_structures::fx::FxHashMap;
use std::ops::Range;
use rustc_data_structures::snapshot_vec as sv;
use rustc_data_structures::unify as ut;
......@@ -294,12 +294,11 @@ pub fn commit(&mut self, s: Snapshot<'tcx>) {
pub fn vars_since_snapshot(
&mut self,
s: &Snapshot<'tcx>,
) -> FxHashMap<TyVid, TypeVariableOrigin> {
) -> (Range<TyVid>, Vec<TypeVariableOrigin>) {
let range = self.eq_relations.vars_since_snapshot(&s.eq_snapshot);
(range.start.vid.index..range.end.vid.index).map(|index| {
let origin = self.values.get(index as usize).origin.clone();
(TyVid { index }, origin)
}).collect()
(range.start.vid..range.end.vid, (range.start.vid.index..range.end.vid.index).map(|index| {
self.values.get(index as usize).origin.clone()
}).collect())
}
/// Finds the set of type variables that existed *before* `s`
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册