提交 bc1eb677 编写于 作者: A Ariel Ben-Yehuda 提交者: Ariel Ben-Yehuda

introduce the type-safe IdxVec and use it instead of loose indexes

上级 e9003c55
......@@ -11,6 +11,7 @@
use graphviz::IntoCow;
use middle::const_val::ConstVal;
use rustc_const_math::{ConstUsize, ConstInt, ConstMathErr};
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use hir::def_id::DefId;
use ty::subst::Substs;
use ty::{self, AdtDef, ClosureSubsts, FnOutput, Region, Ty};
......@@ -25,37 +26,61 @@
use syntax::ast::{self, Name};
use syntax::codemap::Span;
macro_rules! newtype_index {
($name:ident, $debug_name:expr) => (
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
RustcEncodable, RustcDecodable)]
pub struct $name(u32);
impl Idx for $name {
fn new(value: usize) -> Self {
assert!(value < (u32::MAX) as usize);
$name(value as u32)
}
fn index(self) -> usize {
self.0 as usize
}
}
impl Debug for $name {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
write!(fmt, "{}{}", $debug_name, self.0)
}
}
)
}
/// Lowered representation of a single function.
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Mir<'tcx> {
/// List of basic blocks. References to basic block use a newtyped index type `BasicBlock`
/// that indexes into this vector.
pub basic_blocks: Vec<BasicBlockData<'tcx>>,
pub basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
/// List of visibility (lexical) scopes; these are referenced by statements
/// and used (eventually) for debuginfo. Indexed by a `VisibilityScope`.
pub visibility_scopes: Vec<VisibilityScopeData>,
pub visibility_scopes: IndexVec<VisibilityScope, VisibilityScopeData>,
/// Rvalues promoted from this function, such as borrows of constants.
/// Each of them is the Mir of a constant with the fn's type parameters
/// in scope, but no vars or args and a separate set of temps.
pub promoted: Vec<Mir<'tcx>>,
pub promoted: IndexVec<Promoted, Mir<'tcx>>,
/// Return type of the function.
pub return_ty: FnOutput<'tcx>,
/// Variables: these are stack slots corresponding to user variables. They may be
/// assigned many times.
pub var_decls: Vec<VarDecl<'tcx>>,
pub var_decls: IndexVec<Var, VarDecl<'tcx>>,
/// Args: these are stack slots corresponding to the input arguments.
pub arg_decls: Vec<ArgDecl<'tcx>>,
pub arg_decls: IndexVec<Arg, ArgDecl<'tcx>>,
/// Temp declarations: stack slots that for temporaries created by
/// the compiler. These are assigned once, but they are not SSA
/// values in that it is possible to borrow them and mutate them
/// through the resulting reference.
pub temp_decls: Vec<TempDecl<'tcx>>,
pub temp_decls: IndexVec<Temp, TempDecl<'tcx>>,
/// Names and capture modes of all the closure upvars, assuming
/// the first argument is either the closure or a reference to it.
......@@ -76,11 +101,11 @@ pub fn all_basic_blocks(&self) -> Vec<BasicBlock> {
}
pub fn basic_block_data(&self, bb: BasicBlock) -> &BasicBlockData<'tcx> {
&self.basic_blocks[bb.index()]
&self.basic_blocks[bb]
}
pub fn basic_block_data_mut(&mut self, bb: BasicBlock) -> &mut BasicBlockData<'tcx> {
&mut self.basic_blocks[bb.index()]
&mut self.basic_blocks[bb]
}
}
......@@ -231,31 +256,7 @@ pub struct UpvarDecl {
///////////////////////////////////////////////////////////////////////////
// BasicBlock
/// The index of a particular basic block. The index is into the `basic_blocks`
/// list of the `Mir`.
///
/// (We use a `u32` internally just to save memory.)
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
RustcEncodable, RustcDecodable)]
pub struct BasicBlock(u32);
impl BasicBlock {
pub fn new(index: usize) -> BasicBlock {
assert!(index < (u32::MAX as usize));
BasicBlock(index as u32)
}
/// Extract the index.
pub fn index(self) -> usize {
self.0 as usize
}
}
impl Debug for BasicBlock {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
write!(fmt, "bb{}", self.0)
}
}
newtype_index!(BasicBlock, "bb");
///////////////////////////////////////////////////////////////////////////
// BasicBlockData and Terminator
......@@ -616,19 +617,23 @@ fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
///////////////////////////////////////////////////////////////////////////
// Lvalues
newtype_index!(Var, "var");
newtype_index!(Temp, "tmp");
newtype_index!(Arg, "arg");
/// A path to a value; something that can be evaluated without
/// changing or disturbing program state.
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum Lvalue<'tcx> {
/// local variable declared by the user
Var(u32),
Var(Var),
/// temporary introduced during lowering into MIR
Temp(u32),
Temp(Temp),
/// formal parameter of the function; note that these are NOT the
/// bindings that the user declares, which are vars
Arg(u32),
Arg(Arg),
/// static or static mut variable
Static(DefId),
......@@ -696,20 +701,7 @@ pub enum ProjectionElem<'tcx, V> {
/// and the index is an operand.
pub type LvalueElem<'tcx> = ProjectionElem<'tcx, Operand<'tcx>>;
/// Index into the list of fields found in a `VariantDef`
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct Field(u32);
impl Field {
pub fn new(value: usize) -> Field {
assert!(value < (u32::MAX) as usize);
Field(value as u32)
}
pub fn index(self) -> usize {
self.0 as usize
}
}
newtype_index!(Field, "field");
impl<'tcx> Lvalue<'tcx> {
pub fn field(self, f: Field, ty: Ty<'tcx>) -> Lvalue<'tcx> {
......@@ -737,12 +729,9 @@ fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
use self::Lvalue::*;
match *self {
Var(id) =>
write!(fmt, "var{:?}", id),
Arg(id) =>
write!(fmt, "arg{:?}", id),
Temp(id) =>
write!(fmt, "tmp{:?}", id),
Var(id) => write!(fmt, "{:?}", id),
Arg(id) => write!(fmt, "{:?}", id),
Temp(id) => write!(fmt, "{:?}", id),
Static(def_id) =>
write!(fmt, "{}", ty::tls::with(|tcx| tcx.item_path_str(def_id))),
ReturnPointer =>
......@@ -777,38 +766,8 @@ fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
///////////////////////////////////////////////////////////////////////////
// Scopes
impl Index<VisibilityScope> for Vec<VisibilityScopeData> {
type Output = VisibilityScopeData;
#[inline]
fn index(&self, index: VisibilityScope) -> &VisibilityScopeData {
&self[index.index()]
}
}
impl IndexMut<VisibilityScope> for Vec<VisibilityScopeData> {
#[inline]
fn index_mut(&mut self, index: VisibilityScope) -> &mut VisibilityScopeData {
&mut self[index.index()]
}
}
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, RustcEncodable, RustcDecodable)]
pub struct VisibilityScope(u32);
/// The visibility scope all arguments go into.
pub const ARGUMENT_VISIBILITY_SCOPE: VisibilityScope = VisibilityScope(0);
impl VisibilityScope {
pub fn new(index: usize) -> VisibilityScope {
assert!(index < (u32::MAX as usize));
VisibilityScope(index as u32)
}
pub fn index(self) -> usize {
self.0 as usize
}
}
newtype_index!(VisibilityScope, "scope");
pub const ARGUMENT_VISIBILITY_SCOPE : VisibilityScope = VisibilityScope(0);
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub struct VisibilityScopeData {
......@@ -1080,6 +1039,8 @@ fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
}
}
newtype_index!(Promoted, "promoted");
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum Literal<'tcx> {
Item {
......@@ -1091,7 +1052,7 @@ pub enum Literal<'tcx> {
},
Promoted {
// Index into the `promoted` vector of `Mir`.
index: usize
index: Promoted
},
}
......@@ -1115,7 +1076,7 @@ fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
fmt_const_val(fmt, value)
}
Promoted { index } => {
write!(fmt, "promoted{}", index)
write!(fmt, "{:?}", index)
}
}
}
......
......@@ -154,11 +154,11 @@ pub fn lvalue_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
{
match *lvalue {
Lvalue::Var(index) =>
LvalueTy::Ty { ty: self.var_decls[index as usize].ty },
LvalueTy::Ty { ty: self.var_decls[index].ty },
Lvalue::Temp(index) =>
LvalueTy::Ty { ty: self.temp_decls[index as usize].ty },
LvalueTy::Ty { ty: self.temp_decls[index].ty },
Lvalue::Arg(index) =>
LvalueTy::Ty { ty: self.arg_decls[index as usize].ty },
LvalueTy::Ty { ty: self.arg_decls[index].ty },
Lvalue::Static(def_id) =>
LvalueTy::Ty { ty: tcx.lookup_item_type(def_id).ty },
Lvalue::ReturnPointer =>
......
......@@ -11,6 +11,7 @@
use std::vec;
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_vec::Idx;
use super::repr::*;
......
......@@ -15,6 +15,7 @@
use mir::repr::*;
use rustc_const_math::ConstUsize;
use rustc_data_structures::tuple_slice::TupleSlice;
use rustc_data_structures::indexed_vec::Idx;
use syntax::codemap::Span;
// # The MIR Visitor
......
......@@ -14,4 +14,5 @@ log = { path = "../liblog" }
syntax = { path = "../libsyntax" }
graphviz = { path = "../libgraphviz" }
rustc = { path = "../librustc" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_mir = { path = "../librustc_mir" }
......@@ -12,6 +12,7 @@
use syntax::ast::NodeId;
use rustc::mir::repr::{BasicBlock, Mir};
use rustc_data_structures::indexed_vec::Idx;
use dot;
use dot::IntoCow;
......@@ -27,7 +28,7 @@
use super::super::MoveDataParamEnv;
use super::super::MirBorrowckCtxtPreDataflow;
use bitslice::bits_to_string;
use indexed_set::{Idx, IdxSet};
use indexed_set::{IdxSet};
use super::{BitDenotation, DataflowState};
impl<O: BitDenotation> DataflowState<O> {
......
......@@ -10,6 +10,7 @@
use rustc::ty::TyCtxt;
use rustc::mir::repr::{self, Mir};
use rustc_data_structures::indexed_vec::Idx;
use super::super::gather_moves::{Location};
use super::super::gather_moves::{MoveOutIndex, MovePathIndex};
......@@ -23,7 +24,7 @@
use bitslice::BitSlice; // adds set_bit/get_bit to &[usize] bitvector rep.
use bitslice::{BitwiseOperator};
use indexed_set::{Idx, IdxSet};
use indexed_set::{IdxSet};
// Dataflow analyses are built upon some interpretation of the
// bitvectors attached to each basic block, represented via a
......@@ -451,7 +452,7 @@ fn statement_effect(&self,
move_data,
move_path_index,
|mpi| for moi in &path_map[mpi] {
assert!(moi.idx() < bits_per_block);
assert!(moi.index() < bits_per_block);
sets.kill_set.add(&moi);
});
}
......@@ -472,7 +473,7 @@ fn terminator_effect(&self,
term, loc, &loc_map[loc]);
let bits_per_block = self.bits_per_block(ctxt);
for move_index in &loc_map[loc] {
assert!(move_index.idx() < bits_per_block);
assert!(move_index.index() < bits_per_block);
zero_to_one(sets.gen_set.words_mut(), *move_index);
}
}
......@@ -493,14 +494,14 @@ fn propagate_call_return(&self,
move_data,
move_path_index,
|mpi| for moi in &path_map[mpi] {
assert!(moi.idx() < bits_per_block);
assert!(moi.index() < bits_per_block);
in_out.remove(&moi);
});
}
}
fn zero_to_one(bitvec: &mut [usize], move_index: MoveOutIndex) {
let retval = bitvec.set_bit(move_index.idx());
let retval = bitvec.set_bit(move_index.index());
assert!(retval);
}
......
......@@ -8,6 +8,8 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use rustc_data_structures::indexed_vec::Idx;
use rustc::ty::TyCtxt;
use rustc::mir::repr::{self, Mir};
......@@ -21,7 +23,7 @@
use super::MoveDataParamEnv;
use bitslice::{bitwise, BitwiseOperator};
use indexed_set::{Idx, IdxSet, IdxSetBuf};
use indexed_set::{IdxSet, IdxSetBuf};
pub use self::sanity_check::sanity_check_via_rustc_peek;
pub use self::impls::{MaybeInitializedLvals, MaybeUninitializedLvals};
......
......@@ -14,6 +14,7 @@
use rustc::ty::{self, TyCtxt};
use rustc::mir::repr::{self, Mir};
use rustc_data_structures::indexed_vec::Idx;
use super::super::gather_moves::{MovePathIndex};
use super::super::MoveDataParamEnv;
......
......@@ -22,6 +22,7 @@
use rustc::middle::const_val::ConstVal;
use rustc::middle::lang_items;
use rustc::util::nodemap::FnvHashMap;
use rustc_data_structures::indexed_vec::Idx;
use rustc_mir::pretty;
use syntax::codemap::Span;
......@@ -118,7 +119,7 @@ struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
env: &'a MoveDataParamEnv<'tcx>,
flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
drop_flags: FnvHashMap<MovePathIndex, u32>,
drop_flags: FnvHashMap<MovePathIndex, Temp>,
patch: MirPatch<'tcx>,
}
......
......@@ -12,6 +12,7 @@
use rustc::ty::{FnOutput, TyCtxt};
use rustc::mir::repr::*;
use rustc::util::nodemap::FnvHashMap;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use std::cell::{Cell};
use std::collections::hash_map::Entry;
......@@ -20,7 +21,6 @@
use std::ops::Index;
use super::abs_domain::{AbstractElem, Lift};
use indexed_set::{Idx};
// This submodule holds some newtype'd Index wrappers that are using
// NonZero to ensure that Option<Index> occupies only a single word.
......@@ -29,7 +29,7 @@
// (which is likely to yield a subtle off-by-one error).
mod indexes {
use core::nonzero::NonZero;
use indexed_set::Idx;
use rustc_data_structures::indexed_vec::Idx;
macro_rules! new_index {
($Index:ident) => {
......@@ -43,7 +43,7 @@ impl Idx for $Index {
fn new(idx: usize) -> Self {
unsafe { $Index(NonZero::new(idx + 1)) }
}
fn idx(&self) -> usize {
fn index(self) -> usize {
*self.0 - 1
}
}
......@@ -62,7 +62,7 @@ fn idx(&self) -> usize {
impl self::indexes::MoveOutIndex {
pub fn move_path_index(&self, move_data: &MoveData) -> MovePathIndex {
move_data.moves[self.idx()].path
move_data.moves[self.index()].path
}
}
......@@ -176,7 +176,7 @@ pub struct PathMap {
impl Index<MovePathIndex> for PathMap {
type Output = [MoveOutIndex];
fn index(&self, index: MovePathIndex) -> &Self::Output {
&self.map[index.idx()]
&self.map[index.index()]
}
}
......@@ -196,7 +196,7 @@ pub struct MoveOut {
impl fmt::Debug for MoveOut {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "p{}@{:?}", self.path.idx(), self.source)
write!(fmt, "p{}@{:?}", self.path.index(), self.source)
}
}
......@@ -227,14 +227,10 @@ pub fn len(&self) -> usize { self.move_paths.len() }
impl<'tcx> Index<MovePathIndex> for MovePathData<'tcx> {
type Output = MovePath<'tcx>;
fn index(&self, i: MovePathIndex) -> &MovePath<'tcx> {
&self.move_paths[i.idx()]
&self.move_paths[i.index()]
}
}
/// MovePathInverseMap maps from a uint in an lvalue-category to the
/// MovePathIndex for the MovePath for that lvalue.
type MovePathInverseMap = Vec<Option<MovePathIndex>>;
struct MovePathDataBuilder<'a, 'tcx: 'a> {
mir: &'a Mir<'tcx>,
pre_move_paths: Vec<PreMovePath<'tcx>>,
......@@ -244,9 +240,9 @@ struct MovePathDataBuilder<'a, 'tcx: 'a> {
/// Tables mapping from an l-value to its MovePathIndex.
#[derive(Debug)]
pub struct MovePathLookup<'tcx> {
vars: MovePathInverseMap,
temps: MovePathInverseMap,
args: MovePathInverseMap,
vars: IndexVec<Var, Option<MovePathIndex>>,
temps: IndexVec<Temp, Option<MovePathIndex>>,
args: IndexVec<Arg, Option<MovePathIndex>>,
/// The move path representing the return value is constructed
/// lazily when we first encounter it in the input MIR.
......@@ -295,15 +291,15 @@ enum LookupKind { Generate, Reuse }
struct Lookup<T>(LookupKind, T);
impl Lookup<MovePathIndex> {
fn idx(&self) -> usize { (self.1).idx() }
fn index(&self) -> usize { (self.1).index() }
}
impl<'tcx> MovePathLookup<'tcx> {
fn new() -> Self {
fn new(mir: &Mir) -> Self {
MovePathLookup {
vars: vec![],
temps: vec![],
args: vec![],
vars: IndexVec::from_elem(None, &mir.var_decls),
temps: IndexVec::from_elem(None, &mir.temp_decls),
args: IndexVec::from_elem(None, &mir.arg_decls),
statics: None,
return_ptr: None,
projections: vec![],
......@@ -313,15 +309,14 @@ fn new() -> Self {
fn next_index(next: &mut MovePathIndex) -> MovePathIndex {
let i = *next;
*next = MovePathIndex::new(i.idx() + 1);
*next = MovePathIndex::new(i.index() + 1);
i
}
fn lookup_or_generate(vec: &mut Vec<Option<MovePathIndex>>,
idx: u32,
next_index: &mut MovePathIndex) -> Lookup<MovePathIndex> {
let idx = idx as usize;
vec.fill_to_with(idx, None);
fn lookup_or_generate<I: Idx>(vec: &mut IndexVec<I, Option<MovePathIndex>>,
idx: I,
next_index: &mut MovePathIndex)
-> Lookup<MovePathIndex> {
let entry = &mut vec[idx];
match *entry {
None => {
......@@ -335,19 +330,19 @@ fn lookup_or_generate(vec: &mut Vec<Option<MovePathIndex>>,
}
}
fn lookup_var(&mut self, var_idx: u32) -> Lookup<MovePathIndex> {
fn lookup_var(&mut self, var_idx: Var) -> Lookup<MovePathIndex> {
Self::lookup_or_generate(&mut self.vars,
var_idx,
&mut self.next_index)
}
fn lookup_temp(&mut self, temp_idx: u32) -> Lookup<MovePathIndex> {
fn lookup_temp(&mut self, temp_idx: Temp) -> Lookup<MovePathIndex> {
Self::lookup_or_generate(&mut self.temps,
temp_idx,
&mut self.next_index)
}
fn lookup_arg(&mut self, arg_idx: u32) -> Lookup<MovePathIndex> {
fn lookup_arg(&mut self, arg_idx: Arg) -> Lookup<MovePathIndex> {
Self::lookup_or_generate(&mut self.args,
arg_idx,
&mut self.next_index)
......@@ -384,8 +379,8 @@ fn lookup_proj(&mut self,
base: MovePathIndex) -> Lookup<MovePathIndex> {
let MovePathLookup { ref mut projections,
ref mut next_index, .. } = *self;
projections.fill_to(base.idx());
match projections[base.idx()].entry(proj.elem.lift()) {
projections.fill_to(base.index());
match projections[base.index()].entry(proj.elem.lift()) {
Entry::Occupied(ent) => {
Lookup(LookupKind::Reuse, *ent.get())
}
......@@ -404,14 +399,14 @@ impl<'tcx> MovePathLookup<'tcx> {
// unknown l-value; it will simply panic.
pub fn find(&self, lval: &Lvalue<'tcx>) -> MovePathIndex {
match *lval {
Lvalue::Var(var_idx) => self.vars[var_idx as usize].unwrap(),
Lvalue::Temp(temp_idx) => self.temps[temp_idx as usize].unwrap(),
Lvalue::Arg(arg_idx) => self.args[arg_idx as usize].unwrap(),
Lvalue::Var(var) => self.vars[var].unwrap(),
Lvalue::Temp(temp) => self.temps[temp].unwrap(),
Lvalue::Arg(arg) => self.args[arg].unwrap(),
Lvalue::Static(ref _def_id) => self.statics.unwrap(),
Lvalue::ReturnPointer => self.return_ptr.unwrap(),
Lvalue::Projection(ref proj) => {
let base_index = self.find(&proj.base);
self.projections[base_index.idx()][&proj.elem.lift()]
self.projections[base_index.index()][&proj.elem.lift()]
}
}
}
......@@ -451,7 +446,7 @@ fn move_path_for(&mut self, lval: &Lvalue<'tcx>) -> MovePathIndex {
// `lookup` is either the previously assigned index or a
// newly-allocated one.
debug_assert!(lookup.idx() <= self.pre_move_paths.len());
debug_assert!(lookup.index() <= self.pre_move_paths.len());
if let Lookup(LookupKind::Generate, mpi) = lookup {
let parent;
......@@ -482,7 +477,7 @@ fn move_path_for(&mut self, lval: &Lvalue<'tcx>) -> MovePathIndex {
let idx = self.move_path_for(&proj.base);
parent = Some(idx);
let parent_move_path = &mut self.pre_move_paths[idx.idx()];
let parent_move_path = &mut self.pre_move_paths[idx.index()];
// At last: Swap in the new first_child.
sibling = parent_move_path.first_child.get();
......@@ -535,7 +530,7 @@ fn gather_moves<'a, 'tcx>(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MoveD
let mut builder = MovePathDataBuilder {
mir: mir,
pre_move_paths: Vec::new(),
rev_lookup: MovePathLookup::new(),
rev_lookup: MovePathLookup::new(mir),
};
// Before we analyze the program text, we create the MovePath's
......@@ -546,17 +541,17 @@ fn gather_moves<'a, 'tcx>(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MoveD
assert!(mir.var_decls.len() <= ::std::u32::MAX as usize);
assert!(mir.arg_decls.len() <= ::std::u32::MAX as usize);
assert!(mir.temp_decls.len() <= ::std::u32::MAX as usize);
for var_idx in 0..mir.var_decls.len() {
let path_idx = builder.move_path_for(&Lvalue::Var(var_idx as u32));
path_map.fill_to(path_idx.idx());
for (var, _) in mir.var_decls.iter_enumerated() {
let path_idx = builder.move_path_for(&Lvalue::Var(var));
path_map.fill_to(path_idx.index());
}
for arg_idx in 0..mir.arg_decls.len() {
let path_idx = builder.move_path_for(&Lvalue::Arg(arg_idx as u32));
path_map.fill_to(path_idx.idx());
for (arg, _) in mir.arg_decls.iter_enumerated() {
let path_idx = builder.move_path_for(&Lvalue::Arg(arg));
path_map.fill_to(path_idx.index());
}
for temp_idx in 0..mir.temp_decls.len() {
let path_idx = builder.move_path_for(&Lvalue::Temp(temp_idx as u32));
path_map.fill_to(path_idx.idx());
for (temp, _) in mir.temp_decls.iter_enumerated() {
let path_idx = builder.move_path_for(&Lvalue::Temp(temp));
path_map.fill_to(path_idx.index());
}
for bb in bbs {
......@@ -585,7 +580,7 @@ fn gather_moves<'a, 'tcx>(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MoveD
// Ensure that the path_map contains entries even
// if the lvalue is assigned and never read.
let assigned_path = bb_ctxt.builder.move_path_for(lval);
bb_ctxt.path_map.fill_to(assigned_path.idx());
bb_ctxt.path_map.fill_to(assigned_path.index());
match *rval {
Rvalue::Use(ref operand) => {
......@@ -679,7 +674,7 @@ fn gather_moves<'a, 'tcx>(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MoveD
}
TerminatorKind::DropAndReplace { ref location, ref value, .. } => {
let assigned_path = bb_ctxt.builder.move_path_for(location);
bb_ctxt.path_map.fill_to(assigned_path.idx());
bb_ctxt.path_map.fill_to(assigned_path.index());
let source = Location { block: bb,
index: bb_data.statements.len() };
......@@ -699,7 +694,7 @@ fn gather_moves<'a, 'tcx>(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MoveD
// Ensure that the path_map contains entries even
// if the lvalue is assigned and never read.
let assigned_path = bb_ctxt.builder.move_path_for(destination);
bb_ctxt.path_map.fill_to(assigned_path.idx());
bb_ctxt.path_map.fill_to(assigned_path.index());
bb_ctxt.builder.create_move_path(destination);
}
......@@ -729,8 +724,8 @@ fn gather_moves<'a, 'tcx>(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MoveD
let mut seen: Vec<_> = move_paths.iter().map(|_| false).collect();
for (j, &MoveOut { ref path, ref source }) in moves.iter().enumerate() {
debug!("MovePathData moves[{}]: MoveOut {{ path: {:?} = {:?}, source: {:?} }}",
j, path, move_paths[path.idx()], source);
seen[path.idx()] = true;
j, path, move_paths[path.index()], source);
seen[path.index()] = true;
}
for (j, path) in move_paths.iter().enumerate() {
if !seen[j] {
......@@ -767,7 +762,7 @@ fn on_move_out_lval(&mut self,
let path = self.builder.move_path_for(lval);
self.moves.push(MoveOut { path: path, source: source.clone() });
self.path_map.fill_to(path.idx());
self.path_map.fill_to(path.index());
debug!("ctxt: {:?} add consume of lval: {:?} \
at index: {:?} \
......@@ -775,12 +770,12 @@ fn on_move_out_lval(&mut self,
to loc_map for loc: {:?}",
stmt_kind, lval, index, path, source);
debug_assert!(path.idx() < self.path_map.len());
debug_assert!(path.index() < self.path_map.len());
// this is actually a questionable assert; at the very
// least, incorrect input code can probably cause it to
// fire.
assert!(self.path_map[path.idx()].iter().find(|idx| **idx == index).is_none());
self.path_map[path.idx()].push(index);
assert!(self.path_map[path.index()].iter().find(|idx| **idx == index).is_none());
self.path_map[path.index()].push(index);
debug_assert!(i < self.loc_map_bb.len());
debug_assert!(self.loc_map_bb[i].iter().find(|idx| **idx == index).is_none());
......
......@@ -298,8 +298,8 @@ fn drop_flag_effects_for_function_entry<'a, 'tcx, F>(
where F: FnMut(MovePathIndex, DropFlagState)
{
let move_data = &ctxt.move_data;
for i in 0..(mir.arg_decls.len() as u32) {
let lvalue = repr::Lvalue::Arg(i);
for (arg, _) in mir.arg_decls.iter_enumerated() {
let lvalue = repr::Lvalue::Arg(arg);
let move_path_index = move_data.rev_lookup.find(&lvalue);
on_all_children_bits(tcx, mir, move_data,
move_path_index,
......
......@@ -11,31 +11,28 @@
use super::gather_moves::Location;
use rustc::ty::Ty;
use rustc::mir::repr::*;
use std::iter;
use std::u32;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
/// This struct represents a patch to MIR, which can add
/// new statements and basic blocks and patch over block
/// terminators.
pub struct MirPatch<'tcx> {
patch_map: Vec<Option<TerminatorKind<'tcx>>>,
patch_map: IndexVec<BasicBlock, Option<TerminatorKind<'tcx>>>,
new_blocks: Vec<BasicBlockData<'tcx>>,
new_statements: Vec<(Location, StatementKind<'tcx>)>,
new_temps: Vec<TempDecl<'tcx>>,
resume_block: BasicBlock,
next_temp: u32,
next_temp: usize,
}
impl<'tcx> MirPatch<'tcx> {
pub fn new(mir: &Mir<'tcx>) -> Self {
let mut result = MirPatch {
patch_map: iter::repeat(None)
.take(mir.basic_blocks.len()).collect(),
patch_map: IndexVec::from_elem(None, &mir.basic_blocks),
new_blocks: vec![],
new_temps: vec![],
new_statements: vec![],
next_temp: mir.temp_decls.len() as u32,
next_temp: mir.temp_decls.len(),
resume_block: START_BLOCK
};
......@@ -83,7 +80,7 @@ pub fn resume_block(&self) -> BasicBlock {
}
pub fn is_patched(&self, bb: BasicBlock) -> bool {
self.patch_map[bb.index()].is_some()
self.patch_map[bb].is_some()
}
pub fn terminator_loc(&self, mir: &Mir<'tcx>, bb: BasicBlock) -> Location {
......@@ -97,12 +94,11 @@ pub fn terminator_loc(&self, mir: &Mir<'tcx>, bb: BasicBlock) -> Location {
}
}
pub fn new_temp(&mut self, ty: Ty<'tcx>) -> u32 {
pub fn new_temp(&mut self, ty: Ty<'tcx>) -> Temp {
let index = self.next_temp;
assert!(self.next_temp < u32::MAX);
self.next_temp += 1;
self.new_temps.push(TempDecl { ty: ty });
index
Temp::new(index as usize)
}
pub fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock {
......@@ -114,9 +110,9 @@ pub fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock {
}
pub fn patch_terminator(&mut self, block: BasicBlock, new: TerminatorKind<'tcx>) {
assert!(self.patch_map[block.index()].is_none());
assert!(self.patch_map[block].is_none());
debug!("MirPatch: patch_terminator({:?}, {:?})", block, new);
self.patch_map[block.index()] = Some(new);
self.patch_map[block] = Some(new);
}
pub fn add_statement(&mut self, loc: Location, stmt: StatementKind<'tcx>) {
......@@ -135,7 +131,7 @@ pub fn apply(self, mir: &mut Mir<'tcx>) {
self.new_blocks.len(), mir.basic_blocks.len());
mir.basic_blocks.extend(self.new_blocks);
mir.temp_decls.extend(self.new_temps);
for (src, patch) in self.patch_map.into_iter().enumerate() {
for (src, patch) in self.patch_map.into_iter_enumerated() {
if let Some(patch) = patch {
debug!("MirPatch: patching block {:?}", src);
mir.basic_blocks[src].terminator_mut().kind = patch;
......
......@@ -17,13 +17,7 @@
use bitslice::{BitSlice, Word};
use bitslice::{bitwise, Union, Subtract};
/// Represents some newtyped `usize` wrapper.
///
/// (purpose: avoid mixing indexes for different bitvector domains.)
pub trait Idx: 'static {
fn new(usize) -> Self;
fn idx(&self) -> usize;
}
use rustc_data_structures::indexed_vec::Idx;
/// Represents a set (or packed family of sets), of some element type
/// E, where each E is identified by some unique index type `T`.
......@@ -120,27 +114,27 @@ pub fn to_owned(&self) -> IdxSetBuf<T> {
/// Removes `elem` from the set `self`; returns true iff this changed `self`.
pub fn remove(&mut self, elem: &T) -> bool {
self.bits.clear_bit(elem.idx())
self.bits.clear_bit(elem.index())
}
/// Adds `elem` to the set `self`; returns true iff this changed `self`.
pub fn add(&mut self, elem: &T) -> bool {
self.bits.set_bit(elem.idx())
self.bits.set_bit(elem.index())
}
pub fn range(&self, elems: &Range<T>) -> &Self {
let elems = elems.start.idx()..elems.end.idx();
let elems = elems.start.index()..elems.end.index();
unsafe { Self::from_slice(&self.bits[elems]) }
}
pub fn range_mut(&mut self, elems: &Range<T>) -> &mut Self {
let elems = elems.start.idx()..elems.end.idx();
let elems = elems.start.index()..elems.end.index();
unsafe { Self::from_slice_mut(&mut self.bits[elems]) }
}
/// Returns true iff set `self` contains `elem`.
pub fn contains(&self, elem: &T) -> bool {
self.bits.get_bit(elem.idx())
self.bits.get_bit(elem.index())
}
pub fn words(&self) -> &[Word] {
......
......@@ -34,6 +34,7 @@
extern crate graphviz as dot;
#[macro_use]
extern crate rustc;
extern crate rustc_data_structures;
extern crate rustc_mir;
extern crate core; // for NonZero
......
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::iter::{self, FromIterator};
use std::slice;
use std::marker::PhantomData;
use std::ops::{Index, IndexMut};
use std::fmt;
use std::vec;
use rustc_serialize as serialize;
/// Represents some newtyped `usize` wrapper.
///
/// (purpose: avoid mixing indexes for different bitvector domains.)
pub trait Idx: Copy + 'static {
fn new(usize) -> Self;
fn index(self) -> usize;
}
impl Idx for usize {
fn new(idx: usize) -> Self { idx }
fn index(self) -> usize { self }
}
#[derive(Clone)]
pub struct IndexVec<I: Idx, T> {
pub raw: Vec<T>,
_marker: PhantomData<Fn(&I)>
}
impl<I: Idx, T: serialize::Encodable> serialize::Encodable for IndexVec<I, T> {
fn encode<S: serialize::Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
serialize::Encodable::encode(&self.raw, s)
}
}
impl<I: Idx, T: serialize::Decodable> serialize::Decodable for IndexVec<I, T> {
fn decode<D: serialize::Decoder>(d: &mut D) -> Result<Self, D::Error> {
serialize::Decodable::decode(d).map(|v| {
IndexVec { raw: v, _marker: PhantomData }
})
}
}
impl<I: Idx, T: fmt::Debug> fmt::Debug for IndexVec<I, T> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.raw, fmt)
}
}
pub type Enumerated<I, J> = iter::Map<iter::Enumerate<J>, IntoIdx<I>>;
impl<I: Idx, T> IndexVec<I, T> {
#[inline]
pub fn new() -> Self {
IndexVec { raw: Vec::new(), _marker: PhantomData }
}
#[inline]
pub fn with_capacity(capacity: usize) -> Self {
IndexVec { raw: Vec::with_capacity(capacity), _marker: PhantomData }
}
#[inline]
pub fn from_elem<S>(elem: T, universe: &IndexVec<I, S>) -> Self
where T: Clone
{
IndexVec { raw: vec![elem; universe.len()], _marker: PhantomData }
}
#[inline]
pub fn push(&mut self, d: T) -> I {
let idx = I::new(self.len());
self.raw.push(d);
idx
}
#[inline]
pub fn len(&self) -> usize {
self.raw.len()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.raw.is_empty()
}
#[inline]
pub fn into_iter(self) -> vec::IntoIter<T> {
self.raw.into_iter()
}
#[inline]
pub fn into_iter_enumerated(self) -> Enumerated<I, vec::IntoIter<T>>
{
self.raw.into_iter().enumerate().map(IntoIdx { _marker: PhantomData })
}
#[inline]
pub fn iter(&self) -> slice::Iter<T> {
self.raw.iter()
}
#[inline]
pub fn iter_enumerated(&self) -> Enumerated<I, slice::Iter<T>>
{
self.raw.iter().enumerate().map(IntoIdx { _marker: PhantomData })
}
#[inline]
pub fn iter_mut(&mut self) -> slice::IterMut<T> {
self.raw.iter_mut()
}
#[inline]
pub fn iter_enumerated_mut(&mut self) -> Enumerated<I, slice::IterMut<T>>
{
self.raw.iter_mut().enumerate().map(IntoIdx { _marker: PhantomData })
}
#[inline]
pub fn last(&self) -> Option<I> {
self.len().checked_sub(1).map(I::new)
}
}
impl<I: Idx, T> Index<I> for IndexVec<I, T> {
type Output = T;
#[inline]
fn index(&self, index: I) -> &T {
&self.raw[index.index()]
}
}
impl<I: Idx, T> IndexMut<I> for IndexVec<I, T> {
#[inline]
fn index_mut(&mut self, index: I) -> &mut T {
&mut self.raw[index.index()]
}
}
impl<I: Idx, T> Extend<T> for IndexVec<I, T> {
#[inline]
fn extend<J: IntoIterator<Item = T>>(&mut self, iter: J) {
self.raw.extend(iter);
}
}
impl<I: Idx, T> FromIterator<T> for IndexVec<I, T> {
#[inline]
fn from_iter<J>(iter: J) -> Self where J: IntoIterator<Item=T> {
IndexVec { raw: FromIterator::from_iter(iter), _marker: PhantomData }
}
}
impl<I: Idx, T> IntoIterator for IndexVec<I, T> {
type Item = T;
type IntoIter = vec::IntoIter<T>;
#[inline]
fn into_iter(self) -> vec::IntoIter<T> {
self.raw.into_iter()
}
}
impl<'a, I: Idx, T> IntoIterator for &'a IndexVec<I, T> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
#[inline]
fn into_iter(self) -> slice::Iter<'a, T> {
self.raw.iter()
}
}
impl<'a, I: Idx, T> IntoIterator for &'a mut IndexVec<I, T> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
#[inline]
fn into_iter(mut self) -> slice::IterMut<'a, T> {
self.raw.iter_mut()
}
}
pub struct IntoIdx<I: Idx> { _marker: PhantomData<fn(&I)> }
impl<I: Idx, T> FnOnce<((usize, T),)> for IntoIdx<I> {
type Output = (I, T);
extern "rust-call" fn call_once(self, ((n, t),): ((usize, T),)) -> Self::Output {
(I::new(n), t)
}
}
impl<I: Idx, T> FnMut<((usize, T),)> for IntoIdx<I> {
extern "rust-call" fn call_mut(&mut self, ((n, t),): ((usize, T),)) -> Self::Output {
(I::new(n), t)
}
}
......@@ -41,6 +41,7 @@
pub mod bitvec;
pub mod graph;
pub mod ivar;
pub mod indexed_vec;
pub mod obligation_forest;
pub mod snapshot_map;
pub mod snapshot_vec;
......
......@@ -18,17 +18,15 @@
impl<'tcx> CFG<'tcx> {
pub fn block_data(&self, blk: BasicBlock) -> &BasicBlockData<'tcx> {
&self.basic_blocks[blk.index()]
&self.basic_blocks[blk]
}
pub fn block_data_mut(&mut self, blk: BasicBlock) -> &mut BasicBlockData<'tcx> {
&mut self.basic_blocks[blk.index()]
&mut self.basic_blocks[blk]
}
pub fn start_new_block(&mut self) -> BasicBlock {
let node_index = self.basic_blocks.len();
self.basic_blocks.push(BasicBlockData::new(None));
BasicBlock::new(node_index)
self.basic_blocks.push(BasicBlockData::new(None))
}
pub fn start_new_cleanup_block(&mut self) -> BasicBlock {
......
......@@ -15,6 +15,8 @@
use hair::*;
use rustc::mir::repr::*;
use rustc_data_structures::indexed_vec::Idx;
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
/// Compile `expr`, yielding an lvalue that we can move from etc.
pub fn as_lvalue<M>(&mut self,
......@@ -75,7 +77,7 @@ fn expr_as_lvalue(&mut self,
success.and(slice.index(idx))
}
ExprKind::SelfRef => {
block.and(Lvalue::Arg(0))
block.and(Lvalue::Arg(Arg::new(0)))
}
ExprKind::VarRef { id } => {
let index = this.var_indices[&id];
......
......@@ -14,6 +14,7 @@
use rustc_const_math::{ConstMathErr, Op};
use rustc_data_structures::fnv::FnvHashMap;
use rustc_data_structures::indexed_vec::Idx;
use build::{BlockAnd, BlockAndExtension, Builder};
use build::expr::category::{Category, RvalueFunc};
......
......@@ -667,25 +667,23 @@ fn declare_binding(&mut self,
name: Name,
var_id: NodeId,
var_ty: Ty<'tcx>)
-> u32
-> Var
{
debug!("declare_binding(var_id={:?}, name={:?}, var_ty={:?}, source_info={:?})",
var_id, name, var_ty, source_info);
let index = self.var_decls.len();
self.var_decls.push(VarDecl::<'tcx> {
let var = self.var_decls.push(VarDecl::<'tcx> {
source_info: source_info,
mutability: mutability,
name: name,
ty: var_ty.clone(),
});
let index = index as u32;
let extent = self.extent_of_innermost_scope();
self.schedule_drop(source_info.span, extent, &Lvalue::Var(index), var_ty);
self.var_indices.insert(var_id, index);
self.schedule_drop(source_info.span, extent, &Lvalue::Var(var), var_ty);
self.var_indices.insert(var_id, var);
debug!("declare_binding: index={:?}", index);
debug!("declare_binding: var={:?}", var);
index
var
}
}
......@@ -18,7 +18,6 @@
use rustc::ty::{self, Ty};
use rustc::mir::repr::*;
use std::u32;
use syntax::ast;
use syntax::codemap::Span;
......@@ -29,12 +28,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
/// NB: **No cleanup is scheduled for this temporary.** You should
/// call `schedule_drop` once the temporary is initialized.
pub fn temp(&mut self, ty: Ty<'tcx>) -> Lvalue<'tcx> {
let index = self.temp_decls.len();
self.temp_decls.push(TempDecl { ty: ty });
assert!(index < (u32::MAX) as usize);
let lvalue = Lvalue::Temp(index as u32);
let temp = self.temp_decls.push(TempDecl { ty: ty });
let lvalue = Lvalue::Temp(temp);
debug!("temp: created temp {:?} with type {:?}",
lvalue, self.temp_decls.last().unwrap().ty);
lvalue, self.temp_decls[temp].ty);
lvalue
}
......
......@@ -12,15 +12,17 @@
use rustc::middle::region::{CodeExtent, CodeExtentData, ROOT_CODE_EXTENT};
use rustc::ty::{self, Ty};
use rustc::mir::repr::*;
use rustc_data_structures::fnv::FnvHashMap;
use rustc::util::nodemap::NodeMap;
use rustc::hir;
use std::ops::{Index, IndexMut};
use std::u32;
use syntax::abi::Abi;
use syntax::ast;
use syntax::codemap::Span;
use syntax::parse::token::keywords;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use std::u32;
pub struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
hir: Cx<'a, 'gcx, 'tcx>,
cfg: CFG<'tcx>,
......@@ -36,7 +38,7 @@ pub struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
/// but these are liable to get out of date once optimization
/// begins. They are also hopefully temporary, and will be
/// no longer needed when we adopt graph-based regions.
scope_auxiliary: ScopeAuxiliaryVec,
scope_auxiliary: IndexVec<ScopeId, ScopeAuxiliary>,
/// the current set of loops; see the `scope` module for more
/// details
......@@ -44,12 +46,12 @@ pub struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
/// the vector of all scopes that we have created thus far;
/// we track this for debuginfo later
visibility_scopes: Vec<VisibilityScopeData>,
visibility_scopes: IndexVec<VisibilityScope, VisibilityScopeData>,
visibility_scope: VisibilityScope,
var_decls: Vec<VarDecl<'tcx>>,
var_indices: FnvHashMap<ast::NodeId, u32>,
temp_decls: Vec<TempDecl<'tcx>>,
var_decls: IndexVec<Var, VarDecl<'tcx>>,
var_indices: NodeMap<Var>,
temp_decls: IndexVec<Temp, TempDecl<'tcx>>,
unit_temp: Option<Lvalue<'tcx>>,
/// cached block with the RESUME terminator; this is created
......@@ -60,19 +62,19 @@ pub struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
}
struct CFG<'tcx> {
basic_blocks: Vec<BasicBlockData<'tcx>>,
basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct ScopeId(u32);
impl ScopeId {
pub fn new(index: usize) -> ScopeId {
impl Idx for ScopeId {
fn new(index: usize) -> ScopeId {
assert!(index < (u32::MAX as usize));
ScopeId(index as u32)
}
pub fn index(self) -> usize {
fn index(self) -> usize {
self.0 as usize
}
}
......@@ -109,25 +111,7 @@ pub struct Location {
pub statement_index: usize,
}
pub struct ScopeAuxiliaryVec {
pub vec: Vec<ScopeAuxiliary>
}
impl Index<ScopeId> for ScopeAuxiliaryVec {
type Output = ScopeAuxiliary;
#[inline]
fn index(&self, index: ScopeId) -> &ScopeAuxiliary {
&self.vec[index.index()]
}
}
impl IndexMut<ScopeId> for ScopeAuxiliaryVec {
#[inline]
fn index_mut(&mut self, index: ScopeId) -> &mut ScopeAuxiliary {
&mut self.vec[index.index()]
}
}
pub type ScopeAuxiliaryVec = IndexVec<ScopeId, ScopeAuxiliary>;
///////////////////////////////////////////////////////////////////////////
/// The `BlockAnd` "monad" packages up the new basic block along with a
......@@ -213,8 +197,8 @@ pub fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
match tcx.node_id_to_type(fn_id).sty {
ty::TyFnDef(_, _, f) if f.abi == Abi::RustCall => {
// RustCall pseudo-ABI untuples the last argument.
if let Some(arg_decl) = arg_decls.last_mut() {
arg_decl.spread = true;
if let Some(last_arg) = arg_decls.last() {
arg_decls[last_arg].spread = true;
}
}
_ => {}
......@@ -271,23 +255,23 @@ pub fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>,
});
let ty = tcx.expr_ty_adjusted(ast_expr);
builder.finish(vec![], vec![], ty::FnConverging(ty))
builder.finish(vec![], IndexVec::new(), ty::FnConverging(ty))
}
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
fn new(hir: Cx<'a, 'gcx, 'tcx>, span: Span) -> Builder<'a, 'gcx, 'tcx> {
let mut builder = Builder {
hir: hir,
cfg: CFG { basic_blocks: vec![] },
cfg: CFG { basic_blocks: IndexVec::new() },
fn_span: span,
scopes: vec![],
visibility_scopes: vec![],
visibility_scopes: IndexVec::new(),
visibility_scope: ARGUMENT_VISIBILITY_SCOPE,
scope_auxiliary: ScopeAuxiliaryVec { vec: vec![] },
scope_auxiliary: IndexVec::new(),
loop_scopes: vec![],
temp_decls: vec![],
var_decls: vec![],
var_indices: FnvHashMap(),
temp_decls: IndexVec::new(),
var_decls: IndexVec::new(),
var_indices: NodeMap(),
unit_temp: None,
cached_resume_block: None,
cached_return_block: None
......@@ -302,7 +286,7 @@ fn new(hir: Cx<'a, 'gcx, 'tcx>, span: Span) -> Builder<'a, 'gcx, 'tcx> {
fn finish(self,
upvar_decls: Vec<UpvarDecl>,
arg_decls: Vec<ArgDecl<'tcx>>,
arg_decls: IndexVec<Arg, ArgDecl<'tcx>>,
return_ty: ty::FnOutput<'tcx>)
-> (Mir<'tcx>, ScopeAuxiliaryVec) {
for (index, block) in self.cfg.basic_blocks.iter().enumerate() {
......@@ -314,7 +298,7 @@ fn finish(self,
(Mir {
basic_blocks: self.cfg.basic_blocks,
visibility_scopes: self.visibility_scopes,
promoted: vec![],
promoted: IndexVec::new(),
var_decls: self.var_decls,
arg_decls: arg_decls,
temp_decls: self.temp_decls,
......@@ -330,13 +314,13 @@ fn args_and_body<A>(&mut self,
arguments: A,
argument_extent: CodeExtent,
ast_block: &'gcx hir::Block)
-> BlockAnd<Vec<ArgDecl<'tcx>>>
-> BlockAnd<IndexVec<Arg, ArgDecl<'tcx>>>
where A: Iterator<Item=(Ty<'gcx>, Option<&'gcx hir::Pat>)>
{
// to start, translate the argument patterns and collect the argument types.
let mut scope = None;
let arg_decls = arguments.enumerate().map(|(index, (ty, pattern))| {
let lvalue = Lvalue::Arg(index as u32);
let lvalue = Lvalue::Arg(Arg::new(index));
if let Some(pattern) = pattern {
let pattern = self.hir.irrefutable_pat(pattern);
scope = self.declare_bindings(scope, ast_block.span, &pattern);
......
......@@ -96,6 +96,7 @@
use syntax::parse::token::intern_and_get_ident;
use rustc::middle::const_val::ConstVal;
use rustc_const_math::ConstInt;
use rustc_data_structures::indexed_vec::Idx;
pub struct Scope<'tcx> {
/// the scope-id within the scope_auxiliary
......@@ -264,7 +265,7 @@ pub fn in_scope<F, R>(&mut self, extent: CodeExtent, mut block: BasicBlock, f: F
/// wrapper maybe preferable.
pub fn push_scope(&mut self, extent: CodeExtent, entry: BasicBlock) {
debug!("push_scope({:?})", extent);
let id = ScopeId::new(self.scope_auxiliary.vec.len());
let id = ScopeId::new(self.scope_auxiliary.len());
let vis_scope = self.visibility_scope;
self.scopes.push(Scope {
id: id,
......@@ -274,7 +275,7 @@ pub fn push_scope(&mut self, extent: CodeExtent, entry: BasicBlock) {
free: None,
cached_block: None,
});
self.scope_auxiliary.vec.push(ScopeAuxiliary {
self.scope_auxiliary.push(ScopeAuxiliary {
extent: extent,
dom: self.cfg.current_location(entry),
postdoms: vec![]
......
......@@ -15,6 +15,8 @@
use std::io::{self, Write};
use syntax::ast::NodeId;
use rustc_data_structures::indexed_vec::Idx;
/// Write a graphviz DOT graph of a list of MIRs.
pub fn write_mir_graphviz<'a, 'b, 'tcx, W, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
iter: I, w: &mut W)
......@@ -130,7 +132,7 @@ fn write_graph_label<'a, 'tcx, W: Write>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
if i > 0 {
write!(w, ", ")?;
}
write!(w, "{:?}: {}", Lvalue::Arg(i as u32), escape(&arg.ty))?;
write!(w, "{:?}: {}", Lvalue::Arg(Arg::new(i)), escape(&arg.ty))?;
}
write!(w, ") -&gt; ")?;
......@@ -150,13 +152,13 @@ fn write_graph_label<'a, 'tcx, W: Write>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
write!(w, "mut ")?;
}
write!(w, r#"{:?}: {}; // {}<br align="left"/>"#,
Lvalue::Var(i as u32), escape(&var.ty), var.name)?;
Lvalue::Var(Var::new(i)), escape(&var.ty), var.name)?;
}
// Compiler-introduced temporary types.
for (i, temp) in mir.temp_decls.iter().enumerate() {
write!(w, r#"let mut {:?}: {};<br align="left"/>"#,
Lvalue::Temp(i as u32), escape(&temp.ty))?;
Lvalue::Temp(Temp::new(i)), escape(&temp.ty))?;
}
writeln!(w, ">;")
......
......@@ -10,6 +10,7 @@
use hair::*;
use rustc_data_structures::fnv::FnvHashMap;
use rustc_data_structures::indexed_vec::Idx;
use rustc_const_math::ConstInt;
use hair::cx::Cx;
use hair::cx::block;
......
......@@ -21,6 +21,7 @@
use rustc::middle::const_val::ConstVal;
use rustc_const_eval as const_eval;
use rustc_data_structures::indexed_vec::Idx;
use rustc::hir::def_id::DefId;
use rustc::hir::intravisit::FnKind;
use rustc::hir::map::blocks::FnLikeNode;
......
......@@ -11,6 +11,7 @@
use hair::*;
use hair::cx::Cx;
use rustc_data_structures::fnv::FnvHashMap;
use rustc_data_structures::indexed_vec::Idx;
use rustc_const_eval as const_eval;
use rustc::hir::def::Def;
use rustc::hir::pat_util::{EnumerateAndAdjustIterator, pat_is_resolved_const};
......
......@@ -14,6 +14,7 @@
use rustc::mir::transform::MirSource;
use rustc::ty::{self, TyCtxt};
use rustc_data_structures::fnv::FnvHashMap;
use rustc_data_structures::indexed_vec::{Idx};
use std::fmt::Display;
use std::fs;
use std::io::{self, Write};
......@@ -111,9 +112,7 @@ fn scope_entry_exit_annotations(auxiliary: Option<&ScopeAuxiliaryVec>)
// compute scope/entry exit annotations
let mut annotations = FnvHashMap();
if let Some(auxiliary) = auxiliary {
for (index, auxiliary) in auxiliary.vec.iter().enumerate() {
let scope_id = ScopeId::new(index);
for (scope_id, auxiliary) in auxiliary.iter_enumerated() {
annotations.entry(auxiliary.dom)
.or_insert(vec![])
.push(Annotation::EnterScope(scope_id));
......@@ -218,7 +217,7 @@ fn write_scope_tree(tcx: TyCtxt,
writeln!(w, "{0:1$}scope {2} {{", "", indent, child.index())?;
// User variable types (including the user's name in a comment).
for (i, var) in mir.var_decls.iter().enumerate() {
for (id, var) in mir.var_decls.iter_enumerated() {
// Skip if not declared in this scope.
if var.source_info.scope != child {
continue;
......@@ -235,7 +234,7 @@ fn write_scope_tree(tcx: TyCtxt,
INDENT,
indent,
mut_str,
Lvalue::Var(i as u32),
id,
var.ty);
writeln!(w, "{0:1$} // \"{2}\" in {3}",
indented_var,
......@@ -297,11 +296,11 @@ fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)
write!(w, "(")?;
// fn argument types.
for (i, arg) in mir.arg_decls.iter().enumerate() {
if i > 0 {
for (i, arg) in mir.arg_decls.iter_enumerated() {
if i.index() != 0 {
write!(w, ", ")?;
}
write!(w, "{:?}: {}", Lvalue::Arg(i as u32), arg.ty)?;
write!(w, "{:?}: {}", Lvalue::Arg(i), arg.ty)?;
}
write!(w, ") -> ")?;
......@@ -319,8 +318,8 @@ fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write)
fn write_mir_decls(mir: &Mir, w: &mut Write) -> io::Result<()> {
// Compiler-introduced temporary types.
for (i, temp) in mir.temp_decls.iter().enumerate() {
writeln!(w, "{}let mut {:?}: {};", INDENT, Lvalue::Temp(i as u32), temp.ty)?;
for (id, temp) in mir.temp_decls.iter_enumerated() {
writeln!(w, "{}let mut {:?}: {};", INDENT, id, temp.ty)?;
}
// Wrote any declaration? Add an empty line before the first block is printed.
......
......@@ -12,6 +12,8 @@
use rustc::mir::repr::*;
use rustc::mir::transform::{MirPass, MirSource, Pass};
use rustc::mir::traversal;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use pretty;
pub struct AddCallGuards;
......@@ -38,13 +40,13 @@
impl<'tcx> MirPass<'tcx> for AddCallGuards {
fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mut Mir<'tcx>) {
let mut pred_count = vec![0u32; mir.basic_blocks.len()];
let mut pred_count = IndexVec::from_elem(0u32, &mir.basic_blocks);
// Build the precedecessor map for the MIR
for (_, data) in traversal::preorder(mir) {
if let Some(ref term) = data.terminator {
for &tgt in term.successors().iter() {
pred_count[tgt.index()] += 1;
pred_count[tgt] += 1;
}
}
}
......@@ -65,7 +67,7 @@ fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mu
cleanup: Some(_),
..
}, source_info
}) if pred_count[destination.index()] > 1 => {
}) if pred_count[*destination] > 1 => {
// It's a critical edge, break it
let call_guard = BasicBlockData {
statements: vec![],
......@@ -88,7 +90,7 @@ fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mu
pretty::dump_mir(tcx, "break_cleanup_edges", &0, src, mir, None);
debug!("Broke {} N edges", new_blocks.len());
mir.basic_blocks.extend_from_slice(&new_blocks);
mir.basic_blocks.extend(new_blocks);
}
}
......
......@@ -30,6 +30,8 @@
use build::Location;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use std::mem;
/// State of a temporary during collection and promotion.
......@@ -74,7 +76,7 @@ pub enum Candidate {
}
struct TempCollector {
temps: Vec<TempState>,
temps: IndexVec<Temp, TempState>,
location: Location,
span: Span
}
......@@ -89,7 +91,7 @@ fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: LvalueContext) {
return;
}
let temp = &mut self.temps[index as usize];
let temp = &mut self.temps[index];
if *temp == TempState::Undefined {
match context {
LvalueContext::Store |
......@@ -134,9 +136,9 @@ fn visit_basic_block_data(&mut self, bb: BasicBlock, data: &BasicBlockData<'tcx>
}
}
pub fn collect_temps(mir: &Mir, rpo: &mut ReversePostorder) -> Vec<TempState> {
pub fn collect_temps(mir: &Mir, rpo: &mut ReversePostorder) -> IndexVec<Temp, TempState> {
let mut collector = TempCollector {
temps: vec![TempState::Undefined; mir.temp_decls.len()],
temps: IndexVec::from_elem(TempState::Undefined, &mir.temp_decls),
location: Location {
block: START_BLOCK,
statement_index: 0
......@@ -152,7 +154,7 @@ pub fn collect_temps(mir: &Mir, rpo: &mut ReversePostorder) -> Vec<TempState> {
struct Promoter<'a, 'tcx: 'a> {
source: &'a mut Mir<'tcx>,
promoted: Mir<'tcx>,
temps: &'a mut Vec<TempState>,
temps: &'a mut IndexVec<Temp, TempState>,
/// If true, all nested temps are also kept in the
/// source MIR, not moved to the promoted MIR.
......@@ -161,7 +163,6 @@ struct Promoter<'a, 'tcx: 'a> {
impl<'a, 'tcx> Promoter<'a, 'tcx> {
fn new_block(&mut self) -> BasicBlock {
let index = self.promoted.basic_blocks.len();
self.promoted.basic_blocks.push(BasicBlockData {
statements: vec![],
terminator: Some(Terminator {
......@@ -172,12 +173,12 @@ fn new_block(&mut self) -> BasicBlock {
kind: TerminatorKind::Return
}),
is_cleanup: false
});
BasicBlock::new(index)
})
}
fn assign(&mut self, dest: Lvalue<'tcx>, rvalue: Rvalue<'tcx>, span: Span) {
let data = self.promoted.basic_blocks.last_mut().unwrap();
let last = self.promoted.basic_blocks.last().unwrap();
let data = &mut self.promoted.basic_blocks[last];
data.statements.push(Statement {
source_info: SourceInfo {
span: span,
......@@ -189,10 +190,9 @@ fn assign(&mut self, dest: Lvalue<'tcx>, rvalue: Rvalue<'tcx>, span: Span) {
/// Copy the initialization of this temp to the
/// promoted MIR, recursing through temps.
fn promote_temp(&mut self, index: u32) -> u32 {
let index = index as usize;
fn promote_temp(&mut self, temp: Temp) -> Temp {
let old_keep_original = self.keep_original;
let (bb, stmt_idx) = match self.temps[index] {
let (bb, stmt_idx) = match self.temps[temp] {
TempState::Defined {
location: Location { block, statement_index },
uses
......@@ -202,13 +202,13 @@ fn promote_temp(&mut self, index: u32) -> u32 {
}
(block, statement_index)
}
temp => {
span_bug!(self.promoted.span, "tmp{} not promotable: {:?}",
index, temp);
state => {
span_bug!(self.promoted.span, "{:?} not promotable: {:?}",
temp, state);
}
};
if !self.keep_original {
self.temps[index] = TempState::PromotedOut;
self.temps[temp] = TempState::PromotedOut;
}
let no_stmts = self.source[bb].statements.len();
......@@ -260,22 +260,20 @@ fn promote_temp(&mut self, index: u32) -> u32 {
self.visit_terminator_kind(bb, call.as_mut().unwrap());
}
let new_index = self.promoted.temp_decls.len() as u32;
let new_temp = Lvalue::Temp(new_index);
self.promoted.temp_decls.push(TempDecl {
ty: self.source.temp_decls[index].ty
let new_temp = self.promoted.temp_decls.push(TempDecl {
ty: self.source.temp_decls[temp].ty
});
// Inject the Rvalue or Call into the promoted MIR.
if stmt_idx < no_stmts {
self.assign(new_temp, rvalue.unwrap(), source_info.span);
self.assign(Lvalue::Temp(new_temp), rvalue.unwrap(), source_info.span);
} else {
let last = self.promoted.basic_blocks.len() - 1;
let last = self.promoted.basic_blocks.last().unwrap();
let new_target = self.new_block();
let mut call = call.unwrap();
match call {
TerminatorKind::Call { ref mut destination, ..} => {
*destination = Some((new_temp, new_target));
*destination = Some((Lvalue::Temp(new_temp), new_target));
}
_ => bug!()
}
......@@ -287,7 +285,7 @@ fn promote_temp(&mut self, index: u32) -> u32 {
// Restore the old duplication state.
self.keep_original = old_keep_original;
new_index
new_temp
}
fn promote_candidate(mut self, candidate: Candidate) {
......@@ -296,7 +294,7 @@ fn promote_candidate(mut self, candidate: Candidate) {
span: span,
ty: self.promoted.return_ty.unwrap(),
literal: Literal::Promoted {
index: self.source.promoted.len()
index: Promoted::new(self.source.promoted.len())
}
});
let mut rvalue = match candidate {
......@@ -325,8 +323,8 @@ fn promote_candidate(mut self, candidate: Candidate) {
/// Replaces all temporaries with their promoted counterparts.
impl<'a, 'tcx> MutVisitor<'tcx> for Promoter<'a, 'tcx> {
fn visit_lvalue(&mut self, lvalue: &mut Lvalue<'tcx>, context: LvalueContext) {
if let Lvalue::Temp(ref mut index) = *lvalue {
*index = self.promote_temp(*index);
if let Lvalue::Temp(ref mut temp) = *lvalue {
*temp = self.promote_temp(*temp);
}
self.super_lvalue(lvalue, context);
}
......@@ -334,7 +332,7 @@ fn visit_lvalue(&mut self, lvalue: &mut Lvalue<'tcx>, context: LvalueContext) {
pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mut temps: Vec<TempState>,
mut temps: IndexVec<Temp, TempState>,
candidates: Vec<Candidate>) {
// Visit candidates in reverse, in case they're nested.
for candidate in candidates.into_iter().rev() {
......@@ -343,7 +341,7 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>,
let statement = &mir[bb].statements[stmt_idx];
let StatementKind::Assign(ref dest, _) = statement.kind;
if let Lvalue::Temp(index) = *dest {
if temps[index as usize] == TempState::PromotedOut {
if temps[index] == TempState::PromotedOut {
// Already promoted.
continue;
}
......@@ -368,16 +366,16 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>,
let mut promoter = Promoter {
source: mir,
promoted: Mir {
basic_blocks: vec![],
visibility_scopes: vec![VisibilityScopeData {
basic_blocks: IndexVec::new(),
visibility_scopes: Some(VisibilityScopeData {
span: span,
parent_scope: None
}],
promoted: vec![],
}).into_iter().collect(),
promoted: IndexVec::new(),
return_ty: ty::FnConverging(ty),
var_decls: vec![],
arg_decls: vec![],
temp_decls: vec![],
var_decls: IndexVec::new(),
arg_decls: IndexVec::new(),
temp_decls: IndexVec::new(),
upvar_decls: vec![],
span: span
},
......@@ -389,7 +387,7 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>,
}
// Eliminate assignments to, and drops of promoted temps.
let promoted = |index: u32| temps[index as usize] == TempState::PromotedOut;
let promoted = |index: Temp| temps[index] == TempState::PromotedOut;
for block in &mut mir.basic_blocks {
block.statements.retain(|statement| {
match statement.kind {
......
......@@ -15,6 +15,7 @@
//! diagnostics as to why a constant rvalue wasn't promoted.
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::hir::intravisit::FnKind;
......@@ -141,12 +142,12 @@ struct Qualifier<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
param_env: ty::ParameterEnvironment<'tcx>,
qualif_map: &'a mut DefIdMap<Qualif>,
mir_map: Option<&'a MirMap<'tcx>>,
temp_qualif: Vec<Option<Qualif>>,
temp_qualif: IndexVec<Temp, Option<Qualif>>,
return_qualif: Option<Qualif>,
qualif: Qualif,
const_fn_arg_vars: BitVector,
location: Location,
temp_promotion_state: Vec<TempState>,
temp_promotion_state: IndexVec<Temp, TempState>,
promotion_candidates: Vec<Candidate>
}
......@@ -172,7 +173,7 @@ fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
param_env: param_env,
qualif_map: qualif_map,
mir_map: mir_map,
temp_qualif: vec![None; mir.temp_decls.len()],
temp_qualif: IndexVec::from_elem(None, &mir.temp_decls),
return_qualif: None,
qualif: Qualif::empty(),
const_fn_arg_vars: BitVector::new(mir.var_decls.len()),
......@@ -301,22 +302,22 @@ fn assign(&mut self, dest: &Lvalue<'tcx>) {
// Only handle promotable temps in non-const functions.
if self.mode == Mode::Fn {
if let Lvalue::Temp(index) = *dest {
if self.temp_promotion_state[index as usize].is_promotable() {
store(&mut self.temp_qualif[index as usize]);
if self.temp_promotion_state[index].is_promotable() {
store(&mut self.temp_qualif[index]);
}
}
return;
}
match *dest {
Lvalue::Temp(index) => store(&mut self.temp_qualif[index as usize]),
Lvalue::Temp(index) => store(&mut self.temp_qualif[index]),
Lvalue::ReturnPointer => store(&mut self.return_qualif),
Lvalue::Projection(box Projection {
base: Lvalue::Temp(index),
elem: ProjectionElem::Deref
}) if self.mir.temp_decls[index as usize].ty.is_unique()
&& self.temp_qualif[index as usize].map_or(false, |qualif| {
}) if self.mir.temp_decls[index].ty.is_unique()
&& self.temp_qualif[index].map_or(false, |qualif| {
qualif.intersects(Qualif::NOT_CONST)
}) => {
// Part of `box expr`, we should've errored
......@@ -366,7 +367,7 @@ fn qualify_const(&mut self) -> Qualif {
TerminatorKind::Return => {
// Check for unused values. This usually means
// there are extra statements in the AST.
for i in 0..mir.temp_decls.len() {
for (i, _) in mir.temp_decls.iter_enumerated() {
if self.temp_qualif[i].is_none() {
continue;
}
......@@ -393,7 +394,7 @@ fn qualify_const(&mut self) -> Qualif {
self.qualif = Qualif::NOT_CONST;
for index in 0..mir.var_decls.len() {
if !self.const_fn_arg_vars.contains(index) {
self.assign(&Lvalue::Var(index as u32));
self.assign(&Lvalue::Var(Var::new(index)));
}
}
......@@ -448,11 +449,11 @@ fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: LvalueContext) {
self.add(Qualif::NOT_CONST);
}
Lvalue::Temp(index) => {
if !self.temp_promotion_state[index as usize].is_promotable() {
if !self.temp_promotion_state[index].is_promotable() {
self.add(Qualif::NOT_PROMOTABLE);
}
if let Some(qualif) = self.temp_qualif[index as usize] {
if let Some(qualif) = self.temp_qualif[index] {
self.add(qualif);
} else {
self.not_const();
......@@ -822,7 +823,7 @@ fn visit_assign(&mut self, _: BasicBlock, dest: &Lvalue<'tcx>, rvalue: &Rvalue<'
// Check the allowed const fn argument forms.
if let (Mode::ConstFn, &Lvalue::Var(index)) = (self.mode, dest) {
if self.const_fn_arg_vars.insert(index as usize) {
if self.const_fn_arg_vars.insert(index.index()) {
// Direct use of an argument is permitted.
if let Rvalue::Use(Operand::Consume(Lvalue::Arg(_))) = *rvalue {
return;
......@@ -830,7 +831,7 @@ fn visit_assign(&mut self, _: BasicBlock, dest: &Lvalue<'tcx>, rvalue: &Rvalue<'
// Avoid a generic error for other uses of arguments.
if self.qualif.intersects(Qualif::FN_ARGUMENT) {
let decl = &self.mir.var_decls[index as usize];
let decl = &self.mir.var_decls[index];
span_err!(self.tcx.sess, decl.source_info.span, E0022,
"arguments of constant functions can only \
be immutable by-value bindings");
......
......@@ -33,6 +33,7 @@
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use rustc::middle::const_val::ConstVal;
use rustc::ty::TyCtxt;
use rustc::mir::repr::*;
......@@ -59,7 +60,7 @@ fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mu
pretty::dump_mir(tcx, "simplify_cfg", &format!("{}-after", self.label), src, mir, None);
// FIXME: Should probably be moved into some kind of pass manager
mir.basic_blocks.shrink_to_fit();
mir.basic_blocks.raw.shrink_to_fit();
}
}
......@@ -67,11 +68,11 @@ impl<'l> Pass for SimplifyCfg<'l> {}
fn merge_consecutive_blocks(mir: &mut Mir) {
// Build the precedecessor map for the MIR
let mut pred_count = vec![0u32; mir.basic_blocks.len()];
let mut pred_count = IndexVec::from_elem(0u32, &mir.basic_blocks);
for (_, data) in traversal::preorder(mir) {
if let Some(ref term) = data.terminator {
for &tgt in term.successors().iter() {
pred_count[tgt.index()] += 1;
pred_count[tgt] += 1;
}
}
}
......@@ -100,10 +101,10 @@ fn merge_consecutive_blocks(mir: &mut Mir) {
TerminatorKind::Goto { target: new_target } if num_insts == 0 => {
inner_change = true;
terminator.kind = TerminatorKind::Goto { target: new_target };
pred_count[target.index()] -= 1;
pred_count[new_target.index()] += 1;
pred_count[target] -= 1;
pred_count[new_target] += 1;
}
_ if pred_count[target.index()] == 1 => {
_ if pred_count[target] == 1 => {
inner_change = true;
let mut stmts = Vec::new();
{
......@@ -126,8 +127,8 @@ fn merge_consecutive_blocks(mir: &mut Mir) {
};
if *target != new_target {
inner_change = true;
pred_count[target.index()] -= 1;
pred_count[new_target.index()] += 1;
pred_count[*target] -= 1;
pred_count[new_target] += 1;
*target = new_target;
}
}
......@@ -234,18 +235,18 @@ fn remove_dead_blocks(mir: &mut Mir) {
let num_blocks = mir.basic_blocks.len();
let mut replacements: Vec<_> = (0..num_blocks).map(BasicBlock::new).collect();
let mut replacements : Vec<_> = (0..num_blocks).map(BasicBlock::new).collect();
let mut used_blocks = 0;
for alive_index in seen.iter() {
replacements[alive_index] = BasicBlock::new(used_blocks);
if alive_index != used_blocks {
// Swap the next alive block data with the current available slot. Since alive_index is
// non-decreasing this is a valid operation.
mir.basic_blocks.swap(alive_index, used_blocks);
mir.basic_blocks.raw.swap(alive_index, used_blocks);
}
used_blocks += 1;
}
mir.basic_blocks.truncate(used_blocks);
mir.basic_blocks.raw.truncate(used_blocks);
for bb in mir.all_basic_blocks() {
for target in mir.basic_block_data_mut(bb).terminator_mut().successors_mut() {
......
......@@ -24,6 +24,8 @@
use std::fmt;
use syntax::codemap::{Span, DUMMY_SP};
use rustc_data_structures::indexed_vec::Idx;
macro_rules! span_mirbug {
($context:expr, $elem:expr, $($message:tt)*) => ({
$context.tcx().sess.span_warn(
......@@ -129,11 +131,9 @@ fn sanitize_type(&mut self, parent: &fmt::Debug, ty: Ty<'tcx>) -> Ty<'tcx> {
fn sanitize_lvalue(&mut self, lvalue: &Lvalue<'tcx>) -> LvalueTy<'tcx> {
debug!("sanitize_lvalue: {:?}", lvalue);
match *lvalue {
Lvalue::Var(index) => LvalueTy::Ty { ty: self.mir.var_decls[index as usize].ty },
Lvalue::Temp(index) =>
LvalueTy::Ty { ty: self.mir.temp_decls[index as usize].ty },
Lvalue::Arg(index) =>
LvalueTy::Ty { ty: self.mir.arg_decls[index as usize].ty },
Lvalue::Var(index) => LvalueTy::Ty { ty: self.mir.var_decls[index].ty },
Lvalue::Temp(index) => LvalueTy::Ty { ty: self.mir.temp_decls[index].ty },
Lvalue::Arg(index) => LvalueTy::Ty { ty: self.mir.arg_decls[index].ty },
Lvalue::Static(def_id) =>
LvalueTy::Ty { ty: self.tcx().lookup_item_type(def_id).ty },
Lvalue::ReturnPointer => {
......
......@@ -26,6 +26,7 @@
use syntax::{ast, codemap};
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use rustc::hir::{self, PatKind};
// This procedure builds the *scope map* for a given function, which maps any
......@@ -69,9 +70,9 @@ pub fn create_scope_map(cx: &CrateContext,
/// Produce DIScope DIEs for each MIR Scope which has variables defined in it.
/// If debuginfo is disabled, the returned vector is empty.
pub fn create_mir_scopes(fcx: &FunctionContext) -> Vec<DIScope> {
pub fn create_mir_scopes(fcx: &FunctionContext) -> IndexVec<VisibilityScope, DIScope> {
let mir = fcx.mir.clone().expect("create_mir_scopes: missing MIR for fn");
let mut scopes = vec![ptr::null_mut(); mir.visibility_scopes.len()];
let mut scopes = IndexVec::from_elem(ptr::null_mut(), &mir.visibility_scopes);
let fn_metadata = match fcx.debug_context {
FunctionDebugContext::RegularContext(box ref data) => data.fn_metadata,
......@@ -101,23 +102,22 @@ fn make_mir_scope(ccx: &CrateContext,
has_variables: &BitVector,
fn_metadata: DISubprogram,
scope: VisibilityScope,
scopes: &mut [DIScope]) {
let idx = scope.index();
if !scopes[idx].is_null() {
scopes: &mut IndexVec<VisibilityScope, DIScope>) {
if !scopes[scope].is_null() {
return;
}
let scope_data = &mir.visibility_scopes[scope];
let parent_scope = if let Some(parent) = scope_data.parent_scope {
make_mir_scope(ccx, mir, has_variables, fn_metadata, parent, scopes);
scopes[parent.index()]
scopes[parent]
} else {
// The root is the function itself.
scopes[idx] = fn_metadata;
scopes[scope] = fn_metadata;
return;
};
if !has_variables.contains(idx) {
if !has_variables.contains(scope.index()) {
// Do not create a DIScope if there are no variables
// defined in this MIR Scope, to avoid debuginfo bloat.
......@@ -125,14 +125,14 @@ fn make_mir_scope(ccx: &CrateContext,
// our parent is the root, because we might want to
// put arguments in the root and not have shadowing.
if parent_scope != fn_metadata {
scopes[idx] = parent_scope;
scopes[scope] = parent_scope;
return;
}
}
let loc = span_start(ccx, scope_data.span);
let file_metadata = file_metadata(ccx, &loc.file.name);
scopes[idx] = unsafe {
scopes[scope] = unsafe {
llvm::LLVMDIBuilderCreateLexicalBlock(
DIB(ccx),
parent_scope,
......
......@@ -12,6 +12,7 @@
//! which do not.
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use rustc::mir::repr as mir;
use rustc::mir::repr::TerminatorKind;
use rustc::mir::visit::{Visitor, LvalueContext};
......@@ -94,10 +95,10 @@ fn visit_assign(&mut self,
debug!("visit_assign(block={:?}, lvalue={:?}, rvalue={:?})", block, lvalue, rvalue);
match *lvalue {
mir::Lvalue::Temp(index) => {
self.mark_assigned(index as usize);
mir::Lvalue::Temp(temp) => {
self.mark_assigned(temp.index());
if !rvalue::rvalue_creates_operand(self.mir, self.bcx, rvalue) {
self.mark_as_lvalue(index as usize);
self.mark_as_lvalue(temp.index());
}
}
_ => {
......@@ -115,8 +116,8 @@ fn visit_lvalue(&mut self,
// Allow uses of projections of immediate pair fields.
if let mir::Lvalue::Projection(ref proj) = *lvalue {
if let mir::Lvalue::Temp(index) = proj.base {
let ty = self.mir.temp_decls[index as usize].ty;
if let mir::Lvalue::Temp(temp) = proj.base {
let ty = self.mir.temp_decls[temp].ty;
let ty = self.bcx.monomorphize(&ty);
if common::type_is_imm_pair(self.bcx.ccx(), ty) {
if let mir::ProjectionElem::Field(..) = proj.elem {
......@@ -129,10 +130,10 @@ fn visit_lvalue(&mut self,
}
match *lvalue {
mir::Lvalue::Temp(index) => {
mir::Lvalue::Temp(temp) => {
match context {
LvalueContext::Call => {
self.mark_assigned(index as usize);
self.mark_assigned(temp.index());
}
LvalueContext::Consume => {
}
......@@ -142,7 +143,7 @@ fn visit_lvalue(&mut self,
LvalueContext::Borrow { .. } |
LvalueContext::Slice { .. } |
LvalueContext::Projection => {
self.mark_as_lvalue(index as usize);
self.mark_as_lvalue(temp.index());
}
}
}
......@@ -163,9 +164,10 @@ pub enum CleanupKind {
pub fn cleanup_kinds<'bcx,'tcx>(_bcx: Block<'bcx,'tcx>,
mir: &mir::Mir<'tcx>)
-> Vec<CleanupKind>
-> IndexVec<mir::BasicBlock, CleanupKind>
{
fn discover_masters<'tcx>(result: &mut [CleanupKind], mir: &mir::Mir<'tcx>) {
fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
mir: &mir::Mir<'tcx>) {
for bb in mir.all_basic_blocks() {
let data = mir.basic_block_data(bb);
match data.terminator().kind {
......@@ -184,19 +186,19 @@ fn discover_masters<'tcx>(result: &mut [CleanupKind], mir: &mir::Mir<'tcx>) {
if let Some(unwind) = unwind {
debug!("cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
bb, data, unwind);
result[unwind.index()] = CleanupKind::Funclet;
result[unwind] = CleanupKind::Funclet;
}
}
}
}
}
fn propagate<'tcx>(result: &mut [CleanupKind], mir: &mir::Mir<'tcx>) {
let mut funclet_succs : Vec<_> =
mir.all_basic_blocks().iter().map(|_| None).collect();
fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
mir: &mir::Mir<'tcx>) {
let mut funclet_succs = IndexVec::from_elem(None, &mir.basic_blocks);
let mut set_successor = |funclet: mir::BasicBlock, succ| {
match funclet_succs[funclet.index()] {
match funclet_succs[funclet] {
ref mut s @ None => {
debug!("set_successor: updating successor of {:?} to {:?}",
funclet, succ);
......@@ -210,22 +212,22 @@ fn propagate<'tcx>(result: &mut [CleanupKind], mir: &mir::Mir<'tcx>) {
};
for (bb, data) in traversal::reverse_postorder(mir) {
let funclet = match result[bb.index()] {
let funclet = match result[bb] {
CleanupKind::NotCleanup => continue,
CleanupKind::Funclet => bb,
CleanupKind::Internal { funclet } => funclet,
};
debug!("cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
bb, data, result[bb.index()], funclet);
bb, data, result[bb], funclet);
for &succ in data.terminator().successors().iter() {
let kind = result[succ.index()];
let kind = result[succ];
debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}",
funclet, succ, kind);
match kind {
CleanupKind::NotCleanup => {
result[succ.index()] = CleanupKind::Internal { funclet: funclet };
result[succ] = CleanupKind::Internal { funclet: funclet };
}
CleanupKind::Funclet => {
set_successor(funclet, succ);
......@@ -237,7 +239,7 @@ fn propagate<'tcx>(result: &mut [CleanupKind], mir: &mir::Mir<'tcx>) {
debug!("promoting {:?} to a funclet and updating {:?}", succ,
succ_funclet);
result[succ.index()] = CleanupKind::Funclet;
result[succ] = CleanupKind::Funclet;
set_successor(succ_funclet, succ);
set_successor(funclet, succ);
}
......@@ -247,8 +249,7 @@ fn propagate<'tcx>(result: &mut [CleanupKind], mir: &mir::Mir<'tcx>) {
}
}
let mut result : Vec<_> =
mir.all_basic_blocks().iter().map(|_| CleanupKind::NotCleanup).collect();
let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, &mir.basic_blocks);
discover_masters(&mut result, mir);
propagate(&mut result, mir);
......
......@@ -52,9 +52,9 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
let cleanup_bundle = bcx.lpad().and_then(|l| l.bundle());
let funclet_br = |this: &Self, bcx: BlockAndBuilder, bb: mir::BasicBlock| {
let lltarget = this.blocks[bb.index()].llbb;
let lltarget = this.blocks[bb].llbb;
if let Some(cp) = cleanup_pad {
match this.cleanup_kind(bb) {
match this.cleanup_kinds[bb] {
CleanupKind::Funclet => {
// micro-optimization: generate a `ret` rather than a jump
// to a return block
......@@ -69,10 +69,10 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
};
let llblock = |this: &mut Self, target: mir::BasicBlock| {
let lltarget = this.blocks[target.index()].llbb;
let lltarget = this.blocks[target].llbb;
if let Some(cp) = cleanup_pad {
match this.cleanup_kind(target) {
match this.cleanup_kinds[target] {
CleanupKind::Funclet => {
// MSVC cross-funclet jump - need a trampoline
......@@ -89,7 +89,7 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
}
} else {
if let (CleanupKind::NotCleanup, CleanupKind::Funclet) =
(this.cleanup_kind(bb), this.cleanup_kind(target))
(this.cleanup_kinds[bb], this.cleanup_kinds[target])
{
// jump *into* cleanup - need a landing pad if GNU
this.landing_pad_to(target).llbb
......@@ -209,7 +209,7 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
if let Some(unwind) = unwind {
bcx.invoke(drop_fn,
&[llvalue],
self.blocks[target.index()].llbb,
self.blocks[target].llbb,
llblock(self, unwind),
cleanup_bundle);
} else {
......@@ -488,7 +488,7 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
// Many different ways to call a function handled here
if let &Some(cleanup) = cleanup {
let ret_bcx = if let Some((_, target)) = *destination {
self.blocks[target.index()]
self.blocks[target]
} else {
self.unreachable_block()
};
......@@ -693,27 +693,23 @@ fn get_personality_slot(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>) -> ValueRe
}
}
fn cleanup_kind(&self, bb: mir::BasicBlock) -> CleanupKind {
self.cleanup_kinds[bb.index()]
}
/// Return the landingpad wrapper around the given basic block
///
/// No-op in MSVC SEH scheme.
fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> Block<'bcx, 'tcx>
{
if let Some(block) = self.landing_pads[target_bb.index()] {
if let Some(block) = self.landing_pads[target_bb] {
return block;
}
if base::wants_msvc_seh(self.fcx.ccx.sess()) {
return self.blocks[target_bb.index()];
return self.blocks[target_bb];
}
let target = self.bcx(target_bb);
let block = self.fcx.new_block("cleanup", None);
self.landing_pads[target_bb.index()] = Some(block);
self.landing_pads[target_bb] = Some(block);
let bcx = block.build();
let ccx = bcx.ccx();
......@@ -732,7 +728,7 @@ pub fn init_cpad(&mut self, bb: mir::BasicBlock) {
let data = self.mir.basic_block_data(bb);
debug!("init_cpad({:?})", data);
match self.cleanup_kinds[bb.index()] {
match self.cleanup_kinds[bb] {
CleanupKind::NotCleanup => {
bcx.set_lpad(None)
}
......@@ -763,7 +759,7 @@ fn unreachable_block(&mut self) -> Block<'bcx, 'tcx> {
}
fn bcx(&self, bb: mir::BasicBlock) -> BlockAndBuilder<'bcx, 'tcx> {
self.blocks[bb.index()].build()
self.blocks[bb].build()
}
fn make_return_dest(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>,
......@@ -776,7 +772,7 @@ fn make_return_dest(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>,
let dest = match *dest {
mir::Lvalue::Temp(idx) => {
let ret_ty = self.lvalue_ty(dest);
match self.temps[idx as usize] {
match self.temps[idx] {
TempRef::Lvalue(dest) => dest,
TempRef::Operand(None) => {
// Handle temporary lvalues, specifically Operand ones, as
......@@ -838,6 +834,7 @@ fn trans_transmute(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>,
self.store_operand(bcx, cast_ptr, val);
}
// Stores the return value of a function call into it's final location.
fn store_return(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
......@@ -851,7 +848,7 @@ fn store_return(&mut self,
Store(dst) => ret_ty.store(bcx, op.immediate(), dst),
IndirectOperand(tmp, idx) => {
let op = self.trans_load(bcx, tmp, op.ty);
self.temps[idx as usize] = TempRef::Operand(Some(op));
self.temps[idx] = TempRef::Operand(Some(op));
}
DirectOperand(idx) => {
// If there is a cast, we have to store and reload.
......@@ -864,7 +861,7 @@ fn store_return(&mut self,
} else {
op.unpack_if_pair(bcx)
};
self.temps[idx as usize] = TempRef::Operand(Some(op));
self.temps[idx] = TempRef::Operand(Some(op));
}
}
}
......@@ -876,7 +873,7 @@ enum ReturnDest {
// Store the return value to the pointer
Store(ValueRef),
// Stores an indirect return value to an operand temporary lvalue
IndirectOperand(ValueRef, u32),
IndirectOperand(ValueRef, mir::Temp),
// Stores a direct return value to an operand temporary lvalue
DirectOperand(u32)
DirectOperand(mir::Temp)
}
......@@ -22,6 +22,7 @@
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc::ty::cast::{CastTy, IntTy};
use rustc::ty::subst::Substs;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use {abi, adt, base, Disr};
use callee::Callee;
use common::{self, BlockAndBuilder, CrateContext, const_get_elt, val_ty};
......@@ -203,13 +204,13 @@ struct MirConstContext<'a, 'tcx: 'a> {
substs: &'tcx Substs<'tcx>,
/// Arguments passed to a const fn.
args: Vec<Const<'tcx>>,
args: IndexVec<mir::Arg, Const<'tcx>>,
/// Variable values - specifically, argument bindings of a const fn.
vars: Vec<Option<Const<'tcx>>>,
vars: IndexVec<mir::Var, Option<Const<'tcx>>>,
/// Temp values.
temps: Vec<Option<Const<'tcx>>>,
temps: IndexVec<mir::Temp, Option<Const<'tcx>>>,
/// Value assigned to Return, which is the resulting constant.
return_value: Option<Const<'tcx>>
......@@ -220,22 +221,22 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> {
fn new(ccx: &'a CrateContext<'a, 'tcx>,
mir: &'a mir::Mir<'tcx>,
substs: &'tcx Substs<'tcx>,
args: Vec<Const<'tcx>>)
args: IndexVec<mir::Arg, Const<'tcx>>)
-> MirConstContext<'a, 'tcx> {
MirConstContext {
ccx: ccx,
mir: mir,
substs: substs,
args: args,
vars: vec![None; mir.var_decls.len()],
temps: vec![None; mir.temp_decls.len()],
vars: IndexVec::from_elem(None, &mir.var_decls),
temps: IndexVec::from_elem(None, &mir.temp_decls),
return_value: None
}
}
fn trans_def(ccx: &'a CrateContext<'a, 'tcx>,
mut instance: Instance<'tcx>,
args: Vec<Const<'tcx>>)
args: IndexVec<mir::Arg, Const<'tcx>>)
-> Result<Const<'tcx>, ConstEvalFailure> {
// Try to resolve associated constants.
if instance.substs.self_ty().is_some() {
......@@ -342,10 +343,10 @@ fn trans(&mut self) -> Result<Const<'tcx>, ConstEvalFailure> {
func, fn_ty)
};
let mut const_args = Vec::with_capacity(args.len());
let mut const_args = IndexVec::with_capacity(args.len());
for arg in args {
match self.const_operand(arg, span) {
Ok(arg) => const_args.push(arg),
Ok(arg) => { const_args.push(arg); },
Err(err) => if failure.is_ok() { failure = Err(err); }
}
}
......@@ -366,8 +367,8 @@ fn trans(&mut self) -> Result<Const<'tcx>, ConstEvalFailure> {
fn store(&mut self, dest: &mir::Lvalue<'tcx>, value: Const<'tcx>, span: Span) {
let dest = match *dest {
mir::Lvalue::Var(index) => &mut self.vars[index as usize],
mir::Lvalue::Temp(index) => &mut self.temps[index as usize],
mir::Lvalue::Var(var) => &mut self.vars[var],
mir::Lvalue::Temp(temp) => &mut self.temps[temp],
mir::Lvalue::ReturnPointer => &mut self.return_value,
_ => span_bug!(span, "assignment to {:?} in constant", dest)
};
......@@ -378,17 +379,17 @@ fn const_lvalue(&self, lvalue: &mir::Lvalue<'tcx>, span: Span)
-> Result<ConstLvalue<'tcx>, ConstEvalFailure> {
let tcx = self.ccx.tcx();
let lvalue = match *lvalue {
mir::Lvalue::Var(index) => {
self.vars[index as usize].unwrap_or_else(|| {
span_bug!(span, "var{} not initialized", index)
mir::Lvalue::Var(var) => {
self.vars[var].unwrap_or_else(|| {
span_bug!(span, "{:?} not initialized", var)
}).as_lvalue()
}
mir::Lvalue::Temp(index) => {
self.temps[index as usize].unwrap_or_else(|| {
span_bug!(span, "tmp{} not initialized", index)
mir::Lvalue::Temp(temp) => {
self.temps[temp].unwrap_or_else(|| {
span_bug!(span, "{:?} not initialized", temp)
}).as_lvalue()
}
mir::Lvalue::Arg(index) => self.args[index as usize].as_lvalue(),
mir::Lvalue::Arg(arg) => self.args[arg].as_lvalue(),
mir::Lvalue::Static(def_id) => {
ConstLvalue {
base: Base::Static(consts::get_static(self.ccx, def_id).val),
......@@ -489,11 +490,11 @@ fn const_operand(&self, operand: &mir::Operand<'tcx>, span: Span)
let substs = self.monomorphize(&substs);
let instance = Instance::new(def_id, substs);
MirConstContext::trans_def(self.ccx, instance, vec![])
MirConstContext::trans_def(self.ccx, instance, IndexVec::new())
}
mir::Literal::Promoted { index } => {
let mir = &self.mir.promoted[index];
MirConstContext::new(self.ccx, mir, self.substs, vec![]).trans()
MirConstContext::new(self.ccx, mir, self.substs, IndexVec::new()).trans()
}
mir::Literal::Value { value } => {
Ok(Const::from_constval(self.ccx, value, ty))
......@@ -914,11 +915,12 @@ pub fn trans_constant(&mut self,
let substs = bcx.monomorphize(&substs);
let instance = Instance::new(def_id, substs);
MirConstContext::trans_def(bcx.ccx(), instance, vec![])
MirConstContext::trans_def(bcx.ccx(), instance, IndexVec::new())
}
mir::Literal::Promoted { index } => {
let mir = &self.mir.promoted[index];
MirConstContext::new(bcx.ccx(), mir, bcx.fcx().param_substs, vec![]).trans()
MirConstContext::new(bcx.ccx(), mir, bcx.fcx().param_substs,
IndexVec::new()).trans()
}
mir::Literal::Value { value } => {
Ok(Const::from_constval(bcx.ccx(), value, ty))
......@@ -945,5 +947,5 @@ pub fn trans_constant(&mut self,
pub fn trans_static_initializer(ccx: &CrateContext, def_id: DefId)
-> Result<ValueRef, ConstEvalFailure> {
let instance = Instance::mono(ccx.shared(), def_id);
MirConstContext::trans_def(ccx, instance, vec![]).map(|c| c.llval)
MirConstContext::trans_def(ccx, instance, IndexVec::new()).map(|c| c.llval)
}
......@@ -12,6 +12,7 @@
use rustc::ty::{self, Ty, TypeFoldable};
use rustc::mir::repr as mir;
use rustc::mir::tcx::LvalueTy;
use rustc_data_structures::indexed_vec::Idx;
use abi;
use adt;
use base;
......@@ -90,14 +91,14 @@ pub fn trans_lvalue(&mut self,
let ccx = bcx.ccx();
let tcx = bcx.tcx();
let result = match *lvalue {
mir::Lvalue::Var(index) => self.vars[index as usize],
mir::Lvalue::Temp(index) => match self.temps[index as usize] {
mir::Lvalue::Var(var) => self.vars[var],
mir::Lvalue::Temp(temp) => match self.temps[temp] {
TempRef::Lvalue(lvalue) =>
lvalue,
TempRef::Operand(..) =>
bug!("using operand temp {:?} as lvalue", lvalue),
},
mir::Lvalue::Arg(index) => self.args[index as usize],
mir::Lvalue::Arg(arg) => self.args[arg],
mir::Lvalue::Static(def_id) => {
let const_ty = self.lvalue_ty(lvalue);
LvalueRef::new_sized(consts::get_static(ccx, def_id).val,
......@@ -233,8 +234,8 @@ pub fn with_lvalue_ref<F, U>(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>,
where F: FnOnce(&mut Self, LvalueRef<'tcx>) -> U
{
match *lvalue {
mir::Lvalue::Temp(idx) => {
match self.temps[idx as usize] {
mir::Lvalue::Temp(temp) => {
match self.temps[temp] {
TempRef::Lvalue(lvalue) => f(self, lvalue),
TempRef::Operand(None) => {
let lvalue_ty = self.lvalue_ty(lvalue);
......@@ -243,7 +244,7 @@ pub fn with_lvalue_ref<F, U>(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>,
"lvalue_temp");
let ret = f(self, lvalue);
let op = self.trans_load(bcx, lvalue.llval, lvalue_ty);
self.temps[idx as usize] = TempRef::Operand(Some(op));
self.temps[temp] = TempRef::Operand(Some(op));
ret
}
TempRef::Operand(Some(_)) => {
......
......@@ -30,6 +30,7 @@
use basic_block::BasicBlock;
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
pub use self::constant::trans_static_initializer;
......@@ -71,20 +72,20 @@ pub struct MirContext<'bcx, 'tcx:'bcx> {
llpersonalityslot: Option<ValueRef>,
/// A `Block` for each MIR `BasicBlock`
blocks: Vec<Block<'bcx, 'tcx>>,
blocks: IndexVec<mir::BasicBlock, Block<'bcx, 'tcx>>,
/// The funclet status of each basic block
cleanup_kinds: Vec<analyze::CleanupKind>,
cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
/// This stores the landing-pad block for a given BB, computed lazily on GNU
/// and eagerly on MSVC.
landing_pads: Vec<Option<Block<'bcx, 'tcx>>>,
landing_pads: IndexVec<mir::BasicBlock, Option<Block<'bcx, 'tcx>>>,
/// Cached unreachable block
unreachable_block: Option<Block<'bcx, 'tcx>>,
/// An LLVM alloca for each MIR `VarDecl`
vars: Vec<LvalueRef<'tcx>>,
vars: IndexVec<mir::Var, LvalueRef<'tcx>>,
/// The location where each MIR `TempDecl` is stored. This is
/// usually an `LvalueRef` representing an alloca, but not always:
......@@ -101,20 +102,20 @@ pub struct MirContext<'bcx, 'tcx:'bcx> {
///
/// Avoiding allocs can also be important for certain intrinsics,
/// notably `expect`.
temps: Vec<TempRef<'tcx>>,
temps: IndexVec<mir::Temp, TempRef<'tcx>>,
/// The arguments to the function; as args are lvalues, these are
/// always indirect, though we try to avoid creating an alloca
/// when we can (and just reuse the pointer the caller provided).
args: Vec<LvalueRef<'tcx>>,
args: IndexVec<mir::Arg, LvalueRef<'tcx>>,
/// Debug information for MIR scopes.
scopes: Vec<DIScope>
scopes: IndexVec<mir::VisibilityScope, DIScope>
}
impl<'blk, 'tcx> MirContext<'blk, 'tcx> {
pub fn debug_loc(&self, source_info: mir::SourceInfo) -> DebugLoc {
DebugLoc::ScopeAt(self.scopes[source_info.scope.index()], source_info.span)
DebugLoc::ScopeAt(self.scopes[source_info.scope], source_info.span)
}
}
......@@ -173,7 +174,7 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) {
.map(|(mty, decl)| {
let lvalue = LvalueRef::alloca(&bcx, mty, &decl.name.as_str());
let scope = scopes[decl.source_info.scope.index()];
let scope = scopes[decl.source_info.scope];
if !scope.is_null() && bcx.sess().opts.debuginfo == FullDebugInfo {
bcx.with_block(|bcx| {
declare_local(bcx, decl.name, mty, scope,
......@@ -200,9 +201,9 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) {
.collect();
// Allocate a `Block` for every basic block
let block_bcxs: Vec<Block<'blk,'tcx>> =
let block_bcxs: IndexVec<mir::BasicBlock, Block<'blk,'tcx>> =
mir_blocks.iter()
.map(|&bb|{
.map(|&bb| {
if bb == mir::START_BLOCK {
fcx.new_block("start", None)
} else {
......@@ -212,7 +213,7 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) {
.collect();
// Branch to the START block
let start_bcx = block_bcxs[mir::START_BLOCK.index()];
let start_bcx = block_bcxs[mir::START_BLOCK];
bcx.br(start_bcx.llbb);
// Up until here, IR instructions for this function have explicitly not been annotated with
......@@ -253,7 +254,7 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) {
// Remove blocks that haven't been visited, or have no
// predecessors.
for &bb in &mir_blocks {
let block = mircx.blocks[bb.index()];
let block = mircx.blocks[bb];
let block = BasicBlock(block.llbb);
// Unreachable block
if !visited.contains(bb.index()) {
......@@ -271,15 +272,15 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) {
/// indirect.
fn arg_value_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>,
mir: &mir::Mir<'tcx>,
scopes: &[DIScope])
-> Vec<LvalueRef<'tcx>> {
scopes: &IndexVec<mir::VisibilityScope, DIScope>)
-> IndexVec<mir::Arg, LvalueRef<'tcx>> {
let fcx = bcx.fcx();
let tcx = bcx.tcx();
let mut idx = 0;
let mut llarg_idx = fcx.fn_ty.ret.is_indirect() as usize;
// Get the argument scope, if it exists and if we need it.
let arg_scope = scopes[mir::ARGUMENT_VISIBILITY_SCOPE.index()];
let arg_scope = scopes[mir::ARGUMENT_VISIBILITY_SCOPE];
let arg_scope = if !arg_scope.is_null() && bcx.sess().opts.debuginfo == FullDebugInfo {
Some(arg_scope)
} else {
......
......@@ -11,6 +11,8 @@
use llvm::ValueRef;
use rustc::ty::Ty;
use rustc::mir::repr as mir;
use rustc_data_structures::indexed_vec::Idx;
use base;
use common::{self, Block, BlockAndBuilder};
use value::Value;
......@@ -174,7 +176,7 @@ pub fn trans_operand(&mut self,
// watch out for temporaries that do not have an
// alloca; they are handled somewhat differently
if let &mir::Lvalue::Temp(index) = lvalue {
match self.temps[index as usize] {
match self.temps[index] {
TempRef::Operand(Some(o)) => {
return o;
}
......@@ -190,7 +192,7 @@ pub fn trans_operand(&mut self,
// Moves out of pair fields are trivial.
if let &mir::Lvalue::Projection(ref proj) = lvalue {
if let mir::Lvalue::Temp(index) = proj.base {
let temp_ref = &self.temps[index as usize];
let temp_ref = &self.temps[index];
if let &TempRef::Operand(Some(o)) = temp_ref {
match (o.val, &proj.elem) {
(OperandValue::Pair(a, b),
......
......@@ -9,6 +9,7 @@
// except according to those terms.
use rustc::mir::repr as mir;
use common::{self, BlockAndBuilder};
use super::MirContext;
......@@ -28,8 +29,7 @@ pub fn trans_statement(&mut self,
mir::StatementKind::Assign(ref lvalue, ref rvalue) => {
match *lvalue {
mir::Lvalue::Temp(index) => {
let index = index as usize;
match self.temps[index as usize] {
match self.temps[index] {
TempRef::Lvalue(tr_dest) => {
self.trans_rvalue(bcx, tr_dest, rvalue, debug_loc)
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册