提交 c6bf11cf 编写于 作者: K kennytm 提交者: GitHub

Rollup merge of #47205 - eddyb:alloc-id, r=oli-obk

miri: use AllocId instead of u64.

This makes @alexreg's miri allocation -> LLVM global translation more straight-forward.

r? @oli-obk
......@@ -145,7 +145,7 @@ pub fn offset<C: HasDataLayout>(self, i: u64, cx: C) -> EvalResult<'tcx, Self> {
}
#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd, Debug)]
#[derive(Copy, Clone, Default, Eq, Hash, Ord, PartialEq, PartialOrd, Debug)]
pub struct AllocId(pub u64);
impl fmt::Display for AllocId {
......
......@@ -895,31 +895,29 @@ pub struct InterpretInterner<'tcx> {
allocs: FxHashSet<&'tcx interpret::Allocation>,
/// Allows obtaining function instance handles via a unique identifier
functions: FxHashMap<u64, Instance<'tcx>>,
functions: FxHashMap<interpret::AllocId, Instance<'tcx>>,
/// Inverse map of `interpret_functions`.
/// Used so we don't allocate a new pointer every time we need one
function_cache: FxHashMap<Instance<'tcx>, u64>,
function_cache: FxHashMap<Instance<'tcx>, interpret::AllocId>,
/// Allows obtaining const allocs via a unique identifier
alloc_by_id: FxHashMap<u64, &'tcx interpret::Allocation>,
alloc_by_id: FxHashMap<interpret::AllocId, &'tcx interpret::Allocation>,
/// The AllocId to assign to the next new regular allocation.
/// Always incremented, never gets smaller.
next_id: u64,
next_id: interpret::AllocId,
/// Allows checking whether a constant already has an allocation
///
/// The pointers are to the beginning of an `alloc_by_id` allocation
alloc_cache: FxHashMap<interpret::GlobalId<'tcx>, interpret::Pointer>,
alloc_cache: FxHashMap<interpret::GlobalId<'tcx>, interpret::AllocId>,
/// A cache for basic byte allocations keyed by their contents. This is used to deduplicate
/// allocations for string and bytestring literals.
literal_alloc_cache: FxHashMap<Vec<u8>, u64>,
literal_alloc_cache: FxHashMap<Vec<u8>, interpret::AllocId>,
}
impl<'tcx> InterpretInterner<'tcx> {
pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> u64 {
pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> interpret::AllocId {
if let Some(&alloc_id) = self.function_cache.get(&instance) {
return alloc_id;
}
......@@ -932,14 +930,14 @@ pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> u64 {
pub fn get_fn(
&self,
id: u64,
id: interpret::AllocId,
) -> Option<Instance<'tcx>> {
self.functions.get(&id).cloned()
}
pub fn get_alloc(
&self,
id: u64,
id: interpret::AllocId,
) -> Option<&'tcx interpret::Allocation> {
self.alloc_by_id.get(&id).cloned()
}
......@@ -947,14 +945,14 @@ pub fn get_alloc(
pub fn get_cached(
&self,
global_id: interpret::GlobalId<'tcx>,
) -> Option<interpret::Pointer> {
) -> Option<interpret::AllocId> {
self.alloc_cache.get(&global_id).cloned()
}
pub fn cache(
&mut self,
global_id: interpret::GlobalId<'tcx>,
ptr: interpret::Pointer,
ptr: interpret::AllocId,
) {
if let Some(old) = self.alloc_cache.insert(global_id, ptr) {
bug!("tried to cache {:?}, but was already existing as {:#?}", global_id, old);
......@@ -963,7 +961,7 @@ pub fn cache(
pub fn intern_at_reserved(
&mut self,
id: u64,
id: interpret::AllocId,
alloc: &'tcx interpret::Allocation,
) {
if let Some(old) = self.alloc_by_id.insert(id, alloc) {
......@@ -975,9 +973,9 @@ pub fn intern_at_reserved(
/// yet have an allocation backing it.
pub fn reserve(
&mut self,
) -> u64 {
) -> interpret::AllocId {
let next = self.next_id;
self.next_id = self.next_id
self.next_id.0 = self.next_id.0
.checked_add(1)
.expect("You overflowed a u64 by incrementing by 1... \
You've just earned yourself a free drink if we ever meet. \
......@@ -1069,7 +1067,7 @@ pub fn intern_const_alloc(
}
/// Allocates a byte or string literal for `mir::interpret`
pub fn allocate_cached(self, bytes: &[u8]) -> u64 {
pub fn allocate_cached(self, bytes: &[u8]) -> interpret::AllocId {
// check whether we already allocated this literal or a constant with the same memory
if let Some(&alloc_id) = self.interpret_interner.borrow().literal_alloc_cache.get(bytes) {
return alloc_id;
......
......@@ -12,7 +12,7 @@
use syntax::ast::Mutability;
use syntax::codemap::Span;
use rustc::mir::interpret::{EvalResult, EvalError, EvalErrorKind, GlobalId, Value, Pointer, PrimVal};
use rustc::mir::interpret::{EvalResult, EvalError, EvalErrorKind, GlobalId, Value, MemoryPointer, Pointer, PrimVal};
use super::{Place, EvalContext, StackPopCleanup, ValTy};
use rustc_const_math::ConstInt;
......@@ -67,7 +67,7 @@ pub fn eval_body<'a, 'tcx>(
layout.align,
None,
)?;
tcx.interpret_interner.borrow_mut().cache(cid, ptr.into());
tcx.interpret_interner.borrow_mut().cache(cid, ptr.alloc_id);
let cleanup = StackPopCleanup::MarkStatic(Mutability::Immutable);
let name = ty::tls::with(|tcx| tcx.item_path_str(instance.def_id()));
trace!("const_eval: pushing stack frame for global: {}", name);
......@@ -81,8 +81,8 @@ pub fn eval_body<'a, 'tcx>(
while ecx.step()? {}
}
let value = tcx.interpret_interner.borrow().get_cached(cid).expect("global not cached");
Ok((value, instance_ty))
let alloc = tcx.interpret_interner.borrow().get_cached(cid).expect("global not cached");
Ok((MemoryPointer::new(alloc, 0).into(), instance_ty))
}
pub fn eval_body_as_integer<'a, 'tcx>(
......
......@@ -950,8 +950,8 @@ pub(crate) fn write_discriminant_value(
}
pub fn read_global_as_value(&self, gid: GlobalId, layout: TyLayout) -> Value {
Value::ByRef(self.tcx.interpret_interner.borrow().get_cached(gid).expect("global not cached"),
layout.align)
let alloc = self.tcx.interpret_interner.borrow().get_cached(gid).expect("global not cached");
Value::ByRef(MemoryPointer::new(alloc, 0).into(), layout.align)
}
pub fn force_allocation(&mut self, place: Place) -> EvalResult<'tcx, Place> {
......
......@@ -2,7 +2,7 @@
//! This separation exists to ensure that no fancy miri features like
//! interpreting common C functions leak into CTFE.
use rustc::mir::interpret::{EvalResult, PrimVal, MemoryPointer, AccessKind};
use rustc::mir::interpret::{AllocId, EvalResult, PrimVal, MemoryPointer, AccessKind};
use super::{EvalContext, Place, ValTy, Memory};
use rustc::mir;
......@@ -89,12 +89,12 @@ fn check_locks<'a>(
fn add_lock<'a>(
_mem: &mut Memory<'a, 'tcx, Self>,
_id: u64,
_id: AllocId,
) {}
fn free_lock<'a>(
_mem: &mut Memory<'a, 'tcx, Self>,
_id: u64,
_id: AllocId,
_len: u64,
) -> EvalResult<'tcx> {
Ok(())
......
......@@ -34,15 +34,15 @@ pub struct Memory<'a, 'tcx: 'a, M: Machine<'tcx>> {
pub data: M::MemoryData,
/// Helps guarantee that stack allocations aren't deallocated via `rust_deallocate`
alloc_kind: HashMap<u64, MemoryKind<M::MemoryKinds>>,
alloc_kind: HashMap<AllocId, MemoryKind<M::MemoryKinds>>,
/// Actual memory allocations (arbitrary bytes, may contain pointers into other allocations).
alloc_map: HashMap<u64, Allocation>,
alloc_map: HashMap<AllocId, Allocation>,
/// Actual memory allocations (arbitrary bytes, may contain pointers into other allocations).
///
/// Stores statics while they are being processed, before they are interned and thus frozen
uninitialized_statics: HashMap<u64, Allocation>,
uninitialized_statics: HashMap<AllocId, Allocation>,
/// Number of virtual bytes allocated.
memory_usage: u64,
......@@ -73,17 +73,17 @@ pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, max_memory: u64, data: M::MemoryData) ->
pub fn allocations<'x>(
&'x self,
) -> impl Iterator<Item = (AllocId, &'x Allocation)> {
self.alloc_map.iter().map(|(&id, alloc)| (AllocId(id), alloc))
self.alloc_map.iter().map(|(&id, alloc)| (id, alloc))
}
pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> MemoryPointer {
let id = self.tcx.interpret_interner.borrow_mut().create_fn_alloc(instance);
MemoryPointer::new(AllocId(id), 0)
MemoryPointer::new(id, 0)
}
pub fn allocate_cached(&mut self, bytes: &[u8]) -> MemoryPointer {
let id = self.tcx.allocate_cached(bytes);
MemoryPointer::new(AllocId(id), 0)
MemoryPointer::new(id, 0)
}
/// kind is `None` for statics
......@@ -121,7 +121,7 @@ pub fn allocate(
},
Some(MemoryKind::MutableStatic) => bug!("don't allocate mutable statics directly")
}
Ok(MemoryPointer::new(AllocId(id), 0))
Ok(MemoryPointer::new(id, 0))
}
pub fn reallocate(
......@@ -136,8 +136,8 @@ pub fn reallocate(
if ptr.offset != 0 {
return err!(ReallocateNonBasePtr);
}
if self.alloc_map.contains_key(&ptr.alloc_id.0) {
let alloc_kind = self.alloc_kind[&ptr.alloc_id.0];
if self.alloc_map.contains_key(&ptr.alloc_id) {
let alloc_kind = self.alloc_kind[&ptr.alloc_id];
if alloc_kind != kind {
return err!(ReallocatedWrongMemoryKind(
format!("{:?}", alloc_kind),
......@@ -163,7 +163,7 @@ pub fn reallocate(
}
pub fn deallocate_local(&mut self, ptr: MemoryPointer) -> EvalResult<'tcx> {
match self.alloc_kind.get(&ptr.alloc_id.0).cloned() {
match self.alloc_kind.get(&ptr.alloc_id).cloned() {
// for a constant like `const FOO: &i32 = &1;` the local containing
// the `1` is referred to by the global. We transitively marked everything
// the global refers to as static itself, so we don't free it here
......@@ -185,19 +185,19 @@ pub fn deallocate(
return err!(DeallocateNonBasePtr);
}
let alloc = match self.alloc_map.remove(&ptr.alloc_id.0) {
let alloc = match self.alloc_map.remove(&ptr.alloc_id) {
Some(alloc) => alloc,
None => if self.uninitialized_statics.contains_key(&ptr.alloc_id.0) {
None => if self.uninitialized_statics.contains_key(&ptr.alloc_id) {
return err!(DeallocatedWrongMemoryKind(
"uninitializedstatic".to_string(),
format!("{:?}", kind),
))
} else if self.tcx.interpret_interner.borrow().get_fn(ptr.alloc_id.0).is_some() {
} else if self.tcx.interpret_interner.borrow().get_fn(ptr.alloc_id).is_some() {
return err!(DeallocatedWrongMemoryKind(
"function".to_string(),
format!("{:?}", kind),
))
} else if self.tcx.interpret_interner.borrow().get_alloc(ptr.alloc_id.0).is_some() {
} else if self.tcx.interpret_interner.borrow().get_alloc(ptr.alloc_id).is_some() {
return err!(DeallocatedWrongMemoryKind(
"static".to_string(),
format!("{:?}", kind),
......@@ -207,14 +207,14 @@ pub fn deallocate(
},
};
let alloc_kind = self.alloc_kind.remove(&ptr.alloc_id.0).expect("alloc_map out of sync with alloc_kind");
let alloc_kind = self.alloc_kind.remove(&ptr.alloc_id).expect("alloc_map out of sync with alloc_kind");
// It is okay for us to still holds locks on deallocation -- for example, we could store data we own
// in a local, and the local could be deallocated (from StorageDead) before the function returns.
// However, we should check *something*. For now, we make sure that there is no conflicting write
// lock by another frame. We *have* to permit deallocation if we hold a read lock.
// TODO: Figure out the exact rules here.
M::free_lock(self, ptr.alloc_id.0, alloc.bytes.len() as u64)?;
M::free_lock(self, ptr.alloc_id, alloc.bytes.len() as u64)?;
if alloc_kind != kind {
return err!(DeallocatedWrongMemoryKind(
......@@ -295,17 +295,17 @@ pub fn check_bounds(&self, ptr: MemoryPointer, access: bool) -> EvalResult<'tcx>
impl<'a, 'tcx, M: Machine<'tcx>> Memory<'a, 'tcx, M> {
pub fn get(&self, id: AllocId) -> EvalResult<'tcx, &Allocation> {
// normal alloc?
match self.alloc_map.get(&id.0) {
match self.alloc_map.get(&id) {
Some(alloc) => Ok(alloc),
// uninitialized static alloc?
None => match self.uninitialized_statics.get(&id.0) {
None => match self.uninitialized_statics.get(&id) {
Some(alloc) => Ok(alloc),
None => {
let int = self.tcx.interpret_interner.borrow();
// static alloc?
int.get_alloc(id.0)
int.get_alloc(id)
// no alloc? produce an error
.ok_or_else(|| if int.get_fn(id.0).is_some() {
.ok_or_else(|| if int.get_fn(id).is_some() {
EvalErrorKind::DerefFunctionPointer.into()
} else {
EvalErrorKind::DanglingPointerDeref.into()
......@@ -320,17 +320,17 @@ fn get_mut(
id: AllocId,
) -> EvalResult<'tcx, &mut Allocation> {
// normal alloc?
match self.alloc_map.get_mut(&id.0) {
match self.alloc_map.get_mut(&id) {
Some(alloc) => Ok(alloc),
// uninitialized static alloc?
None => match self.uninitialized_statics.get_mut(&id.0) {
None => match self.uninitialized_statics.get_mut(&id) {
Some(alloc) => Ok(alloc),
None => {
let int = self.tcx.interpret_interner.borrow();
// no alloc or immutable alloc? produce an error
if int.get_alloc(id.0).is_some() {
if int.get_alloc(id).is_some() {
err!(ModifiedConstantMemory)
} else if int.get_fn(id.0).is_some() {
} else if int.get_fn(id).is_some() {
err!(DerefFunctionPointer)
} else {
err!(DanglingPointerDeref)
......@@ -348,7 +348,7 @@ pub fn get_fn(&self, ptr: MemoryPointer) -> EvalResult<'tcx, Instance<'tcx>> {
self.tcx
.interpret_interner
.borrow()
.get_fn(ptr.alloc_id.0)
.get_fn(ptr.alloc_id)
.ok_or(EvalErrorKind::ExecuteMemory.into())
}
......@@ -372,21 +372,21 @@ pub fn dump_allocs(&self, mut allocs: Vec<AllocId>) {
let (alloc, immutable) =
// normal alloc?
match self.alloc_map.get(&id.0) {
Some(a) => (a, match self.alloc_kind[&id.0] {
match self.alloc_map.get(&id) {
Some(a) => (a, match self.alloc_kind[&id] {
MemoryKind::Stack => " (stack)".to_owned(),
MemoryKind::Machine(m) => format!(" ({:?})", m),
MemoryKind::MutableStatic => " (static mut)".to_owned(),
}),
// uninitialized static alloc?
None => match self.uninitialized_statics.get(&id.0) {
None => match self.uninitialized_statics.get(&id) {
Some(a) => (a, " (static in the process of initialization)".to_owned()),
None => {
let int = self.tcx.interpret_interner.borrow();
// static alloc?
match int.get_alloc(id.0) {
match int.get_alloc(id) {
Some(a) => (a, "(immutable)".to_owned()),
None => if let Some(func) = int.get_fn(id.0) {
None => if let Some(func) = int.get_fn(id) {
trace!("{} {}", msg, func);
continue;
} else {
......@@ -445,7 +445,7 @@ pub fn leak_report(&self) -> usize {
let leaks: Vec<_> = self.alloc_map
.keys()
.filter_map(|key| if kinds[key] != MemoryKind::MutableStatic {
Some(AllocId(*key))
Some(*key)
} else {
None
})
......@@ -528,7 +528,7 @@ fn mark_inner_allocation_initialized(
alloc: AllocId,
mutability: Mutability,
) -> EvalResult<'tcx> {
match self.alloc_kind.get(&alloc.0) {
match self.alloc_kind.get(&alloc) {
// do not go into immutable statics
None |
// or mutable statics
......@@ -550,13 +550,13 @@ pub fn mark_static_initalized(
mutability
);
if mutability == Mutability::Immutable {
let alloc = self.alloc_map.remove(&alloc_id.0);
let kind = self.alloc_kind.remove(&alloc_id.0);
let alloc = self.alloc_map.remove(&alloc_id);
let kind = self.alloc_kind.remove(&alloc_id);
assert_ne!(kind, Some(MemoryKind::MutableStatic));
let uninit = self.uninitialized_statics.remove(&alloc_id.0);
let uninit = self.uninitialized_statics.remove(&alloc_id);
if let Some(alloc) = alloc.or(uninit) {
let alloc = self.tcx.intern_const_alloc(alloc);
self.tcx.interpret_interner.borrow_mut().intern_at_reserved(alloc_id.0, alloc);
self.tcx.interpret_interner.borrow_mut().intern_at_reserved(alloc_id, alloc);
// recurse into inner allocations
for &alloc in alloc.relocations.values() {
self.mark_inner_allocation_initialized(alloc, mutability)?;
......@@ -565,17 +565,17 @@ pub fn mark_static_initalized(
return Ok(());
}
// We are marking the static as initialized, so move it out of the uninit map
if let Some(uninit) = self.uninitialized_statics.remove(&alloc_id.0) {
self.alloc_map.insert(alloc_id.0, uninit);
if let Some(uninit) = self.uninitialized_statics.remove(&alloc_id) {
self.alloc_map.insert(alloc_id, uninit);
}
// do not use `self.get_mut(alloc_id)` here, because we might have already marked a
// sub-element or have circular pointers (e.g. `Rc`-cycles)
let relocations = match self.alloc_map.get_mut(&alloc_id.0) {
let relocations = match self.alloc_map.get_mut(&alloc_id) {
Some(&mut Allocation {
ref mut relocations,
..
}) => {
match self.alloc_kind.get(&alloc_id.0) {
match self.alloc_kind.get(&alloc_id) {
// const eval results can refer to "locals".
// E.g. `const Foo: &u32 = &1;` refers to the temp local that stores the `1`
None |
......@@ -587,7 +587,7 @@ pub fn mark_static_initalized(
},
}
// overwrite or insert
self.alloc_kind.insert(alloc_id.0, MemoryKind::MutableStatic);
self.alloc_kind.insert(alloc_id, MemoryKind::MutableStatic);
// take out the relocations vector to free the borrow on self, so we can call
// mark recursively
mem::replace(relocations, Default::default())
......@@ -600,7 +600,7 @@ pub fn mark_static_initalized(
}
// put back the relocations
self.alloc_map
.get_mut(&alloc_id.0)
.get_mut(&alloc_id)
.expect("checked above")
.relocations = relocations;
Ok(())
......
......@@ -194,8 +194,9 @@ pub fn eval_place(&mut self, mir_place: &mir::Place<'tcx>) -> EvalResult<'tcx, P
promoted: None,
};
let layout = self.layout_of(self.place_ty(mir_place))?;
let alloc = self.tcx.interpret_interner.borrow().get_cached(gid).expect("uncached global");
Place::Ptr {
ptr: self.tcx.interpret_interner.borrow().get_cached(gid).expect("uncached global"),
ptr: MemoryPointer::new(alloc, 0).into(),
align: layout.align,
extra: PlaceExtra::None,
}
......
......@@ -180,7 +180,7 @@ fn global_item(
layout.align,
None,
)?;
self.tcx.interpret_interner.borrow_mut().cache(cid, ptr.into());
self.tcx.interpret_interner.borrow_mut().cache(cid, ptr.alloc_id);
let internally_mutable = !layout.ty.is_freeze(self.tcx, self.param_env, span);
let mutability = if mutability == Mutability::Mutable || internally_mutable {
Mutability::Mutable
......@@ -265,7 +265,7 @@ fn visit_constant(&mut self, constant: &mir::Constant<'tcx>, location: mir::Loca
layout.align,
None,
)?;
this.ecx.tcx.interpret_interner.borrow_mut().cache(cid, ptr.into());
this.ecx.tcx.interpret_interner.borrow_mut().cache(cid, ptr.alloc_id);
trace!("pushing stack frame for {:?}", index);
this.ecx.push_stack_frame(
this.instance,
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册