提交 4c090fe3 编写于 作者: R Ralf Jung

bring back MemoryExtra

上级 37961dbd
......@@ -53,6 +53,94 @@ pub struct Allocation<Tag=(),Extra=()> {
pub extra: Extra,
}
pub trait AllocationExtra<Tag, MemoryExtra>: ::std::fmt::Debug + Clone {
/// Hook to initialize the extra data when an allocation gets crated.
fn memory_allocated(
_size: Size,
_memory_extra: &MemoryExtra
) -> EvalResult<'tcx, Self>;
/// Hook for performing extra checks on a memory read access.
///
/// Takes read-only access to the allocation so we can keep all the memory read
/// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
/// need to mutate.
#[inline(always)]
fn memory_read(
_alloc: &Allocation<Tag, Self>,
_ptr: Pointer<Tag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}
/// Hook for performing extra checks on a memory write access.
#[inline(always)]
fn memory_written(
_alloc: &mut Allocation<Tag, Self>,
_ptr: Pointer<Tag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}
/// Hook for performing extra checks on a memory deallocation.
/// `size` will be the size of the allocation.
#[inline(always)]
fn memory_deallocated(
_alloc: &mut Allocation<Tag, Self>,
_ptr: Pointer<Tag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}
}
impl AllocationExtra<(), ()> for () {
#[inline(always)]
fn memory_allocated(
_size: Size,
_memory_extra: &()
) -> EvalResult<'tcx, Self> {
Ok(())
}
}
impl<Tag, Extra> Allocation<Tag, Extra> {
/// Creates a read-only allocation initialized by the given bytes
pub fn from_bytes(slice: &[u8], align: Align, extra: Extra) -> Self {
let mut undef_mask = UndefMask::new(Size::ZERO);
undef_mask.grow(Size::from_bytes(slice.len() as u64), true);
Self {
bytes: slice.to_owned(),
relocations: Relocations::new(),
undef_mask,
align,
mutability: Mutability::Immutable,
extra,
}
}
pub fn from_byte_aligned_bytes(slice: &[u8], extra: Extra) -> Self {
Allocation::from_bytes(slice, Align::from_bytes(1).unwrap(), extra)
}
pub fn undef(size: Size, align: Align, extra: Extra) -> Self {
assert_eq!(size.bytes() as usize as u64, size.bytes());
Allocation {
bytes: vec![0; size.bytes() as usize],
relocations: Relocations::new(),
undef_mask: UndefMask::new(size),
align,
mutability: Mutability::Mutable,
extra,
}
}
}
impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}
/// Alignment and bounds checks
impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
/// Check if the pointer is "in-bounds". Notice that a pointer pointing at the end
......@@ -81,7 +169,7 @@ pub fn check_bounds(
}
/// Byte accessors
impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// The last argument controls whether we error out when there are undefined
/// or pointer bytes. You should never call this, call `get_bytes` or
/// `get_bytes_with_undef_and_ptr` instead,
......@@ -89,13 +177,16 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
/// This function also guarantees that the resulting pointer will remain stable
/// even when new allocations are pushed to the `HashMap`. `copy_repeatedly` relies
/// on that.
fn get_bytes_internal(
fn get_bytes_internal<MemoryExtra>(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
check_defined_and_ptr: bool,
) -> EvalResult<'tcx, &[u8]> {
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.check_bounds(cx, ptr, size)?;
if check_defined_and_ptr {
......@@ -115,35 +206,44 @@ fn get_bytes_internal(
}
#[inline]
pub fn get_bytes(
pub fn get_bytes<MemoryExtra>(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
) -> EvalResult<'tcx, &[u8]> {
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.get_bytes_internal(cx, ptr, size, true)
}
/// It is the caller's responsibility to handle undefined and pointer bytes.
/// However, this still checks that there are no relocations on the *edges*.
#[inline]
pub fn get_bytes_with_undef_and_ptr(
pub fn get_bytes_with_undef_and_ptr<MemoryExtra>(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
) -> EvalResult<'tcx, &[u8]> {
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.get_bytes_internal(cx, ptr, size, false)
}
/// Just calling this already marks everything as defined and removes relocations,
/// so be sure to actually put data there!
pub fn get_bytes_mut(
pub fn get_bytes_mut<MemoryExtra>(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
) -> EvalResult<'tcx, &mut [u8]> {
) -> EvalResult<'tcx, &mut [u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
assert_ne!(size.bytes(), 0, "0-sized accesses should never even get a `Pointer`");
self.check_bounds(cx, ptr, size)?;
......@@ -160,14 +260,17 @@ pub fn get_bytes_mut(
}
/// Reading and writing
impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// Reads bytes until a `0` is encountered. Will error if the end of the allocation is reached
/// before a `0` is found.
pub fn read_c_str(
pub fn read_c_str<MemoryExtra>(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
) -> EvalResult<'tcx, &[u8]> {
) -> EvalResult<'tcx, &[u8]>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
let offset = ptr.offset.bytes() as usize;
match self.bytes[offset..].iter().position(|&c| c == 0) {
......@@ -184,13 +287,16 @@ pub fn read_c_str(
/// Validates that `ptr.offset` and `ptr.offset + size` do not point to the middle of a
/// relocation. If `allow_ptr_and_undef` is `false`, also enforces that the memory in the
/// given range contains neither relocations nor undef bytes.
pub fn check_bytes(
pub fn check_bytes<MemoryExtra>(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size,
allow_ptr_and_undef: bool,
) -> EvalResult<'tcx> {
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
// Check bounds and relocations on the edges
self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
// Check undef and ptr
......@@ -204,25 +310,31 @@ pub fn check_bytes(
/// Writes `src` to the memory starting at `ptr.offset`.
///
/// Will do bounds checks on the allocation.
pub fn write_bytes(
pub fn write_bytes<MemoryExtra>(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
src: &[u8],
) -> EvalResult<'tcx> {
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(src.len() as u64))?;
bytes.clone_from_slice(src);
Ok(())
}
/// Sets `count` bytes starting at `ptr.offset` with `val`. Basically `memset`.
pub fn write_repeat(
pub fn write_repeat<MemoryExtra>(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
val: u8,
count: Size
) -> EvalResult<'tcx> {
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let bytes = self.get_bytes_mut(cx, ptr, count)?;
for b in bytes {
*b = val;
......@@ -238,12 +350,15 @@ pub fn write_repeat(
/// being valid for ZSTs
///
/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn read_scalar(
pub fn read_scalar<MemoryExtra>(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
size: Size
) -> EvalResult<'tcx, ScalarMaybeUndef<Tag>> {
) -> EvalResult<'tcx, ScalarMaybeUndef<Tag>>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
// get_bytes_unchecked tests relocation edges
let bytes = self.get_bytes_with_undef_and_ptr(cx, ptr, size)?;
// Undef check happens *after* we established that the alignment is correct.
......@@ -273,11 +388,14 @@ pub fn read_scalar(
}
/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn read_ptr_sized(
pub fn read_ptr_sized<MemoryExtra>(
&self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
) -> EvalResult<'tcx, ScalarMaybeUndef<Tag>> {
) -> EvalResult<'tcx, ScalarMaybeUndef<Tag>>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
self.read_scalar(cx, ptr, cx.data_layout().pointer_size)
}
......@@ -289,13 +407,16 @@ pub fn read_ptr_sized(
/// being valid for ZSTs
///
/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn write_scalar(
pub fn write_scalar<MemoryExtra>(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
val: ScalarMaybeUndef<Tag>,
type_size: Size,
) -> EvalResult<'tcx> {
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let val = match val {
ScalarMaybeUndef::Scalar(scalar) => scalar,
ScalarMaybeUndef::Undef => return self.mark_definedness(ptr, type_size, false),
......@@ -336,12 +457,15 @@ pub fn write_scalar(
}
/// Note: This function does not do *any* alignment checks, you need to do these before calling
pub fn write_ptr_sized(
pub fn write_ptr_sized<MemoryExtra>(
&mut self,
cx: &impl HasDataLayout,
ptr: Pointer<Tag>,
val: ScalarMaybeUndef<Tag>
) -> EvalResult<'tcx> {
) -> EvalResult<'tcx>
// FIXME: Working around https://github.com/rust-lang/rust/issues/56209
where Extra: AllocationExtra<Tag, MemoryExtra>
{
let ptr_size = cx.data_layout().pointer_size;
self.write_scalar(cx, ptr.into(), val, ptr_size)
}
......@@ -465,79 +589,7 @@ pub fn mark_definedness(
}
}
pub trait AllocationExtra<Tag>: ::std::fmt::Debug + Default + Clone {
/// Hook for performing extra checks on a memory read access.
///
/// Takes read-only access to the allocation so we can keep all the memory read
/// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
/// need to mutate.
#[inline]
fn memory_read(
_alloc: &Allocation<Tag, Self>,
_ptr: Pointer<Tag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}
/// Hook for performing extra checks on a memory write access.
#[inline]
fn memory_written(
_alloc: &mut Allocation<Tag, Self>,
_ptr: Pointer<Tag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}
/// Hook for performing extra checks on a memory deallocation.
/// `size` will be the size of the allocation.
#[inline]
fn memory_deallocated(
_alloc: &mut Allocation<Tag, Self>,
_ptr: Pointer<Tag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}
}
impl AllocationExtra<()> for () {}
impl<Tag, Extra: Default> Allocation<Tag, Extra> {
/// Creates a read-only allocation initialized by the given bytes
pub fn from_bytes(slice: &[u8], align: Align) -> Self {
let mut undef_mask = UndefMask::new(Size::ZERO);
undef_mask.grow(Size::from_bytes(slice.len() as u64), true);
Self {
bytes: slice.to_owned(),
relocations: Relocations::new(),
undef_mask,
align,
mutability: Mutability::Immutable,
extra: Extra::default(),
}
}
pub fn from_byte_aligned_bytes(slice: &[u8]) -> Self {
Allocation::from_bytes(slice, Align::from_bytes(1).unwrap())
}
pub fn undef(size: Size, align: Align) -> Self {
assert_eq!(size.bytes() as usize as u64, size.bytes());
Allocation {
bytes: vec![0; size.bytes() as usize],
relocations: Relocations::new(),
undef_mask: UndefMask::new(size),
align,
mutability: Mutability::Mutable,
extra: Extra::default(),
}
}
}
impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}
/// Relocations
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);
......
......@@ -1055,7 +1055,7 @@ pub fn intern_const_alloc(
/// Allocates a byte or string literal for `mir::interpret`, read-only
pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId {
// create an allocation that just contains these bytes
let alloc = interpret::Allocation::from_byte_aligned_bytes(bytes);
let alloc = interpret::Allocation::from_byte_aligned_bytes(bytes, ());
let alloc = self.intern_const_alloc(alloc);
self.alloc_map.lock().allocate(alloc)
}
......
......@@ -353,6 +353,7 @@ impl<'a, 'mir, 'tcx> interpret::Machine<'a, 'mir, 'tcx>
for CompileTimeInterpreter<'a, 'mir, 'tcx>
{
type MemoryKinds = !;
type MemoryExtra = ();
type AllocExtra = ();
type PointerTag = ();
......
......@@ -77,8 +77,13 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized {
/// The `default()` is used for pointers to consts, statics, vtables and functions.
type PointerTag: ::std::fmt::Debug + Default + Copy + Eq + Hash + 'static;
/// Extra data stored in memory. A reference to this is available when `AllocExtra`
/// gets initialized, so you can e.g. have an `Rc` here if there is global state you
/// need access to in the `AllocExtra` hooks.
type MemoryExtra: Default;
/// Extra data stored in every allocation.
type AllocExtra: AllocationExtra<Self::PointerTag>;
type AllocExtra: AllocationExtra<Self::PointerTag, Self::MemoryExtra>;
/// Memory's allocation map
type MemoryMap:
......
......@@ -73,6 +73,9 @@ pub struct Memory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>> {
/// that do not exist any more.
dead_alloc_map: FxHashMap<AllocId, (Size, Align)>,
/// Extra data added by the machine.
pub extra: M::MemoryExtra,
/// Lets us implement `HasDataLayout`, which is awfully convenient.
pub(super) tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
}
......@@ -88,13 +91,19 @@ fn data_layout(&self) -> &TargetDataLayout {
// FIXME: Really we shouldn't clone memory, ever. Snapshot machinery should instead
// carefully copy only the reachable parts.
impl<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>>
Clone for Memory<'a, 'mir, 'tcx, M>
impl<'a, 'mir, 'tcx, M>
Clone
for
Memory<'a, 'mir, 'tcx, M>
where
M: Machine<'a, 'mir, 'tcx, PointerTag=(), AllocExtra=(), MemoryExtra=()>,
M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation)>,
{
fn clone(&self) -> Self {
Memory {
alloc_map: self.alloc_map.clone(),
dead_alloc_map: self.dead_alloc_map.clone(),
extra: (),
tcx: self.tcx,
}
}
......@@ -103,8 +112,9 @@ fn clone(&self) -> Self {
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
pub fn new(tcx: TyCtxtAt<'a, 'tcx, 'tcx>) -> Self {
Memory {
alloc_map: Default::default(),
alloc_map: M::MemoryMap::default(),
dead_alloc_map: FxHashMap::default(),
extra: M::MemoryExtra::default(),
tcx,
}
}
......@@ -133,7 +143,8 @@ pub fn allocate(
align: Align,
kind: MemoryKind<M::MemoryKinds>,
) -> EvalResult<'tcx, Pointer> {
Ok(Pointer::from(self.allocate_with(Allocation::undef(size, align), kind)?))
let extra = AllocationExtra::memory_allocated(size, &self.extra)?;
Ok(Pointer::from(self.allocate_with(Allocation::undef(size, align, extra), kind)?))
}
pub fn reallocate(
......@@ -601,7 +612,8 @@ pub fn read_bytes(
/// Interning (for CTFE)
impl<'a, 'mir, 'tcx, M> Memory<'a, 'mir, 'tcx, M>
where
M: Machine<'a, 'mir, 'tcx, PointerTag=(), AllocExtra=()>,
M: Machine<'a, 'mir, 'tcx, PointerTag=(), AllocExtra=(), MemoryExtra=()>,
// FIXME: Working around https://github.com/rust-lang/rust/issues/24159
M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation)>,
{
/// mark an allocation as static and initialized, either mutable or not
......
......@@ -295,10 +295,12 @@ pub fn to_mem_place(self) -> MPlaceTy<'tcx, Tag> {
// separating the pointer tag for `impl Trait`, see https://github.com/rust-lang/rust/issues/54385
impl<'a, 'mir, 'tcx, Tag, M> EvalContext<'a, 'mir, 'tcx, M>
where
// FIXME: Working around https://github.com/rust-lang/rust/issues/54385
Tag: ::std::fmt::Debug+Default+Copy+Eq+Hash+'static,
M: Machine<'a, 'mir, 'tcx, PointerTag=Tag>,
// FIXME: Working around https://github.com/rust-lang/rust/issues/24159
M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation<Tag, M::AllocExtra>)>,
M::AllocExtra: AllocationExtra<Tag>,
M::AllocExtra: AllocationExtra<Tag, M::MemoryExtra>,
{
/// Take a value, which represents a (thin or fat) reference, and make it a place.
/// Alignment is just based on the type. This is the inverse of `MemPlace::to_ref()`.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册