提交 32453ce4 编写于 作者: R Ralf Jung

remvoe to_scalar_ptr and use ref_to_mplace everywhere

上级 14ee66ac
......@@ -192,20 +192,18 @@ fn visit_primitive(&mut self, mplace: MPlaceTy<'tcx>) -> InterpResult<'tcx> {
let ty = mplace.layout.ty;
if let ty::Ref(_, referenced_ty, mutability) = ty.kind {
let value = self.ecx.read_immediate(mplace.into())?;
let mplace = self.ecx.ref_to_mplace(value)?;
// Handle trait object vtables
if let Ok(meta) = value.to_meta() {
if let ty::Dynamic(..) =
self.ecx.tcx.struct_tail_erasing_lifetimes(
referenced_ty, self.ecx.param_env).kind
{
if let Ok(vtable) = meta.unwrap().to_ptr() {
// explitly choose `Immutable` here, since vtables are immutable, even
// if the reference of the fat pointer is mutable
self.intern_shallow(vtable.alloc_id, Mutability::Immutable, None)?;
}
if let ty::Dynamic(..) =
self.ecx.tcx.struct_tail_erasing_lifetimes(
referenced_ty, self.ecx.param_env).kind
{
if let Ok(vtable) = mplace.meta.unwrap().to_ptr() {
// explitly choose `Immutable` here, since vtables are immutable, even
// if the reference of the fat pointer is mutable
self.intern_shallow(vtable.alloc_id, Mutability::Immutable, None)?;
}
}
let mplace = self.ecx.ref_to_mplace(value)?;
// Check if we have encountered this pointer+layout combination before.
// Only recurse for allocation-backed pointers.
if let Scalar::Ptr(ptr) = mplace.ptr {
......@@ -230,7 +228,7 @@ fn visit_primitive(&mut self, mplace: MPlaceTy<'tcx>) -> InterpResult<'tcx> {
ty::Array(_, n)
if n.eval_usize(self.ecx.tcx.tcx, self.ecx.param_env) == 0 => {}
ty::Slice(_)
if value.to_meta().unwrap().unwrap().to_usize(self.ecx)? == 0 => {}
if mplace.meta.unwrap().to_usize(self.ecx)? == 0 => {}
_ => bug!("const qualif failed to prevent mutable references"),
}
},
......
......@@ -82,26 +82,6 @@ pub fn to_scalar_pair(self) -> InterpResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> {
Immediate::ScalarPair(a, b) => Ok((a.not_undef()?, b.not_undef()?))
}
}
/// Converts the immediate into a pointer (or a pointer-sized integer).
/// Throws away the second half of a ScalarPair!
#[inline]
pub fn to_scalar_ptr(self) -> InterpResult<'tcx, Scalar<Tag>> {
match self {
Immediate::Scalar(ptr) |
Immediate::ScalarPair(ptr, _) => ptr.not_undef(),
}
}
/// Converts the value into its metadata.
/// Throws away the first half of a ScalarPair!
#[inline]
pub fn to_meta(self) -> InterpResult<'tcx, Option<Scalar<Tag>>> {
Ok(match self {
Immediate::Scalar(_) => None,
Immediate::ScalarPair(_, meta) => Some(meta.not_undef()?),
})
}
}
// ScalarPair needs a type to interpret, so we often have an immediate and a type together
......
......@@ -291,15 +291,19 @@ pub fn ref_to_mplace(
.expect("`ref_to_mplace` called on non-ptr type")
.ty;
let layout = self.layout_of(pointee_type)?;
let (ptr, meta) = match *val {
Immediate::Scalar(ptr) => (ptr.not_undef()?, None),
Immediate::ScalarPair(ptr, meta) => (ptr.not_undef()?, Some(meta.not_undef()?)),
};
let mplace = MemPlace {
ptr: val.to_scalar_ptr()?,
ptr,
// We could use the run-time alignment here. For now, we do not, because
// the point of tracking the alignment here is to make sure that the *static*
// alignment information emitted with the loads is correct. The run-time
// alignment can only be more restrictive.
align: layout.align.abi,
meta: val.to_meta()?,
meta,
};
Ok(MPlaceTy { mplace, layout })
}
......
......@@ -388,44 +388,31 @@ fn visit_primitive(&mut self, value: OpTy<'tcx, M::PointerTag>) -> InterpResult<
}
}
ty::RawPtr(..) => {
// Check pointer part.
if self.ref_tracking_for_consts.is_some() {
// Integers/floats in CTFE: For consistency with integers, we do not
// accept undef.
let _ptr = try_validation!(value.to_scalar_ptr(),
"undefined address in raw pointer", self.path);
} else {
// Remain consistent with `usize`: Accept anything.
}
// Check metadata.
let meta = try_validation!(value.to_meta(),
"uninitialized data in wide pointer metadata", self.path);
let layout = self.ecx.layout_of(value.layout.ty.builtin_deref(true).unwrap().ty)?;
if layout.is_unsized() {
self.check_wide_ptr_meta(meta, layout)?;
// We are conservative with undef for integers, but try to
// actually enforce our current rules for raw pointers.
let place = try_validation!(self.ecx.ref_to_mplace(value),
"undefined pointer", self.path);
if place.layout.is_unsized() {
self.check_wide_ptr_meta(place.meta, place.layout)?;
}
}
_ if ty.is_box() || ty.is_region_ptr() => {
// Handle wide pointers.
// Check metadata early, for better diagnostics
let ptr = try_validation!(value.to_scalar_ptr(),
"undefined address in pointer", self.path);
let meta = try_validation!(value.to_meta(),
"uninitialized data in wide pointer metadata", self.path);
let layout = self.ecx.layout_of(value.layout.ty.builtin_deref(true).unwrap().ty)?;
if layout.is_unsized() {
self.check_wide_ptr_meta(meta, layout)?;
let place = try_validation!(self.ecx.ref_to_mplace(value),
"undefined pointer", self.path);
if place.layout.is_unsized() {
self.check_wide_ptr_meta(place.meta, place.layout)?;
}
// Make sure this is dereferencable and all.
let (size, align) = self.ecx.size_and_align_of(meta, layout)?
let (size, align) = self.ecx.size_and_align_of(place.meta, place.layout)?
// for the purpose of validity, consider foreign types to have
// alignment and size determined by the layout (size will be 0,
// alignment should take attributes into account).
.unwrap_or_else(|| (layout.size, layout.align.abi));
.unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
let ptr: Option<_> = match
self.ecx.memory.check_ptr_access_align(
ptr,
place.ptr,
size,
Some(align),
CheckInAllocMsg::InboundsTest,
......@@ -435,7 +422,7 @@ fn visit_primitive(&mut self, value: OpTy<'tcx, M::PointerTag>) -> InterpResult<
Err(err) => {
info!(
"{:?} did not pass access check for size {:?}, align {:?}",
ptr, size, align
place.ptr, size, align
);
match err.kind {
err_unsup!(InvalidNullPointerUsage) =>
......@@ -459,7 +446,6 @@ fn visit_primitive(&mut self, value: OpTy<'tcx, M::PointerTag>) -> InterpResult<
};
// Recursive checking
if let Some(ref mut ref_tracking) = self.ref_tracking_for_consts {
let place = self.ecx.ref_to_mplace(value)?;
if let Some(ptr) = ptr { // not a ZST
// Skip validation entirely for some external statics
let alloc_kind = self.ecx.tcx.alloc_map.lock().get(ptr.alloc_id);
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册