提交 524e575b 编写于 作者: S Smitty

Support allocation failures when interperting MIR

Note that this breaks Miri.

Closes #79601
上级 6e0b5546
......@@ -72,7 +72,10 @@ pub(crate) fn get_vtable<'tcx>(
let vtable_ptr = if let Some(vtable_ptr) = fx.vtables.get(&(ty, trait_ref)) {
*vtable_ptr
} else {
let vtable_alloc_id = fx.tcx.vtable_allocation(ty, trait_ref);
let vtable_alloc_id = match fx.tcx.vtable_allocation(ty, trait_ref) {
Ok(alloc) => alloc,
Err(_) => fx.tcx.sess().fatal("allocation of constant vtable failed"),
};
let vtable_allocation = fx.tcx.global_alloc(vtable_alloc_id).unwrap_memory();
let vtable_ptr = pointer_for_allocation(fx, vtable_allocation);
......
......@@ -70,7 +70,10 @@ pub fn get_vtable<'tcx, Cx: CodegenMethods<'tcx>>(
return val;
}
let vtable_alloc_id = tcx.vtable_allocation(ty, trait_ref);
let vtable_alloc_id = match tcx.vtable_allocation(ty, trait_ref) {
Ok(alloc) => alloc,
Err(_) => tcx.sess.fatal("allocation of constant vtable failed"),
};
let vtable_allocation = tcx.global_alloc(vtable_alloc_id).unwrap_memory();
let vtable_const = cx.const_data_from_alloc(vtable_allocation);
let align = cx.data_layout().pointer_align.abi;
......
......@@ -48,6 +48,7 @@
#![feature(associated_type_defaults)]
#![feature(iter_zip)]
#![feature(thread_local_const_init)]
#![feature(try_reserve)]
#![recursion_limit = "512"]
#[macro_use]
......
......@@ -11,8 +11,9 @@
use rustc_target::abi::{Align, HasDataLayout, Size};
use super::{
read_target_uint, write_target_uint, AllocId, InterpError, Pointer, Scalar, ScalarMaybeUninit,
UndefinedBehaviorInfo, UninitBytesAccess, UnsupportedOpInfo,
read_target_uint, write_target_uint, AllocId, InterpError, InterpResult, Pointer,
ResourceExhaustionInfo, Scalar, ScalarMaybeUninit, UndefinedBehaviorInfo, UninitBytesAccess,
UnsupportedOpInfo,
};
/// This type represents an Allocation in the Miri/CTFE core engine.
......@@ -121,15 +122,23 @@ pub fn from_bytes_byte_aligned_immutable<'a>(slice: impl Into<Cow<'a, [u8]>>) ->
Allocation::from_bytes(slice, Align::ONE, Mutability::Not)
}
pub fn uninit(size: Size, align: Align) -> Self {
Allocation {
bytes: vec![0; size.bytes_usize()],
/// Try to create an Allocation of `size` bytes, failing if there is not enough memory
/// available to the compiler to do so.
pub fn uninit(size: Size, align: Align) -> InterpResult<'static, Self> {
let mut bytes = Vec::new();
bytes.try_reserve(size.bytes_usize()).map_err(|_| {
InterpError::ResourceExhaustion(ResourceExhaustionInfo::MemoryExhausted)
})?;
bytes.resize(size.bytes_usize(), 0);
bytes.fill(0);
Ok(Allocation {
bytes: bytes,
relocations: Relocations::new(),
init_mask: InitMask::new(size, false),
align,
mutability: Mutability::Mut,
extra: (),
}
})
}
}
......
......@@ -423,6 +423,8 @@ pub enum ResourceExhaustionInfo {
///
/// The exact limit is set by the `const_eval_limit` attribute.
StepLimitReached,
/// There is not enough memory to perform an allocation.
MemoryExhausted,
}
impl fmt::Display for ResourceExhaustionInfo {
......@@ -435,6 +437,9 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
StepLimitReached => {
write!(f, "exceeded interpreter step limit (see `#[const_eval_limit]`)")
}
MemoryExhausted => {
write!(f, "tried to allocate more memory than available to compiler")
}
}
}
}
......
use std::convert::TryFrom;
use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar};
use crate::mir::interpret::{alloc_range, AllocId, Allocation, Pointer, Scalar, InterpResult};
use crate::ty::fold::TypeFoldable;
use crate::ty::{self, DefId, SubstsRef, Ty, TyCtxt};
use rustc_ast::Mutability;
......@@ -28,11 +28,11 @@ pub fn vtable_allocation(
self,
ty: Ty<'tcx>,
poly_trait_ref: Option<ty::PolyExistentialTraitRef<'tcx>>,
) -> AllocId {
) -> InterpResult<'tcx, AllocId> {
let tcx = self;
let vtables_cache = tcx.vtables_cache.lock();
if let Some(alloc_id) = vtables_cache.get(&(ty, poly_trait_ref)).cloned() {
return alloc_id;
return Ok(alloc_id);
}
drop(vtables_cache);
......@@ -60,7 +60,7 @@ pub fn vtable_allocation(
let ptr_align = tcx.data_layout.pointer_align.abi;
let vtable_size = ptr_size * u64::try_from(vtable_entries.len()).unwrap();
let mut vtable = Allocation::uninit(vtable_size, ptr_align);
let mut vtable = Allocation::uninit(vtable_size, ptr_align)?;
// No need to do any alignment checks on the memory accesses below, because we know the
// allocation is correctly aligned as we created it above. Also we're only offsetting by
......@@ -101,6 +101,6 @@ pub fn vtable_allocation(
let alloc_id = tcx.create_memory_alloc(tcx.intern_const_alloc(vtable));
let mut vtables_cache = self.vtables_cache.lock();
vtables_cache.insert((ty, poly_trait_ref), alloc_id);
alloc_id
Ok(alloc_id)
}
}
......@@ -48,7 +48,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
);
let layout = ecx.layout_of(body.return_ty().subst(tcx, cid.instance.substs))?;
assert!(!layout.is_unsized());
let ret = ecx.allocate(layout, MemoryKind::Stack);
let ret = ecx.allocate(layout, MemoryKind::Stack)?;
let name =
with_no_trimmed_paths(|| ty::tls::with(|tcx| tcx.def_path_str(cid.instance.def_id())));
......
......@@ -306,7 +306,7 @@ fn call_intrinsic(
Size::from_bytes(size as u64),
align,
interpret::MemoryKind::Machine(MemoryKind::Heap),
);
)?;
ecx.write_scalar(Scalar::Ptr(ptr), dest)?;
}
_ => {
......
......@@ -31,7 +31,11 @@ pub(crate) fn const_caller_location(
trace!("const_caller_location: {}:{}:{}", file, line, col);
let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all(), false);
let loc_place = ecx.alloc_caller_location(file, line, col);
// This can fail if rustc runs out of memory right here. Trying to emit an error would be
// pointless, since that would require allocating more memory than a Location.
let loc_place = ecx
.alloc_caller_location(file, line, col)
.expect("not enough memory to allocate location?");
if intern_const_alloc_recursive(&mut ecx, InternKind::Constant, &loc_place).is_err() {
bug!("intern_const_alloc_recursive should not error in this case")
}
......
......@@ -428,7 +428,7 @@ pub(crate) fn intern_with_temp_alloc(
&MPlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx, ()>,
) -> InterpResult<'tcx, &'tcx Allocation> {
let dest = self.allocate(layout, MemoryKind::Stack);
let dest = self.allocate(layout, MemoryKind::Stack)?;
f(self, &dest)?;
let ptr = dest.ptr.assert_ptr();
assert_eq!(ptr.offset, Size::ZERO);
......
......@@ -137,7 +137,7 @@ pub fn emulate_intrinsic(
match intrinsic_name {
sym::caller_location => {
let span = self.find_closest_untracked_caller_location();
let location = self.alloc_caller_location_for_span(span);
let location = self.alloc_caller_location_for_span(span)?;
self.write_scalar(location.ptr, dest)?;
}
......
......@@ -9,7 +9,7 @@
use crate::interpret::{
intrinsics::{InterpCx, Machine},
MPlaceTy, MemoryKind, Scalar,
InterpResult, MPlaceTy, MemoryKind, Scalar,
};
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
......@@ -79,7 +79,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
filename: Symbol,
line: u32,
col: u32,
) -> MPlaceTy<'tcx, M::PointerTag> {
) -> InterpResult<'static, MPlaceTy<'tcx, M::PointerTag>> {
let file =
self.allocate_str(&filename.as_str(), MemoryKind::CallerLocation, Mutability::Not);
let line = Scalar::from_u32(line);
......@@ -91,7 +91,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
.type_of(self.tcx.require_lang_item(LangItem::PanicLocation, None))
.subst(*self.tcx, self.tcx.mk_substs([self.tcx.lifetimes.re_erased.into()].iter()));
let loc_layout = self.layout_of(loc_ty).unwrap();
let location = self.allocate(loc_layout, MemoryKind::CallerLocation);
let location = self.allocate(loc_layout, MemoryKind::CallerLocation)?;
// Initialize fields.
self.write_immediate(file.to_ref(), &self.mplace_field(&location, 0).unwrap().into())
......@@ -101,7 +101,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.write_scalar(col, &self.mplace_field(&location, 2).unwrap().into())
.expect("writing to memory we just allocated cannot fail");
location
Ok(location)
}
crate fn location_triple_for_span(&self, span: Span) -> (Symbol, u32, u32) {
......@@ -114,7 +114,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
)
}
pub fn alloc_caller_location_for_span(&mut self, span: Span) -> MPlaceTy<'tcx, M::PointerTag> {
pub fn alloc_caller_location_for_span(
&mut self,
span: Span,
) -> InterpResult<'static, MPlaceTy<'tcx, M::PointerTag>> {
let (file, line, column) = self.location_triple_for_span(span);
self.alloc_caller_location(file, line, column)
}
......
......@@ -207,9 +207,9 @@ pub fn allocate(
size: Size,
align: Align,
kind: MemoryKind<M::MemoryKind>,
) -> Pointer<M::PointerTag> {
let alloc = Allocation::uninit(size, align);
self.allocate_with(alloc, kind)
) -> InterpResult<'static, Pointer<M::PointerTag>> {
let alloc = Allocation::uninit(size, align)?;
Ok(self.allocate_with(alloc, kind))
}
pub fn allocate_bytes(
......@@ -257,7 +257,7 @@ pub fn reallocate(
// For simplicities' sake, we implement reallocate as "alloc, copy, dealloc".
// This happens so rarely, the perf advantage is outweighed by the maintenance cost.
let new_ptr = self.allocate(new_size, new_align, kind);
let new_ptr = self.allocate(new_size, new_align, kind)?;
let old_size = match old_size_and_align {
Some((size, _align)) => size,
None => self.get_raw(ptr.alloc_id)?.size(),
......
......@@ -982,7 +982,7 @@ pub fn force_allocation_maybe_sized(
let (size, align) = self
.size_and_align_of(&meta, &local_layout)?
.expect("Cannot allocate for non-dyn-sized type");
let ptr = self.memory.allocate(size, align, MemoryKind::Stack);
let ptr = self.memory.allocate(size, align, MemoryKind::Stack)?;
let mplace = MemPlace { ptr: ptr.into(), align, meta };
if let LocalValue::Live(Operand::Immediate(value)) = local_val {
// Preserve old value.
......@@ -1018,9 +1018,9 @@ pub fn allocate(
&mut self,
layout: TyAndLayout<'tcx>,
kind: MemoryKind<M::MemoryKind>,
) -> MPlaceTy<'tcx, M::PointerTag> {
let ptr = self.memory.allocate(layout.size, layout.align.abi, kind);
MPlaceTy::from_aligned_ptr(ptr, layout)
) -> InterpResult<'static, MPlaceTy<'tcx, M::PointerTag>> {
let ptr = self.memory.allocate(layout.size, layout.align.abi, kind)?;
Ok(MPlaceTy::from_aligned_ptr(ptr, layout))
}
/// Returns a wide MPlace of type `&'static [mut] str` to a new 1-aligned allocation.
......
......@@ -30,7 +30,7 @@ pub fn get_vtable(
ensure_monomorphic_enough(*self.tcx, ty)?;
ensure_monomorphic_enough(*self.tcx, poly_trait_ref)?;
let vtable_allocation = self.tcx.vtable_allocation(ty, poly_trait_ref);
let vtable_allocation = self.tcx.vtable_allocation(ty, poly_trait_ref)?;
let vtable_ptr = self.memory.global_base_pointer(Pointer::from(vtable_allocation))?;
......
......@@ -29,6 +29,7 @@
#![feature(option_get_or_insert_default)]
#![feature(once_cell)]
#![feature(control_flow_enum)]
#![feature(try_reserve)]
#![recursion_limit = "256"]
#[macro_use]
......
......@@ -385,15 +385,19 @@ fn new(
(),
);
let ret = ecx
.layout_of(body.return_ty().subst(tcx, substs))
.ok()
let ret = if let Ok(layout) = ecx.layout_of(body.return_ty().subst(tcx, substs)) {
// Don't bother allocating memory for ZST types which have no values
// or for large values.
.filter(|ret_layout| {
!ret_layout.is_zst() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT)
})
.map(|ret_layout| ecx.allocate(ret_layout, MemoryKind::Stack).into());
if !layout.is_zst() && layout.size < Size::from_bytes(MAX_ALLOC_LIMIT) {
// hopefully all types will allocate, since large types have already been removed,
// but check anyways
ecx.allocate(layout, MemoryKind::Stack).ok().map(Into::into)
} else {
None
}
} else {
None
};
ecx.push_stack_frame(
Instance::new(def_id, substs),
......
// only-64bit
// on 32bit and 16bit platforms it is plausible that the maximum allocation size will succeed
const FOO: () = {
// 128 TiB, unlikely anyone has that much RAM
let x = [0_u8; (1 << 47) - 1];
//~^ ERROR any use of this value will cause an error
//~| WARNING this was previously accepted by the compiler but is being phased out
};
fn main() {
let _ = FOO;
}
error: any use of this value will cause an error
--> $DIR/large_const_alloc.rs:6:13
|
LL | / const FOO: () = {
LL | | // 128 TiB, unlikely anyone has that much RAM
LL | | let x = [0_u8; (1 << 47) - 1];
| | ^^^^^^^^^^^^^^^^^^^^^ tried to allocate more memory than available to compiler
LL | |
LL | |
LL | | };
| |__-
|
= note: `#[deny(const_err)]` on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #71800 <https://github.com/rust-lang/rust/issues/71800>
error: aborting due to previous error
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册