提交 e8a22b69 编写于 作者: O Oliver Scherer

Rename `Value` to `Immediate` for miri

上级 87a3c1ee
......@@ -32,7 +32,7 @@
use syntax::source_map::{Span, DUMMY_SP};
use interpret::{self,
PlaceTy, MemPlace, OpTy, Operand, Value, Scalar, ConstValue, Pointer,
PlaceTy, MemPlace, OpTy, Operand, Immediate, Scalar, ConstValue, Pointer,
EvalResult, EvalError, EvalErrorKind, GlobalId, EvalContext, StackPopCleanup,
Allocation, AllocId, MemoryKind,
snapshot, RefTracking,
......@@ -115,7 +115,7 @@ pub fn op_to_const<'tcx>(
_ => false,
};
let normalized_op = if normalize {
ecx.try_read_value(op)?
ecx.try_read_immediate(op)?
} else {
match op.op {
Operand::Indirect(mplace) => Err(mplace),
......@@ -137,9 +137,9 @@ pub fn op_to_const<'tcx>(
let alloc = ecx.tcx.intern_const_alloc(alloc);
ConstValue::ByRef(ptr.alloc_id, alloc, ptr.offset)
},
Ok(Value::Scalar(x)) =>
Ok(Immediate::Scalar(x)) =>
ConstValue::Scalar(x.not_undef()?),
Ok(Value::ScalarPair(a, b)) =>
Ok(Immediate::ScalarPair(a, b)) =>
ConstValue::ScalarPair(a.not_undef()?, b.not_undef()?),
};
Ok(ty::Const::from_const_value(ecx.tcx.tcx, val, op.layout.ty))
......
......@@ -19,7 +19,7 @@
use rustc::mir::CastKind;
use rustc_apfloat::Float;
use super::{EvalContext, Machine, PlaceTy, OpTy, Value};
use super::{EvalContext, Machine, PlaceTy, OpTy, Immediate};
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
......@@ -45,7 +45,7 @@ pub fn cast(
Misc => {
let src_layout = src.layout;
let src = self.read_value(src)?;
let src = self.read_immediate(src)?;
let src = if M::ENABLE_PTR_TRACKING_HOOKS && src_layout.ty.is_region_ptr() {
// The only `Misc` casts on references are those creating raw pointers.
......@@ -61,14 +61,14 @@ pub fn cast(
if self.type_is_fat_ptr(src_layout.ty) {
match (src, self.type_is_fat_ptr(dest.layout.ty)) {
// pointers to extern types
(Value::Scalar(_),_) |
(Immediate::Scalar(_),_) |
// slices and trait objects to other slices/trait objects
(Value::ScalarPair(..), true) => {
// No change to value
self.write_value(src, dest)?;
(Immediate::ScalarPair(..), true) => {
// No change to immediate
self.write_immediate(src, dest)?;
}
// slices and trait objects to thin pointers (dropping the metadata)
(Value::ScalarPair(data, _), false) => {
(Immediate::ScalarPair(data, _), false) => {
self.write_scalar(data, dest)?;
}
}
......@@ -118,11 +118,11 @@ pub fn cast(
}
UnsafeFnPointer => {
let src = self.read_value(src)?;
let src = self.read_immediate(src)?;
match dest.layout.ty.sty {
ty::FnPtr(_) => {
// No change to value
self.write_value(*src, dest)?;
self.write_immediate(*src, dest)?;
}
ref other => bug!("fn to unsafe fn cast on {:?}", other),
}
......@@ -144,8 +144,8 @@ pub fn cast(
ty::ClosureKind::FnOnce,
);
let fn_ptr = self.memory.create_fn_alloc(instance).with_default_tag();
let val = Value::Scalar(Scalar::Ptr(fn_ptr.into()).into());
self.write_value(val, dest)?;
let val = Immediate::Scalar(Scalar::Ptr(fn_ptr.into()).into());
self.write_immediate(val, dest)?;
}
ref other => bug!("closure fn pointer on {:?}", other),
}
......@@ -326,24 +326,24 @@ fn unsize_into_ptr(
match (&src_pointee_ty.sty, &dest_pointee_ty.sty) {
(&ty::Array(_, length), &ty::Slice(_)) => {
let ptr = self.read_value(src)?.to_scalar_ptr()?;
let ptr = self.read_immediate(src)?.to_scalar_ptr()?;
// u64 cast is from usize to u64, which is always good
let val = Value::new_slice(ptr, length.unwrap_usize(self.tcx.tcx), self.tcx.tcx);
self.write_value(val, dest)
let val = Immediate::new_slice(ptr, length.unwrap_usize(self.tcx.tcx), self.tcx.tcx);
self.write_immediate(val, dest)
}
(&ty::Dynamic(..), &ty::Dynamic(..)) => {
// For now, upcasts are limited to changes in marker
// traits, and hence never actually require an actual
// change to the vtable.
let val = self.read_value(src)?;
self.write_value(*val, dest)
let val = self.read_immediate(src)?;
self.write_immediate(*val, dest)
}
(_, &ty::Dynamic(ref data, _)) => {
// Initial cast from sized to dyn trait
let vtable = self.get_vtable(src_pointee_ty, data.principal())?;
let ptr = self.read_value(src)?.to_scalar_ptr()?;
let val = Value::new_dyn_trait(ptr, vtable);
self.write_value(val, dest)
let ptr = self.read_immediate(src)?.to_scalar_ptr()?;
let val = Immediate::new_dyn_trait(ptr, vtable);
self.write_immediate(val, dest)
}
_ => bug!("invalid unsizing {:?} -> {:?}", src.layout.ty, dest.layout.ty),
......
......@@ -32,7 +32,7 @@
use rustc_data_structures::fx::FxHashMap;
use super::{
Value, Operand, MemPlace, MPlaceTy, Place, PlaceTy, ScalarMaybeUndef,
Immediate, Operand, MemPlace, MPlaceTy, Place, PlaceTy, ScalarMaybeUndef,
Memory, Machine
};
......@@ -333,9 +333,9 @@ pub fn layout_of_local(
self.layout_of(local_ty)
}
pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value<M::PointerTag>> {
pub fn str_to_immediate(&mut self, s: &str) -> EvalResult<'tcx, Immediate<M::PointerTag>> {
let ptr = self.memory.allocate_static_bytes(s.as_bytes()).with_default_tag();
Ok(Value::new_slice(Scalar::Ptr(ptr), s.len() as u64, self.tcx.tcx))
Ok(Immediate::new_slice(Scalar::Ptr(ptr), s.len() as u64, self.tcx.tcx))
}
/// Return the actual dynamic size and alignment of the place at the given type.
......@@ -457,11 +457,11 @@ pub fn push_stack_frame(
// don't allocate at all for trivial constants
if mir.local_decls.len() > 1 {
// We put some marker value into the locals that we later want to initialize.
// We put some marker immediate into the locals that we later want to initialize.
// This can be anything except for LocalValue::Dead -- because *that* is the
// value we use for things that we know are initially dead.
let dummy =
LocalValue::Live(Operand::Immediate(Value::Scalar(ScalarMaybeUndef::Undef)));
LocalValue::Live(Operand::Immediate(Immediate::Scalar(ScalarMaybeUndef::Undef)));
let mut locals = IndexVec::from_elem(dummy, &mir.local_decls);
// Return place is handled specially by the `eval_place` functions, and the
// entry in `locals` should never be used. Make it dead, to be sure.
......@@ -652,13 +652,13 @@ pub fn dump_place(&self, place: Place<M::PointerTag>) {
ptr => write!(msg, " by integral ref: {:?}", ptr).unwrap(),
}
}
Ok(Operand::Immediate(Value::Scalar(val))) => {
Ok(Operand::Immediate(Immediate::Scalar(val))) => {
write!(msg, " {:?}", val).unwrap();
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val {
allocs.push(ptr.alloc_id);
}
}
Ok(Operand::Immediate(Value::ScalarPair(val1, val2))) => {
Ok(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 {
allocs.push(ptr.alloc_id);
......
......@@ -115,8 +115,8 @@ pub fn emulate_intrinsic(
| "add_with_overflow"
| "sub_with_overflow"
| "mul_with_overflow" => {
let lhs = self.read_value(args[0])?;
let rhs = self.read_value(args[1])?;
let lhs = self.read_immediate(args[0])?;
let rhs = self.read_immediate(args[1])?;
let (bin_op, ignore_overflow) = match intrinsic_name {
"overflowing_add" => (BinOp::Add, true),
"overflowing_sub" => (BinOp::Sub, true),
......@@ -133,8 +133,8 @@ pub fn emulate_intrinsic(
}
}
"unchecked_shl" | "unchecked_shr" => {
let l = self.read_value(args[0])?;
let r = self.read_value(args[1])?;
let l = self.read_immediate(args[0])?;
let r = self.read_immediate(args[1])?;
let bin_op = match intrinsic_name {
"unchecked_shl" => BinOp::Shl,
"unchecked_shr" => BinOp::Shr,
......@@ -172,8 +172,8 @@ pub fn hook_fn(
// Some fn calls are actually BinOp intrinsics
if let Some((op, oflo)) = self.tcx.is_binop_lang_item(def_id) {
let dest = dest.expect("128 lowerings can't diverge");
let l = self.read_value(args[0])?;
let r = self.read_value(args[1])?;
let l = self.read_immediate(args[0])?;
let r = self.read_immediate(args[1])?;
if oflo {
self.binop_with_overflow(op, l, r, dest)?;
} else {
......@@ -183,7 +183,7 @@ pub fn hook_fn(
} else if Some(def_id) == self.tcx.lang_items().panic_fn() {
assert!(args.len() == 1);
// &(&'static str, &'static str, u32, u32)
let ptr = self.read_value(args[0])?;
let ptr = self.read_immediate(args[0])?;
let place = self.ref_to_mplace(ptr)?;
let (msg, file, line, col) = (
self.mplace_field(place, 0)?,
......@@ -192,9 +192,9 @@ pub fn hook_fn(
self.mplace_field(place, 3)?,
);
let msg_place = self.ref_to_mplace(self.read_value(msg.into())?)?;
let msg_place = self.ref_to_mplace(self.read_immediate(msg.into())?)?;
let msg = Symbol::intern(self.read_str(msg_place)?);
let file_place = self.ref_to_mplace(self.read_value(file.into())?)?;
let file_place = self.ref_to_mplace(self.read_immediate(file.into())?)?;
let file = Symbol::intern(self.read_str(file_place)?);
let line = self.read_scalar(line.into())?.to_u32()?;
let col = self.read_scalar(col.into())?.to_u32()?;
......@@ -203,7 +203,7 @@ pub fn hook_fn(
assert!(args.len() == 2);
// &'static str, &(&'static str, u32, u32)
let msg = args[0];
let ptr = self.read_value(args[1])?;
let ptr = self.read_immediate(args[1])?;
let place = self.ref_to_mplace(ptr)?;
let (file, line, col) = (
self.mplace_field(place, 0)?,
......@@ -211,9 +211,9 @@ pub fn hook_fn(
self.mplace_field(place, 2)?,
);
let msg_place = self.ref_to_mplace(self.read_value(msg.into())?)?;
let msg_place = self.ref_to_mplace(self.read_immediate(msg.into())?)?;
let msg = Symbol::intern(self.read_str(msg_place)?);
let file_place = self.ref_to_mplace(self.read_value(file.into())?)?;
let file_place = self.ref_to_mplace(self.read_immediate(file.into())?)?;
let file = Symbol::intern(self.read_str(file_place)?);
let line = self.read_scalar(line.into())?.to_u32()?;
let col = self.read_scalar(col.into())?.to_u32()?;
......
......@@ -36,6 +36,6 @@
pub use self::machine::{Machine, AllocMap, MayLeak};
pub use self::operand::{ScalarMaybeUndef, Value, ValTy, Operand, OpTy};
pub use self::operand::{ScalarMaybeUndef, Immediate, ImmTy, Operand, OpTy};
pub use self::validity::RefTracking;
......@@ -146,32 +146,32 @@ pub fn to_isize(self, cx: impl HasDataLayout) -> EvalResult<'tcx, i64> {
/// In particular, thanks to `ScalarPair`, arithmetic operations and casts can be entirely
/// defined on `Value`, and do not have to work with a `Place`.
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub enum Value<Tag=(), Id=AllocId> {
pub enum Immediate<Tag=(), Id=AllocId> {
Scalar(ScalarMaybeUndef<Tag, Id>),
ScalarPair(ScalarMaybeUndef<Tag, Id>, ScalarMaybeUndef<Tag, Id>),
}
impl Value {
impl Immediate {
#[inline]
pub fn with_default_tag<Tag>(self) -> Value<Tag>
pub fn with_default_tag<Tag>(self) -> Immediate<Tag>
where Tag: Default
{
match self {
Value::Scalar(x) => Value::Scalar(x.with_default_tag()),
Value::ScalarPair(x, y) =>
Value::ScalarPair(x.with_default_tag(), y.with_default_tag()),
Immediate::Scalar(x) => Immediate::Scalar(x.with_default_tag()),
Immediate::ScalarPair(x, y) =>
Immediate::ScalarPair(x.with_default_tag(), y.with_default_tag()),
}
}
}
impl<'tcx, Tag> Value<Tag> {
impl<'tcx, Tag> Immediate<Tag> {
#[inline]
pub fn erase_tag(self) -> Value
pub fn erase_tag(self) -> Immediate
{
match self {
Value::Scalar(x) => Value::Scalar(x.erase_tag()),
Value::ScalarPair(x, y) =>
Value::ScalarPair(x.erase_tag(), y.erase_tag()),
Immediate::Scalar(x) => Immediate::Scalar(x.erase_tag()),
Immediate::ScalarPair(x, y) =>
Immediate::ScalarPair(x.erase_tag(), y.erase_tag()),
}
}
......@@ -180,18 +180,21 @@ pub fn new_slice(
len: u64,
cx: impl HasDataLayout
) -> Self {
Value::ScalarPair(val.into(), Scalar::from_uint(len, cx.data_layout().pointer_size).into())
Immediate::ScalarPair(
val.into(),
Scalar::from_uint(len, cx.data_layout().pointer_size).into(),
)
}
pub fn new_dyn_trait(val: Scalar<Tag>, vtable: Pointer<Tag>) -> Self {
Value::ScalarPair(val.into(), Scalar::Ptr(vtable).into())
Immediate::ScalarPair(val.into(), Scalar::Ptr(vtable).into())
}
#[inline]
pub fn to_scalar_or_undef(self) -> ScalarMaybeUndef<Tag> {
match self {
Value::Scalar(val) => val,
Value::ScalarPair(..) => bug!("Got a fat pointer where a scalar was expected"),
Immediate::Scalar(val) => val,
Immediate::ScalarPair(..) => bug!("Got a fat pointer where a scalar was expected"),
}
}
......@@ -203,18 +206,18 @@ pub fn to_scalar(self) -> EvalResult<'tcx, Scalar<Tag>> {
#[inline]
pub fn to_scalar_pair(self) -> EvalResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> {
match self {
Value::Scalar(..) => bug!("Got a thin pointer where a scalar pair was expected"),
Value::ScalarPair(a, b) => Ok((a.not_undef()?, b.not_undef()?))
Immediate::Scalar(..) => bug!("Got a thin pointer where a scalar pair was expected"),
Immediate::ScalarPair(a, b) => Ok((a.not_undef()?, b.not_undef()?))
}
}
/// Convert the value into a pointer (or a pointer-sized integer).
/// Convert the immediate into a pointer (or a pointer-sized integer).
/// Throws away the second half of a ScalarPair!
#[inline]
pub fn to_scalar_ptr(self) -> EvalResult<'tcx, Scalar<Tag>> {
match self {
Value::Scalar(ptr) |
Value::ScalarPair(ptr, _) => ptr.not_undef(),
Immediate::Scalar(ptr) |
Immediate::ScalarPair(ptr, _) => ptr.not_undef(),
}
}
......@@ -229,19 +232,19 @@ pub fn to_meta(self) -> EvalResult<'tcx, Option<Scalar<Tag>>> {
}
}
// ScalarPair needs a type to interpret, so we often have a value and a type together
// ScalarPair needs a type to interpret, so we often have an immediate and a type together
// as input for binary and cast operations.
#[derive(Copy, Clone, Debug)]
pub struct ValTy<'tcx, Tag=()> {
value: Value<Tag>,
pub struct ImmTy<'tcx, Tag=()> {
immediate: Immediate<Tag>,
pub layout: TyLayout<'tcx>,
}
impl<'tcx, Tag> ::std::ops::Deref for ValTy<'tcx, Tag> {
type Target = Value<Tag>;
impl<'tcx, Tag> ::std::ops::Deref for ImmTy<'tcx, Tag> {
type Target = Immediate<Tag>;
#[inline(always)]
fn deref(&self) -> &Value<Tag> {
&self.value
fn deref(&self) -> &Immediate<Tag> {
&self.immediate
}
}
......@@ -250,7 +253,7 @@ fn deref(&self) -> &Value<Tag> {
/// memory and to avoid having to store arbitrary-sized data here.
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub enum Operand<Tag=(), Id=AllocId> {
Immediate(Value<Tag, Id>),
Immediate(Immediate<Tag, Id>),
Indirect(MemPlace<Tag, Id>),
}
......@@ -288,11 +291,11 @@ pub fn to_mem_place(self) -> MemPlace<Tag>
}
#[inline]
pub fn to_immediate(self) -> Value<Tag>
pub fn to_immediate(self) -> Immediate<Tag>
where Tag: ::std::fmt::Debug
{
match self {
Operand::Immediate(val) => val,
Operand::Immediate(imm) => imm,
_ => bug!("to_immediate: expected Operand::Immediate, got {:?}", self),
}
......@@ -323,11 +326,11 @@ fn from(mplace: MPlaceTy<'tcx, Tag>) -> Self {
}
}
impl<'tcx, Tag> From<ValTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
impl<'tcx, Tag> From<ImmTy<'tcx, Tag>> for OpTy<'tcx, Tag> {
#[inline(always)]
fn from(val: ValTy<'tcx, Tag>) -> Self {
fn from(val: ImmTy<'tcx, Tag>) -> Self {
OpTy {
op: Operand::Immediate(val.value),
op: Operand::Immediate(val.immediate),
layout: val.layout
}
}
......@@ -367,12 +370,12 @@ fn from_known_layout<'tcx>(
}
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
/// Try reading a value in memory; this is interesting particularly for ScalarPair.
/// Try reading an immediate in memory; this is interesting particularly for ScalarPair.
/// Return None if the layout does not permit loading this as a value.
pub(super) fn try_read_value_from_mplace(
pub(super) fn try_read_immediate_from_mplace(
&self,
mplace: MPlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx, Option<Value<M::PointerTag>>> {
) -> EvalResult<'tcx, Option<Immediate<M::PointerTag>>> {
if mplace.layout.is_unsized() {
// Don't touch unsized
return Ok(None);
......@@ -383,14 +386,14 @@ pub(super) fn try_read_value_from_mplace(
// Not all ZSTs have a layout we would handle below, so just short-circuit them
// all here.
self.memory.check_align(ptr, ptr_align)?;
return Ok(Some(Value::Scalar(Scalar::zst().into())));
return Ok(Some(Immediate::Scalar(Scalar::zst().into())));
}
let ptr = ptr.to_ptr()?;
match mplace.layout.abi {
layout::Abi::Scalar(..) => {
let scalar = self.memory.read_scalar(ptr, ptr_align, mplace.layout.size)?;
Ok(Some(Value::Scalar(scalar)))
Ok(Some(Immediate::Scalar(scalar)))
}
layout::Abi::ScalarPair(ref a, ref b) => {
let (a, b) = (&a.value, &b.value);
......@@ -401,25 +404,25 @@ pub(super) fn try_read_value_from_mplace(
let b_ptr = ptr.offset(b_offset, self)?.into();
let a_val = self.memory.read_scalar(a_ptr, ptr_align, a_size)?;
let b_val = self.memory.read_scalar(b_ptr, ptr_align, b_size)?;
Ok(Some(Value::ScalarPair(a_val, b_val)))
Ok(Some(Immediate::ScalarPair(a_val, b_val)))
}
_ => Ok(None),
}
}
/// Try returning an immediate value for the operand.
/// If the layout does not permit loading this as a value, return where in memory
/// Try returning an immediate for the operand.
/// If the layout does not permit loading this as an immediate, return where in memory
/// we can find the data.
/// Note that for a given layout, this operation will either always fail or always
/// succeed! Whether it succeeds depends on whether the layout can be represented
/// in a `Value`, not on which data is stored there currently.
pub(crate) fn try_read_value(
/// in a `Immediate`, not on which data is stored there currently.
pub(crate) fn try_read_immediate(
&self,
src: OpTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx, Result<Value<M::PointerTag>, MemPlace<M::PointerTag>>> {
) -> EvalResult<'tcx, Result<Immediate<M::PointerTag>, MemPlace<M::PointerTag>>> {
Ok(match src.try_as_mplace() {
Ok(mplace) => {
if let Some(val) = self.try_read_value_from_mplace(mplace)? {
if let Some(val) = self.try_read_immediate_from_mplace(mplace)? {
Ok(val)
} else {
Err(*mplace)
......@@ -429,14 +432,14 @@ pub(crate) fn try_read_value(
})
}
/// Read a value from a place, asserting that that is possible with the given layout.
/// Read an immediate from a place, asserting that that is possible with the given layout.
#[inline(always)]
pub fn read_value(
pub fn read_immediate(
&self,
op: OpTy<'tcx, M::PointerTag>
) -> EvalResult<'tcx, ValTy<'tcx, M::PointerTag>> {
if let Ok(value) = self.try_read_value(op)? {
Ok(ValTy { value, layout: op.layout })
) -> EvalResult<'tcx, ImmTy<'tcx, M::PointerTag>> {
if let Ok(immediate) = self.try_read_immediate(op)? {
Ok(ImmTy { immediate, layout: op.layout })
} else {
bug!("primitive read failed for type: {:?}", op.layout.ty);
}
......@@ -447,10 +450,7 @@ pub fn read_scalar(
&self,
op: OpTy<'tcx, M::PointerTag>
) -> EvalResult<'tcx, ScalarMaybeUndef<M::PointerTag>> {
match *self.read_value(op)? {
Value::ScalarPair(..) => bug!("got ScalarPair for type: {:?}", op.layout.ty),
Value::Scalar(val) => Ok(val),
}
Ok(self.read_immediate(op)?.to_scalar_or_undef())
}
// Turn the MPlace into a string (must already be dereferenced!)
......@@ -470,16 +470,16 @@ pub fn uninit_operand(
layout: TyLayout<'tcx>
) -> EvalResult<'tcx, Operand<M::PointerTag>> {
// This decides which types we will use the Immediate optimization for, and hence should
// match what `try_read_value` and `eval_place_to_op` support.
// match what `try_read_immediate` and `eval_place_to_op` support.
if layout.is_zst() {
return Ok(Operand::Immediate(Value::Scalar(Scalar::zst().into())));
return Ok(Operand::Immediate(Immediate::Scalar(Scalar::zst().into())));
}
Ok(match layout.abi {
layout::Abi::Scalar(..) =>
Operand::Immediate(Value::Scalar(ScalarMaybeUndef::Undef)),
Operand::Immediate(Immediate::Scalar(ScalarMaybeUndef::Undef)),
layout::Abi::ScalarPair(..) =>
Operand::Immediate(Value::ScalarPair(
Operand::Immediate(Immediate::ScalarPair(
ScalarMaybeUndef::Undef,
ScalarMaybeUndef::Undef,
)),
......@@ -510,22 +510,22 @@ pub fn operand_field(
let field = field.try_into().unwrap();
let field_layout = op.layout.field(self, field)?;
if field_layout.is_zst() {
let val = Value::Scalar(Scalar::zst().into());
return Ok(OpTy { op: Operand::Immediate(val), layout: field_layout });
let immediate = Immediate::Scalar(Scalar::zst().into());
return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout });
}
let offset = op.layout.fields.offset(field);
let value = match base {
let immediate = match base {
// the field covers the entire type
_ if offset.bytes() == 0 && field_layout.size == op.layout.size => base,
// extract fields from types with `ScalarPair` ABI
Value::ScalarPair(a, b) => {
Immediate::ScalarPair(a, b) => {
let val = if offset.bytes() == 0 { a } else { b };
Value::Scalar(val)
Immediate::Scalar(val)
},
Value::Scalar(val) =>
Immediate::Scalar(val) =>
bug!("field access on non aggregate {:#?}, {:#?}", val, op.layout),
};
Ok(OpTy { op: Operand::Immediate(value), layout: field_layout })
Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout })
}
pub fn operand_downcast(
......@@ -551,7 +551,7 @@ pub(super) fn deref_operand(
&self,
src: OpTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
let val = self.read_value(src)?;
let val = self.read_immediate(src)?;
trace!("deref to {} on {:?}", val.layout.ty, *val);
Ok(self.ref_to_mplace(val)?)
}
......@@ -568,7 +568,7 @@ pub fn operand_projection(
Deref => self.deref_operand(base)?.into(),
Subslice { .. } | ConstantIndex { .. } | Index(_) => if base.layout.is_zst() {
OpTy {
op: Operand::Immediate(Value::Scalar(Scalar::zst().into())),
op: Operand::Immediate(Immediate::Scalar(Scalar::zst().into())),
// the actual index doesn't matter, so we just pick a convenient one like 0
layout: base.layout.field(self, 0)?,
}
......@@ -682,9 +682,12 @@ pub(super) fn const_value_to_op(
).with_default_tag())
},
ConstValue::ScalarPair(a, b) =>
Ok(Operand::Immediate(Value::ScalarPair(a.into(), b.into())).with_default_tag()),
Ok(Operand::Immediate(Immediate::ScalarPair(
a.into(),
b.into(),
)).with_default_tag()),
ConstValue::Scalar(x) =>
Ok(Operand::Immediate(Value::Scalar(x.into())).with_default_tag()),
Ok(Operand::Immediate(Immediate::Scalar(x.into())).with_default_tag()),
}
}
pub fn const_to_op(
......@@ -722,7 +725,7 @@ pub fn read_discriminant(
}
// read raw discriminant value
let discr_op = self.operand_field(rval, 0)?;
let discr_val = self.read_value(discr_op)?;
let discr_val = self.read_immediate(discr_op)?;
let raw_discr = discr_val.to_scalar()?;
trace!("discr value: {:?}", raw_discr);
// post-process
......
......@@ -15,7 +15,7 @@
use rustc_apfloat::Float;
use rustc::mir::interpret::{EvalResult, Scalar};
use super::{EvalContext, PlaceTy, Value, Machine, ValTy};
use super::{EvalContext, PlaceTy, Immediate, Machine, ImmTy};
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
......@@ -24,13 +24,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
pub fn binop_with_overflow(
&mut self,
op: mir::BinOp,
left: ValTy<'tcx, M::PointerTag>,
right: ValTy<'tcx, M::PointerTag>,
left: ImmTy<'tcx, M::PointerTag>,
right: ImmTy<'tcx, M::PointerTag>,
dest: PlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx> {
let (val, overflowed) = self.binary_op_val(op, left, right)?;
let val = Value::ScalarPair(val.into(), Scalar::from_bool(overflowed).into());
self.write_value(val, dest)
let val = Immediate::ScalarPair(val.into(), Scalar::from_bool(overflowed).into());
self.write_immediate(val, dest)
}
/// Applies the binary operation `op` to the arguments and writes the result to the
......@@ -38,8 +38,8 @@ pub fn binop_with_overflow(
pub fn binop_ignore_overflow(
&mut self,
op: mir::BinOp,
left: ValTy<'tcx, M::PointerTag>,
right: ValTy<'tcx, M::PointerTag>,
left: ImmTy<'tcx, M::PointerTag>,
right: ImmTy<'tcx, M::PointerTag>,
dest: PlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx> {
let (val, _overflowed) = self.binary_op_val(op, left, right)?;
......@@ -288,8 +288,8 @@ fn binary_int_op(
pub fn binary_op_val(
&self,
bin_op: mir::BinOp,
left: ValTy<'tcx, M::PointerTag>,
right: ValTy<'tcx, M::PointerTag>,
left: ImmTy<'tcx, M::PointerTag>,
right: ImmTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx, (Scalar<M::PointerTag>, bool)> {
self.binary_op(
bin_op,
......
......@@ -25,7 +25,7 @@
};
use super::{
EvalContext, Machine, AllocMap,
Value, ValTy, ScalarMaybeUndef, Operand, OpTy, MemoryKind
Immediate, ImmTy, ScalarMaybeUndef, Operand, OpTy, MemoryKind
};
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
......@@ -201,10 +201,10 @@ pub(super) fn vtable(self) -> EvalResult<'tcx, Pointer<Tag>> {
impl<'tcx, Tag: ::std::fmt::Debug> OpTy<'tcx, Tag> {
#[inline(always)]
pub fn try_as_mplace(self) -> Result<MPlaceTy<'tcx, Tag>, Value<Tag>> {
pub fn try_as_mplace(self) -> Result<MPlaceTy<'tcx, Tag>, Immediate<Tag>> {
match self.op {
Operand::Indirect(mplace) => Ok(MPlaceTy { mplace, layout: self.layout }),
Operand::Immediate(value) => Err(value),
Operand::Immediate(imm) => Err(imm),
}
}
......@@ -269,7 +269,7 @@ impl<'a, 'mir, 'tcx, Tag, M> EvalContext<'a, 'mir, 'tcx, M>
/// Alignment is just based on the type. This is the inverse of `create_ref`.
pub fn ref_to_mplace(
&self,
val: ValTy<'tcx, M::PointerTag>,
val: ImmTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
let pointee_type = val.layout.ty.builtin_deref(true).unwrap().ty;
let layout = self.layout_of(pointee_type)?;
......@@ -304,7 +304,7 @@ pub fn create_ref(
&mut self,
place: MPlaceTy<'tcx, M::PointerTag>,
mutbl: Option<hir::Mutability>,
) -> EvalResult<'tcx, Value<M::PointerTag>> {
) -> EvalResult<'tcx, Immediate<M::PointerTag>> {
// Pointer tag tracking might want to adjust the tag
let place = if M::ENABLE_PTR_TRACKING_HOOKS {
let (size, _) = self.size_and_align_of_mplace(place)?
......@@ -315,8 +315,8 @@ pub fn create_ref(
*place
};
Ok(match place.meta {
None => Value::Scalar(place.ptr.into()),
Some(meta) => Value::ScalarPair(place.ptr.into(), meta.into()),
None => Immediate::Scalar(place.ptr.into()),
Some(meta) => Immediate::ScalarPair(place.ptr.into(), meta.into()),
})
}
......@@ -629,17 +629,17 @@ pub fn write_scalar(
val: impl Into<ScalarMaybeUndef<M::PointerTag>>,
dest: PlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx> {
self.write_value(Value::Scalar(val.into()), dest)
self.write_immediate(Immediate::Scalar(val.into()), dest)
}
/// Write a value to a place
/// Write an immediate to a place
#[inline(always)]
pub fn write_value(
pub fn write_immediate(
&mut self,
src_val: Value<M::PointerTag>,
src: Immediate<M::PointerTag>,
dest: PlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx> {
self.write_value_no_validate(src_val, dest)?;
self.write_immediate_no_validate(src, dest)?;
if M::enforce_validity(self) {
// Data got changed, better make sure it matches the type!
......@@ -649,40 +649,40 @@ pub fn write_value(
Ok(())
}
/// Write a value to a place.
/// Write an immediate to a place.
/// If you use this you are responsible for validating that things got copied at the
/// right type.
fn write_value_no_validate(
fn write_immediate_no_validate(
&mut self,
src_val: Value<M::PointerTag>,
src: Immediate<M::PointerTag>,
dest: PlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx> {
if cfg!(debug_assertions) {
// This is a very common path, avoid some checks in release mode
assert!(!dest.layout.is_unsized(), "Cannot write unsized data");
match src_val {
Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Ptr(_))) =>
match src {
Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Ptr(_))) =>
assert_eq!(self.pointer_size(), dest.layout.size,
"Size mismatch when writing pointer"),
Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { size, .. })) =>
Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { size, .. })) =>
assert_eq!(Size::from_bytes(size.into()), dest.layout.size,
"Size mismatch when writing bits"),
Value::Scalar(ScalarMaybeUndef::Undef) => {}, // undef can have any size
Value::ScalarPair(_, _) => {
Immediate::Scalar(ScalarMaybeUndef::Undef) => {}, // undef can have any size
Immediate::ScalarPair(_, _) => {
// FIXME: Can we check anything here?
}
}
}
trace!("write_value: {:?} <- {:?}: {}", *dest, src_val, dest.layout.ty);
trace!("write_immediate: {:?} <- {:?}: {}", *dest, src, dest.layout.ty);
// See if we can avoid an allocation. This is the counterpart to `try_read_value`,
// See if we can avoid an allocation. This is the counterpart to `try_read_immediate`,
// but not factored as a separate function.
let mplace = match dest.place {
Place::Local { frame, local } => {
match *self.stack[frame].locals[local].access_mut()? {
Operand::Immediate(ref mut dest_val) => {
// Yay, we can just change the local directly.
*dest_val = src_val;
*dest_val = src;
return Ok(());
},
Operand::Indirect(mplace) => mplace, // already in memory
......@@ -693,15 +693,15 @@ fn write_value_no_validate(
let dest = MPlaceTy { mplace, layout: dest.layout };
// This is already in memory, write there.
self.write_value_to_mplace_no_validate(src_val, dest)
self.write_immediate_to_mplace_no_validate(src, dest)
}
/// Write a value to memory.
/// Write an immediate to memory.
/// If you use this you are responsible for validating that things git copied at the
/// right type.
fn write_value_to_mplace_no_validate(
fn write_immediate_to_mplace_no_validate(
&mut self,
value: Value<M::PointerTag>,
value: Immediate<M::PointerTag>,
dest: MPlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx> {
let (ptr, ptr_align) = dest.to_scalar_ptr_align();
......@@ -721,10 +721,10 @@ fn write_value_to_mplace_no_validate(
// memory. The code below is not sufficient, with enough padding it might not
// cover all the bytes!
match value {
Value::Scalar(scalar) => {
Immediate::Scalar(scalar) => {
match dest.layout.abi {
layout::Abi::Scalar(_) => {}, // fine
_ => bug!("write_value_to_mplace: invalid Scalar layout: {:#?}",
_ => bug!("write_immediate_to_mplace: invalid Scalar layout: {:#?}",
dest.layout)
}
......@@ -732,10 +732,10 @@ fn write_value_to_mplace_no_validate(
ptr, ptr_align.min(dest.layout.align), scalar, dest.layout.size
)
}
Value::ScalarPair(a_val, b_val) => {
Immediate::ScalarPair(a_val, b_val) => {
let (a, b) = match dest.layout.abi {
layout::Abi::ScalarPair(ref a, ref b) => (&a.value, &b.value),
_ => bug!("write_value_to_mplace: invalid ScalarPair layout: {:#?}",
_ => bug!("write_immediate_to_mplace: invalid ScalarPair layout: {:#?}",
dest.layout)
};
let (a_size, b_size) = (a.size(&self), b.size(&self));
......@@ -788,10 +788,10 @@ fn copy_op_no_validate(
"Layout mismatch when copying!\nsrc: {:#?}\ndest: {:#?}", src, dest);
// Let us see if the layout is simple so we take a shortcut, avoid force_allocation.
let src = match self.try_read_value(src)? {
let src = match self.try_read_immediate(src)? {
Ok(src_val) => {
// Yay, we got a value that we can write directly.
return self.write_value_no_validate(src_val, dest);
return self.write_immediate_no_validate(src_val, dest);
}
Err(mplace) => mplace,
};
......@@ -873,7 +873,7 @@ pub fn force_allocation(
let ptr = self.allocate(local_layout, MemoryKind::Stack)?;
// We don't have to validate as we can assume the local
// was already valid for its type.
self.write_value_to_mplace_no_validate(value, ptr)?;
self.write_immediate_to_mplace_no_validate(value, ptr)?;
let mplace = ptr.mplace;
// Update the local
*self.stack[frame].locals[local].access_mut()? =
......
......@@ -24,7 +24,7 @@
use syntax::source_map::Span;
use super::eval_context::{LocalValue, StackPopCleanup};
use super::{Frame, Memory, Operand, MemPlace, Place, Value, ScalarMaybeUndef};
use super::{Frame, Memory, Operand, MemPlace, Place, Immediate, ScalarMaybeUndef};
use const_eval::CompileTimeInterpreter;
#[derive(Default)]
......@@ -237,11 +237,11 @@ fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
}
}
impl_stable_hash_for!(enum ::interpret::Value {
impl_stable_hash_for!(enum ::interpret::Immediate {
Scalar(x),
ScalarPair(x, y),
});
impl_snapshot_for!(enum Value {
impl_snapshot_for!(enum Immediate {
Scalar(s),
ScalarPair(s, t),
});
......
......@@ -159,9 +159,9 @@ fn eval_rvalue_into_place(
BinaryOp(bin_op, ref left, ref right) => {
let layout = if binop_left_homogeneous(bin_op) { Some(dest.layout) } else { None };
let left = self.read_value(self.eval_operand(left, layout)?)?;
let left = self.read_immediate(self.eval_operand(left, layout)?)?;
let layout = if binop_right_homogeneous(bin_op) { Some(left.layout) } else { None };
let right = self.read_value(self.eval_operand(right, layout)?)?;
let right = self.read_immediate(self.eval_operand(right, layout)?)?;
self.binop_ignore_overflow(
bin_op,
left,
......@@ -172,9 +172,9 @@ fn eval_rvalue_into_place(
CheckedBinaryOp(bin_op, ref left, ref right) => {
// Due to the extra boolean in the result, we can never reuse the `dest.layout`.
let left = self.read_value(self.eval_operand(left, None)?)?;
let left = self.read_immediate(self.eval_operand(left, None)?)?;
let layout = if binop_right_homogeneous(bin_op) { Some(left.layout) } else { None };
let right = self.read_value(self.eval_operand(right, layout)?)?;
let right = self.read_immediate(self.eval_operand(right, layout)?)?;
self.binop_with_overflow(
bin_op,
left,
......@@ -185,7 +185,7 @@ fn eval_rvalue_into_place(
UnaryOp(un_op, ref operand) => {
// The operand always has the same type as the result.
let val = self.read_value(self.eval_operand(operand, Some(dest.layout))?)?;
let val = self.read_immediate(self.eval_operand(operand, Some(dest.layout))?)?;
let val = self.unary_op(un_op, val.to_scalar()?, dest.layout)?;
self.write_scalar(val, dest)?;
}
......@@ -259,7 +259,7 @@ fn eval_rvalue_into_place(
hir::MutImmutable,
};
let val = self.create_ref(val, Some(mutbl))?;
self.write_value(val, dest)?;
self.write_immediate(val, dest)?;
}
NullaryOp(mir::NullOp::Box, _) => {
......
......@@ -17,7 +17,7 @@
use rustc::mir::interpret::{EvalResult, PointerArithmetic, EvalErrorKind, Scalar};
use super::{
EvalContext, Machine, Value, OpTy, PlaceTy, MPlaceTy, Operand, StackPopCleanup
EvalContext, Machine, Immediate, OpTy, PlaceTy, MPlaceTy, Operand, StackPopCleanup
};
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
......@@ -51,7 +51,7 @@ pub(super) fn eval_terminator(
ref targets,
..
} => {
let discr = self.read_value(self.eval_operand(discr, None)?)?;
let discr = self.read_immediate(self.eval_operand(discr, None)?)?;
trace!("SwitchInt({:?})", *discr);
// Branch to the `otherwise` case by default, if no match is found.
......@@ -138,7 +138,7 @@ pub(super) fn eval_terminator(
target,
..
} => {
let cond_val = self.read_value(self.eval_operand(cond, None)?)?
let cond_val = self.read_immediate(self.eval_operand(cond, None)?)?
.to_scalar()?.to_bool()?;
if expected == cond_val {
self.goto_block(Some(target))?;
......@@ -147,10 +147,10 @@ pub(super) fn eval_terminator(
use rustc::mir::interpret::EvalErrorKind::*;
return match *msg {
BoundsCheck { ref len, ref index } => {
let len = self.read_value(self.eval_operand(len, None)?)
let len = self.read_immediate(self.eval_operand(len, None)?)
.expect("can't eval len").to_scalar()?
.to_bits(self.memory().pointer_size())? as u64;
let index = self.read_value(self.eval_operand(index, None)?)
let index = self.read_immediate(self.eval_operand(index, None)?)
.expect("can't eval index").to_scalar()?
.to_bits(self.memory().pointer_size())? as u64;
err!(BoundsCheck { len, index })
......@@ -402,7 +402,7 @@ fn eval_fn_call(
ty::InstanceDef::Virtual(_, idx) => {
let ptr_size = self.pointer_size();
let ptr_align = self.tcx.data_layout.pointer_align;
let ptr = self.ref_to_mplace(self.read_value(args[0])?)?;
let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?;
let vtable = ptr.vtable()?;
let fn_ptr = self.memory.read_ptr_sized(
vtable.offset(ptr_size * (idx as u64 + 3), &self)?,
......@@ -417,7 +417,7 @@ fn eval_fn_call(
let pointee = args[0].layout.ty.builtin_deref(true).unwrap().ty;
let fake_fat_ptr_ty = self.tcx.mk_mut_ptr(pointee);
args[0].layout = self.layout_of(fake_fat_ptr_ty)?.field(&self, 0)?;
args[0].op = Operand::Immediate(Value::Scalar(ptr.ptr.into())); // strip vtable
args[0].op = Operand::Immediate(Immediate::Scalar(ptr.ptr.into())); // strip vtable
trace!("Patched self operand to {:#?}", args[0]);
// recurse with concrete function
self.eval_fn_call(instance, span, caller_abi, &args, dest, ret)
......
......@@ -20,7 +20,7 @@
};
use super::{
ValTy, OpTy, MPlaceTy, Machine, EvalContext, ScalarMaybeUndef
ImmTy, OpTy, MPlaceTy, Machine, EvalContext, ScalarMaybeUndef
};
macro_rules! validation_failure {
......@@ -144,7 +144,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
/// Make sure that `value` is valid for `ty`, *assuming* `ty` is a primitive type.
fn validate_primitive_type(
&self,
value: ValTy<'tcx, M::PointerTag>,
value: ImmTy<'tcx, M::PointerTag>,
path: &Vec<PathElem>,
ref_tracking: Option<&mut RefTracking<'tcx, M::PointerTag>>,
const_mode: bool,
......@@ -465,7 +465,7 @@ pub fn validate_operand(
_ => dest.layout.ty.builtin_deref(true).is_some(),
};
if primitive {
let value = try_validation!(self.read_value(dest),
let value = try_validation!(self.read_immediate(dest),
"uninitialized or unrepresentable data", path);
return self.validate_primitive_type(
value,
......
......@@ -28,7 +28,7 @@
HasTyCtxt, TargetDataLayout, HasDataLayout,
};
use interpret::{self, EvalContext, ScalarMaybeUndef, Value, OpTy, MemoryKind};
use interpret::{self, EvalContext, ScalarMaybeUndef, Immediate, OpTy, MemoryKind};
use const_eval::{CompileTimeInterpreter, error_to_const_error, eval_promoted, mk_borrowck_eval_cx};
use transform::{MirPass, MirSource};
......@@ -354,7 +354,7 @@ fn const_prop(
Rvalue::NullaryOp(NullOp::SizeOf, ty) => {
type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some((
OpTy {
op: interpret::Operand::Immediate(Value::Scalar(
op: interpret::Operand::Immediate(Immediate::Scalar(
Scalar::Bits {
bits: n as u128,
size: self.tcx.data_layout.pointer_size.bytes() as u8,
......@@ -397,7 +397,7 @@ fn const_prop(
this.ecx.unary_op(op, prim, arg.layout)
})?;
let res = OpTy {
op: interpret::Operand::Immediate(Value::Scalar(val.into())),
op: interpret::Operand::Immediate(Immediate::Scalar(val.into())),
layout: place_layout,
};
Some((res, span))
......@@ -418,7 +418,7 @@ fn const_prop(
}
let r = self.use_ecx(source_info, |this| {
this.ecx.read_value(right.0)
this.ecx.read_immediate(right.0)
})?;
if op == BinOp::Shr || op == BinOp::Shl {
let left_ty = left.ty(self.mir, self.tcx);
......@@ -451,14 +451,14 @@ fn const_prop(
}
let left = self.eval_operand(left, source_info)?;
let l = self.use_ecx(source_info, |this| {
this.ecx.read_value(left.0)
this.ecx.read_immediate(left.0)
})?;
trace!("const evaluating {:?} for {:?} and {:?}", op, left, right);
let (val, overflow) = self.use_ecx(source_info, |this| {
this.ecx.binary_op_val(op, l, r)
})?;
let val = if let Rvalue::CheckedBinaryOp(..) = *rvalue {
Value::ScalarPair(
Immediate::ScalarPair(
val.into(),
Scalar::from_bool(overflow).into(),
)
......@@ -468,7 +468,7 @@ fn const_prop(
let _: Option<()> = self.use_ecx(source_info, |_| Err(err));
return None;
}
Value::Scalar(val.into())
Immediate::Scalar(val.into())
};
let res = OpTy {
op: interpret::Operand::Immediate(val),
......@@ -591,7 +591,7 @@ fn visit_terminator_kind(
if let TerminatorKind::Assert { expected, msg, cond, .. } = kind {
if let Some(value) = self.eval_operand(cond, source_info) {
trace!("assertion on {:?} should be {:?}", value, expected);
let expected = Value::Scalar(Scalar::from_bool(*expected).into());
let expected = Immediate::Scalar(Scalar::from_bool(*expected).into());
if expected != value.0.to_immediate() {
// poison all places this operand references so that further code
// doesn't use the invalid value
......@@ -629,7 +629,7 @@ fn visit_terminator_kind(
.eval_operand(len, source_info)
.expect("len must be const");
let len = match len.0.to_immediate() {
Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
_ => bug!("const len not primitive: {:?}", len),
......@@ -638,7 +638,7 @@ fn visit_terminator_kind(
.eval_operand(index, source_info)
.expect("index must be const");
let index = match index.0.to_immediate() {
Value::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
_ => bug!("const index not primitive: {:?}", index),
......
......@@ -133,7 +133,7 @@ fn check(cache: &mut Cache,
// whitelists to get this past `make check` today.
// FIXME(#32129)
if file.ends_with("std/string/struct.String.html") ||
file.ends_with("interpret/struct.ValTy.html") ||
file.ends_with("interpret/struct.ImmTy.html") ||
file.ends_with("symbol/struct.InternedString.html") ||
file.ends_with("ast/struct.ThinVec.html") ||
file.ends_with("util/struct.ThinVec.html") ||
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册