提交 c488d59a 编写于 作者: M Masaki Hara

Integrate OperandValue::UnsizedRef into OperandValue::Ref.

上级 6e15e7c1
......@@ -188,7 +188,7 @@ fn store(&self, bx: &Builder<'_, 'll, 'tcx>, val: &'ll Value, dst: PlaceRef<'ll,
}
let cx = bx.cx;
if self.is_sized_indirect() {
OperandValue::Ref(val, self.layout.align).store(bx, dst)
OperandValue::Ref(val, None, self.layout.align).store(bx, dst)
} else if self.is_unsized_indirect() {
bug!("unsized ArgType must be handled through store_fn_arg");
} else if let PassMode::Cast(cast) = self.mode {
......@@ -249,7 +249,7 @@ fn store_fn_arg(&self, bx: &Builder<'a, 'll, 'tcx>, idx: &mut usize, dst: PlaceR
OperandValue::Pair(next(), next()).store(bx, dst);
}
PassMode::Indirect(_, Some(_)) => {
OperandValue::UnsizedRef(next(), next()).store(bx, dst);
OperandValue::Ref(next(), Some(next()), self.layout.align).store(bx, dst);
}
PassMode::Direct(_) | PassMode::Indirect(_, None) | PassMode::Cast(_) => {
self.store(bx, next(), dst);
......
......@@ -295,7 +295,7 @@ pub fn coerce_unsized_into(
OperandValue::Immediate(base) => {
unsize_thin_ptr(bx, base, src_ty, dst_ty)
}
OperandValue::Ref(..) | OperandValue::UnsizedRef(..) => bug!()
OperandValue::Ref(..) => bug!()
};
OperandValue::Pair(base, info).store(bx, dst);
};
......
......@@ -605,7 +605,7 @@ fn modify_as_needed(
// etc.
assert!(!bx.cx.type_needs_drop(arg.layout.ty));
let (ptr, align) = match arg.val {
OperandValue::Ref(ptr, align) => (ptr, align),
OperandValue::Ref(ptr, None, align) => (ptr, align),
_ => bug!()
};
let arg = PlaceRef::new_sized(ptr, arg.layout, align);
......
......@@ -32,7 +32,7 @@
use super::{FunctionCx, LocalRef};
use super::place::PlaceRef;
use super::operand::OperandRef;
use super::operand::OperandValue::{Pair, Ref, UnsizedRef, Immediate};
use super::operand::OperandValue::{Pair, Ref, Immediate};
impl FunctionCx<'a, 'll, 'tcx> {
pub fn codegen_block(&mut self, bb: mir::BasicBlock) {
......@@ -232,10 +232,8 @@ fn codegen_terminator(&mut self,
PassMode::Direct(_) | PassMode::Pair(..) => {
let op = self.codegen_consume(&bx, &mir::Place::Local(mir::RETURN_PLACE));
if let Ref(llval, align) = op.val {
if let Ref(llval, _, align) = op.val {
bx.load(llval, align)
} else if let UnsizedRef(..) = op.val {
bug!("return type must be sized");
} else {
op.immediate_or_packed_pair(&bx)
}
......@@ -247,7 +245,7 @@ fn codegen_terminator(&mut self,
LocalRef::Operand(None) => bug!("use of return before def"),
LocalRef::Place(cg_place) => {
OperandRef {
val: Ref(cg_place.llval, cg_place.align),
val: Ref(cg_place.llval, None, cg_place.align),
layout: cg_place.layout
}
}
......@@ -259,12 +257,11 @@ fn codegen_terminator(&mut self,
op.val.store(&bx, scratch);
scratch.llval
}
Ref(llval, align) => {
Ref(llval, _, align) => {
assert_eq!(align.abi(), op.layout.align.abi(),
"return place is unaligned!");
llval
}
UnsizedRef(..) => bug!("return type must be sized"),
};
bx.load(
bx.pointercast(llslot, cast_ty.llvm_type(bx.cx).ptr_to()),
......@@ -605,15 +602,11 @@ fn codegen_terminator(&mut self,
// The callee needs to own the argument memory if we pass it
// by-ref, so make a local copy of non-immediate constants.
match (arg, op.val) {
(&mir::Operand::Copy(_), Ref(..)) |
(&mir::Operand::Constant(_), Ref(..)) => {
(&mir::Operand::Copy(_), Ref(_, None, _)) |
(&mir::Operand::Constant(_), Ref(_, None, _)) => {
let tmp = PlaceRef::alloca(&bx, op.layout, "const");
op.val.store(&bx, tmp);
op.val = Ref(tmp.llval, tmp.align);
}
(&mir::Operand::Copy(_), UnsizedRef(..)) |
(&mir::Operand::Constant(_), UnsizedRef(..)) => {
bug!("tried to pass an unsized argument by copy or constant")
op.val = Ref(tmp.llval, None, tmp.align);
}
_ => {}
}
......@@ -667,7 +660,7 @@ fn codegen_argument(&mut self,
}
} else if arg.is_unsized_indirect() {
match op.val {
UnsizedRef(a, b) => {
Ref(a, Some(b), _) => {
llargs.push(a);
llargs.push(b);
return;
......@@ -690,7 +683,7 @@ fn codegen_argument(&mut self,
}
}
}
Ref(llval, align) => {
Ref(llval, _, align) => {
if arg.is_indirect() && align.abi() < arg.layout.align.abi() {
// `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
// think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
......@@ -703,8 +696,6 @@ fn codegen_argument(&mut self,
(llval, align, true)
}
}
UnsizedRef(..) =>
bug!("codegen_argument: tried to pass unsized operand to sized argument"),
};
if by_ref && !arg.is_indirect() {
......@@ -740,13 +731,13 @@ fn codegen_arguments_untupled(&mut self,
let tuple = self.codegen_operand(bx, operand);
// Handle both by-ref and immediate tuples.
if let Ref(llval, align) = tuple.val {
if let Ref(llval, None, align) = tuple.val {
let tuple_ptr = PlaceRef::new_sized(llval, tuple.layout, align);
for i in 0..tuple.layout.fields.count() {
let field_ptr = tuple_ptr.project_field(bx, i);
self.codegen_argument(bx, field_ptr.load(bx), llargs, &args[i]);
}
} else if let UnsizedRef(..) = tuple.val {
} else if let Ref(_, Some(_), _) = tuple.val {
bug!("closure arguments must be sized")
} else {
// If the tuple is immediate, the elements are as well.
......
......@@ -37,11 +37,9 @@
pub enum OperandValue<'ll> {
/// A reference to the actual operand. The data is guaranteed
/// to be valid for the operand's lifetime.
Ref(&'ll Value, Align),
/// A reference to the unsized operand. The data is guaranteed
/// to be valid for the operand's lifetime.
/// The second field is the extra.
UnsizedRef(&'ll Value, &'ll Value),
/// The second value, if any, is the extra data (vtable or length)
/// which indicates that it refers to an unsized rvalue.
Ref(&'ll Value, Option<&'ll Value>, Align),
/// A single LLVM value.
Immediate(&'ll Value),
/// A pair of immediate LLVM values. Used by fat pointers too.
......@@ -154,8 +152,7 @@ pub fn deref(self, cx: &CodegenCx<'ll, 'tcx>) -> PlaceRef<'ll, 'tcx> {
let (llptr, llextra) = match self.val {
OperandValue::Immediate(llptr) => (llptr, None),
OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
OperandValue::Ref(..) |
OperandValue::UnsizedRef(..) => bug!("Deref of by-Ref operand {:?}", self)
OperandValue::Ref(..) => bug!("Deref of by-Ref operand {:?}", self)
};
let layout = cx.layout_of(projected_ty);
PlaceRef {
......@@ -250,8 +247,7 @@ pub fn extract_field(&self, bx: &Builder<'a, 'll, 'tcx>, i: usize) -> OperandRef
*a = bx.bitcast(*a, field.scalar_pair_element_llvm_type(bx.cx, 0, true));
*b = bx.bitcast(*b, field.scalar_pair_element_llvm_type(bx.cx, 1, true));
}
OperandValue::Ref(..) |
OperandValue::UnsizedRef(..) => bug!()
OperandValue::Ref(..) => bug!()
}
OperandRef {
......@@ -291,11 +287,11 @@ fn store_with_flags(
return;
}
match self {
OperandValue::Ref(r, source_align) => {
OperandValue::Ref(r, None, source_align) => {
base::memcpy_ty(bx, dest.llval, r, dest.layout,
source_align.min(dest.align), flags)
}
OperandValue::UnsizedRef(..) => {
OperandValue::Ref(_, Some(_), _) => {
bug!("cannot directly store unsized values");
}
OperandValue::Immediate(s) => {
......@@ -321,7 +317,7 @@ pub fn store_unsized(self, bx: &Builder<'a, 'll, 'tcx>, indirect_dest: PlaceRef<
.unwrap_or_else(|| bug!("indirect_dest has non-pointer type: {:?}", indirect_dest)).ty;
let (llptr, llextra) =
if let OperandValue::UnsizedRef(llptr, llextra) = self {
if let OperandValue::Ref(llptr, Some(llextra), _) = self {
(llptr, llextra)
} else {
bug!("store_unsized called with a sized value")
......
......@@ -132,7 +132,7 @@ pub fn load(&self, bx: &Builder<'a, 'll, 'tcx>) -> OperandRef<'ll, 'tcx> {
};
let val = if let Some(llextra) = self.llextra {
OperandValue::UnsizedRef(self.llval, llextra)
OperandValue::Ref(self.llval, Some(llextra), self.align)
} else if self.layout.is_llvm_immediate() {
let mut const_llval = None;
unsafe {
......@@ -163,7 +163,7 @@ pub fn load(&self, bx: &Builder<'a, 'll, 'tcx>) -> OperandRef<'ll, 'tcx> {
};
OperandValue::Pair(load(0, a), load(1, b))
} else {
OperandValue::Ref(self.llval, self.align)
OperandValue::Ref(self.llval, None, self.align)
};
OperandRef { val, layout: self.layout }
......
......@@ -83,11 +83,11 @@ pub fn codegen_rvalue(&mut self,
base::coerce_unsized_into(&bx, scratch, dest);
scratch.storage_dead(&bx);
}
OperandValue::Ref(llref, align) => {
OperandValue::Ref(llref, None, align) => {
let source = PlaceRef::new_sized(llref, operand.layout, align);
base::coerce_unsized_into(&bx, source, dest);
}
OperandValue::UnsizedRef(..) => {
OperandValue::Ref(_, Some(_), _) => {
bug!("unsized coercion on an unsized rvalue")
}
}
......@@ -268,9 +268,6 @@ pub fn codegen_rvalue_operand(&mut self,
bug!("by-ref operand {:?} in codegen_rvalue_operand",
operand);
}
OperandValue::UnsizedRef(..) => {
bug!("unsized coercion on an unsized rvalue")
}
}
}
mir::CastKind::Misc if operand.layout.is_llvm_scalar_pair() => {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册