提交 5a9e6b87 编写于 作者: I Irina Popa 提交者: Eduard-Mihai Burtescu

rustc_codegen_llvm: begin generalizing over backend values.

上级 6b9b97bd
......@@ -167,8 +167,13 @@ fn llvm_type(&self, cx: &CodegenCx<'ll, '_>) -> &'ll Type {
pub trait ArgTypeExt<'ll, 'tcx> {
fn memory_ty(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type;
fn store(&self, bx: &Builder<'_, 'll, 'tcx>, val: &'ll Value, dst: PlaceRef<'ll, 'tcx>);
fn store_fn_arg(&self, bx: &Builder<'_, 'll, 'tcx>, idx: &mut usize, dst: PlaceRef<'ll, 'tcx>);
fn store(&self, bx: &Builder<'_, 'll, 'tcx>, val: &'ll Value, dst: PlaceRef<'tcx, &'ll Value>);
fn store_fn_arg(
&self,
bx: &Builder<'_, 'll, 'tcx>,
idx: &mut usize,
dst: PlaceRef<'tcx, &'ll Value>,
);
}
impl ArgTypeExt<'ll, 'tcx> for ArgType<'tcx, Ty<'tcx>> {
......@@ -182,7 +187,7 @@ fn memory_ty(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
/// place for the original Rust type of this argument/return.
/// Can be used for both storing formal arguments into Rust variables
/// or results of call/invoke instructions into their destinations.
fn store(&self, bx: &Builder<'_, 'll, 'tcx>, val: &'ll Value, dst: PlaceRef<'ll, 'tcx>) {
fn store(&self, bx: &Builder<'_, 'll, 'tcx>, val: &'ll Value, dst: PlaceRef<'tcx, &'ll Value>) {
if self.is_ignore() {
return;
}
......@@ -238,7 +243,12 @@ fn store(&self, bx: &Builder<'_, 'll, 'tcx>, val: &'ll Value, dst: PlaceRef<'ll,
}
}
fn store_fn_arg(&self, bx: &Builder<'a, 'll, 'tcx>, idx: &mut usize, dst: PlaceRef<'ll, 'tcx>) {
fn store_fn_arg(
&self,
bx: &Builder<'a, 'll, 'tcx>,
idx: &mut usize,
dst: PlaceRef<'tcx, &'ll Value>,
) {
let mut next = || {
let val = llvm::get_param(bx.llfn(), *idx as c_uint);
*idx += 1;
......
......@@ -28,7 +28,7 @@
pub fn codegen_inline_asm(
bx: &Builder<'a, 'll, 'tcx>,
ia: &hir::InlineAsm,
outputs: Vec<PlaceRef<'ll, 'tcx>>,
outputs: Vec<PlaceRef<'tcx, &'ll Value>>,
mut inputs: Vec<&'ll Value>
) -> bool {
let mut ext_constraints = vec![];
......
......@@ -275,8 +275,8 @@ pub fn unsize_thin_ptr(
/// to a value of type `dst_ty` and store the result in `dst`
pub fn coerce_unsized_into(
bx: &Builder<'a, 'll, 'tcx>,
src: PlaceRef<'ll, 'tcx>,
dst: PlaceRef<'ll, 'tcx>
src: PlaceRef<'tcx, &'ll Value>,
dst: PlaceRef<'tcx, &'ll Value>
) {
let src_ty = src.layout.ty;
let dst_ty = dst.layout.ty;
......
......@@ -91,7 +91,7 @@ pub fn codegen_intrinsic_call(
bx: &Builder<'a, 'll, 'tcx>,
callee_ty: Ty<'tcx>,
fn_ty: &FnType<'tcx, Ty<'tcx>>,
args: &[OperandRef<'ll, 'tcx>],
args: &[OperandRef<'tcx, &'ll Value>],
llresult: &'ll Value,
span: Span,
) {
......@@ -614,7 +614,7 @@ fn ty_to_type(cx: &CodegenCx<'ll, '_>, t: &intrinsics::Type) -> Vec<&'ll Type> {
fn modify_as_needed(
bx: &Builder<'a, 'll, 'tcx>,
t: &intrinsics::Type,
arg: &OperandRef<'ll, 'tcx>,
arg: &OperandRef<'tcx, &'ll Value>,
) -> Vec<&'ll Value> {
match *t {
intrinsics::Type::Aggregate(true, ref contents) => {
......@@ -992,7 +992,7 @@ fn generic_simd_intrinsic(
bx: &Builder<'a, 'll, 'tcx>,
name: &str,
callee_ty: Ty<'tcx>,
args: &[OperandRef<'ll, 'tcx>],
args: &[OperandRef<'tcx, &'ll Value>],
ret_ty: Ty<'tcx>,
llret_ty: &'ll Type,
span: Span
......@@ -1167,7 +1167,7 @@ fn simd_simple_float_intrinsic(
in_len: usize,
bx: &Builder<'a, 'll, 'tcx>,
span: Span,
args: &[OperandRef<'ll, 'tcx>],
args: &[OperandRef<'tcx, &'ll Value>],
) -> Result<&'ll Value, ()> {
macro_rules! emit_error {
($msg: tt) => {
......
......@@ -115,7 +115,7 @@ fn codegen_terminator(&mut self,
fn_ty: FnType<'tcx, Ty<'tcx>>,
fn_ptr: &'ll Value,
llargs: &[&'ll Value],
destination: Option<(ReturnDest<'ll, 'tcx>, mir::BasicBlock)>,
destination: Option<(ReturnDest<'tcx, &'ll Value>, mir::BasicBlock)>,
cleanup: Option<mir::BasicBlock>
| {
if let Some(cleanup) = cleanup {
......@@ -731,7 +731,7 @@ fn codegen_terminator(&mut self,
fn codegen_argument(&mut self,
bx: &Builder<'a, 'll, 'tcx>,
op: OperandRef<'ll, 'tcx>,
op: OperandRef<'tcx, &'ll Value>,
llargs: &mut Vec<&'ll Value>,
arg: &ArgType<'tcx, Ty<'tcx>>) {
// Fill padding with undef value, where applicable.
......@@ -843,7 +843,7 @@ fn codegen_arguments_untupled(&mut self,
}
}
fn get_personality_slot(&mut self, bx: &Builder<'a, 'll, 'tcx>) -> PlaceRef<'ll, 'tcx> {
fn get_personality_slot(&mut self, bx: &Builder<'a, 'll, 'tcx>) -> PlaceRef<'tcx, &'ll Value> {
let cx = bx.cx;
if let Some(slot) = self.personality_slot {
slot
......@@ -919,7 +919,7 @@ pub fn build_block(&self, bb: mir::BasicBlock) -> Builder<'a, 'll, 'tcx> {
fn make_return_dest(&mut self, bx: &Builder<'a, 'll, 'tcx>,
dest: &mir::Place<'tcx>, fn_ret: &ArgType<'tcx, Ty<'tcx>>,
llargs: &mut Vec<&'ll Value>, is_intrinsic: bool)
-> ReturnDest<'ll, 'tcx> {
-> ReturnDest<'tcx, &'ll Value> {
// If the return is ignored, we can just return a do-nothing ReturnDest
if fn_ret.is_ignore() {
return ReturnDest::Nothing;
......@@ -1003,7 +1003,7 @@ fn codegen_transmute(&mut self, bx: &Builder<'a, 'll, 'tcx>,
fn codegen_transmute_into(&mut self, bx: &Builder<'a, 'll, 'tcx>,
src: &mir::Operand<'tcx>,
dst: PlaceRef<'ll, 'tcx>) {
dst: PlaceRef<'tcx, &'ll Value>) {
let src = self.codegen_operand(bx, src);
let llty = src.layout.llvm_type(bx.cx);
let cast_ptr = bx.pointercast(dst.llval, llty.ptr_to());
......@@ -1015,7 +1015,7 @@ fn codegen_transmute_into(&mut self, bx: &Builder<'a, 'll, 'tcx>,
// Stores the return value of a function call into it's final location.
fn store_return(&mut self,
bx: &Builder<'a, 'll, 'tcx>,
dest: ReturnDest<'ll, 'tcx>,
dest: ReturnDest<'tcx, &'ll Value>,
ret_ty: &ArgType<'tcx, Ty<'tcx>>,
llval: &'ll Value) {
use self::ReturnDest::*;
......@@ -1046,13 +1046,13 @@ fn store_return(&mut self,
}
}
enum ReturnDest<'ll, 'tcx> {
enum ReturnDest<'tcx, V> {
// Do nothing, the return value is indirect or ignored
Nothing,
// Store the return value to the pointer
Store(PlaceRef<'ll, 'tcx>),
Store(PlaceRef<'tcx, V>),
// Stores an indirect return value to an operand local place
IndirectOperand(PlaceRef<'ll, 'tcx>, mir::Local),
IndirectOperand(PlaceRef<'tcx, V>, mir::Local),
// Stores a direct return value to an operand local place
DirectOperand(mir::Local)
}
......@@ -64,7 +64,7 @@ pub struct FunctionCx<'a, 'll: 'a, 'tcx: 'll> {
/// don't really care about it very much. Anyway, this value
/// contains an alloca into which the personality is stored and
/// then later loaded when generating the DIVERGE_BLOCK.
personality_slot: Option<PlaceRef<'ll, 'tcx>>,
personality_slot: Option<PlaceRef<'tcx, &'ll Value>>,
/// A `Block` for each MIR `BasicBlock`
blocks: IndexVec<mir::BasicBlock, &'ll BasicBlock>,
......@@ -98,7 +98,7 @@ pub struct FunctionCx<'a, 'll: 'a, 'tcx: 'll> {
///
/// Avoiding allocs can also be important for certain intrinsics,
/// notably `expect`.
locals: IndexVec<mir::Local, LocalRef<'ll, 'tcx>>,
locals: IndexVec<mir::Local, LocalRef<'tcx, &'ll Value>>,
/// Debug information for MIR scopes.
scopes: IndexVec<mir::SourceScope, debuginfo::MirDebugScope<'ll>>,
......@@ -179,18 +179,21 @@ fn scope_metadata_for_loc(&self, scope_id: mir::SourceScope, pos: BytePos)
}
}
enum LocalRef<'ll, 'tcx> {
Place(PlaceRef<'ll, 'tcx>),
enum LocalRef<'tcx, V> {
Place(PlaceRef<'tcx, V>),
/// `UnsizedPlace(p)`: `p` itself is a thin pointer (indirect place).
/// `*p` is the fat pointer that references the actual unsized place.
/// Every time it is initialized, we have to reallocate the place
/// and update the fat pointer. That's the reason why it is indirect.
UnsizedPlace(PlaceRef<'ll, 'tcx>),
Operand(Option<OperandRef<'ll, 'tcx>>),
UnsizedPlace(PlaceRef<'tcx, V>),
Operand(Option<OperandRef<'tcx, V>>),
}
impl LocalRef<'ll, 'tcx> {
fn new_operand(cx: &CodegenCx<'ll, 'tcx>, layout: TyLayout<'tcx>) -> LocalRef<'ll, 'tcx> {
impl LocalRef<'tcx, &'ll Value> {
fn new_operand(
cx: &CodegenCx<'ll, 'tcx>,
layout: TyLayout<'tcx>,
) -> LocalRef<'tcx, &'ll Value> {
if layout.is_zst() {
// Zero-size temporaries aren't always initialized, which
// doesn't matter because they don't contain data, but
......@@ -437,7 +440,7 @@ fn arg_local_refs(
fx: &FunctionCx<'a, 'll, 'tcx>,
scopes: &IndexVec<mir::SourceScope, debuginfo::MirDebugScope<'ll>>,
memory_locals: &BitSet<mir::Local>,
) -> Vec<LocalRef<'ll, 'tcx>> {
) -> Vec<LocalRef<'tcx, &'ll Value>> {
let mir = fx.mir;
let tcx = bx.tcx();
let mut idx = 0;
......
......@@ -31,16 +31,16 @@
/// uniquely determined by the value's type, but is kept as a
/// safety check.
#[derive(Copy, Clone, Debug)]
pub enum OperandValue<'ll> {
pub enum OperandValue<V> {
/// A reference to the actual operand. The data is guaranteed
/// to be valid for the operand's lifetime.
/// The second value, if any, is the extra data (vtable or length)
/// which indicates that it refers to an unsized rvalue.
Ref(&'ll Value, Option<&'ll Value>, Align),
Ref(V, Option<V>, Align),
/// A single LLVM value.
Immediate(&'ll Value),
Immediate(V),
/// A pair of immediate LLVM values. Used by fat pointers too.
Pair(&'ll Value, &'ll Value)
Pair(V, V)
}
/// An `OperandRef` is an "SSA" reference to a Rust value, along with
......@@ -52,23 +52,23 @@ pub enum OperandValue<'ll> {
/// directly is sure to cause problems -- use `OperandRef::store`
/// instead.
#[derive(Copy, Clone)]
pub struct OperandRef<'ll, 'tcx> {
pub struct OperandRef<'tcx, V> {
// The value.
pub val: OperandValue<'ll>,
pub val: OperandValue<V>,
// The layout of value, based on its Rust type.
pub layout: TyLayout<'tcx>,
}
impl fmt::Debug for OperandRef<'ll, 'tcx> {
impl fmt::Debug for OperandRef<'tcx, &'ll Value> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "OperandRef({:?} @ {:?})", self.val, self.layout)
}
}
impl OperandRef<'ll, 'tcx> {
impl OperandRef<'tcx, &'ll Value> {
pub fn new_zst(cx: &CodegenCx<'ll, 'tcx>,
layout: TyLayout<'tcx>) -> OperandRef<'ll, 'tcx> {
layout: TyLayout<'tcx>) -> OperandRef<'tcx, &'ll Value> {
assert!(layout.is_zst());
OperandRef {
val: OperandValue::Immediate(C_undef(layout.immediate_llvm_type(cx))),
......@@ -78,7 +78,7 @@ pub fn new_zst(cx: &CodegenCx<'ll, 'tcx>,
pub fn from_const(bx: &Builder<'a, 'll, 'tcx>,
val: &'tcx ty::Const<'tcx>)
-> Result<OperandRef<'ll, 'tcx>, ErrorHandled> {
-> Result<OperandRef<'tcx, &'ll Value>, ErrorHandled> {
let layout = bx.cx.layout_of(val.ty);
if layout.is_zst() {
......@@ -140,7 +140,7 @@ pub fn immediate(self) -> &'ll Value {
}
}
pub fn deref(self, cx: &CodegenCx<'ll, 'tcx>) -> PlaceRef<'ll, 'tcx> {
pub fn deref(self, cx: &CodegenCx<'ll, 'tcx>) -> PlaceRef<'tcx, &'ll Value> {
let projected_ty = self.layout.ty.builtin_deref(true)
.unwrap_or_else(|| bug!("deref of non-pointer {:?}", self)).ty;
let (llptr, llextra) = match self.val {
......@@ -178,7 +178,7 @@ pub fn immediate_or_packed_pair(self, bx: &Builder<'a, 'll, 'tcx>) -> &'ll Value
pub fn from_immediate_or_packed_pair(bx: &Builder<'a, 'll, 'tcx>,
llval: &'ll Value,
layout: TyLayout<'tcx>)
-> OperandRef<'ll, 'tcx> {
-> OperandRef<'tcx, &'ll Value> {
let val = if let layout::Abi::ScalarPair(ref a, ref b) = layout.abi {
debug!("Operand::from_immediate_or_packed_pair: unpacking {:?} @ {:?}",
llval, layout);
......@@ -193,7 +193,11 @@ pub fn from_immediate_or_packed_pair(bx: &Builder<'a, 'll, 'tcx>,
OperandRef { val, layout }
}
pub fn extract_field(&self, bx: &Builder<'a, 'll, 'tcx>, i: usize) -> OperandRef<'ll, 'tcx> {
pub fn extract_field(
&self,
bx: &Builder<'a, 'll, 'tcx>,
i: usize,
) -> OperandRef<'tcx, &'ll Value> {
let field = self.layout.field(bx.cx, i);
let offset = self.layout.fields.offset(i);
......@@ -251,27 +255,31 @@ pub fn extract_field(&self, bx: &Builder<'a, 'll, 'tcx>, i: usize) -> OperandRef
}
}
impl OperandValue<'ll> {
pub fn store(self, bx: &Builder<'a, 'll, 'tcx>, dest: PlaceRef<'ll, 'tcx>) {
impl OperandValue<&'ll Value> {
pub fn store(self, bx: &Builder<'a, 'll, 'tcx>, dest: PlaceRef<'tcx, &'ll Value>) {
self.store_with_flags(bx, dest, MemFlags::empty());
}
pub fn volatile_store(self, bx: &Builder<'a, 'll, 'tcx>, dest: PlaceRef<'ll, 'tcx>) {
pub fn volatile_store(self, bx: &Builder<'a, 'll, 'tcx>, dest: PlaceRef<'tcx, &'ll Value>) {
self.store_with_flags(bx, dest, MemFlags::VOLATILE);
}
pub fn unaligned_volatile_store(self, bx: &Builder<'a, 'll, 'tcx>, dest: PlaceRef<'ll, 'tcx>) {
pub fn unaligned_volatile_store(
self,
bx: &Builder<'a, 'll, 'tcx>,
dest: PlaceRef<'tcx, &'ll Value>,
) {
self.store_with_flags(bx, dest, MemFlags::VOLATILE | MemFlags::UNALIGNED);
}
pub fn nontemporal_store(self, bx: &Builder<'a, 'll, 'tcx>, dest: PlaceRef<'ll, 'tcx>) {
pub fn nontemporal_store(self, bx: &Builder<'a, 'll, 'tcx>, dest: PlaceRef<'tcx, &'ll Value>) {
self.store_with_flags(bx, dest, MemFlags::NONTEMPORAL);
}
fn store_with_flags(
self,
bx: &Builder<'a, 'll, 'tcx>,
dest: PlaceRef<'ll, 'tcx>,
dest: PlaceRef<'tcx, &'ll Value>,
flags: MemFlags,
) {
debug!("OperandRef::store: operand={:?}, dest={:?}", self, dest);
......@@ -302,7 +310,11 @@ fn store_with_flags(
}
}
pub fn store_unsized(self, bx: &Builder<'a, 'll, 'tcx>, indirect_dest: PlaceRef<'ll, 'tcx>) {
pub fn store_unsized(
self,
bx: &Builder<'a, 'll, 'tcx>,
indirect_dest: PlaceRef<'tcx, &'ll Value>,
) {
debug!("OperandRef::store_unsized: operand={:?}, indirect_dest={:?}", self, indirect_dest);
let flags = MemFlags::empty();
......@@ -336,7 +348,7 @@ impl FunctionCx<'a, 'll, 'tcx> {
fn maybe_codegen_consume_direct(&mut self,
bx: &Builder<'a, 'll, 'tcx>,
place: &mir::Place<'tcx>)
-> Option<OperandRef<'ll, 'tcx>>
-> Option<OperandRef<'tcx, &'ll Value>>
{
debug!("maybe_codegen_consume_direct(place={:?})", place);
......@@ -384,7 +396,7 @@ fn maybe_codegen_consume_direct(&mut self,
pub fn codegen_consume(&mut self,
bx: &Builder<'a, 'll, 'tcx>,
place: &mir::Place<'tcx>)
-> OperandRef<'ll, 'tcx>
-> OperandRef<'tcx, &'ll Value>
{
debug!("codegen_consume(place={:?})", place);
......@@ -408,7 +420,7 @@ pub fn codegen_consume(&mut self,
pub fn codegen_operand(&mut self,
bx: &Builder<'a, 'll, 'tcx>,
operand: &mir::Operand<'tcx>)
-> OperandRef<'ll, 'tcx>
-> OperandRef<'tcx, &'ll Value>
{
debug!("codegen_operand(operand={:?})", operand);
......
......@@ -27,12 +27,12 @@
use super::operand::{OperandRef, OperandValue};
#[derive(Copy, Clone, Debug)]
pub struct PlaceRef<'ll, 'tcx> {
pub struct PlaceRef<'tcx, V> {
/// Pointer to the contents of the place
pub llval: &'ll Value,
pub llval: V,
/// This place's extra data if it is unsized, or null
pub llextra: Option<&'ll Value>,
pub llextra: Option<V>,
/// Monomorphized type of this place, including variant information
pub layout: TyLayout<'tcx>,
......@@ -41,12 +41,12 @@ pub struct PlaceRef<'ll, 'tcx> {
pub align: Align,
}
impl PlaceRef<'ll, 'tcx> {
impl PlaceRef<'tcx, &'ll Value> {
pub fn new_sized(
llval: &'ll Value,
layout: TyLayout<'tcx>,
align: Align,
) -> PlaceRef<'ll, 'tcx> {
) -> PlaceRef<'tcx, &'ll Value> {
assert!(!layout.is_unsized());
PlaceRef {
llval,
......@@ -61,7 +61,7 @@ pub fn from_const_alloc(
layout: TyLayout<'tcx>,
alloc: &mir::interpret::Allocation,
offset: Size,
) -> PlaceRef<'ll, 'tcx> {
) -> PlaceRef<'tcx, &'ll Value> {
let init = const_alloc_to_llvm(bx.cx, alloc);
let base_addr = consts::addr_of(bx.cx, init, layout.align, None);
......@@ -75,7 +75,7 @@ pub fn from_const_alloc(
}
pub fn alloca(bx: &Builder<'a, 'll, 'tcx>, layout: TyLayout<'tcx>, name: &str)
-> PlaceRef<'ll, 'tcx> {
-> PlaceRef<'tcx, &'ll Value> {
debug!("alloca({:?}: {:?})", name, layout);
assert!(!layout.is_unsized(), "tried to statically allocate unsized place");
let tmp = bx.alloca(layout.llvm_type(bx.cx), name, layout.align);
......@@ -83,8 +83,11 @@ pub fn alloca(bx: &Builder<'a, 'll, 'tcx>, layout: TyLayout<'tcx>, name: &str)
}
/// Returns a place for an indirect reference to an unsized place.
pub fn alloca_unsized_indirect(bx: &Builder<'a, 'll, 'tcx>, layout: TyLayout<'tcx>, name: &str)
-> PlaceRef<'ll, 'tcx> {
pub fn alloca_unsized_indirect(
bx: &Builder<'a, 'll, 'tcx>,
layout: TyLayout<'tcx>,
name: &str,
) -> PlaceRef<'tcx, &'ll Value> {
debug!("alloca_unsized_indirect({:?}: {:?})", name, layout);
assert!(layout.is_unsized(), "tried to allocate indirect place for sized values");
let ptr_ty = bx.cx.tcx.mk_mut_ptr(layout.ty);
......@@ -105,7 +108,7 @@ pub fn len(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Value {
}
}
pub fn load(&self, bx: &Builder<'a, 'll, 'tcx>) -> OperandRef<'ll, 'tcx> {
pub fn load(&self, bx: &Builder<'a, 'll, 'tcx>) -> OperandRef<'tcx, &'ll Value> {
debug!("PlaceRef::load: {:?}", self);
assert_eq!(self.llextra.is_some(), self.layout.is_unsized());
......@@ -169,7 +172,11 @@ pub fn load(&self, bx: &Builder<'a, 'll, 'tcx>) -> OperandRef<'ll, 'tcx> {
}
/// Access a field, at a point when the value's case is known.
pub fn project_field(self, bx: &Builder<'a, 'll, 'tcx>, ix: usize) -> PlaceRef<'ll, 'tcx> {
pub fn project_field(
self,
bx: &Builder<'a, 'll, 'tcx>,
ix: usize,
) -> PlaceRef<'tcx, &'ll Value> {
let cx = bx.cx;
let field = self.layout.field(cx, ix);
let offset = self.layout.fields.offset(ix);
......@@ -393,7 +400,7 @@ pub fn codegen_set_discr(&self, bx: &Builder<'a, 'll, 'tcx>, variant_index: Vari
}
pub fn project_index(&self, bx: &Builder<'a, 'll, 'tcx>, llindex: &'ll Value)
-> PlaceRef<'ll, 'tcx> {
-> PlaceRef<'tcx, &'ll Value> {
PlaceRef {
llval: bx.inbounds_gep(self.llval, &[C_usize(bx.cx, 0), llindex]),
llextra: None,
......@@ -403,7 +410,7 @@ pub fn project_index(&self, bx: &Builder<'a, 'll, 'tcx>, llindex: &'ll Value)
}
pub fn project_downcast(&self, bx: &Builder<'a, 'll, 'tcx>, variant_index: VariantIdx)
-> PlaceRef<'ll, 'tcx> {
-> PlaceRef<'tcx, &'ll Value> {
let mut downcast = *self;
downcast.layout = self.layout.for_variant(bx.cx, variant_index);
......@@ -427,7 +434,7 @@ impl FunctionCx<'a, 'll, 'tcx> {
pub fn codegen_place(&mut self,
bx: &Builder<'a, 'll, 'tcx>,
place: &mir::Place<'tcx>)
-> PlaceRef<'ll, 'tcx> {
-> PlaceRef<'tcx, &'ll Value> {
debug!("codegen_place(place={:?})", place);
let cx = bx.cx;
......
......@@ -35,7 +35,7 @@
impl FunctionCx<'a, 'll, 'tcx> {
pub fn codegen_rvalue(&mut self,
bx: Builder<'a, 'll, 'tcx>,
dest: PlaceRef<'ll, 'tcx>,
dest: PlaceRef<'tcx, &'ll Value>,
rvalue: &mir::Rvalue<'tcx>)
-> Builder<'a, 'll, 'tcx>
{
......@@ -178,12 +178,12 @@ pub fn codegen_rvalue(&mut self,
}
}
pub fn codegen_rvalue_unsized(&mut self,
bx: Builder<'a, 'll, 'tcx>,
indirect_dest: PlaceRef<'ll, 'tcx>,
rvalue: &mir::Rvalue<'tcx>)
-> Builder<'a, 'll, 'tcx>
{
pub fn codegen_rvalue_unsized(
&mut self,
bx: Builder<'a, 'll, 'tcx>,
indirect_dest: PlaceRef<'tcx, &'ll Value>,
rvalue: &mir::Rvalue<'tcx>,
) -> Builder<'a, 'll, 'tcx> {
debug!("codegen_rvalue_unsized(indirect_dest.llval={:?}, rvalue={:?})",
indirect_dest.llval, rvalue);
......@@ -201,7 +201,7 @@ pub fn codegen_rvalue_unsized(&mut self,
pub fn codegen_rvalue_operand(&mut self,
bx: Builder<'a, 'll, 'tcx>,
rvalue: &mir::Rvalue<'tcx>)
-> (Builder<'a, 'll, 'tcx>, OperandRef<'ll, 'tcx>)
-> (Builder<'a, 'll, 'tcx>, OperandRef<'tcx, &'ll Value>)
{
assert!(self.rvalue_creates_operand(rvalue), "cannot codegen {:?} to operand", rvalue);
......@@ -677,7 +677,7 @@ pub fn codegen_scalar_checked_binop(&mut self,
op: mir::BinOp,
lhs: &'ll Value,
rhs: &'ll Value,
input_ty: Ty<'tcx>) -> OperandValue<'ll> {
input_ty: Ty<'tcx>) -> OperandValue<&'ll Value> {
// This case can currently arise only from functions marked
// with #[rustc_inherit_overflow_checks] and inlined from
// another crate (mostly core::num generic/#[inline] fns),
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册