提交 6f633ef1 编写于 作者: A Ariel Ben-Yehuda 提交者: Ariel Ben-Yehuda

tuple arguments to overloaded calls

also fix translation of "rust-call" functions, although that could use
more optimizations
上级 b7cbbc37
......@@ -54,14 +54,35 @@ fn make_mirror<'a>(self, cx: &mut Cx<'a, 'tcx>) -> Expr<'tcx> {
// Find the actual method implementation being called and
// build the appropriate UFCS call expression with the
// callee-object as self parameter.
// rewrite f(u, v) into FnOnce::call_once(f, (u, v))
let method = method_callee(cx, self, ty::MethodCall::expr(self.id));
let mut argrefs = vec![fun.to_ref()];
argrefs.extend(args.iter().map(|a| a.to_ref()));
let sig = match method.ty.sty {
ty::TyBareFn(_, fn_ty) => &fn_ty.sig,
_ => cx.tcx.sess.span_bug(self.span, "type of method is not an fn")
};
let sig = cx.tcx.no_late_bound_regions(sig).unwrap_or_else(|| {
cx.tcx.sess.span_bug(self.span, "method call has late-bound regions")
});
assert_eq!(sig.inputs.len(), 2);
let tupled_args = Expr {
ty: sig.inputs[1],
temp_lifetime: cx.tcx.region_maps.temporary_scope(self.id),
span: self.span,
kind: ExprKind::Tuple {
fields: args.iter().map(ToRef::to_ref).collect()
}
};
ExprKind::Call {
ty: method.ty,
fun: method.to_ref(),
args: argrefs,
args: vec![fun.to_ref(), tupled_args.to_ref()]
}
} else {
let adt_data = if let hir::ExprPath(..) = fun.node {
......
......@@ -9,7 +9,7 @@
// except according to those terms.
use llvm::{BasicBlockRef, ValueRef, OperandBundleDef};
use rustc::middle::ty;
use rustc::middle::ty::{self, Ty};
use rustc::mir::repr as mir;
use syntax::abi::Abi;
use trans::adt;
......@@ -26,8 +26,55 @@
use super::MirContext;
use super::operand::OperandValue::{FatPtr, Immediate, Ref};
use super::operand::OperandRef;
#[derive(PartialEq, Eq)]
enum AbiStyle {
Foreign,
RustCall,
Rust
}
impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
fn abi_style(&self, fn_ty: Ty<'tcx>) -> AbiStyle {
if let ty::TyBareFn(_, ref f) = fn_ty.sty {
// We do not translate intrinsics here (they shouldn’t be functions)
assert!(f.abi != Abi::RustIntrinsic && f.abi != Abi::PlatformIntrinsic);
match f.abi {
Abi::Rust => AbiStyle::Rust,
Abi::RustCall => AbiStyle::RustCall,
_ => AbiStyle::Foreign
}
} else {
unreachable!()
}
}
fn arg_operands(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
abi_style: AbiStyle,
args: &[mir::Operand<'tcx>])
-> Vec<OperandRef<'tcx>>
{
match abi_style {
AbiStyle::Foreign | AbiStyle::Rust => {
args.iter().map(|arg| self.trans_operand(bcx, arg)).collect()
}
AbiStyle::RustCall => match args.split_last() {
None => vec![],
Some((tup, self_ty)) => {
// we can reorder safely because of MIR
let untupled_args = self.trans_operand_untupled(bcx, tup);
self_ty
.iter().map(|arg| self.trans_operand(bcx, arg))
.chain(untupled_args.into_iter())
.collect()
}
}
}
}
pub fn trans_block(&mut self, bb: mir::BasicBlock) {
debug!("trans_block({:?})", bb);
......@@ -159,13 +206,8 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
let mut arg_tys = Vec::new();
// Foreign-ABI functions are translated differently
let is_foreign = if let ty::TyBareFn(_, ref f) = callee.ty.sty {
// We do not translate intrinsics here (they shouldn’t be functions)
assert!(f.abi != Abi::RustIntrinsic && f.abi != Abi::PlatformIntrinsic);
f.abi != Abi::Rust && f.abi != Abi::RustCall
} else {
false
};
let abi_style = self.abi_style(callee.ty);
let is_foreign = abi_style == AbiStyle::Foreign;
// Prepare the return value destination
let (ret_dest_ty, must_copy_dest) = if let Some((ref d, _)) = *destination {
......@@ -182,8 +224,7 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
};
// Process the rest of the args.
for arg in args {
let operand = self.trans_operand(&bcx, arg);
for operand in self.arg_operands(&bcx, abi_style, args) {
match operand.val {
Ref(llval) | Immediate(llval) => llargs.push(llval),
FatPtr(b, e) => {
......
......@@ -9,13 +9,16 @@
// except according to those terms.
use llvm::ValueRef;
use rustc::middle::ty::{Ty, TypeFoldable};
use rustc::middle::ty::{self, Ty};
use rustc::mir::repr as mir;
use trans::adt;
use trans::base;
use trans::common::{self, Block, BlockAndBuilder};
use trans::datum;
use trans::Disr;
use super::{MirContext, TempRef};
use super::lvalue::LvalueRef;
/// The representation of a Rust value. The enum variant is in fact
/// uniquely determined by the value's type, but is kept as a
......@@ -90,6 +93,32 @@ pub fn from_rvalue_datum(datum: datum::Datum<'tcx, datum::Rvalue>) -> OperandRef
}
impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
pub fn trans_load(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
llval: ValueRef,
ty: Ty<'tcx>)
-> OperandRef<'tcx>
{
debug!("trans_load: {} @ {:?}", bcx.val_to_string(llval), ty);
let val = match datum::appropriate_rvalue_mode(bcx.ccx(), ty) {
datum::ByValue => {
bcx.with_block(|bcx| {
OperandValue::Immediate(base::load_ty(bcx, llval, ty))
})
}
datum::ByRef if common::type_is_fat_ptr(bcx.tcx(), ty) => {
let (lldata, llextra) = bcx.with_block(|bcx| {
base::load_fat_ptr(bcx, llval, ty)
});
OperandValue::FatPtr(lldata, llextra)
}
datum::ByRef => OperandValue::Ref(llval)
};
OperandRef { val: val, ty: ty }
}
pub fn trans_operand(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
operand: &mir::Operand<'tcx>)
......@@ -120,30 +149,7 @@ pub fn trans_operand(&mut self,
// out from their home
let tr_lvalue = self.trans_lvalue(bcx, lvalue);
let ty = tr_lvalue.ty.to_ty(bcx.tcx());
debug!("trans_operand: tr_lvalue={} @ {:?}",
bcx.val_to_string(tr_lvalue.llval),
ty);
let val = match datum::appropriate_rvalue_mode(bcx.ccx(), ty) {
datum::ByValue => {
bcx.with_block(|bcx| {
OperandValue::Immediate(base::load_ty(bcx, tr_lvalue.llval, ty))
})
}
datum::ByRef if common::type_is_fat_ptr(bcx.tcx(), ty) => {
let (lldata, llextra) = bcx.with_block(|bcx| {
base::load_fat_ptr(bcx, tr_lvalue.llval, ty)
});
OperandValue::FatPtr(lldata, llextra)
}
datum::ByRef => OperandValue::Ref(tr_lvalue.llval)
};
assert!(!ty.has_erasable_regions());
OperandRef {
val: val,
ty: ty
}
self.trans_load(bcx, tr_lvalue.llval, ty)
}
mir::Operand::Constant(ref constant) => {
......@@ -197,4 +203,46 @@ pub fn store_operand_direct(&mut self,
}
}
}
pub fn trans_operand_untupled(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
operand: &mir::Operand<'tcx>)
-> Vec<OperandRef<'tcx>>
{
// FIXME: consider having some optimization to avoid tupling/untupling
// (and storing/loading in the case of immediates)
// avoid trans_operand for pointless copying
let lv = match *operand {
mir::Operand::Consume(ref lvalue) => self.trans_lvalue(bcx, lvalue),
mir::Operand::Constant(ref constant) => {
// FIXME: consider being less pessimized
if constant.ty.is_nil() {
return vec![];
}
let ty = bcx.monomorphize(&constant.ty);
let lv = LvalueRef::alloca(bcx, ty, "__untuple_alloca");
let constant = self.trans_constant(bcx, constant);
self.store_operand(bcx, lv.llval, constant);
lv
}
};
let lv_ty = lv.ty.to_ty(bcx.tcx());
let result_types = match lv_ty.sty {
ty::TyTuple(ref tys) => tys,
_ => bcx.tcx().sess.span_bug(
self.mir.span,
&format!("bad final argument to \"rust-call\" fn {:?}", lv_ty))
};
let base_repr = adt::represent_type(bcx.ccx(), lv_ty);
let base = adt::MaybeSizedValue::sized(lv.llval);
result_types.iter().enumerate().map(|(n, &ty)| {
self.trans_load(bcx, bcx.with_block(|bcx| {
adt::trans_field_ptr(bcx, &base_repr, base, Disr(0), n)
}), ty)
}).collect()
}
}
......@@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_attrs)]
#![feature(rustc_attrs, unboxed_closures, fn_traits)]
#[rustc_mir]
fn test1(a: isize, b: (i32, i32), c: &[i32]) -> (isize, (i32, i32), &[i32]) {
......@@ -117,6 +117,27 @@ fn test_fn_impl(f: &&Fn(i32, i32) -> i32, x: i32, y: i32) -> i32 {
f(x, y)
}
#[rustc_mir]
fn test_fn_direct_call<F>(f: &F, x: i32, y: i32) -> i32
where F: Fn(i32, i32) -> i32
{
f.call((x, y))
}
#[rustc_mir]
fn test_fn_const_call<F>(f: &F) -> i32
where F: Fn(i32, i32) -> i32
{
f.call((100, -1))
}
#[rustc_mir]
fn test_fn_nil_call<F>(f: &F) -> i32
where F: Fn() -> i32
{
f()
}
fn main() {
assert_eq!(test1(1, (2, 3), &[4, 5, 6]), (1, (2, 3), &[4, 5, 6][..]));
assert_eq!(test2(98), 98);
......@@ -128,9 +149,14 @@ fn main() {
assert_eq!(test8(), 2);
assert_eq!(test9(), 41 + 42 * 43);
let closure = |x: i32, y: i32| { x + y };
assert_eq!(test_closure(&closure, 100, 1), 101);
let r = 3;
let closure = |x: i32, y: i32| { r*(x + (y*2)) };
assert_eq!(test_fn_const_call(&closure), 294);
assert_eq!(test_closure(&closure, 100, 1), 306);
let function_object = &closure as &Fn(i32, i32) -> i32;
assert_eq!(test_fn_object(function_object, 100, 2), 102);
assert_eq!(test_fn_impl(&function_object, 100, 3), 103);
assert_eq!(test_fn_object(function_object, 100, 2), 312);
assert_eq!(test_fn_impl(&function_object, 100, 3), 318);
assert_eq!(test_fn_direct_call(&closure, 100, 4), 324);
assert_eq!(test_fn_nil_call(&(|| 42)), 42);
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册