提交 6e15e7c1 编写于 作者: M Masaki Hara

Integrate PassMode::UnsizedIndirect into PassMode::Indirect.

上级 a0c422a7
......@@ -187,7 +187,7 @@ fn store(&self, bx: &Builder<'_, 'll, 'tcx>, val: &'ll Value, dst: PlaceRef<'ll,
return;
}
let cx = bx.cx;
if self.is_indirect() {
if self.is_sized_indirect() {
OperandValue::Ref(val, self.layout.align).store(bx, dst)
} else if self.is_unsized_indirect() {
bug!("unsized ArgType must be handled through store_fn_arg");
......@@ -248,10 +248,10 @@ fn store_fn_arg(&self, bx: &Builder<'a, 'll, 'tcx>, idx: &mut usize, dst: PlaceR
PassMode::Pair(..) => {
OperandValue::Pair(next(), next()).store(bx, dst);
}
PassMode::UnsizedIndirect(..) => {
PassMode::Indirect(_, Some(_)) => {
OperandValue::UnsizedRef(next(), next()).store(bx, dst);
}
PassMode::Direct(_) | PassMode::Indirect(_) | PassMode::Cast(_) => {
PassMode::Direct(_) | PassMode::Indirect(_, None) | PassMode::Cast(_) => {
self.store(bx, next(), dst);
}
}
......@@ -547,9 +547,7 @@ fn adjust_for_abi(&mut self,
}
let size = arg.layout.size;
if arg.layout.is_unsized() {
arg.make_unsized_indirect(None);
} else if size > layout::Pointer.size(cx) {
if arg.layout.is_unsized() || size > layout::Pointer.size(cx) {
arg.make_indirect();
} else {
// We want to pass small aggregates as immediates, but using
......@@ -565,7 +563,7 @@ fn adjust_for_abi(&mut self,
for arg in &mut self.args {
fixup(arg);
}
if let PassMode::Indirect(ref mut attrs) = self.ret.mode {
if let PassMode::Indirect(ref mut attrs, _) = self.ret.mode {
attrs.set(ArgAttribute::StructRet);
}
return;
......@@ -582,7 +580,7 @@ fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
if let PassMode::Pair(_, _) = arg.mode { 2 } else { 1 }
).sum();
let mut llargument_tys = Vec::with_capacity(
if let PassMode::Indirect(_) = self.ret.mode { 1 } else { 0 } + args_capacity
if let PassMode::Indirect(..) = self.ret.mode { 1 } else { 0 } + args_capacity
);
let llreturn_ty = match self.ret.mode {
......@@ -591,11 +589,10 @@ fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
self.ret.layout.immediate_llvm_type(cx)
}
PassMode::Cast(cast) => cast.llvm_type(cx),
PassMode::Indirect(_) => {
PassMode::Indirect(..) => {
llargument_tys.push(self.ret.memory_ty(cx).ptr_to());
Type::void(cx)
}
PassMode::UnsizedIndirect(..) => bug!("return type must be sized"),
};
for arg in &self.args {
......@@ -612,7 +609,7 @@ fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
llargument_tys.push(arg.layout.scalar_pair_element_llvm_type(cx, 1, true));
continue;
}
PassMode::UnsizedIndirect(..) => {
PassMode::Indirect(_, Some(_)) => {
let ptr_ty = cx.tcx.mk_mut_ptr(arg.layout.ty);
let ptr_layout = cx.layout_of(ptr_ty);
llargument_tys.push(ptr_layout.scalar_pair_element_llvm_type(cx, 0, true));
......@@ -620,7 +617,7 @@ fn llvm_type(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
continue;
}
PassMode::Cast(cast) => cast.llvm_type(cx),
PassMode::Indirect(_) => arg.memory_ty(cx).ptr_to(),
PassMode::Indirect(_, None) => arg.memory_ty(cx).ptr_to(),
};
llargument_tys.push(llarg_ty);
}
......@@ -659,7 +656,7 @@ fn apply_attrs_llfn(&self, llfn: &'ll Value) {
PassMode::Direct(ref attrs) => {
attrs.apply_llfn(llvm::AttributePlace::ReturnValue, llfn);
}
PassMode::Indirect(ref attrs) => apply(attrs),
PassMode::Indirect(ref attrs, _) => apply(attrs),
_ => {}
}
for arg in &self.args {
......@@ -669,8 +666,8 @@ fn apply_attrs_llfn(&self, llfn: &'ll Value) {
match arg.mode {
PassMode::Ignore => {}
PassMode::Direct(ref attrs) |
PassMode::Indirect(ref attrs) => apply(attrs),
PassMode::UnsizedIndirect(ref attrs, ref extra_attrs) => {
PassMode::Indirect(ref attrs, None) => apply(attrs),
PassMode::Indirect(ref attrs, Some(ref extra_attrs)) => {
apply(attrs);
apply(extra_attrs);
}
......@@ -693,7 +690,7 @@ fn apply_attrs_callsite(&self, bx: &Builder<'a, 'll, 'tcx>, callsite: &'ll Value
PassMode::Direct(ref attrs) => {
attrs.apply_callsite(llvm::AttributePlace::ReturnValue, callsite);
}
PassMode::Indirect(ref attrs) => apply(attrs),
PassMode::Indirect(ref attrs, _) => apply(attrs),
_ => {}
}
if let layout::Abi::Scalar(ref scalar) = self.ret.layout.abi {
......@@ -717,8 +714,8 @@ fn apply_attrs_callsite(&self, bx: &Builder<'a, 'll, 'tcx>, callsite: &'ll Value
match arg.mode {
PassMode::Ignore => {}
PassMode::Direct(ref attrs) |
PassMode::Indirect(ref attrs) => apply(attrs),
PassMode::UnsizedIndirect(ref attrs, ref extra_attrs) => {
PassMode::Indirect(ref attrs, None) => apply(attrs),
PassMode::Indirect(ref attrs, Some(ref extra_attrs)) => {
apply(attrs);
apply(extra_attrs);
}
......
......@@ -225,7 +225,7 @@ fn codegen_terminator(&mut self,
mir::TerminatorKind::Return => {
let llval = match self.fn_ty.ret.mode {
PassMode::Ignore | PassMode::Indirect(_) => {
PassMode::Ignore | PassMode::Indirect(..) => {
bx.ret_void();
return;
}
......@@ -270,8 +270,6 @@ fn codegen_terminator(&mut self,
bx.pointercast(llslot, cast_ty.llvm_type(bx.cx).ptr_to()),
self.fn_ty.ret.layout.align)
}
PassMode::UnsizedIndirect(..) => bug!("return value must be sized"),
};
bx.ret(llval);
}
......@@ -667,7 +665,7 @@ fn codegen_argument(&mut self,
}
_ => bug!("codegen_argument: {:?} invalid for pair arugment", op)
}
} else if let PassMode::UnsizedIndirect(..) = arg.mode {
} else if arg.is_unsized_indirect() {
match op.val {
UnsizedRef(a, b) => {
llargs.push(a);
......@@ -682,7 +680,7 @@ fn codegen_argument(&mut self,
let (mut llval, align, by_ref) = match op.val {
Immediate(_) | Pair(..) => {
match arg.mode {
PassMode::Indirect(_) | PassMode::Cast(_) => {
PassMode::Indirect(..) | PassMode::Cast(_) => {
let scratch = PlaceRef::alloca(bx, arg.layout, "arg");
op.val.store(bx, scratch);
(scratch.llval, scratch.align, true)
......
......@@ -541,7 +541,7 @@ fn arg_local_refs(
}
}
let place = if arg.is_indirect() {
let place = if arg.is_sized_indirect() {
// Don't copy an indirect argument to an alloca, the caller
// already put it in a temporary alloca and gave it up.
// FIXME: lifetimes
......
......@@ -44,9 +44,9 @@ pub enum PassMode {
/// a single uniform or a pair of registers.
Cast(CastTarget),
/// Pass the argument indirectly via a hidden pointer.
Indirect(ArgAttributes),
/// Pass the unsized argument indirectly via a hidden pointer.
UnsizedIndirect(ArgAttributes, ArgAttributes),
/// The second value, if any, is for the extra data (vtable or length)
/// which indicates that it refers to an unsized rvalue.
Indirect(ArgAttributes, Option<ArgAttributes>),
}
// Hack to disable non_upper_case_globals only for the bitflags! and not for the rest
......@@ -370,38 +370,25 @@ pub fn make_indirect(&mut self) {
// i686-pc-windows-msvc, it results in wrong stack offsets.
// attrs.pointee_align = Some(self.layout.align);
self.mode = PassMode::Indirect(attrs);
let extra_attrs = if self.layout.is_unsized() {
Some(ArgAttributes::new())
} else {
None
};
self.mode = PassMode::Indirect(attrs, extra_attrs);
}
pub fn make_indirect_byval(&mut self) {
self.make_indirect();
match self.mode {
PassMode::Indirect(ref mut attrs) => {
PassMode::Indirect(ref mut attrs, _) => {
attrs.set(ArgAttribute::ByVal);
}
_ => unreachable!()
}
}
pub fn make_unsized_indirect(&mut self, vtable_size: Option<Size>) {
self.make_indirect();
let attrs = if let PassMode::Indirect(attrs) = self.mode {
attrs
} else {
unreachable!()
};
let mut extra_attrs = ArgAttributes::new();
if let Some(vtable_size) = vtable_size {
extra_attrs.set(ArgAttribute::NoAlias)
.set(ArgAttribute::NonNull);
extra_attrs.pointee_size = vtable_size;
}
self.mode = PassMode::UnsizedIndirect(attrs, extra_attrs);
}
pub fn extend_integer_width_to(&mut self, bits: u64) {
// Only integers have signedness
if let Abi::Scalar(ref scalar) = self.layout.abi {
......@@ -430,14 +417,21 @@ pub fn pad_with(&mut self, reg: Reg) {
pub fn is_indirect(&self) -> bool {
match self.mode {
PassMode::Indirect(_) => true,
PassMode::Indirect(..) => true,
_ => false
}
}
pub fn is_sized_indirect(&self) -> bool {
match self.mode {
PassMode::Indirect(_, None) => true,
_ => false
}
}
pub fn is_unsized_indirect(&self) -> bool {
match self.mode {
PassMode::UnsizedIndirect(..) => true,
PassMode::Indirect(_, Some(_)) => true,
_ => false
}
}
......@@ -534,7 +528,7 @@ pub fn adjust_for_cabi<C>(&mut self, cx: C, abi: ::spec::abi::Abi) -> Result<(),
a => return Err(format!("unrecognized arch \"{}\" in target specification", a))
}
if let PassMode::Indirect(ref mut attrs) = self.ret.mode {
if let PassMode::Indirect(ref mut attrs, _) = self.ret.mode {
attrs.set(ArgAttribute::StructRet);
}
......
......@@ -99,10 +99,10 @@ pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>, flavor: Flav
for arg in &mut fty.args {
let attrs = match arg.mode {
PassMode::Ignore |
PassMode::Indirect(_) => continue,
PassMode::Indirect(_, None) => continue,
PassMode::Direct(ref mut attrs) => attrs,
PassMode::Pair(..) |
PassMode::UnsizedIndirect(..) |
PassMode::Indirect(_, Some(_)) |
PassMode::Cast(_) => {
unreachable!("x86 shouldn't be passing arguments by {:?}", arg.mode)
}
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册