提交 8657fb11 编写于 作者: D Dante-Broggi

`new_sized` is mostly used without align

so rename it `new_sized_aligned`.
6/11 use `align` = `layout.align.abi`.
`from_const_alloc` uses `alloc.align`, but that is `assert_eq!` to `layout.align.abi`.
only 4/11 use something interesting for `align`.
上级 0cc1c8d2
...@@ -561,7 +561,7 @@ fn write_operand_repeatedly( ...@@ -561,7 +561,7 @@ fn write_operand_repeatedly(
let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size); let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
cg_elem.val.store(&mut body_bx, cg_elem.val.store(&mut body_bx,
PlaceRef::new_sized(current, cg_elem.layout, align)); PlaceRef::new_sized_aligned(current, cg_elem.layout, align));
let next = body_bx.inbounds_gep(current, &[self.const_usize(1)]); let next = body_bx.inbounds_gep(current, &[self.const_usize(1)]);
body_bx.br(header_bx.llbb()); body_bx.br(header_bx.llbb());
......
...@@ -348,7 +348,7 @@ fn from_const_alloc( ...@@ -348,7 +348,7 @@ fn from_const_alloc(
)}; )};
self.const_bitcast(llval, llty) self.const_bitcast(llval, llty)
}; };
PlaceRef::new_sized(llval, layout, alloc.align) PlaceRef::new_sized(llval, layout)
} }
fn const_ptrcast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value { fn const_ptrcast(&self, val: &'ll Value, ty: &'ll Type) -> &'ll Value {
......
...@@ -101,7 +101,7 @@ fn codegen_intrinsic_call( ...@@ -101,7 +101,7 @@ fn codegen_intrinsic_call(
let name = &*tcx.item_name(def_id).as_str(); let name = &*tcx.item_name(def_id).as_str();
let llret_ty = self.layout_of(ret_ty).llvm_type(self); let llret_ty = self.layout_of(ret_ty).llvm_type(self);
let result = PlaceRef::new_sized(llresult, fn_ty.ret.layout, fn_ty.ret.layout.align.abi); let result = PlaceRef::new_sized(llresult, fn_ty.ret.layout);
let simple = get_simple_intrinsic(self, name); let simple = get_simple_intrinsic(self, name);
let llval = match name { let llval = match name {
......
...@@ -989,7 +989,7 @@ fn codegen_arguments_untupled( ...@@ -989,7 +989,7 @@ fn codegen_arguments_untupled(
// Handle both by-ref and immediate tuples. // Handle both by-ref and immediate tuples.
if let Ref(llval, None, align) = tuple.val { if let Ref(llval, None, align) = tuple.val {
let tuple_ptr = PlaceRef::new_sized(llval, tuple.layout, align); let tuple_ptr = PlaceRef::new_sized_aligned(llval, tuple.layout, align);
for i in 0..tuple.layout.fields.count() { for i in 0..tuple.layout.fields.count() {
let field_ptr = tuple_ptr.project_field(bx, i); let field_ptr = tuple_ptr.project_field(bx, i);
let field = bx.load_operand(field_ptr); let field = bx.load_operand(field_ptr);
...@@ -1203,7 +1203,7 @@ fn codegen_transmute_into( ...@@ -1203,7 +1203,7 @@ fn codegen_transmute_into(
let llty = bx.backend_type(src.layout); let llty = bx.backend_type(src.layout);
let cast_ptr = bx.pointercast(dst.llval, bx.type_ptr_to(llty)); let cast_ptr = bx.pointercast(dst.llval, bx.type_ptr_to(llty));
let align = src.layout.align.abi.min(dst.align); let align = src.layout.align.abi.min(dst.align);
src.val.store(bx, PlaceRef::new_sized(cast_ptr, src.layout, align)); src.val.store(bx, PlaceRef::new_sized_aligned(cast_ptr, src.layout, align));
} }
......
...@@ -289,7 +289,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( ...@@ -289,7 +289,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
if local == mir::RETURN_PLACE && fx.fn_ty.ret.is_indirect() { if local == mir::RETURN_PLACE && fx.fn_ty.ret.is_indirect() {
debug!("alloc: {:?} (return place) -> place", local); debug!("alloc: {:?} (return place) -> place", local);
let llretptr = bx.get_param(0); let llretptr = bx.get_param(0);
LocalRef::Place(PlaceRef::new_sized(llretptr, layout, layout.align.abi)) LocalRef::Place(PlaceRef::new_sized(llretptr, layout))
} else if memory_locals.contains(local) { } else if memory_locals.contains(local) {
debug!("alloc: {:?} -> place", local); debug!("alloc: {:?} -> place", local);
if layout.is_unsized() { if layout.is_unsized() {
...@@ -548,7 +548,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( ...@@ -548,7 +548,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
let llarg = bx.get_param(llarg_idx); let llarg = bx.get_param(llarg_idx);
bx.set_value_name(llarg, &name); bx.set_value_name(llarg, &name);
llarg_idx += 1; llarg_idx += 1;
PlaceRef::new_sized(llarg, arg.layout, arg.layout.align.abi) PlaceRef::new_sized(llarg, arg.layout)
} else if arg.is_unsized_indirect() { } else if arg.is_unsized_indirect() {
// As the storage for the indirect argument lives during // As the storage for the indirect argument lives during
// the whole function call, we just copy the fat pointer. // the whole function call, we just copy the fat pointer.
......
...@@ -485,7 +485,6 @@ pub fn codegen_operand( ...@@ -485,7 +485,6 @@ pub fn codegen_operand(
bx.load_operand(PlaceRef::new_sized( bx.load_operand(PlaceRef::new_sized(
bx.cx().const_undef(bx.cx().type_ptr_to(bx.cx().backend_type(layout))), bx.cx().const_undef(bx.cx().type_ptr_to(bx.cx().backend_type(layout))),
layout, layout,
layout.align.abi,
)) ))
}) })
} }
......
...@@ -30,6 +30,19 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { ...@@ -30,6 +30,19 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
pub fn new_sized( pub fn new_sized(
llval: V, llval: V,
layout: TyLayout<'tcx>, layout: TyLayout<'tcx>,
) -> PlaceRef<'tcx, V> {
assert!(!layout.is_unsized());
PlaceRef {
llval,
llextra: None,
layout,
align: layout.align.abi
}
}
pub fn new_sized_aligned(
llval: V,
layout: TyLayout<'tcx>,
align: Align, align: Align,
) -> PlaceRef<'tcx, V> { ) -> PlaceRef<'tcx, V> {
assert!(!layout.is_unsized()); assert!(!layout.is_unsized());
...@@ -63,7 +76,7 @@ pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>( ...@@ -63,7 +76,7 @@ pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
debug!("alloca({:?}: {:?})", name, layout); debug!("alloca({:?}: {:?})", name, layout);
assert!(!layout.is_unsized(), "tried to statically allocate unsized place"); assert!(!layout.is_unsized(), "tried to statically allocate unsized place");
let tmp = bx.alloca(bx.cx().backend_type(layout), name, layout.align.abi); let tmp = bx.alloca(bx.cx().backend_type(layout), name, layout.align.abi);
Self::new_sized(tmp, layout, layout.align.abi) Self::new_sized(tmp, layout)
} }
/// Returns a place for an indirect reference to an unsized place. /// Returns a place for an indirect reference to an unsized place.
...@@ -481,7 +494,7 @@ pub fn codegen_place( ...@@ -481,7 +494,7 @@ pub fn codegen_place(
let llval = bx.cx().const_undef( let llval = bx.cx().const_undef(
bx.cx().type_ptr_to(bx.cx().backend_type(layout)) bx.cx().type_ptr_to(bx.cx().backend_type(layout))
); );
PlaceRef::new_sized(llval, layout, layout.align.abi) PlaceRef::new_sized(llval, layout)
} }
} }
} }
......
...@@ -71,7 +71,7 @@ pub fn codegen_rvalue( ...@@ -71,7 +71,7 @@ pub fn codegen_rvalue(
scratch.storage_dead(&mut bx); scratch.storage_dead(&mut bx);
} }
OperandValue::Ref(llref, None, align) => { OperandValue::Ref(llref, None, align) => {
let source = PlaceRef::new_sized(llref, operand.layout, align); let source = PlaceRef::new_sized_aligned(llref, operand.layout, align);
base::coerce_unsized_into(&mut bx, source, dest); base::coerce_unsized_into(&mut bx, source, dest);
} }
OperandValue::Ref(_, Some(_), _) => { OperandValue::Ref(_, Some(_), _) => {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册