block.rs 47.7 KB
Newer Older
1 2 3 4 5 6 7 8 9 10
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

11
use llvm::{self, BasicBlock};
12
use rustc::middle::lang_items;
13
use rustc::ty::{self, Ty, TypeFoldable};
14
use rustc::ty::layout::{self, LayoutOf};
E
Eduard Burtescu 已提交
15
use rustc::mir;
16
use rustc::mir::interpret::EvalErrorKind;
17
use abi::{Abi, ArgType, ArgTypeExt, FnType, FnTypeExt, LlvmType, PassMode};
18
use base;
19
use callee;
20
use builder::{Builder, MemFlags};
D
Denis Merigoux 已提交
21
use common::{self, C_bool, C_str_slice, C_struct, C_u32, C_uint_big, C_undef, IntPredicate};
22
use consts;
23
use meth;
24
use monomorphize;
25
use type_of::LayoutLlvmExt;
26
use type_::Type;
27
use value::Value;
28

D
Denis Merigoux 已提交
29
use interfaces::BuilderMethods;
30

31
use syntax::symbol::Symbol;
E
est31 已提交
32
use syntax_pos::Pos;
33

34
use super::{FunctionCx, LocalRef};
35
use super::place::PlaceRef;
36
use super::operand::OperandRef;
37
use super::operand::OperandValue::{Pair, Ref, Immediate};
38

D
Denis Merigoux 已提交
39
impl FunctionCx<'a, 'll, 'tcx, &'ll Value> {
I
Irina Popa 已提交
40
    pub fn codegen_block(&mut self, bb: mir::BasicBlock) {
41
        let mut bx = self.build_block(bb);
42
        let data = &self.mir[bb];
43

I
Irina Popa 已提交
44
        debug!("codegen_block({:?}={:?})", bb, data);
45

46
        for statement in &data.statements {
I
Irina Popa 已提交
47
            bx = self.codegen_statement(bx, statement);
48 49
        }

I
Irina Popa 已提交
50
        self.codegen_terminator(bx, bb, data.terminator());
51 52
    }

I
Irina Popa 已提交
53
    fn codegen_terminator(&mut self,
54 55 56
                          mut bx: Builder<'a, 'll, 'tcx>,
                          bb: mir::BasicBlock,
                          terminator: &mir::Terminator<'tcx>)
57
    {
I
Irina Popa 已提交
58
        debug!("codegen_terminator: {:?}", terminator);
59

60
        // Create the cleanup bundle, if needed.
61
        let tcx = bx.tcx();
62 63 64 65
        let span = terminator.source_info.span;
        let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
        let funclet = funclet_bb.and_then(|funclet_bb| self.funclets[funclet_bb].as_ref());

M
Mark-Simulacrum 已提交
66 67
        let cleanup_pad = funclet.map(|lp| lp.cleanuppad());
        let cleanup_bundle = funclet.map(|l| l.bundle());
68

69 70 71 72 73 74 75 76 77 78 79 80 81 82 83
        let lltarget = |this: &mut Self, target: mir::BasicBlock| {
            let lltarget = this.blocks[target];
            let target_funclet = this.cleanup_kinds[target].funclet_bb(target);
            match (funclet_bb, target_funclet) {
                (None, None) => (lltarget, false),
                (Some(f), Some(t_f))
                    if f == t_f || !base::wants_msvc_seh(tcx.sess)
                    => (lltarget, false),
                (None, Some(_)) => {
                    // jump *into* cleanup - need a landing pad if GNU
                    (this.landing_pad_to(target), false)
                }
                (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", terminator),
                (Some(_), Some(_)) => {
                    (this.landing_pad_to(target), true)
84 85
                }
            }
86
        };
87

88
        let llblock = |this: &mut Self, target: mir::BasicBlock| {
89 90 91 92 93 94 95 96 97 98 99 100 101
            let (lltarget, is_cleanupret) = lltarget(this, target);
            if is_cleanupret {
                // MSVC cross-funclet jump - need a trampoline

                debug!("llblock: creating cleanup trampoline for {:?}", target);
                let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
                let trampoline = this.new_block(name);
                trampoline.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
                trampoline.llbb()
            } else {
                lltarget
            }
        };
102

103
        let funclet_br =
104
            |this: &mut Self, bx: Builder<'_, 'll, '_>, target: mir::BasicBlock| {
105 106 107 108 109 110 111 112 113
                let (lltarget, is_cleanupret) = lltarget(this, target);
                if is_cleanupret {
                    // micro-optimization: generate a `ret` rather than a jump
                    // to a trampoline.
                    bx.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
                } else {
                    bx.br(lltarget);
                }
            };
114

115 116
        let do_call = |
            this: &mut Self,
117
            bx: Builder<'a, 'll, 'tcx>,
118
            fn_ty: FnType<'tcx, Ty<'tcx>>,
119 120
            fn_ptr: &'ll Value,
            llargs: &[&'ll Value],
121
            destination: Option<(ReturnDest<'tcx, &'ll Value>, mir::BasicBlock)>,
122 123 124
            cleanup: Option<mir::BasicBlock>
        | {
            if let Some(cleanup) = cleanup {
125
                let ret_bx = if let Some((_, target)) = destination {
126 127 128 129
                    this.blocks[target]
                } else {
                    this.unreachable_block()
                };
130
                let invokeret = bx.invoke(fn_ptr,
131 132 133 134
                                          &llargs,
                                          ret_bx,
                                          llblock(this, cleanup),
                                          cleanup_bundle);
135
                fn_ty.apply_attrs_callsite(&bx, invokeret);
136

137
                if let Some((ret_dest, target)) = destination {
138 139 140
                    let ret_bx = this.build_block(target);
                    this.set_debug_loc(&ret_bx, terminator.source_info);
                    this.store_return(&ret_bx, ret_dest, &fn_ty.ret, invokeret);
141 142
                }
            } else {
143
                let llret = bx.call(fn_ptr, &llargs, cleanup_bundle);
144
                fn_ty.apply_attrs_callsite(&bx, llret);
145 146 147 148 149 150 151
                if this.mir[bb].is_cleanup {
                    // Cleanup is always the cold path. Don't inline
                    // drop glue. Also, when there is a deeply-nested
                    // struct, there are "symmetry" issues that cause
                    // exponential inlining - see issue #41696.
                    llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
                }
152

153
                if let Some((ret_dest, target)) = destination {
154 155
                    this.store_return(&bx, ret_dest, &fn_ty.ret, llret);
                    funclet_br(this, bx, target);
156
                } else {
157
                    bx.unreachable();
158 159 160
                }
            }
        };
161

162
        self.set_debug_loc(&bx, terminator.source_info);
163
        match terminator.kind {
164
            mir::TerminatorKind::Resume => {
165
                if let Some(cleanup_pad) = cleanup_pad {
166
                    bx.cleanup_ret(cleanup_pad, None);
167
                } else {
168 169 170 171
                    let slot = self.get_personality_slot(&bx);
                    let lp0 = slot.project_field(&bx, 0).load(&bx).immediate();
                    let lp1 = slot.project_field(&bx, 1).load(&bx).immediate();
                    slot.storage_dead(&bx);
172

173
                    if !bx.sess().target.target.options.custom_unwind_resume {
174
                        let mut lp = C_undef(self.landing_pad_type());
175 176 177
                        lp = bx.insert_value(lp, lp0, 0);
                        lp = bx.insert_value(lp, lp1, 1);
                        bx.resume(lp);
M
Mark-Simulacrum 已提交
178
                    } else {
179 180
                        bx.call(bx.cx.eh_unwind_resume(), &[lp0], cleanup_bundle);
                        bx.unreachable();
M
Mark-Simulacrum 已提交
181
                    }
182 183 184
                }
            }

D
David Henningsson 已提交
185 186
            mir::TerminatorKind::Abort => {
                // Call core::intrinsics::abort()
187 188 189
                let fnname = bx.cx.get_intrinsic(&("llvm.trap"));
                bx.call(fnname, &[], None);
                bx.unreachable();
D
David Henningsson 已提交
190 191
            }

192
            mir::TerminatorKind::Goto { target } => {
193
                funclet_br(self, bx, target);
194 195
            }

196
            mir::TerminatorKind::SwitchInt { ref discr, switch_ty, ref values, ref targets } => {
I
Irina Popa 已提交
197
                let discr = self.codegen_operand(&bx, discr);
198 199
                if targets.len() == 2 {
                    // If there are two targets, emit br instead of switch
200 201
                    let lltrue = llblock(self, targets[0]);
                    let llfalse = llblock(self, targets[1]);
202 203 204 205 206 207 208 209
                    if switch_ty == bx.tcx().types.bool {
                        // Don't generate trivial icmps when switching on bool
                        if let [0] = values[..] {
                            bx.cond_br(discr.immediate(), llfalse, lltrue);
                        } else {
                            assert_eq!(&values[..], &[1]);
                            bx.cond_br(discr.immediate(), lltrue, llfalse);
                        }
210
                    } else {
211 212
                        let switch_llty = bx.cx.layout_of(switch_ty).immediate_llvm_type(bx.cx);
                        let llval = C_uint_big(switch_llty, values[0]);
213
                        let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval);
214
                        bx.cond_br(cmp, lltrue, llfalse);
215 216 217
                    }
                } else {
                    let (otherwise, targets) = targets.split_last().unwrap();
218
                    let switch = bx.switch(discr.immediate(),
219 220
                                           llblock(self, *otherwise),
                                           values.len());
221
                    let switch_llty = bx.cx.layout_of(switch_ty).immediate_llvm_type(bx.cx);
O
Oliver Schneider 已提交
222
                    for (&value, target) in values.iter().zip(targets) {
O
Oliver Schneider 已提交
223
                        let llval = C_uint_big(switch_llty, value);
224
                        let llbb = llblock(self, *target);
O
Oliver Schneider 已提交
225
                        bx.add_case(switch, llval, llbb)
226
                    }
227
                }
228 229
            }

230
            mir::TerminatorKind::Return => {
231
                let llval = match self.fn_ty.ret.mode {
232
                    PassMode::Ignore | PassMode::Indirect(..) => {
233
                        bx.ret_void();
234 235
                        return;
                    }
236

237
                    PassMode::Direct(_) | PassMode::Pair(..) => {
I
Irina Popa 已提交
238
                        let op = self.codegen_consume(&bx, &mir::Place::Local(mir::RETURN_PLACE));
239
                        if let Ref(llval, _, align) = op.val {
240
                            bx.load(llval, align)
241
                        } else {
242
                            op.immediate_or_packed_pair(&bx)
243
                        }
244 245 246
                    }

                    PassMode::Cast(cast_ty) => {
247
                        let op = match self.locals[mir::RETURN_PLACE] {
248 249
                            LocalRef::Operand(Some(op)) => op,
                            LocalRef::Operand(None) => bug!("use of return before def"),
I
Irina Popa 已提交
250
                            LocalRef::Place(cg_place) => {
251
                                OperandRef {
252
                                    val: Ref(cg_place.llval, None, cg_place.align),
I
Irina Popa 已提交
253
                                    layout: cg_place.layout
254 255
                                }
                            }
256
                            LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
257 258 259
                        };
                        let llslot = match op.val {
                            Immediate(_) | Pair(..) => {
260 261
                                let scratch = PlaceRef::alloca(&bx, self.fn_ty.ret.layout, "ret");
                                op.val.store(&bx, scratch);
262 263
                                scratch.llval
                            }
264
                            Ref(llval, _, align) => {
265
                                assert_eq!(align.abi(), op.layout.align.abi(),
266
                                           "return place is unaligned!");
267 268 269
                                llval
                            }
                        };
270 271
                        bx.load(
                            bx.pointercast(llslot, cast_ty.llvm_type(bx.cx).ptr_to()),
272
                            self.fn_ty.ret.layout.align)
273
                    }
274
                };
275
                bx.ret(llval);
276 277
            }

278
            mir::TerminatorKind::Unreachable => {
279
                bx.unreachable();
280 281
            }

282
            mir::TerminatorKind::Drop { ref location, target, unwind } => {
283
                let ty = location.ty(self.mir, bx.tcx()).to_ty(bx.tcx());
284
                let ty = self.monomorphize(&ty);
285
                let drop_fn = monomorphize::resolve_drop_in_place(bx.cx.tcx, ty);
286

A
Ariel Ben-Yehuda 已提交
287 288
                if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
                    // we don't actually need to drop anything.
289
                    funclet_br(self, bx, target);
A
Ariel Ben-Yehuda 已提交
290
                    return
291
                }
292

I
Irina Popa 已提交
293
                let place = self.codegen_place(&bx, location);
294 295 296 297 298 299 300 301
                let (args1, args2);
                let mut args = if let Some(llextra) = place.llextra {
                    args2 = [place.llval, llextra];
                    &args2[..]
                } else {
                    args1 = [place.llval];
                    &args1[..]
                };
302
                let (drop_fn, fn_ty) = match ty.sty {
V
varkor 已提交
303
                    ty::Dynamic(..) => {
304
                        let sig = drop_fn.fn_sig(bx.cx.tcx);
305 306 307 308
                        let sig = bx.tcx().normalize_erasing_late_bound_regions(
                            ty::ParamEnv::reveal_all(),
                            &sig,
                        );
309
                        let fn_ty = FnType::new_vtable(bx.cx, sig, &[]);
310
                        let vtable = args[1];
311
                        args = &args[..1];
312
                        (meth::DESTRUCTOR.get_fn(&bx, vtable, &fn_ty), fn_ty)
313 314
                    }
                    _ => {
315 316
                        (callee::get_fn(bx.cx, drop_fn),
                         FnType::of_instance(bx.cx, &drop_fn))
317
                    }
A
Ariel Ben-Yehuda 已提交
318
                };
319
                do_call(self, bx, fn_ty, drop_fn, args,
320
                        Some((ReturnDest::Nothing, target)),
321
                        unwind);
322 323
            }

324
            mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
I
Irina Popa 已提交
325
                let cond = self.codegen_operand(&bx, cond).immediate();
326
                let mut const_cond = common::const_to_opt_u128(cond, false).map(|c| c == 1);
327 328 329 330 331 332 333 334

                // This case can currently arise only from functions marked
                // with #[rustc_inherit_overflow_checks] and inlined from
                // another crate (mostly core::num generic/#[inline] fns),
                // while the current crate doesn't use overflow checks.
                // NOTE: Unlike binops, negation doesn't have its own
                // checked operation, just a comparison with the minimum
                // value, so we have to check for the assert message.
335
                if !bx.cx.check_overflow {
336
                    if let mir::interpret::EvalErrorKind::OverflowNeg = *msg {
337 338 339
                        const_cond = Some(expected);
                    }
                }
340

I
Irina Popa 已提交
341
                // Don't codegen the panic block if success if known.
342
                if const_cond == Some(expected) {
343
                    funclet_br(self, bx, target);
344 345 346
                    return;
                }

347
                // Pass the condition through llvm.expect for branch hinting.
348 349
                let expect = bx.cx.get_intrinsic(&"llvm.expect.i1");
                let cond = bx.call(expect, &[cond, C_bool(bx.cx, expected)], None);
350 351

                // Create the failure block and the conditional branch to it.
352 353
                let lltarget = llblock(self, target);
                let panic_block = self.new_block("panic");
354
                if expected {
355
                    bx.cond_br(cond, lltarget, panic_block.llbb());
356
                } else {
357
                    bx.cond_br(cond, panic_block.llbb(), lltarget);
358 359
                }

360 361 362
                // After this point, bx is the block for the call to panic.
                bx = panic_block;
                self.set_debug_loc(&bx, terminator.source_info);
363

364
                // Get the location information.
D
Donato Sciarra 已提交
365
                let loc = bx.sess().source_map().lookup_char_pos(span.lo());
366
                let filename = Symbol::intern(&loc.file.name.to_string()).as_str();
367 368 369
                let filename = C_str_slice(bx.cx, filename);
                let line = C_u32(bx.cx, loc.line as u32);
                let col = C_u32(bx.cx, loc.col.to_usize() as u32 + 1);
370 371 372
                let align = tcx.data_layout.aggregate_align
                    .max(tcx.data_layout.i32_align)
                    .max(tcx.data_layout.pointer_align);
373 374

                // Put together the arguments to the panic entry point.
375
                let (lang_item, args) = match *msg {
376
                    EvalErrorKind::BoundsCheck { ref len, ref index } => {
I
Irina Popa 已提交
377 378
                        let len = self.codegen_operand(&mut bx, len).immediate();
                        let index = self.codegen_operand(&mut bx, index).immediate();
379

380 381
                        let file_line_col = C_struct(bx.cx, &[filename, line, col], false);
                        let file_line_col = consts::addr_of(bx.cx,
E
est31 已提交
382 383
                                                            file_line_col,
                                                            align,
384
                                                            Some("panic_bounds_check_loc"));
385
                        (lang_items::PanicBoundsCheckFnLangItem,
386
                         vec![file_line_col, index, len])
387
                    }
388 389
                    _ => {
                        let str = msg.description();
J
John Kåre Alsaker 已提交
390
                        let msg_str = Symbol::intern(str).as_str();
391 392
                        let msg_str = C_str_slice(bx.cx, msg_str);
                        let msg_file_line_col = C_struct(bx.cx,
393 394
                                                         &[msg_str, filename, line, col],
                                                         false);
395
                        let msg_file_line_col = consts::addr_of(bx.cx,
396 397
                                                                msg_file_line_col,
                                                                align,
398
                                                                Some("panic_loc"));
J
John Kåre Alsaker 已提交
399
                        (lang_items::PanicFnLangItem,
400
                         vec![msg_file_line_col])
J
John Kåre Alsaker 已提交
401
                    }
402 403 404
                };

                // Obtain the panic entry point.
405 406 407 408
                let def_id = common::langcall(bx.tcx(), Some(span), "", lang_item);
                let instance = ty::Instance::mono(bx.tcx(), def_id);
                let fn_ty = FnType::of_instance(bx.cx, &instance);
                let llfn = callee::get_fn(bx.cx, instance);
409

I
Irina Popa 已提交
410
                // Codegen the actual panic invoke/call.
411
                do_call(self, bx, fn_ty, llfn, &args, None, cleanup);
412 413
            }

414
            mir::TerminatorKind::DropAndReplace { .. } => {
I
Irina Popa 已提交
415
                bug!("undesugared DropAndReplace in codegen: {:?}", terminator);
416 417
            }

418 419 420 421 422 423 424
            mir::TerminatorKind::Call {
                ref func,
                ref args,
                ref destination,
                cleanup,
                from_hir_call: _
            } => {
425
                // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
I
Irina Popa 已提交
426
                let callee = self.codegen_operand(&bx, func);
427

428
                let (instance, mut llfn) = match callee.layout.ty.sty {
V
varkor 已提交
429
                    ty::FnDef(def_id, substs) => {
430
                        (Some(ty::Instance::resolve(bx.cx.tcx,
431
                                                    ty::ParamEnv::reveal_all(),
432 433
                                                    def_id,
                                                    substs).unwrap()),
434
                         None)
435
                    }
V
varkor 已提交
436
                    ty::FnPtr(_) => {
437
                        (None, Some(callee.immediate()))
438
                    }
439
                    _ => bug!("{} is not callable", callee.layout.ty)
440
                };
441
                let def = instance.map(|i| i.def);
442
                let sig = callee.layout.ty.fn_sig(bx.tcx());
443 444 445 446
                let sig = bx.tcx().normalize_erasing_late_bound_regions(
                    ty::ParamEnv::reveal_all(),
                    &sig,
                );
447
                let abi = sig.abi;
J
James Miller 已提交
448

I
Irina Popa 已提交
449
                // Handle intrinsics old codegen wants Expr's for, ourselves.
450 451
                let intrinsic = match def {
                    Some(ty::InstanceDef::Intrinsic(def_id))
452
                        => Some(bx.tcx().item_name(def_id).as_str()),
453 454
                    _ => None
                };
455
                let intrinsic = intrinsic.as_ref().map(|s| &s[..]);
456 457

                if intrinsic == Some("transmute") {
458 459
                    if let Some(destination_ref) = destination.as_ref() {
                        let &(ref dest, target) = destination_ref;
460
                        self.codegen_transmute(&bx, &args[0], dest);
461 462 463 464 465 466 467 468
                        funclet_br(self, bx, target);
                    } else {
                        // If we are trying to transmute to an uninhabited type,
                        // it is likely there is no allotted destination. In fact,
                        // transmuting to an uninhabited type is UB, which means
                        // we can do what we like. Here, we declare that transmuting
                        // into an uninhabited type is impossible, so anything following
                        // it must be unreachable.
469
                        assert_eq!(bx.cx.layout_of(sig.output()).abi, layout::Abi::Uninhabited);
470 471
                        bx.unreachable();
                    }
472 473
                    return;
                }
474

475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494
                let extra_args = &args[sig.inputs().len()..];
                let extra_args = extra_args.iter().map(|op_arg| {
                    let op_ty = op_arg.ty(self.mir, bx.tcx());
                    self.monomorphize(&op_ty)
                }).collect::<Vec<_>>();

                let fn_ty = match def {
                    Some(ty::InstanceDef::Virtual(..)) => {
                        FnType::new_vtable(bx.cx, sig, &extra_args)
                    }
                    Some(ty::InstanceDef::DropGlue(_, None)) => {
                        // empty drop glue - a nop.
                        let &(_, target) = destination.as_ref().unwrap();
                        funclet_br(self, bx, target);
                        return;
                    }
                    _ => FnType::new(bx.cx, sig, &extra_args)
                };

                // emit a panic instead of instantiating an uninhabited type
495
                if (intrinsic == Some("init") || intrinsic == Some("uninit")) &&
496
                    fn_ty.ret.layout.abi.is_uninhabited()
497
                {
J
Jorge Aparicio 已提交
498
                    let loc = bx.sess().source_map().lookup_char_pos(span.lo());
499 500 501 502 503 504 505 506
                    let filename = Symbol::intern(&loc.file.name.to_string()).as_str();
                    let filename = C_str_slice(bx.cx, filename);
                    let line = C_u32(bx.cx, loc.line as u32);
                    let col = C_u32(bx.cx, loc.col.to_usize() as u32 + 1);
                    let align = tcx.data_layout.aggregate_align
                        .max(tcx.data_layout.i32_align)
                        .max(tcx.data_layout.pointer_align);

J
Jorge Aparicio 已提交
507 508 509 510 511 512
                    let str = format!(
                        "Attempted to instantiate uninhabited type {} using mem::{}",
                        sig.output(),
                        if intrinsic == Some("init") { "zeroed" } else { "uninitialized" }
                    );
                    let msg_str = Symbol::intern(&str).as_str();
513 514
                    let msg_str = C_str_slice(bx.cx, msg_str);
                    let msg_file_line_col = C_struct(bx.cx,
515 516
                                                     &[msg_str, filename, line, col],
                                                     false);
517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541
                    let msg_file_line_col = consts::addr_of(bx.cx,
                                                            msg_file_line_col,
                                                            align,
                                                            Some("panic_loc"));

                    // Obtain the panic entry point.
                    let def_id =
                        common::langcall(bx.tcx(), Some(span), "", lang_items::PanicFnLangItem);
                    let instance = ty::Instance::mono(bx.tcx(), def_id);
                    let fn_ty = FnType::of_instance(bx.cx, &instance);
                    let llfn = callee::get_fn(bx.cx, instance);

                    // Codegen the actual panic invoke/call.
                    do_call(
                        self,
                        bx,
                        fn_ty,
                        llfn,
                        &[msg_file_line_col],
                        destination.as_ref().map(|(_, bb)| (ReturnDest::Nothing, *bb)),
                        cleanup,
                    );
                    return;
                }

542 543 544
                // The arguments we'll be passing. Plus one to account for outptr, if used.
                let arg_count = fn_ty.args.len() + fn_ty.ret.is_indirect() as usize;
                let mut llargs = Vec::with_capacity(arg_count);
545 546

                // Prepare the return value destination
547
                let ret_dest = if let Some((ref dest, _)) = *destination {
548
                    let is_intrinsic = intrinsic.is_some();
549
                    self.make_return_dest(&bx, dest, &fn_ty.ret, &mut llargs,
550
                                          is_intrinsic)
551
                } else {
552
                    ReturnDest::Nothing
553 554
                };

555
                if intrinsic.is_some() && intrinsic != Some("drop_in_place") {
I
Irina Popa 已提交
556
                    use intrinsic::codegen_intrinsic_call;
557

558 559
                    let dest = match ret_dest {
                        _ if fn_ty.ret.is_indirect() => llargs[0],
560
                        ReturnDest::Nothing => {
561
                            C_undef(fn_ty.ret.memory_ty(bx.cx).ptr_to())
J
James Miller 已提交
562
                        }
563
                        ReturnDest::IndirectOperand(dst, _) |
564
                        ReturnDest::Store(dst) => dst.llval,
565 566 567
                        ReturnDest::DirectOperand(_) =>
                            bug!("Cannot use direct operand with an intrinsic call")
                    };
J
James Miller 已提交
568

569 570 571 572 573 574 575
                    let args: Vec<_> = args.iter().enumerate().map(|(i, arg)| {
                        // The indices passed to simd_shuffle* in the
                        // third argument must be constant. This is
                        // checked by const-qualification, which also
                        // promotes any complex rvalues to constants.
                        if i == 2 && intrinsic.unwrap().starts_with("simd_shuffle") {
                            match *arg {
576 577 578
                                // The shuffle array argument is usually not an explicit constant,
                                // but specified directly in the code. This means it gets promoted
                                // and we can then extract the value by evaluating the promoted.
579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598
                                mir::Operand::Copy(mir::Place::Promoted(box(index, ty))) |
                                mir::Operand::Move(mir::Place::Promoted(box(index, ty))) => {
                                    let param_env = ty::ParamEnv::reveal_all();
                                    let cid = mir::interpret::GlobalId {
                                        instance: self.instance,
                                        promoted: Some(index),
                                    };
                                    let c = bx.tcx().const_eval(param_env.and(cid));
                                    let (llval, ty) = self.simd_shuffle_indices(
                                        &bx,
                                        terminator.source_info.span,
                                        ty,
                                        c,
                                    );
                                    return OperandRef {
                                        val: Immediate(llval),
                                        layout: bx.cx.layout_of(ty),
                                    };

                                },
599 600
                                mir::Operand::Copy(_) |
                                mir::Operand::Move(_) => {
601 602 603
                                    span_bug!(span, "shuffle indices must be constant");
                                }
                                mir::Operand::Constant(ref constant) => {
604
                                    let c = self.eval_mir_constant(&bx, constant);
605
                                    let (llval, ty) = self.simd_shuffle_indices(
O
Oliver Schneider 已提交
606
                                        &bx,
607 608 609
                                        constant.span,
                                        constant.ty,
                                        c,
O
Oliver Schneider 已提交
610
                                    );
611
                                    return OperandRef {
O
Oliver Schneider 已提交
612 613
                                        val: Immediate(llval),
                                        layout: bx.cx.layout_of(ty)
614 615 616 617 618
                                    };
                                }
                            }
                        }

I
Irina Popa 已提交
619
                        self.codegen_operand(&bx, arg)
620 621 622
                    }).collect();


623
                    let callee_ty = instance.as_ref().unwrap().ty(bx.cx.tcx);
I
Irina Popa 已提交
624
                    codegen_intrinsic_call(&bx, callee_ty, &fn_ty, &args, dest,
625
                                           terminator.source_info.span);
626 627

                    if let ReturnDest::IndirectOperand(dst, _) = ret_dest {
628
                        self.store_return(&bx, ret_dest, &fn_ty.ret, dst.llval);
629 630 631
                    }

                    if let Some((_, target)) = *destination {
632
                        funclet_br(self, bx, target);
633
                    } else {
634
                        bx.unreachable();
635
                    }
636 637 638 639

                    return;
                }

640 641 642 643 644 645 646 647
                // Split the rust-call tupled arguments off.
                let (first_args, untuple) = if abi == Abi::RustCall && !args.is_empty() {
                    let (tup, args) = args.split_last().unwrap();
                    (args, Some(tup))
                } else {
                    (&args[..], None)
                };

648
                'make_args: for (i, arg) in first_args.iter().enumerate() {
I
Irina Popa 已提交
649
                    let mut op = self.codegen_operand(&bx, arg);
650

651
                    if let (0, Some(ty::InstanceDef::Virtual(_, idx))) = (i, def) {
652
                        if let Pair(..) = op.val {
653 654 655 656 657 658
                            // In the case of Rc<Self>, we need to explicitly pass a
                            // *mut RcBox<Self> with a Scalar (not ScalarPair) ABI. This is a hack
                            // that is understood elsewhere in the compiler as a method on
                            // `dyn Trait`.
                            // To get a `*mut RcBox<Self>`, we just keep unwrapping newtypes until
                            // we get a value of a built-in pointer type
659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687
                            'descend_newtypes: while !op.layout.ty.is_unsafe_ptr()
                                            && !op.layout.ty.is_region_ptr()
                            {
                                'iter_fields: for i in 0..op.layout.fields.count() {
                                    let field = op.extract_field(&bx, i);
                                    if !field.layout.is_zst() {
                                        // we found the one non-zero-sized field that is allowed
                                        // now find *its* non-zero-sized field, or stop if it's a
                                        // pointer
                                        op = field;
                                        continue 'descend_newtypes
                                    }
                                }

                                span_bug!(span, "receiver has no non-zero-sized fields {:?}", op);
                            }

                            // now that we have `*dyn Trait` or `&dyn Trait`, split it up into its
                            // data pointer and vtable. Look up the method in the vtable, and pass
                            // the data pointer as the first argument
                            match op.val {
                                Pair(data_ptr, meta) => {
                                    llfn = Some(meth::VirtualIndex::from_index(idx)
                                        .get_fn(&bx, meta, &fn_ty));
                                    llargs.push(data_ptr);
                                    continue 'make_args
                                }
                                other => bug!("expected a Pair, got {:?}", other)
                            }
688 689 690
                        } else if let Ref(data_ptr, Some(meta), _) = op.val {
                            // by-value dynamic dispatch
                            llfn = Some(meth::VirtualIndex::from_index(idx)
691
                                .get_fn(&bx, meta, &fn_ty));
692 693
                            llargs.push(data_ptr);
                            continue;
694 695
                        } else {
                            span_bug!(span, "can't codegen a virtual call on {:?}", op);
696 697
                        }
                    }
698 699 700

                    // The callee needs to own the argument memory if we pass it
                    // by-ref, so make a local copy of non-immediate constants.
701
                    match (arg, op.val) {
702 703
                        (&mir::Operand::Copy(_), Ref(_, None, _)) |
                        (&mir::Operand::Constant(_), Ref(_, None, _)) => {
704 705
                            let tmp = PlaceRef::alloca(&bx, op.layout, "const");
                            op.val.store(&bx, tmp);
706
                            op.val = Ref(tmp.llval, None, tmp.align);
707
                        }
708
                        _ => {}
709 710
                    }

I
Irina Popa 已提交
711
                    self.codegen_argument(&bx, op, &mut llargs, &fn_ty.args[i]);
712 713
                }
                if let Some(tup) = untuple {
I
Irina Popa 已提交
714
                    self.codegen_arguments_untupled(&bx, tup, &mut llargs,
715
                        &fn_ty.args[first_args.len()..])
716 717
                }

718 719
                let fn_ptr = match (llfn, instance) {
                    (Some(llfn), _) => llfn,
720
                    (None, Some(instance)) => callee::get_fn(bx.cx, instance),
721
                    _ => span_bug!(span, "no llfn for call"),
722
                };
723

724
                do_call(self, bx, fn_ty, fn_ptr, &llargs,
725
                        destination.as_ref().map(|&(_, target)| (ret_dest, target)),
726
                        cleanup);
727
            }
J
John Kåre Alsaker 已提交
728
            mir::TerminatorKind::GeneratorDrop |
I
Irina Popa 已提交
729
            mir::TerminatorKind::Yield { .. } => bug!("generator ops in codegen"),
730
            mir::TerminatorKind::FalseEdges { .. } |
I
Irina Popa 已提交
731
            mir::TerminatorKind::FalseUnwind { .. } => bug!("borrowck false edges in codegen"),
732 733 734
        }
    }

I
Irina Popa 已提交
735
    fn codegen_argument(&mut self,
736
                      bx: &Builder<'a, 'll, 'tcx>,
737
                      op: OperandRef<'tcx, &'ll Value>,
738
                      llargs: &mut Vec<&'ll Value>,
739
                      arg: &ArgType<'tcx, Ty<'tcx>>) {
740 741
        // Fill padding with undef value, where applicable.
        if let Some(ty) = arg.pad {
742
            llargs.push(C_undef(ty.llvm_type(bx.cx)));
743 744 745 746 747 748
        }

        if arg.is_ignore() {
            return;
        }

749 750 751 752 753 754 755
        if let PassMode::Pair(..) = arg.mode {
            match op.val {
                Pair(a, b) => {
                    llargs.push(a);
                    llargs.push(b);
                    return;
                }
M
Matthias Krüger 已提交
756
                _ => bug!("codegen_argument: {:?} invalid for pair argument", op)
757
            }
758
        } else if arg.is_unsized_indirect() {
759
            match op.val {
760
                Ref(a, Some(b), _) => {
761 762 763 764 765 766
                    llargs.push(a);
                    llargs.push(b);
                    return;
                }
                _ => bug!("codegen_argument: {:?} invalid for unsized indirect argument", op)
            }
767 768
        }

769
        // Force by-ref if we have to load through a cast pointer.
770
        let (mut llval, align, by_ref) = match op.val {
771
            Immediate(_) | Pair(..) => {
772
                match arg.mode {
773
                    PassMode::Indirect(..) | PassMode::Cast(_) => {
774 775
                        let scratch = PlaceRef::alloca(bx, arg.layout, "arg");
                        op.val.store(bx, scratch);
776
                        (scratch.llval, scratch.align, true)
777 778
                    }
                    _ => {
779
                        (op.immediate_or_packed_pair(bx), arg.layout.align, false)
780
                    }
781
                }
782
            }
783
            Ref(llval, _, align) => {
784 785 786 787 788
                if arg.is_indirect() && align.abi() < arg.layout.align.abi() {
                    // `foo(packed.large_field)`. We can't pass the (unaligned) field directly. I
                    // think that ATM (Rust 1.16) we only pass temporaries, but we shouldn't
                    // have scary latent bugs around.

789
                    let scratch = PlaceRef::alloca(bx, arg.layout, "arg");
790 791
                    base::memcpy_ty(bx, scratch.llval, scratch.align, llval, align,
                                    op.layout, MemFlags::empty());
792 793 794 795
                    (scratch.llval, scratch.align, true)
                } else {
                    (llval, align, true)
                }
796
            }
797 798 799 800
        };

        if by_ref && !arg.is_indirect() {
            // Have to load the argument, maybe while casting it.
801
            if let PassMode::Cast(ty) = arg.mode {
802
                llval = bx.load(bx.pointercast(llval, ty.llvm_type(bx.cx).ptr_to()),
803
                                align.min(arg.layout.align));
804
            } else {
805
                // We can't use `PlaceRef::load` here because the argument
806 807 808 809
                // may have a type we don't treat as immediate, but the ABI
                // used for this call is passing it by-value. In that case,
                // the load would just produce `OperandValue::Ref` instead
                // of the `OperandValue::Immediate` we need for the call.
810
                llval = bx.load(llval, align);
811 812
                if let layout::Abi::Scalar(ref scalar) = arg.layout.abi {
                    if scalar.is_bool() {
813
                        bx.range_metadata(llval, 0..2);
814 815
                    }
                }
816
                // We store bools as i8 so we need to truncate to i1.
817
                llval = base::to_immediate(bx, llval, arg.layout);
818
            }
819 820 821 822 823
        }

        llargs.push(llval);
    }

I
Irina Popa 已提交
824
    fn codegen_arguments_untupled(&mut self,
825 826 827 828
                                  bx: &Builder<'a, 'll, 'tcx>,
                                  operand: &mir::Operand<'tcx>,
                                  llargs: &mut Vec<&'ll Value>,
                                  args: &[ArgType<'tcx, Ty<'tcx>>]) {
I
Irina Popa 已提交
829
        let tuple = self.codegen_operand(bx, operand);
830

831
        // Handle both by-ref and immediate tuples.
832
        if let Ref(llval, None, align) = tuple.val {
833
            let tuple_ptr = PlaceRef::new_sized(llval, tuple.layout, align);
834
            for i in 0..tuple.layout.fields.count() {
835
                let field_ptr = tuple_ptr.project_field(bx, i);
I
Irina Popa 已提交
836
                self.codegen_argument(bx, field_ptr.load(bx), llargs, &args[i]);
837
            }
838
        } else if let Ref(_, Some(_), _) = tuple.val {
839
            bug!("closure arguments must be sized")
840 841 842
        } else {
            // If the tuple is immediate, the elements are as well.
            for i in 0..tuple.layout.fields.count() {
843
                let op = tuple.extract_field(bx, i);
I
Irina Popa 已提交
844
                self.codegen_argument(bx, op, llargs, &args[i]);
845
            }
846 847 848
        }
    }

849 850
    fn get_personality_slot(
        &mut self,
851
        bx: &Builder<'a, 'll, 'tcx>
852
    ) -> PlaceRef<'tcx, &'ll Value> {
853
        let cx = bx.cx;
854
        if let Some(slot) = self.personality_slot {
855 856
            slot
        } else {
857 858 859
            let layout = cx.layout_of(cx.tcx.intern_tup(&[
                cx.tcx.mk_mut_ptr(cx.tcx.types.u8),
                cx.tcx.types.i32
A
Andrew Cann 已提交
860
            ]));
861
            let slot = PlaceRef::alloca(bx, layout, "personalityslot");
862
            self.personality_slot = Some(slot);
863
            slot
864 865 866
        }
    }

867
    /// Return the landingpad wrapper around the given basic block
868 869
    ///
    /// No-op in MSVC SEH scheme.
870
    fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> &'ll BasicBlock {
871
        if let Some(block) = self.landing_pads[target_bb] {
872 873 874
            return block;
        }

A
Ariel Ben-Yehuda 已提交
875 876 877 878 879 880
        let block = self.blocks[target_bb];
        let landing_pad = self.landing_pad_uncached(block);
        self.landing_pads[target_bb] = Some(landing_pad);
        landing_pad
    }

881
    fn landing_pad_uncached(&mut self, target_bb: &'ll BasicBlock) -> &'ll BasicBlock {
882
        if base::wants_msvc_seh(self.cx.sess()) {
883
            span_bug!(self.mir.span, "landing pad was not inserted?")
884
        }
885

886
        let bx = self.new_block("cleanup");
887

888
        let llpersonality = self.cx.eh_personality();
889
        let llretty = self.landing_pad_type();
890 891
        let lp = bx.landing_pad(llretty, llpersonality, 1);
        bx.set_cleanup(lp);
892

893 894 895
        let slot = self.get_personality_slot(&bx);
        slot.storage_live(&bx);
        Pair(bx.extract_value(lp, 0), bx.extract_value(lp, 1)).store(&bx, slot);
896

897 898
        bx.br(target_bb);
        bx.llbb()
899 900
    }

901
    fn landing_pad_type(&self) -> &'ll Type {
902 903
        let cx = self.cx;
        Type::struct_(cx, &[Type::i8p(cx), Type::i32(cx)], false)
904 905
    }

906
    fn unreachable_block(&mut self) -> &'ll BasicBlock {
907
        self.unreachable_block.unwrap_or_else(|| {
908
            let bl = self.new_block("unreachable");
M
Mark-Simulacrum 已提交
909 910 911
            bl.unreachable();
            self.unreachable_block = Some(bl.llbb());
            bl.llbb()
912
        })
913 914
    }

915
    pub fn new_block(&self, name: &str) -> Builder<'a, 'll, 'tcx> {
916
        Builder::new_block(self.cx, self.llfn, name)
917 918
    }

919
    pub fn build_block(&self, bb: mir::BasicBlock) -> Builder<'a, 'll, 'tcx> {
920 921 922
        let bx = Builder::with_cx(self.cx);
        bx.position_at_end(self.blocks[bb]);
        bx
923 924
    }

925
    fn make_return_dest(&mut self, bx: &Builder<'a, 'll, 'tcx>,
926
                        dest: &mir::Place<'tcx>, fn_ret: &ArgType<'tcx, Ty<'tcx>>,
927
                        llargs: &mut Vec<&'ll Value>, is_intrinsic: bool)
928
                        -> ReturnDest<'tcx, &'ll Value> {
929
        // If the return is ignored, we can just return a do-nothing ReturnDest
930
        if fn_ret.is_ignore() {
931 932
            return ReturnDest::Nothing;
        }
933
        let dest = if let mir::Place::Local(index) = *dest {
934
            match self.locals[index] {
935
                LocalRef::Place(dest) => dest,
936
                LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
937
                LocalRef::Operand(None) => {
938
                    // Handle temporary places, specifically Operand ones, as
939
                    // they don't have allocas
940
                    return if fn_ret.is_indirect() {
941 942
                        // Odd, but possible, case, we have an operand temporary,
                        // but the calling convention has an indirect return.
943 944
                        let tmp = PlaceRef::alloca(bx, fn_ret.layout, "tmp_ret");
                        tmp.storage_live(bx);
945
                        llargs.push(tmp.llval);
946
                        ReturnDest::IndirectOperand(tmp, index)
947 948 949 950
                    } else if is_intrinsic {
                        // Currently, intrinsics always need a location to store
                        // the result. so we create a temporary alloca for the
                        // result
951 952
                        let tmp = PlaceRef::alloca(bx, fn_ret.layout, "tmp_ret");
                        tmp.storage_live(bx);
953
                        ReturnDest::IndirectOperand(tmp, index)
954 955 956 957 958
                    } else {
                        ReturnDest::DirectOperand(index)
                    };
                }
                LocalRef::Operand(Some(_)) => {
959
                    bug!("place local already assigned to");
960 961
                }
            }
962
        } else {
I
Irina Popa 已提交
963
            self.codegen_place(bx, dest)
964
        };
965
        if fn_ret.is_indirect() {
966 967 968 969 970 971 972 973
            if dest.align.abi() < dest.layout.align.abi() {
                // Currently, MIR code generation does not create calls
                // that store directly to fields of packed structs (in
                // fact, the calls it creates write only to temps),
                //
                // If someone changes that, please update this code path
                // to create a temporary.
                span_bug!(self.mir.span, "can't directly store to unaligned value");
974
            }
975 976
            llargs.push(dest.llval);
            ReturnDest::Nothing
977
        } else {
978
            ReturnDest::Store(dest)
979 980 981
        }
    }

982
    fn codegen_transmute(&mut self, bx: &Builder<'a, 'll, 'tcx>,
983 984
                         src: &mir::Operand<'tcx>,
                         dst: &mir::Place<'tcx>) {
985
        if let mir::Place::Local(index) = *dst {
986
            match self.locals[index] {
I
Irina Popa 已提交
987
                LocalRef::Place(place) => self.codegen_transmute_into(bx, src, place),
988
                LocalRef::UnsizedPlace(_) => bug!("transmute must not involve unsized locals"),
989
                LocalRef::Operand(None) => {
990
                    let dst_layout = bx.cx.layout_of(self.monomorphized_place_ty(dst));
991
                    assert!(!dst_layout.ty.has_erasable_regions());
992 993
                    let place = PlaceRef::alloca(bx, dst_layout, "transmute_temp");
                    place.storage_live(bx);
I
Irina Popa 已提交
994
                    self.codegen_transmute_into(bx, src, place);
995 996
                    let op = place.load(bx);
                    place.storage_dead(bx);
997 998
                    self.locals[index] = LocalRef::Operand(Some(op));
                }
999 1000
                LocalRef::Operand(Some(op)) => {
                    assert!(op.layout.is_zst(),
1001 1002 1003 1004
                            "assigning to initialized SSAtemp");
                }
            }
        } else {
I
Irina Popa 已提交
1005 1006
            let dst = self.codegen_place(bx, dst);
            self.codegen_transmute_into(bx, src, dst);
1007 1008 1009
        }
    }

1010
    fn codegen_transmute_into(&mut self, bx: &Builder<'a, 'll, 'tcx>,
1011
                              src: &mir::Operand<'tcx>,
1012
                              dst: PlaceRef<'tcx, &'ll Value>) {
I
Irina Popa 已提交
1013
        let src = self.codegen_operand(bx, src);
1014 1015
        let llty = src.layout.llvm_type(bx.cx);
        let cast_ptr = bx.pointercast(dst.llval, llty.ptr_to());
1016
        let align = src.layout.align.min(dst.layout.align);
1017
        src.val.store(bx, PlaceRef::new_sized(cast_ptr, src.layout, align));
1018 1019
    }

1020

1021 1022
    // Stores the return value of a function call into it's final location.
    fn store_return(&mut self,
1023
                    bx: &Builder<'a, 'll, 'tcx>,
1024
                    dest: ReturnDest<'tcx, &'ll Value>,
1025
                    ret_ty: &ArgType<'tcx, Ty<'tcx>>,
1026
                    llval: &'ll Value) {
1027 1028
        use self::ReturnDest::*;

1029 1030
        match dest {
            Nothing => (),
1031
            Store(dst) => ret_ty.store(bx, llval, dst),
1032
            IndirectOperand(tmp, index) => {
1033 1034
                let op = tmp.load(bx);
                tmp.storage_dead(bx);
1035
                self.locals[index] = LocalRef::Operand(Some(op));
1036
            }
1037
            DirectOperand(index) => {
1038
                // If there is a cast, we have to store and reload.
1039
                let op = if let PassMode::Cast(_) = ret_ty.mode {
1040 1041 1042 1043 1044
                    let tmp = PlaceRef::alloca(bx, ret_ty.layout, "tmp_ret");
                    tmp.storage_live(bx);
                    ret_ty.store(bx, llval, tmp);
                    let op = tmp.load(bx);
                    tmp.storage_dead(bx);
1045
                    op
J
James Miller 已提交
1046
                } else {
1047
                    OperandRef::from_immediate_or_packed_pair(bx, llval, ret_ty.layout)
1048
                };
1049
                self.locals[index] = LocalRef::Operand(Some(op));
1050 1051 1052 1053 1054
            }
        }
    }
}

1055
enum ReturnDest<'tcx, V> {
1056 1057 1058
    // Do nothing, the return value is indirect or ignored
    Nothing,
    // Store the return value to the pointer
1059
    Store(PlaceRef<'tcx, V>),
1060
    // Stores an indirect return value to an operand local place
1061
    IndirectOperand(PlaceRef<'tcx, V>, mir::Local),
1062
    // Stores a direct return value to an operand local place
1063
    DirectOperand(mir::Local)
1064
}