mod.rs 24.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

use libc::c_uint;
M
Mark-Simulacrum 已提交
12
use llvm::{self, ValueRef, BasicBlockRef};
M
Mark-Simulacrum 已提交
13
use llvm::debuginfo::DIScope;
14 15
use rustc::ty::{self, TypeFoldable};
use rustc::ty::layout::{LayoutOf, FullLayout};
M
Mark-Simulacrum 已提交
16
use rustc::mir::{self, Mir};
17 18
use rustc::ty::subst::Substs;
use rustc::infer::TransNormalize;
19
use rustc::session::config::FullDebugInfo;
20
use base;
21
use builder::Builder;
22
use common::{self, CrateContext, Funclet};
M
Mark-Simulacrum 已提交
23
use debuginfo::{self, declare_local, VariableAccess, VariableKind, FunctionDebugContext};
24
use monomorphize::Instance;
25
use abi::{ArgAttribute, FnType};
26
use type_of::{self, LayoutLlvmExt};
27

28
use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span};
29
use syntax::symbol::keywords;
30

31
use std::iter;
32

33
use rustc_data_structures::bitvec::BitVector;
34
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
35

36 37
pub use self::constant::trans_static_initializer;

M
Mark Simulacrum 已提交
38
use self::analyze::CleanupKind;
39
use self::lvalue::{Alignment, LvalueRef};
40
use rustc::mir::traversal;
41

42
use self::operand::{OperandRef, OperandValue};
43 44

/// Master context for translating MIR.
M
Mark Simulacrum 已提交
45
pub struct MirContext<'a, 'tcx:'a> {
46
    mir: &'a mir::Mir<'tcx>,
47

48
    debug_context: debuginfo::FunctionDebugContext,
49

M
Mark Simulacrum 已提交
50
    llfn: ValueRef,
51

M
Mark Simulacrum 已提交
52 53
    ccx: &'a CrateContext<'a, 'tcx>,

54
    fn_ty: FnType<'tcx>,
55

56 57 58 59 60 61 62
    /// When unwinding is initiated, we have to store this personality
    /// value somewhere so that we can load it and re-use it in the
    /// resume instruction. The personality is (afaik) some kind of
    /// value used for C++ unwinding, which must filter by type: we
    /// don't really care about it very much. Anyway, this value
    /// contains an alloca into which the personality is stored and
    /// then later loaded when generating the DIVERGE_BLOCK.
63
    personality_slot: Option<LvalueRef<'tcx>>,
64 65

    /// A `Block` for each MIR `BasicBlock`
M
Mark-Simulacrum 已提交
66
    blocks: IndexVec<mir::BasicBlock, BasicBlockRef>,
67

68
    /// The funclet status of each basic block
69
    cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
70

71 72 73 74
    /// When targeting MSVC, this stores the cleanup info for each funclet
    /// BB. This is initialized as we compute the funclets' head block in RPO.
    funclets: &'a IndexVec<mir::BasicBlock, Option<Funclet>>,

75 76
    /// This stores the landing-pad block for a given BB, computed lazily on GNU
    /// and eagerly on MSVC.
M
Mark-Simulacrum 已提交
77
    landing_pads: IndexVec<mir::BasicBlock, Option<BasicBlockRef>>,
78

79
    /// Cached unreachable block
M
Mark-Simulacrum 已提交
80
    unreachable_block: Option<BasicBlockRef>,
81

82
    /// The location where each MIR arg/var/tmp/ret is stored. This is
83 84 85 86 87
    /// usually an `LvalueRef` representing an alloca, but not always:
    /// sometimes we can skip the alloca and just store the value
    /// directly using an `OperandRef`, which makes for tighter LLVM
    /// IR. The conditions for using an `OperandRef` are as follows:
    ///
88
    /// - the type of the local must be judged "immediate" by `type_is_immediate`
89 90 91 92 93
    /// - the operand must never be referenced indirectly
    ///     - we should not take its address using the `&` operator
    ///     - nor should it appear in an lvalue path like `tmp.a`
    /// - the operand must be defined by an rvalue that can generate immediate
    ///   values
N
Niko Matsakis 已提交
94 95 96
    ///
    /// Avoiding allocs can also be important for certain intrinsics,
    /// notably `expect`.
97
    locals: IndexVec<mir::Local, LocalRef<'tcx>>,
98 99

    /// Debug information for MIR scopes.
100
    scopes: IndexVec<mir::VisibilityScope, debuginfo::MirDebugScope>,
101 102 103

    /// If this function is being monomorphized, this contains the type substitutions used.
    param_substs: &'tcx Substs<'tcx>,
104 105
}

M
Mark Simulacrum 已提交
106
impl<'a, 'tcx> MirContext<'a, 'tcx> {
107
    pub fn monomorphize<T>(&self, value: &T) -> T
108 109 110
        where T: TransNormalize<'tcx>
    {
        self.ccx.tcx().trans_apply_param_substs(self.param_substs, value)
111 112
    }

113
    pub fn set_debug_loc(&mut self, bcx: &Builder, source_info: mir::SourceInfo) {
114 115 116 117
        let (scope, span) = self.debug_loc(source_info);
        debuginfo::set_source_location(&self.debug_context, bcx, scope, span);
    }

M
Mark-Simulacrum 已提交
118
    pub fn debug_loc(&mut self, source_info: mir::SourceInfo) -> (DIScope, Span) {
119
        // Bail out if debug info emission is not enabled.
120
        match self.debug_context {
121 122
            FunctionDebugContext::DebugInfoDisabled |
            FunctionDebugContext::FunctionWithoutDebugInfo => {
M
Mark-Simulacrum 已提交
123
                return (self.scopes[source_info.scope].scope_metadata, source_info.span);
124 125 126 127 128 129 130
            }
            FunctionDebugContext::RegularContext(_) =>{}
        }

        // In order to have a good line stepping behavior in debugger, we overwrite debug
        // locations of macro expansions with that of the outermost expansion site
        // (unless the crate is being compiled with `-Z debug-macros`).
131
        if source_info.span.ctxt() == NO_EXPANSION ||
132
           self.ccx.sess().opts.debugging_opts.debug_macros {
133
            let scope = self.scope_metadata_for_loc(source_info.scope, source_info.span.lo());
134
            (scope, source_info.span)
135 136
        } else {
            // Walk up the macro expansion chain until we reach a non-expanded span.
B
Bastien Orivel 已提交
137
            // We also stop at the function body level because no line stepping can occur
V
Vadim Chugunov 已提交
138
            // at the level above that.
139
            let mut span = source_info.span;
140 141
            while span.ctxt() != NO_EXPANSION && span.ctxt() != self.mir.span.ctxt() {
                if let Some(info) = span.ctxt().outer().expn_info() {
142
                    span = info.call_site;
143 144 145 146
                } else {
                    break;
                }
            }
147
            let scope = self.scope_metadata_for_loc(source_info.scope, span.lo());
148
            // Use span of the outermost expansion site, while keeping the original lexical scope.
149
            (scope, span)
150 151 152 153 154 155 156 157 158 159 160 161
        }
    }

    // DILocations inherit source file name from the parent DIScope.  Due to macro expansions
    // it may so happen that the current span belongs to a different file than the DIScope
    // corresponding to span's containing visibility scope.  If so, we need to create a DIScope
    // "extension" into that file.
    fn scope_metadata_for_loc(&self, scope_id: mir::VisibilityScope, pos: BytePos)
                               -> llvm::debuginfo::DIScope {
        let scope_metadata = self.scopes[scope_id].scope_metadata;
        if pos < self.scopes[scope_id].file_start_pos ||
           pos >= self.scopes[scope_id].file_end_pos {
M
Mark Simulacrum 已提交
162
            let cm = self.ccx.sess().codemap();
163 164 165 166 167
            let defining_crate = self.debug_context.get_ref(DUMMY_SP).defining_crate;
            debuginfo::extend_scope_to_file(self.ccx,
                                            scope_metadata,
                                            &cm.lookup_char_pos(pos).file,
                                            defining_crate)
168 169 170
        } else {
            scope_metadata
        }
171 172 173
    }
}

174
enum LocalRef<'tcx> {
175 176 177 178
    Lvalue(LvalueRef<'tcx>),
    Operand(Option<OperandRef<'tcx>>),
}

179
impl<'a, 'tcx> LocalRef<'tcx> {
180 181
    fn new_operand(ccx: &CrateContext<'a, 'tcx>, layout: FullLayout<'tcx>) -> LocalRef<'tcx> {
        if layout.is_zst() {
182 183 184
            // Zero-size temporaries aren't always initialized, which
            // doesn't matter because they don't contain data, but
            // we need something in the operand.
185
            LocalRef::Operand(Some(OperandRef::new_zst(ccx, layout)))
186
        } else {
187
            LocalRef::Operand(None)
188
        }
J
James Miller 已提交
189 190 191
    }
}

192 193
///////////////////////////////////////////////////////////////////////////

194
pub fn trans_mir<'a, 'tcx: 'a>(
M
Mark Simulacrum 已提交
195 196
    ccx: &'a CrateContext<'a, 'tcx>,
    llfn: ValueRef,
197 198
    mir: &'a Mir<'tcx>,
    instance: Instance<'tcx>,
199
    sig: ty::FnSig<'tcx>,
200
) {
201
    let fn_ty = FnType::new(ccx, sig, &[]);
202
    debug!("fn_ty: {:?}", fn_ty);
203
    let debug_context =
204
        debuginfo::create_function_debug_context(ccx, instance, sig, llfn, mir);
205
    let bcx = Builder::new_block(ccx, llfn, "start");
206

207 208 209
    if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) {
        bcx.set_personality_fn(ccx.eh_personality());
    }
210

211
    let cleanup_kinds = analyze::cleanup_kinds(&mir);
212 213 214
    // Allocate a `Block` for every basic block, except
    // the start block, if nothing loops back to it.
    let reentrant_start_block = !mir.predecessors_for(mir::START_BLOCK).is_empty();
M
Mark-Simulacrum 已提交
215
    let block_bcxs: IndexVec<mir::BasicBlock, BasicBlockRef> =
216
        mir.basic_blocks().indices().map(|bb| {
217 218
            if bb == mir::START_BLOCK && !reentrant_start_block {
                bcx.llbb()
219
            } else {
220
                bcx.build_sibling_block(&format!("{:?}", bb)).llbb()
221 222 223
            }
        }).collect();

224
    // Compute debuginfo scopes from MIR scopes.
M
Mark Simulacrum 已提交
225
    let scopes = debuginfo::create_mir_scopes(ccx, mir, &debug_context);
226
    let (landing_pads, funclets) = create_funclets(&bcx, &cleanup_kinds, &block_bcxs);
227

228
    let mut mircx = MirContext {
229 230 231 232
        mir,
        llfn,
        fn_ty,
        ccx,
233
        personality_slot: None,
234 235
        blocks: block_bcxs,
        unreachable_block: None,
236 237
        cleanup_kinds,
        landing_pads,
238
        funclets: &funclets,
239
        scopes,
240
        locals: IndexVec::new(),
241
        debug_context,
242 243 244 245
        param_substs: {
            assert!(!instance.substs.needs_infer());
            instance.substs
        },
246 247
    };

248
    let lvalue_locals = analyze::lvalue_locals(&mircx);
249

250
    // Allocate variable and temp allocas
251
    mircx.locals = {
252
        let args = arg_local_refs(&bcx, &mircx, &mircx.scopes, &lvalue_locals);
253 254 255

        let mut allocate_local = |local| {
            let decl = &mir.local_decls[local];
256 257
            let layout = bcx.ccx.layout_of(mircx.monomorphize(&decl.ty));
            assert!(!layout.ty.has_erasable_regions());
258

259 260
            if let Some(name) = decl.name {
                // User variable
261
                let debug_scope = mircx.scopes[decl.source_info.scope];
262
                let dbg = debug_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo;
263

264 265
                if !lvalue_locals.contains(local.index()) && !dbg {
                    debug!("alloc: {:?} ({}) -> operand", local, name);
266
                    return LocalRef::new_operand(bcx.ccx, layout);
267
                }
268 269

                debug!("alloc: {:?} ({}) -> lvalue", local, name);
270
                let lvalue = LvalueRef::alloca(&bcx, layout, &name.as_str());
271
                if dbg {
272
                    let (scope, span) = mircx.debug_loc(decl.source_info);
273
                    declare_local(&bcx, &mircx.debug_context, name, layout.ty, scope,
M
Mark-Simulacrum 已提交
274 275
                        VariableAccess::DirectVariable { alloca: lvalue.llval },
                        VariableKind::LocalVariable, span);
276 277
                }
                LocalRef::Lvalue(lvalue)
278
            } else {
279
                // Temporary or return pointer
280
                if local == mir::RETURN_POINTER && mircx.fn_ty.ret.is_indirect() {
281
                    debug!("alloc: {:?} (return pointer) -> lvalue", local);
M
Mark Simulacrum 已提交
282
                    let llretptr = llvm::get_param(llfn, 0);
283 284 285
                    LocalRef::Lvalue(LvalueRef::new_sized(llretptr,
                                                          layout,
                                                          Alignment::AbiAligned))
286 287
                } else if lvalue_locals.contains(local.index()) {
                    debug!("alloc: {:?} -> lvalue", local);
288
                    LocalRef::Lvalue(LvalueRef::alloca(&bcx, layout, &format!("{:?}", local)))
289 290 291 292 293
                } else {
                    // If this is an immediate local, we do not create an
                    // alloca in advance. Instead we wait until we see the
                    // definition and update the operand there.
                    debug!("alloc: {:?} -> operand", local);
294
                    LocalRef::new_operand(bcx.ccx, layout)
295
                }
296
            }
297 298 299 300 301
        };

        let retptr = allocate_local(mir::RETURN_POINTER);
        iter::once(retptr)
            .chain(args.into_iter())
302
            .chain(mir.vars_and_temps_iter().map(allocate_local))
303
            .collect()
304
    };
305

306 307 308 309
    // Branch to the START block, if it's not the entry block.
    if reentrant_start_block {
        bcx.br(mircx.blocks[mir::START_BLOCK]);
    }
310

311 312 313
    // Up until here, IR instructions for this function have explicitly not been annotated with
    // source code location, so we don't step into call setup code. From here on, source location
    // emitting should be enabled.
314
    debuginfo::start_emitting_source_locations(&mircx.debug_context);
315

M
Mark Simulacrum 已提交
316 317
    let rpo = traversal::reverse_postorder(&mir);
    let mut visited = BitVector::new(mir.basic_blocks().len());
318

319 320
    // Translate the body of each block using reverse postorder
    for (bb, _) in rpo {
321
        visited.insert(bb.index());
322
        mircx.trans_block(bb);
323
    }
324

325 326
    // Remove blocks that haven't been visited, or have no
    // predecessors.
327
    for bb in mir.basic_blocks().indices() {
328
        // Unreachable block
329
        if !visited.contains(bb.index()) {
330
            debug!("trans_mir: block {:?} was not visited", bb);
331 332 333
            unsafe {
                llvm::LLVMDeleteBasicBlock(mircx.blocks[bb]);
            }
334 335
        }
    }
336 337
}

338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357
fn create_funclets<'a, 'tcx>(
    bcx: &Builder<'a, 'tcx>,
    cleanup_kinds: &IndexVec<mir::BasicBlock, CleanupKind>,
    block_bcxs: &IndexVec<mir::BasicBlock, BasicBlockRef>)
    -> (IndexVec<mir::BasicBlock, Option<BasicBlockRef>>,
        IndexVec<mir::BasicBlock, Option<Funclet>>)
{
    block_bcxs.iter_enumerated().zip(cleanup_kinds).map(|((bb, &llbb), cleanup_kind)| {
        match *cleanup_kind {
            CleanupKind::Funclet if base::wants_msvc_seh(bcx.sess()) => {
                let cleanup_bcx = bcx.build_sibling_block(&format!("funclet_{:?}", bb));
                let cleanup = cleanup_bcx.cleanup_pad(None, &[]);
                cleanup_bcx.br(llbb);
                (Some(cleanup_bcx.llbb()), Some(Funclet::new(cleanup)))
            }
            _ => (None, None)
        }
    }).unzip()
}

358 359 360
/// Produce, for each argument, a `ValueRef` pointing at the
/// argument's value. As arguments are lvalues, these are always
/// indirect.
361
fn arg_local_refs<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
362
                            mircx: &MirContext<'a, 'tcx>,
M
Mark Simulacrum 已提交
363 364 365
                            scopes: &IndexVec<mir::VisibilityScope, debuginfo::MirDebugScope>,
                            lvalue_locals: &BitVector)
                            -> Vec<LocalRef<'tcx>> {
366
    let mir = mircx.mir;
367
    let tcx = bcx.tcx();
368
    let mut idx = 0;
369
    let mut llarg_idx = mircx.fn_ty.ret.is_indirect() as usize;
370

371
    // Get the argument scope, if it exists and if we need it.
372
    let arg_scope = scopes[mir::ARGUMENT_VISIBILITY_SCOPE];
373
    let arg_scope = if arg_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo {
374
        Some(arg_scope.scope_metadata)
375 376 377
    } else {
        None
    };
378

379 380 381 382
    let deref_op = unsafe {
        [llvm::LLVMRustDIBuilderCreateOpDeref()]
    };

383
    mir.args_iter().enumerate().map(|(arg_index, local)| {
384
        let arg_decl = &mir.local_decls[local];
385

386 387 388 389 390 391
        let name = if let Some(name) = arg_decl.name {
            name.as_str().to_string()
        } else {
            format!("arg{}", arg_index)
        };

J
Jonas Schievink 已提交
392 393 394 395 396 397
        if Some(local) == mir.spread_arg {
            // This argument (e.g. the last argument in the "rust-call" ABI)
            // is a tuple that was spread at the ABI level and now we have
            // to reconstruct it into a tuple local variable, from multiple
            // individual LLVM function arguments.

398
            let arg_ty = mircx.monomorphize(&arg_decl.ty);
J
Jonas Schievink 已提交
399
            let tupled_arg_tys = match arg_ty.sty {
A
Andrew Cann 已提交
400
                ty::TyTuple(ref tys, _) => tys,
J
Jonas Schievink 已提交
401 402
                _ => bug!("spread argument isn't a tuple?!")
            };
403

404
            let lvalue = LvalueRef::alloca(bcx, bcx.ccx.layout_of(arg_ty), &name);
405
            for i in 0..tupled_arg_tys.len() {
406
                let arg = &mircx.fn_ty.args[idx];
J
Jonas Schievink 已提交
407
                idx += 1;
408
                arg.store_fn_arg(bcx, &mut llarg_idx, lvalue.project_field(bcx, i));
409
            }
410 411 412

            // Now that we have one alloca that contains the aggregate value,
            // we can create one debuginfo entry for the argument.
413
            arg_scope.map(|scope| {
414
                let variable_access = VariableAccess::DirectVariable {
415
                    alloca: lvalue.llval
416
                };
417 418 419 420 421 422 423 424 425
                declare_local(
                    bcx,
                    &mircx.debug_context,
                    arg_decl.name.unwrap_or(keywords::Invalid.name()),
                    arg_ty, scope,
                    variable_access,
                    VariableKind::ArgumentVariable(arg_index + 1),
                    DUMMY_SP
                );
426
            });
427

428
            return LocalRef::Lvalue(lvalue);
429 430
        }

431
        let arg = &mircx.fn_ty.args[idx];
432
        idx += 1;
433
        let lvalue = if arg.is_indirect() {
434
            // Don't copy an indirect argument to an alloca, the caller
435
            // already put it in a temporary alloca and gave it up
436
            // FIXME: lifetimes
437 438 439
            if arg.pad.is_some() {
                llarg_idx += 1;
            }
M
Mark Simulacrum 已提交
440
            let llarg = llvm::get_param(bcx.llfn(), llarg_idx as c_uint);
441
            bcx.set_value_name(llarg, &name);
442
            llarg_idx += 1;
443
            LvalueRef::new_sized(llarg, arg.layout, Alignment::AbiAligned)
444
        } else if !lvalue_locals.contains(local.index()) &&
445 446 447 448 449
                  !arg.nested.is_empty() {
            assert_eq!(arg.nested.len(), 2);
            let (a, b) = (&arg.nested[0], &arg.nested[1]);
            assert!(!a.is_ignore() && a.cast.is_none() && a.pad.is_none());
            assert!(!b.is_ignore() && b.cast.is_none() && b.pad.is_none());
450

451
            let mut a = llvm::get_param(bcx.llfn(), llarg_idx as c_uint);
452
            llarg_idx += 1;
453

454 455 456
            let mut b = llvm::get_param(bcx.llfn(), llarg_idx as c_uint);
            llarg_idx += 1;

457
            if common::type_is_fat_ptr(bcx.ccx, arg.layout.ty) {
458 459 460 461
                // FIXME(eddyb) As we can't perfectly represent the data and/or
                // vtable pointer in a fat pointers in Rust's typesystem, and
                // because we split fat pointers into two ArgType's, they're
                // not the right type so we have to cast them for now.
462
                let pointee = match arg.layout.ty.sty {
463 464
                    ty::TyRef(_, ty::TypeAndMut{ty, ..}) |
                    ty::TyRawPtr(ty::TypeAndMut{ty, ..}) => ty,
465
                    ty::TyAdt(def, _) if def.is_box() => arg.layout.ty.boxed_ty(),
466 467
                    _ => bug!()
                };
468
                let data_llty = bcx.ccx.layout_of(pointee).llvm_type(bcx.ccx);
469 470
                let meta_llty = type_of::unsized_info_ty(bcx.ccx, pointee);

471 472 473 474 475
                a = bcx.pointercast(a, data_llty.ptr_to());
                bcx.set_value_name(a, &(name.clone() + ".ptr"));
                b = bcx.pointercast(b, meta_llty);
                bcx.set_value_name(b, &(name + ".meta"));
            }
476

477 478
            return LocalRef::Operand(Some(OperandRef {
                val: OperandValue::Pair(a, b),
479
                layout: arg.layout
480 481 482 483 484
            }));
        } else if !lvalue_locals.contains(local.index()) &&
                  !arg.is_indirect() && arg.cast.is_none() &&
                  arg_scope.is_none() {
            if arg.is_ignore() {
485
                return LocalRef::new_operand(bcx.ccx, arg.layout);
486 487 488 489 490 491 492 493 494 495 496
            }

            // We don't have to cast or keep the argument in the alloca.
            // FIXME(eddyb): We should figure out how to use llvm.dbg.value instead
            // of putting everything in allocas just so we can use llvm.dbg.declare.
            if arg.pad.is_some() {
                llarg_idx += 1;
            }
            let llarg = llvm::get_param(bcx.llfn(), llarg_idx as c_uint);
            bcx.set_value_name(llarg, &name);
            llarg_idx += 1;
497
            let operand = OperandRef {
498
                val: OperandValue::Immediate(llarg),
499
                layout: arg.layout
500 501
            };
            return LocalRef::Operand(Some(operand.unpack_if_pair(bcx)));
502
        } else {
503
            let tmp = LvalueRef::alloca(bcx, arg.layout, &name);
504
            arg.store_fn_arg(bcx, &mut llarg_idx, tmp);
505
            tmp
506
        };
507
        arg_scope.map(|scope| {
508 509
            // Is this a regular argument?
            if arg_index > 0 || mir.upvar_decls.is_empty() {
510 511 512 513 514 515
                // The Rust ABI passes indirect variables using a pointer and a manual copy, so we
                // need to insert a deref here, but the C ABI uses a pointer and a copy using the
                // byval attribute, for which LLVM does the deref itself, so we must not add it.
                let variable_access = if arg.is_indirect() &&
                    !arg.attrs.contains(ArgAttribute::ByVal) {
                    VariableAccess::IndirectVariable {
516
                        alloca: lvalue.llval,
517 518 519
                        address_operations: &deref_op,
                    }
                } else {
520
                    VariableAccess::DirectVariable { alloca: lvalue.llval }
521 522
                };

523 524 525 526
                declare_local(
                    bcx,
                    &mircx.debug_context,
                    arg_decl.name.unwrap_or(keywords::Invalid.name()),
527
                    arg.layout.ty,
528
                    scope,
529
                    variable_access,
530 531 532
                    VariableKind::ArgumentVariable(arg_index + 1),
                    DUMMY_SP
                );
533 534 535 536
                return;
            }

            // Or is it the closure environment?
537 538 539
            let (closure_layout, env_ref) = match arg.layout.ty.sty {
                ty::TyRef(_, mt) | ty::TyRawPtr(mt) => (bcx.ccx.layout_of(mt.ty), true),
                _ => (arg.layout, false)
540
            };
A
Alex Crichton 已提交
541

542
            let upvar_tys = match closure_layout.ty.sty {
J
John Kåre Alsaker 已提交
543 544
                ty::TyClosure(def_id, substs) |
                ty::TyGenerator(def_id, substs, _) => substs.upvar_tys(def_id, tcx),
545
                _ => bug!("upvar_decls with non-closure arg0 type `{}`", closure_layout.ty)
546 547 548 549 550 551 552 553 554 555
            };

            // Store the pointer to closure data in an alloca for debuginfo
            // because that's what the llvm.dbg.declare intrinsic expects.

            // FIXME(eddyb) this shouldn't be necessary but SROA seems to
            // mishandle DW_OP_plus not preceded by DW_OP_deref, i.e. it
            // doesn't actually strip the offset when splitting the closure
            // environment into its components so it ends up out of bounds.
            let env_ptr = if !env_ref {
556 557 558
                let alloc = LvalueRef::alloca(bcx,
                    bcx.ccx.layout_of(tcx.mk_mut_ptr(arg.layout.ty)),
                    "__debuginfo_env_ptr");
559 560
                bcx.store(lvalue.llval, alloc.llval, None);
                alloc.llval
561
            } else {
562
                lvalue.llval
563 564 565
            };

            for (i, (decl, ty)) in mir.upvar_decls.iter().zip(upvar_tys).enumerate() {
566
                let byte_offset_of_var_in_env = closure_layout.fields.offset(i).bytes();
A
Austin Hicks 已提交
567

568
                let ops = unsafe {
569 570
                    [llvm::LLVMRustDIBuilderCreateOpDeref(),
                     llvm::LLVMRustDIBuilderCreateOpPlus(),
571
                     byte_offset_of_var_in_env as i64,
572
                     llvm::LLVMRustDIBuilderCreateOpDeref()]
573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591
                };

                // The environment and the capture can each be indirect.

                // FIXME(eddyb) see above why we have to keep
                // a pointer in an alloca for debuginfo atm.
                let mut ops = if env_ref || true { &ops[..] } else { &ops[1..] };

                let ty = if let (true, &ty::TyRef(_, mt)) = (decl.by_ref, &ty.sty) {
                    mt.ty
                } else {
                    ops = &ops[..ops.len() - 1];
                    ty
                };

                let variable_access = VariableAccess::IndirectVariable {
                    alloca: env_ptr,
                    address_operations: &ops
                };
592 593 594 595 596 597 598 599 600 601
                declare_local(
                    bcx,
                    &mircx.debug_context,
                    decl.debug_name,
                    ty,
                    scope,
                    variable_access,
                    VariableKind::CapturedVariable,
                    DUMMY_SP
                );
602
            }
603
        });
604
        LocalRef::Lvalue(lvalue)
605
    }).collect()
606 607
}

608
mod analyze;
609 610
mod block;
mod constant;
611
pub mod lvalue;
612
pub mod operand;
613
mod rvalue;
614
mod statement;