builder.rs 43.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

11
use llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
12
use llvm::{self, False, OperandBundleDef, BasicBlock};
D
Denis Merigoux 已提交
13
use common::{self, *};
14
use context::CodegenCx;
15
use type_::Type;
16
use value::Value;
17
use libc::{c_uint, c_char};
D
Denis Merigoux 已提交
18 19
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::layout::{Align, Size, TyLayout};
20
use rustc::session::{config, Session};
21
use rustc_data_structures::small_c_str::SmallCStr;
D
Denis Merigoux 已提交
22
use interfaces::*;
23
use syntax;
A
Alex Crichton 已提交
24

25
use std::borrow::Cow;
26
use std::ops::Range;
27
use std::ptr;
28

29 30
// All Builders must have an llfn associated with them
#[must_use]
31
pub struct Builder<'a, 'll: 'a, 'tcx: 'll, V: 'll = &'ll Value> {
32
    pub llbuilder: &'ll mut llvm::Builder<'ll>,
D
Denis Merigoux 已提交
33
    pub cx: &'a CodegenCx<'ll, 'tcx, V>,
34 35
}

36
impl<V> Drop for Builder<'a, 'll, 'tcx, V> {
M
Mark-Simulacrum 已提交
37 38
    fn drop(&mut self) {
        unsafe {
39
            llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
M
Mark-Simulacrum 已提交
40 41 42 43
        }
    }
}

44 45
// This is a really awful way to get a zero-length c-string, but better (and a
// lot more efficient) than doing str::as_c_str("", ...) every time.
M
Mark-Simulacrum 已提交
46
fn noname() -> *const c_char {
47
    static CNULL: c_char = 0;
48
    &CNULL
49 50
}

51 52 53 54
bitflags! {
    pub struct MemFlags: u8 {
        const VOLATILE = 1 << 0;
        const NONTEMPORAL = 1 << 1;
55
        const UNALIGNED = 1 << 2;
56 57 58
    }
}

D
Denis Merigoux 已提交
59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88
impl BackendTypes for Builder<'_, 'll, '_> {
    type Value = &'ll Value;
    type BasicBlock = &'ll BasicBlock;
    type Type = &'ll Type;
    type Context = &'ll llvm::Context;
}

impl ty::layout::HasDataLayout for Builder<'_, '_, '_> {
    fn data_layout(&self) -> &ty::layout::TargetDataLayout {
        self.cx.data_layout()
    }
}

impl ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> {
    fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
        self.cx.tcx
    }
}

impl ty::layout::LayoutOf for Builder<'_, '_, 'tcx> {
    type Ty = Ty<'tcx>;
    type TyLayout = TyLayout<'tcx>;

    fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
        self.cx.layout_of(ty)
    }
}


impl HasCodegen<'tcx> for Builder<'_, 'll, 'tcx> {
D
Denis Merigoux 已提交
89
    type CodegenCx = CodegenCx<'ll, 'tcx>;
90
}
91

92
impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
93 94
    fn new_block<'b>(
        cx: &'a CodegenCx<'ll, 'tcx>,
95
        llfn: &'ll Value,
96 97
        name: &'b str
    ) -> Self {
98
        let bx = Builder::with_cx(cx);
99
        let llbb = unsafe {
100
            let name = SmallCStr::new(name);
101
            llvm::LLVMAppendBasicBlockInContext(
102
                cx.llcx,
103 104 105 106
                llfn,
                name.as_ptr()
            )
        };
107 108
        bx.position_at_end(llbb);
        bx
109 110
    }

111
    fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
M
Mark-Simulacrum 已提交
112 113
        // Create a fresh builder from the crate context.
        let llbuilder = unsafe {
114
            llvm::LLVMCreateBuilderInContext(cx.llcx)
M
Mark-Simulacrum 已提交
115
        };
116
        Builder {
117
            llbuilder,
118
            cx,
119 120 121
        }
    }

122
    fn build_sibling_block<'b>(&self, name: &'b str) -> Self {
123
        Builder::new_block(self.cx, self.llfn(), name)
124 125
    }

126
    fn sess(&self) -> &Session {
127
        self.cx.sess()
128 129
    }

130
    fn llfn(&self) -> &'ll Value {
131 132 133 134 135
        unsafe {
            llvm::LLVMGetBasicBlockParent(self.llbb())
        }
    }

136
    fn llbb(&self) -> &'ll BasicBlock {
137 138 139 140 141
        unsafe {
            llvm::LLVMGetInsertBlock(self.llbuilder)
        }
    }

M
Mark-Simulacrum 已提交
142
    fn count_insn(&self, category: &str) {
143 144
        if self.cx().sess().codegen_stats() {
            self.cx().stats.borrow_mut().n_llvm_insns += 1;
145
        }
146 147 148 149 150 151
        if self.cx().sess().count_llvm_insns() {
            *self.cx().stats
                      .borrow_mut()
                      .llvm_insns
                      .entry(category.to_string())
                      .or_insert(0) += 1;
152 153 154
        }
    }

155
    fn set_value_name(&self, value: &'ll Value, name: &str) {
156
        let cname = SmallCStr::new(name);
157 158 159 160 161
        unsafe {
            llvm::LLVMSetValueName(value, cname.as_ptr());
        }
    }

162
    fn position_at_end(&self, llbb: &'ll BasicBlock) {
163 164 165 166 167
        unsafe {
            llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
        }
    }

168
    fn position_at_start(&self, llbb: &'ll BasicBlock) {
169 170 171 172 173
        unsafe {
            llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
        }
    }

174
    fn ret_void(&self) {
175 176 177 178 179 180
        self.count_insn("retvoid");
        unsafe {
            llvm::LLVMBuildRetVoid(self.llbuilder);
        }
    }

181
    fn ret(&self, v: &'ll Value) {
182 183 184 185 186 187
        self.count_insn("ret");
        unsafe {
            llvm::LLVMBuildRet(self.llbuilder, v);
        }
    }

188
    fn br(&self, dest: &'ll BasicBlock) {
189 190 191 192 193 194
        self.count_insn("br");
        unsafe {
            llvm::LLVMBuildBr(self.llbuilder, dest);
        }
    }

195
    fn cond_br(
196
        &self,
197 198 199
        cond: &'ll Value,
        then_llbb: &'ll BasicBlock,
        else_llbb: &'ll BasicBlock,
200
    ) {
201 202 203 204 205 206
        self.count_insn("condbr");
        unsafe {
            llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
        }
    }

207
    fn switch(
208
        &self,
209 210
        v: &'ll Value,
        else_llbb: &'ll BasicBlock,
211
        num_cases: usize,
212
    ) -> &'ll Value {
213 214 215 216 217
        unsafe {
            llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint)
        }
    }

218
    fn invoke(&self,
219 220 221 222
                  llfn: &'ll Value,
                  args: &[&'ll Value],
                  then: &'ll BasicBlock,
                  catch: &'ll BasicBlock,
223
                  bundle: Option<&common::OperandBundleDef<&'ll Value>>) -> &'ll Value {
224
        self.count_insn("invoke");
225

226 227 228
        debug!("Invoke {:?} with args ({:?})",
               llfn,
               args);
229

230
        let args = self.check_call("invoke", llfn, args);
231 232
        let bundle = bundle.map(OperandBundleDef::from_generic);
        let bundle = bundle.as_ref().map(|b| &*b.raw);
233

234
        unsafe {
235 236 237 238 239 240 241 242
            llvm::LLVMRustBuildInvoke(self.llbuilder,
                                      llfn,
                                      args.as_ptr(),
                                      args.len() as c_uint,
                                      then,
                                      catch,
                                      bundle,
                                      noname())
243
        }
244 245
    }

246
    fn unreachable(&self) {
247 248 249 250 251 252 253
        self.count_insn("unreachable");
        unsafe {
            llvm::LLVMBuildUnreachable(self.llbuilder);
        }
    }

    /* Arithmetic */
254
    fn add(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
255 256 257 258 259 260
        self.count_insn("add");
        unsafe {
            llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
        }
    }

261
    fn fadd(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
262 263 264 265 266 267
        self.count_insn("fadd");
        unsafe {
            llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
        }
    }

268
    fn fadd_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
269 270 271 272 273 274 275 276
        self.count_insn("fadd");
        unsafe {
            let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname());
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

277
    fn sub(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
278 279 280 281 282 283
        self.count_insn("sub");
        unsafe {
            llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
        }
    }

284
    fn fsub(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
B
Ben Harris 已提交
285
        self.count_insn("fsub");
286 287 288 289 290
        unsafe {
            llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
        }
    }

291
    fn fsub_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
B
Ben Harris 已提交
292
        self.count_insn("fsub");
293 294 295 296 297 298 299
        unsafe {
            let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname());
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

300
    fn mul(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
301 302 303 304 305 306
        self.count_insn("mul");
        unsafe {
            llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
        }
    }

307
    fn fmul(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
308 309 310 311 312 313
        self.count_insn("fmul");
        unsafe {
            llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
        }
    }

314
    fn fmul_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
315 316 317 318 319 320 321 322 323
        self.count_insn("fmul");
        unsafe {
            let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname());
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }


324
    fn udiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
325 326 327 328 329 330
        self.count_insn("udiv");
        unsafe {
            llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
        }
    }

331
    fn exactudiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
332 333 334 335 336 337
        self.count_insn("exactudiv");
        unsafe {
            llvm::LLVMBuildExactUDiv(self.llbuilder, lhs, rhs, noname())
        }
    }

338
    fn sdiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
339 340 341 342 343 344
        self.count_insn("sdiv");
        unsafe {
            llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
        }
    }

345
    fn exactsdiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
346 347 348 349 350 351
        self.count_insn("exactsdiv");
        unsafe {
            llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
        }
    }

352
    fn fdiv(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
353 354 355 356 357 358
        self.count_insn("fdiv");
        unsafe {
            llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
        }
    }

359
    fn fdiv_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
360 361 362 363 364 365 366 367
        self.count_insn("fdiv");
        unsafe {
            let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname());
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

368
    fn urem(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
369 370 371 372 373 374
        self.count_insn("urem");
        unsafe {
            llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
        }
    }

375
    fn srem(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
376 377 378 379 380 381
        self.count_insn("srem");
        unsafe {
            llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
        }
    }

382
    fn frem(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
383 384 385 386 387 388
        self.count_insn("frem");
        unsafe {
            llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
        }
    }

389
    fn frem_fast(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
390 391 392 393 394 395 396 397
        self.count_insn("frem");
        unsafe {
            let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname());
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

398
    fn shl(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
399 400 401 402 403 404
        self.count_insn("shl");
        unsafe {
            llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
        }
    }

405
    fn lshr(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
406 407 408 409 410 411
        self.count_insn("lshr");
        unsafe {
            llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
        }
    }

412
    fn ashr(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
413 414 415 416 417 418
        self.count_insn("ashr");
        unsafe {
            llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
        }
    }

419
    fn and(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
420 421 422 423 424 425
        self.count_insn("and");
        unsafe {
            llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
        }
    }

426
    fn or(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
427 428 429 430 431 432
        self.count_insn("or");
        unsafe {
            llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
        }
    }

433
    fn xor(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
434 435 436 437 438 439
        self.count_insn("xor");
        unsafe {
            llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
        }
    }

440
    fn neg(&self, v: &'ll Value) -> &'ll Value {
441 442
        self.count_insn("neg");
        unsafe {
443
            llvm::LLVMBuildNeg(self.llbuilder, v, noname())
444 445 446
        }
    }

447
    fn fneg(&self, v: &'ll Value) -> &'ll Value {
448 449
        self.count_insn("fneg");
        unsafe {
450
            llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
451 452 453
        }
    }

454
    fn not(&self, v: &'ll Value) -> &'ll Value {
455 456
        self.count_insn("not");
        unsafe {
457
            llvm::LLVMBuildNot(self.llbuilder, v, noname())
458 459 460
        }
    }

461
    fn alloca(&self, ty: &'ll Type, name: &str, align: Align) -> &'ll Value {
462 463
        let bx = Builder::with_cx(self.cx);
        bx.position_at_start(unsafe {
464 465
            llvm::LLVMGetFirstBasicBlock(self.llfn())
        });
466
        bx.dynamic_alloca(ty, name, align)
467 468
    }

469
    fn dynamic_alloca(&self, ty: &'ll Type, name: &str, align: Align) -> &'ll Value {
470 471
        self.count_insn("alloca");
        unsafe {
472
            let alloca = if name.is_empty() {
473
                llvm::LLVMBuildAlloca(self.llbuilder, ty, noname())
474
            } else {
475
                let name = SmallCStr::new(name);
476
                llvm::LLVMBuildAlloca(self.llbuilder, ty,
A
Alex Crichton 已提交
477
                                      name.as_ptr())
478
            };
479
            llvm::LLVMSetAlignment(alloca, align.abi() as c_uint);
480
            alloca
481 482 483
        }
    }

484
    fn array_alloca(&self,
485
                        ty: &'ll Type,
486
                        len: &'ll Value,
M
Masaki Hara 已提交
487
                        name: &str,
488
                        align: Align) -> &'ll Value {
M
Masaki Hara 已提交
489 490 491 492 493 494 495 496 497 498 499 500 501 502
        self.count_insn("alloca");
        unsafe {
            let alloca = if name.is_empty() {
                llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, noname())
            } else {
                let name = SmallCStr::new(name);
                llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len,
                                           name.as_ptr())
            };
            llvm::LLVMSetAlignment(alloca, align.abi() as c_uint);
            alloca
        }
    }

503
    fn load(&self, ptr: &'ll Value, align: Align) -> &'ll Value {
504 505
        self.count_insn("load");
        unsafe {
506
            let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
507
            llvm::LLVMSetAlignment(load, align.abi() as c_uint);
508
            load
509 510 511
        }
    }

512
    fn volatile_load(&self, ptr: &'ll Value) -> &'ll Value {
513 514 515
        self.count_insn("load.volatile");
        unsafe {
            let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
516
            llvm::LLVMSetVolatile(insn, llvm::True);
517 518 519 520
            insn
        }
    }

521 522
    fn atomic_load(
        &self,
523
        ptr: &'ll Value,
D
Denis Merigoux 已提交
524
        order: common::AtomicOrdering,
525
        size: Size,
526
    ) -> &'ll Value {
527 528
        self.count_insn("load.atomic");
        unsafe {
529 530 531 532 533 534
            let load = llvm::LLVMRustBuildAtomicLoad(
                self.llbuilder,
                ptr,
                noname(),
                AtomicOrdering::from_generic(order),
            );
535 536
            // LLVM requires the alignment of atomic loads to be at least the size of the type.
            llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
537
            load
538 539 540 541
        }
    }


542
    fn range_metadata(&self, load: &'ll Value, range: Range<u128>) {
543 544 545 546 547 548 549 550
        if self.sess().target.target.arch == "amdgpu" {
            // amdgpu/LLVM does something weird and thinks a i64 value is
            // split into a v2i32, halving the bitwidth LLVM expects,
            // tripping an assertion. So, for now, just disable this
            // optimization.
            return;
        }

551
        unsafe {
552
            let llty = self.cx.val_ty(load);
553
            let v = [
554 555
                self.cx.const_uint_big(llty, range.start),
                self.cx.const_uint_big(llty, range.end)
556
            ];
557

558
            llvm::LLVMSetMetadata(load, llvm::MD_range as c_uint,
559
                                  llvm::LLVMMDNodeInContext(self.cx.llcx,
560 561
                                                            v.as_ptr(),
                                                            v.len() as c_uint));
562 563 564
        }
    }

565
    fn nonnull_metadata(&self, load: &'ll Value) {
566
        unsafe {
567
            llvm::LLVMSetMetadata(load, llvm::MD_nonnull as c_uint,
568
                                  llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
569 570 571
        }
    }

572
    fn store(&self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
573 574 575
        self.store_with_flags(val, ptr, align, MemFlags::empty())
    }

576
    fn store_with_flags(
577
        &self,
578 579
        val: &'ll Value,
        ptr: &'ll Value,
580 581
        align: Align,
        flags: MemFlags,
582
    ) -> &'ll Value {
583
        debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
584
        self.count_insn("store");
585
        let ptr = self.check_store(val, ptr);
586
        unsafe {
587
            let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
588 589 590 591 592 593
            let align = if flags.contains(MemFlags::UNALIGNED) {
                1
            } else {
                align.abi() as c_uint
            };
            llvm::LLVMSetAlignment(store, align);
594 595 596 597 598 599 600 601
            if flags.contains(MemFlags::VOLATILE) {
                llvm::LLVMSetVolatile(store, llvm::True);
            }
            if flags.contains(MemFlags::NONTEMPORAL) {
                // According to LLVM [1] building a nontemporal store must
                // *always* point to a metadata value of the integer 1.
                //
                // [1]: http://llvm.org/docs/LangRef.html#store-instruction
602
                let one = self.cx.const_i32(1);
603 604 605
                let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1);
                llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node);
            }
606
            store
607 608 609
        }
    }

610
   fn atomic_store(&self, val: &'ll Value, ptr: &'ll Value,
D
Denis Merigoux 已提交
611
                   order: common::AtomicOrdering, size: Size) {
612
        debug!("Store {:?} -> {:?}", val, ptr);
613
        self.count_insn("store.atomic");
614
        let ptr = self.check_store(val, ptr);
615
        unsafe {
616 617 618 619 620 621
            let store = llvm::LLVMRustBuildAtomicStore(
                self.llbuilder,
                val,
                ptr,
                AtomicOrdering::from_generic(order),
            );
622 623
            // LLVM requires the alignment of atomic stores to be at least the size of the type.
            llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
624 625 626
        }
    }

627
    fn gep(&self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
628 629
        self.count_insn("gep");
        unsafe {
630
            llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
631 632 633 634
                               indices.len() as c_uint, noname())
        }
    }

635
    fn inbounds_gep(&self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
636 637 638
        self.count_insn("inboundsgep");
        unsafe {
            llvm::LLVMBuildInBoundsGEP(
639
                self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
640 641 642 643
        }
    }

    /* Casts */
644
    fn trunc(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
645 646
        self.count_insn("trunc");
        unsafe {
647
            llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, noname())
648 649 650
        }
    }

651
    fn sext(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
652 653
        self.count_insn("sext");
        unsafe {
654
            llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, noname())
655 656 657
        }
    }

658
    fn fptoui(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
659 660
        self.count_insn("fptoui");
        unsafe {
661
            llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, noname())
662 663 664
        }
    }

665
    fn fptosi(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
666 667
        self.count_insn("fptosi");
        unsafe {
668
            llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty,noname())
669 670 671
        }
    }

672
    fn uitofp(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
673 674
        self.count_insn("uitofp");
        unsafe {
675
            llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, noname())
676 677 678
        }
    }

679
    fn sitofp(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
680 681
        self.count_insn("sitofp");
        unsafe {
682
            llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, noname())
683 684 685
        }
    }

686
    fn fptrunc(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
687 688
        self.count_insn("fptrunc");
        unsafe {
689
            llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, noname())
690 691 692
        }
    }

693
    fn fpext(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
694 695
        self.count_insn("fpext");
        unsafe {
696
            llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, noname())
697 698 699
        }
    }

700
    fn ptrtoint(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
701 702
        self.count_insn("ptrtoint");
        unsafe {
703
            llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, noname())
704 705 706
        }
    }

707
    fn inttoptr(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
708 709
        self.count_insn("inttoptr");
        unsafe {
710
            llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, noname())
711 712 713
        }
    }

714
    fn bitcast(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
715 716
        self.count_insn("bitcast");
        unsafe {
717
            llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, noname())
718 719 720
        }
    }

721

722
    fn intcast(&self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
723
        self.count_insn("intcast");
724
        unsafe {
725
            llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty, is_signed)
726 727 728
        }
    }

729
    fn pointercast(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
730
        self.count_insn("pointercast");
731
        unsafe {
732
            llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, noname())
733 734 735 736
        }
    }

    /* Comparisons */
D
Denis Merigoux 已提交
737
    fn icmp(&self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
738
        self.count_insn("icmp");
739
        let op = llvm::IntPredicate::from_generic(op);
740 741 742 743 744
        unsafe {
            llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
        }
    }

D
Denis Merigoux 已提交
745
    fn fcmp(&self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
746 747 748 749 750 751 752
        self.count_insn("fcmp");
        unsafe {
            llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
        }
    }

    /* Miscellaneous instructions */
753
    fn empty_phi(&self, ty: &'ll Type) -> &'ll Value {
754 755
        self.count_insn("emptyphi");
        unsafe {
756
            llvm::LLVMBuildPhi(self.llbuilder, ty, noname())
757 758 759
        }
    }

760
    fn phi(&self, ty: &'ll Type, vals: &[&'ll Value], bbs: &[&'ll BasicBlock]) -> &'ll Value {
761 762 763 764
        assert_eq!(vals.len(), bbs.len());
        let phi = self.empty_phi(ty);
        self.count_insn("addincoming");
        unsafe {
765 766
            llvm::LLVMAddIncoming(phi, vals.as_ptr(),
                                  bbs.as_ptr(),
767 768 769 770 771
                                  vals.len() as c_uint);
            phi
        }
    }

772
    fn inline_asm_call(&self, asm: *const c_char, cons: *const c_char,
773
                       inputs: &[&'ll Value], output: &'ll Type,
774
                       volatile: bool, alignstack: bool,
775
                       dia: syntax::ast::AsmDialect) -> Option<&'ll Value> {
776 777
        self.count_insn("inlineasm");

778 779 780 781
        let volatile = if volatile { llvm::True }
                       else        { llvm::False };
        let alignstack = if alignstack { llvm::True }
                         else          { llvm::False };
782

783
        let argtys = inputs.iter().map(|v| {
784
            debug!("Asm Input Type: {:?}", *v);
785
            self.cx.val_ty(*v)
786
        }).collect::<Vec<_>>();
787

788
        debug!("Asm Output Type: {:?}", output);
789
        let fty = self.cx().type_func(&argtys[..], output);
790
        unsafe {
791 792 793
            // Ask LLVM to verify that the constraints are well-formed.
            let constraints_ok = llvm::LLVMRustInlineAsmVerify(fty, cons);
            debug!("Constraint verification result: {:?}", constraints_ok);
794
            if constraints_ok {
795
                let v = llvm::LLVMRustInlineAsm(
796
                    fty, asm, cons, volatile, alignstack, AsmDialect::from_generic(dia));
797 798
                Some(self.call(v, inputs, None))
            } else {
M
Matthias Krüger 已提交
799
                // LLVM has detected an issue with our constraints, bail out
800 801
                None
            }
802 803 804
        }
    }

805 806 807 808 809 810 811 812 813 814 815 816 817 818
    fn memcpy(&self, dst: &'ll Value, dst_align: Align,
                  src: &'ll Value, src_align: Align,
                  size: &'ll Value, flags: MemFlags) {
        if flags.contains(MemFlags::NONTEMPORAL) {
            // HACK(nox): This is inefficient but there is no nontemporal memcpy.
            let val = self.load(src, src_align);
            let ptr = self.pointercast(dst, self.cx().type_ptr_to(self.cx().val_ty(val)));
            self.store_with_flags(val, ptr, dst_align, flags);
            return;
        }
        let size = self.intcast(size, self.cx().type_isize(), false);
        let is_volatile = flags.contains(MemFlags::VOLATILE);
        let dst = self.pointercast(dst, self.cx().type_i8p());
        let src = self.pointercast(src, self.cx().type_i8p());
819
        unsafe {
820 821
            llvm::LLVMRustBuildMemCpy(self.llbuilder, dst, dst_align.abi() as c_uint,
                                      src, src_align.abi() as c_uint, size, is_volatile);
822 823 824
        }
    }

825 826 827 828 829 830 831 832 833 834 835 836 837 838
    fn memmove(&self, dst: &'ll Value, dst_align: Align,
                  src: &'ll Value, src_align: Align,
                  size: &'ll Value, flags: MemFlags) {
        if flags.contains(MemFlags::NONTEMPORAL) {
            // HACK(nox): This is inefficient but there is no nontemporal memmove.
            let val = self.load(src, src_align);
            let ptr = self.pointercast(dst, self.cx().type_ptr_to(self.cx().val_ty(val)));
            self.store_with_flags(val, ptr, dst_align, flags);
            return;
        }
        let size = self.intcast(size, self.cx().type_isize(), false);
        let is_volatile = flags.contains(MemFlags::VOLATILE);
        let dst = self.pointercast(dst, self.cx().type_i8p());
        let src = self.pointercast(src, self.cx().type_i8p());
839
        unsafe {
840 841
            llvm::LLVMRustBuildMemMove(self.llbuilder, dst, dst_align.abi() as c_uint,
                                      src, src_align.abi() as c_uint, size, is_volatile);
842 843 844
        }
    }

845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861
    fn memset(
        &self,
        ptr: &'ll Value,
        fill_byte: &'ll Value,
        size: &'ll Value,
        align: Align,
        flags: MemFlags,
    ) {
        let ptr_width = &self.sess().target.target.target_pointer_width;
        let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
        let llintrinsicfn = self.cx().get_intrinsic(&intrinsic_key);
        let ptr = self.pointercast(ptr, self.cx().type_i8p());
        let align = self.cx().const_u32(align.abi() as u32);
        let volatile = self.cx().const_bool(flags.contains(MemFlags::VOLATILE));
        self.call(llintrinsicfn, &[ptr, fill_byte, size, align, volatile], None);
    }

862
    fn minnum(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
G
gnzlbg 已提交
863 864
        self.count_insn("minnum");
        unsafe {
G
gnzlbg 已提交
865
            let instr = llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs);
866
            instr.expect("LLVMRustBuildMinNum is not available in LLVM version < 6.0")
G
gnzlbg 已提交
867 868
        }
    }
869
    fn maxnum(&self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
G
gnzlbg 已提交
870 871
        self.count_insn("maxnum");
        unsafe {
G
gnzlbg 已提交
872
            let instr = llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs);
873
            instr.expect("LLVMRustBuildMaxNum is not available in LLVM version < 6.0")
G
gnzlbg 已提交
874 875 876
        }
    }

877
    fn select(
878 879 880 881
        &self, cond: &'ll Value,
        then_val: &'ll Value,
        else_val: &'ll Value,
    ) -> &'ll Value {
882 883 884 885 886 887
        self.count_insn("select");
        unsafe {
            llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
        }
    }

888
    #[allow(dead_code)]
889
    fn va_arg(&self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
890 891
        self.count_insn("vaarg");
        unsafe {
892
            llvm::LLVMBuildVAArg(self.llbuilder, list, ty, noname())
893 894 895
        }
    }

896
    fn extract_element(&self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
897 898 899 900 901 902
        self.count_insn("extractelement");
        unsafe {
            llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
        }
    }

903
    fn insert_element(
904 905 906 907
        &self, vec: &'ll Value,
        elt: &'ll Value,
        idx: &'ll Value,
    ) -> &'ll Value {
908 909 910 911 912 913
        self.count_insn("insertelement");
        unsafe {
            llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
        }
    }

914
    fn shuffle_vector(&self, v1: &'ll Value, v2: &'ll Value, mask: &'ll Value) -> &'ll Value {
915 916 917 918 919 920
        self.count_insn("shufflevector");
        unsafe {
            llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
        }
    }

921
    fn vector_splat(&self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
922
        unsafe {
923
            let elt_ty = self.cx.val_ty(elt);
924
            let undef = llvm::LLVMGetUndef(self.cx().type_vector(elt_ty, num_elts as u64));
925
            let vec = self.insert_element(undef, elt, self.cx.const_i32(0));
926
            let vec_i32_ty = self.cx().type_vector(self.cx().type_i32(), num_elts as u64);
927
            self.shuffle_vector(vec, undef, self.cx().const_null(vec_i32_ty))
928 929 930
        }
    }

931
    fn vector_reduce_fadd_fast(&self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
932 933
        self.count_insn("vector.reduce.fadd_fast");
        unsafe {
934 935 936
            // FIXME: add a non-fast math version once
            // https://bugs.llvm.org/show_bug.cgi?id=36732
            // is fixed.
937
            let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
938 939 940 941
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
942
    fn vector_reduce_fmul_fast(&self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
943 944
        self.count_insn("vector.reduce.fmul_fast");
        unsafe {
945 946 947
            // FIXME: add a non-fast math version once
            // https://bugs.llvm.org/show_bug.cgi?id=36732
            // is fixed.
948
            let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
949 950 951 952
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
953
    fn vector_reduce_add(&self, src: &'ll Value) -> &'ll Value {
954
        self.count_insn("vector.reduce.add");
955
        unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
956
    }
957
    fn vector_reduce_mul(&self, src: &'ll Value) -> &'ll Value {
958
        self.count_insn("vector.reduce.mul");
959
        unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
960
    }
961
    fn vector_reduce_and(&self, src: &'ll Value) -> &'ll Value {
962
        self.count_insn("vector.reduce.and");
963
        unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
964
    }
965
    fn vector_reduce_or(&self, src: &'ll Value) -> &'ll Value {
966
        self.count_insn("vector.reduce.or");
967
        unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
968
    }
969
    fn vector_reduce_xor(&self, src: &'ll Value) -> &'ll Value {
970
        self.count_insn("vector.reduce.xor");
971
        unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
972
    }
973
    fn vector_reduce_fmin(&self, src: &'ll Value) -> &'ll Value {
974
        self.count_insn("vector.reduce.fmin");
975
        unsafe { llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false) }
976
    }
977
    fn vector_reduce_fmax(&self, src: &'ll Value) -> &'ll Value {
978
        self.count_insn("vector.reduce.fmax");
979
        unsafe { llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false) }
980
    }
981
    fn vector_reduce_fmin_fast(&self, src: &'ll Value) -> &'ll Value {
982 983
        self.count_insn("vector.reduce.fmin_fast");
        unsafe {
984
            let instr = llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
985 986 987 988
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
989
    fn vector_reduce_fmax_fast(&self, src: &'ll Value) -> &'ll Value {
990 991
        self.count_insn("vector.reduce.fmax_fast");
        unsafe {
992
            let instr = llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
993 994 995 996
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
997
    fn vector_reduce_min(&self, src: &'ll Value, is_signed: bool) -> &'ll Value {
998
        self.count_insn("vector.reduce.min");
999
        unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
1000
    }
1001
    fn vector_reduce_max(&self, src: &'ll Value, is_signed: bool) -> &'ll Value {
1002
        self.count_insn("vector.reduce.max");
1003
        unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
1004 1005
    }

1006
    fn extract_value(&self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
1007
        self.count_insn("extractvalue");
1008
        assert_eq!(idx as c_uint as u64, idx);
1009 1010 1011 1012 1013
        unsafe {
            llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
        }
    }

1014 1015
    fn insert_value(&self, agg_val: &'ll Value, elt: &'ll Value,
                       idx: u64) -> &'ll Value {
1016
        self.count_insn("insertvalue");
1017
        assert_eq!(idx as c_uint as u64, idx);
1018 1019
        unsafe {
            llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
1020
                                       noname())
1021 1022 1023
        }
    }

1024
    fn landing_pad(&self, ty: &'ll Type, pers_fn: &'ll Value,
1025
                       num_clauses: usize) -> &'ll Value {
1026 1027
        self.count_insn("landingpad");
        unsafe {
1028
            llvm::LLVMBuildLandingPad(self.llbuilder, ty, pers_fn,
1029
                                      num_clauses as c_uint, noname())
1030 1031 1032
        }
    }

1033
    fn add_clause(&self, landing_pad: &'ll Value, clause: &'ll Value) {
1034 1035 1036 1037 1038
        unsafe {
            llvm::LLVMAddClause(landing_pad, clause);
        }
    }

1039
    fn set_cleanup(&self, landing_pad: &'ll Value) {
1040 1041
        self.count_insn("setcleanup");
        unsafe {
1042
            llvm::LLVMSetCleanup(landing_pad, llvm::True);
1043 1044 1045
        }
    }

1046
    fn resume(&self, exn: &'ll Value) -> &'ll Value {
1047 1048 1049 1050 1051 1052
        self.count_insn("resume");
        unsafe {
            llvm::LLVMBuildResume(self.llbuilder, exn)
        }
    }

1053
    fn cleanup_pad(&self,
1054 1055
                       parent: Option<&'ll Value>,
                       args: &[&'ll Value]) -> &'ll Value {
1056
        self.count_insn("cleanuppad");
1057
        let name = const_cstr!("cleanuppad");
1058 1059 1060 1061 1062 1063 1064
        let ret = unsafe {
            llvm::LLVMRustBuildCleanupPad(self.llbuilder,
                                          parent,
                                          args.len() as c_uint,
                                          args.as_ptr(),
                                          name.as_ptr())
        };
1065
        ret.expect("LLVM does not have support for cleanuppad")
1066 1067
    }

1068
    fn cleanup_ret(
1069 1070 1071
        &self, cleanup: &'ll Value,
        unwind: Option<&'ll BasicBlock>,
    ) -> &'ll Value {
1072 1073 1074 1075
        self.count_insn("cleanupret");
        let ret = unsafe {
            llvm::LLVMRustBuildCleanupRet(self.llbuilder, cleanup, unwind)
        };
1076
        ret.expect("LLVM does not have support for cleanupret")
1077 1078
    }

1079
    fn catch_pad(&self,
1080 1081
                     parent: &'ll Value,
                     args: &[&'ll Value]) -> &'ll Value {
1082
        self.count_insn("catchpad");
1083
        let name = const_cstr!("catchpad");
1084 1085 1086 1087 1088
        let ret = unsafe {
            llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
                                        args.len() as c_uint, args.as_ptr(),
                                        name.as_ptr())
        };
1089
        ret.expect("LLVM does not have support for catchpad")
1090 1091
    }

1092
    fn catch_ret(&self, pad: &'ll Value, unwind: &'ll BasicBlock) -> &'ll Value {
1093 1094 1095 1096
        self.count_insn("catchret");
        let ret = unsafe {
            llvm::LLVMRustBuildCatchRet(self.llbuilder, pad, unwind)
        };
1097
        ret.expect("LLVM does not have support for catchret")
1098 1099
    }

1100
    fn catch_switch(
1101
        &self,
1102 1103
        parent: Option<&'ll Value>,
        unwind: Option<&'ll BasicBlock>,
1104
        num_handlers: usize,
1105
    ) -> &'ll Value {
1106
        self.count_insn("catchswitch");
1107
        let name = const_cstr!("catchswitch");
1108 1109 1110 1111 1112
        let ret = unsafe {
            llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
                                           num_handlers as c_uint,
                                           name.as_ptr())
        };
1113
        ret.expect("LLVM does not have support for catchswitch")
1114 1115
    }

1116
    fn add_handler(&self, catch_switch: &'ll Value, handler: &'ll BasicBlock) {
1117 1118 1119 1120 1121
        unsafe {
            llvm::LLVMRustAddHandler(catch_switch, handler);
        }
    }

1122
    fn set_personality_fn(&self, personality: &'ll Value) {
1123
        unsafe {
1124
            llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1125 1126 1127
        }
    }

1128
    // Atomic Operations
1129
    fn atomic_cmpxchg(
1130
        &self,
1131 1132 1133
        dst: &'ll Value,
        cmp: &'ll Value,
        src: &'ll Value,
D
Denis Merigoux 已提交
1134 1135
        order: common::AtomicOrdering,
        failure_order: common::AtomicOrdering,
1136
        weak: bool,
1137
    ) -> &'ll Value {
1138
        let weak = if weak { llvm::True } else { llvm::False };
1139
        unsafe {
1140 1141 1142 1143 1144 1145 1146 1147 1148
            llvm::LLVMRustBuildAtomicCmpXchg(
                self.llbuilder,
                dst,
                cmp,
                src,
                AtomicOrdering::from_generic(order),
                AtomicOrdering::from_generic(failure_order),
                weak
            )
1149 1150
        }
    }
1151
    fn atomic_rmw(
1152
        &self,
D
Denis Merigoux 已提交
1153
        op: common::AtomicRmwBinOp,
1154 1155
        dst: &'ll Value,
        src: &'ll Value,
D
Denis Merigoux 已提交
1156
        order: common::AtomicOrdering,
1157
    ) -> &'ll Value {
1158
        unsafe {
1159 1160 1161 1162 1163
            llvm::LLVMBuildAtomicRMW(
                self.llbuilder,
                AtomicRmwBinOp::from_generic(op),
                dst,
                src,
1164
                AtomicOrdering::from_generic(order),
1165
                False)
1166 1167
        }
    }
J
James Miller 已提交
1168

D
Denis Merigoux 已提交
1169
    fn atomic_fence(&self, order: common::AtomicOrdering, scope: common::SynchronizationScope) {
J
James Miller 已提交
1170
        unsafe {
1171 1172 1173
            llvm::LLVMRustBuildAtomicFence(
                self.llbuilder,
                AtomicOrdering::from_generic(order),
1174
                SynchronizationScope::from_generic(scope)
1175
            );
J
James Miller 已提交
1176 1177
        }
    }
1178

1179
    fn add_case(&self, s: &'ll Value, on_val: &'ll Value, dest: &'ll BasicBlock) {
1180 1181 1182 1183 1184
        unsafe {
            llvm::LLVMAddCase(s, on_val, dest)
        }
    }

1185
    fn add_incoming_to_phi(&self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
B
Ben Harris 已提交
1186
        self.count_insn("addincoming");
1187 1188 1189 1190 1191
        unsafe {
            llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
        }
    }

1192
    fn set_invariant_load(&self, load: &'ll Value) {
1193 1194
        unsafe {
            llvm::LLVMSetMetadata(load, llvm::MD_invariant_load as c_uint,
1195
                                  llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
1196 1197 1198
        }
    }

1199 1200
    /// Returns the ptr value that should be used for storing `val`.
    fn check_store<'b>(&self,
1201 1202
                       val: &'ll Value,
                       ptr: &'ll Value) -> &'ll Value {
1203 1204
        let dest_ptr_ty = self.cx.val_ty(ptr);
        let stored_ty = self.cx.val_ty(val);
1205
        let stored_ptr_ty = self.cx.type_ptr_to(stored_ty);
1206

D
Denis Merigoux 已提交
1207
        assert_eq!(self.cx.type_kind(dest_ptr_ty), TypeKind::Pointer);
1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219

        if dest_ptr_ty == stored_ptr_ty {
            ptr
        } else {
            debug!("Type mismatch in store. \
                    Expected {:?}, got {:?}; inserting bitcast",
                   dest_ptr_ty, stored_ptr_ty);
            self.bitcast(ptr, stored_ptr_ty)
        }
    }

    /// Returns the args that should be used for a call to `llfn`.
1220 1221
    fn check_call<'b>(&self,
                      typ: &str,
1222 1223
                      llfn: &'ll Value,
                      args: &'b [&'ll Value]) -> Cow<'b, [&'ll Value]> {
1224
        let mut fn_ty = self.cx.val_ty(llfn);
1225
        // Strip off pointers
D
Denis Merigoux 已提交
1226
        while self.cx.type_kind(fn_ty) == TypeKind::Pointer {
D
Denis Merigoux 已提交
1227
            fn_ty = self.cx.element_type(fn_ty);
1228 1229
        }

D
Denis Merigoux 已提交
1230
        assert!(self.cx.type_kind(fn_ty) == TypeKind::Function,
1231
                "builder::{} not passed a function, but {:?}", typ, fn_ty);
1232

1233
        let param_tys = self.cx.func_params_types(fn_ty);
1234

1235
        let all_args_match = param_tys.iter()
1236
            .zip(args.iter().map(|&v| self.cx().val_ty(v)))
1237 1238 1239 1240 1241 1242 1243 1244 1245 1246
            .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);

        if all_args_match {
            return Cow::Borrowed(args);
        }

        let casted_args: Vec<_> = param_tys.into_iter()
            .zip(args.iter())
            .enumerate()
            .map(|(i, (expected_ty, &actual_val))| {
1247
                let actual_ty = self.cx().val_ty(actual_val);
1248 1249 1250
                if expected_ty != actual_ty {
                    debug!("Type mismatch in function call of {:?}. \
                            Expected {:?} for param {}, got {:?}; injecting bitcast",
1251
                           llfn, expected_ty, i, actual_ty);
1252 1253 1254 1255 1256 1257
                    self.bitcast(actual_val, expected_ty)
                } else {
                    actual_val
                }
            })
            .collect();
1258

L
ljedrz 已提交
1259
        Cow::Owned(casted_args)
1260
    }
1261

1262
    fn lifetime_start(&self, ptr: &'ll Value, size: Size) {
1263 1264 1265
        self.call_lifetime_intrinsic("llvm.lifetime.start", ptr, size);
    }

1266
    fn lifetime_end(&self, ptr: &'ll Value, size: Size) {
1267 1268 1269 1270 1271 1272 1273
        self.call_lifetime_intrinsic("llvm.lifetime.end", ptr, size);
    }

    /// If LLVM lifetime intrinsic support is enabled (i.e. optimizations
    /// on), and `ptr` is nonzero-sized, then extracts the size of `ptr`
    /// and the intrinsic for `lt` and passes them to `emit`, which is in
    /// charge of generating code to call the passed intrinsic on whatever
1274
    /// block of generated code is targeted for the intrinsic.
1275 1276 1277
    ///
    /// If LLVM lifetime intrinsic support is disabled (i.e.  optimizations
    /// off) or `ptr` is zero-sized, then no-op (does not call `emit`).
1278
    fn call_lifetime_intrinsic(&self, intrinsic: &str, ptr: &'ll Value, size: Size) {
1279
        if self.cx.sess().opts.optimize == config::OptLevel::No {
1280 1281 1282 1283 1284 1285 1286 1287
            return;
        }

        let size = size.bytes();
        if size == 0 {
            return;
        }

1288
        let lifetime_intrinsic = self.cx.get_intrinsic(intrinsic);
1289

1290
        let ptr = self.pointercast(ptr, self.cx.type_i8p());
1291
        self.call(lifetime_intrinsic, &[self.cx.const_u64(size), ptr], None);
1292
    }
1293

1294
    fn call(&self, llfn: &'ll Value, args: &[&'ll Value],
1295
                bundle: Option<&common::OperandBundleDef<&'ll Value>>) -> &'ll Value {
1296 1297 1298 1299 1300 1301 1302
        self.count_insn("call");

        debug!("Call {:?} with args ({:?})",
               llfn,
               args);

        let args = self.check_call("call", llfn, args);
1303 1304
        let bundle = bundle.map(OperandBundleDef::from_generic);
        let bundle = bundle.as_ref().map(|b| &*b.raw);
1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316

        unsafe {
            llvm::LLVMRustBuildCall(
                self.llbuilder,
                llfn,
                args.as_ptr() as *const &llvm::Value,
                args.len() as c_uint,
                bundle, noname()
            )
        }
    }

1317
    fn zext(&self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1318 1319 1320 1321 1322 1323
        self.count_insn("zext");
        unsafe {
            llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, noname())
        }
    }

1324
    fn struct_gep(&self, ptr: &'ll Value, idx: u64) -> &'ll Value {
1325 1326 1327 1328 1329 1330 1331
        self.count_insn("structgep");
        assert_eq!(idx as c_uint as u64, idx);
        unsafe {
            llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
        }
    }

1332
    fn cx(&self) -> &'a CodegenCx<'ll, 'tcx> {
1333
        self.cx
1334
    }
1335
}