builder.rs 50.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

11
use llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
12
use llvm::{self, False, BasicBlock};
13
use rustc_codegen_ssa::common::{IntPredicate, TypeKind, RealPredicate};
14
use rustc_codegen_ssa::{self, MemFlags};
15
use common::Funclet;
16
use context::CodegenCx;
17
use type_::Type;
18
use type_of::LayoutLlvmExt;
19
use value::Value;
20
use libc::{c_uint, c_char};
D
Denis Merigoux 已提交
21
use rustc::ty::{self, Ty, TyCtxt};
22
use rustc::ty::layout::{self, Align, Size, TyLayout};
23
use rustc::session::config;
24
use rustc_data_structures::small_c_str::SmallCStr;
25
use rustc_codegen_ssa::traits::*;
26
use syntax;
27 28 29
use rustc_codegen_ssa::base::to_immediate;
use rustc_codegen_ssa::mir::operand::{OperandValue, OperandRef};
use rustc_codegen_ssa::mir::place::PlaceRef;
30
use std::borrow::Cow;
31
use std::ffi::CStr;
B
bjorn3 已提交
32
use std::ops::{Deref, Range};
33
use std::ptr;
34

35 36
// All Builders must have an llfn associated with them
#[must_use]
37
pub struct Builder<'a, 'll: 'a, 'tcx: 'll> {
38
    pub llbuilder: &'ll mut llvm::Builder<'ll>,
39
    pub cx: &'a CodegenCx<'ll, 'tcx>,
40 41
}

42
impl Drop for Builder<'a, 'll, 'tcx> {
M
Mark-Simulacrum 已提交
43 44
    fn drop(&mut self) {
        unsafe {
45
            llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
M
Mark-Simulacrum 已提交
46 47 48 49
        }
    }
}

50 51
// This is a really awful way to get a zero-length c-string, but better (and a
// lot more efficient) than doing str::as_c_str("", ...) every time.
M
Mark-Simulacrum 已提交
52
fn noname() -> *const c_char {
53
    static CNULL: c_char = 0;
54
    &CNULL
55 56
}

57 58 59 60
impl BackendTypes for Builder<'_, 'll, 'tcx> {
    type Value = <CodegenCx<'ll, 'tcx> as BackendTypes>::Value;
    type BasicBlock = <CodegenCx<'ll, 'tcx> as BackendTypes>::BasicBlock;
    type Type = <CodegenCx<'ll, 'tcx> as BackendTypes>::Type;
61
    type Funclet = <CodegenCx<'ll, 'tcx> as BackendTypes>::Funclet;
62 63

    type DIScope = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIScope;
D
Denis Merigoux 已提交
64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
}

impl ty::layout::HasDataLayout for Builder<'_, '_, '_> {
    fn data_layout(&self) -> &ty::layout::TargetDataLayout {
        self.cx.data_layout()
    }
}

impl ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> {
    fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
        self.cx.tcx
    }
}

impl ty::layout::LayoutOf for Builder<'_, '_, 'tcx> {
    type Ty = Ty<'tcx>;
    type TyLayout = TyLayout<'tcx>;

    fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
        self.cx.layout_of(ty)
    }
}

B
bjorn3 已提交
87 88 89 90 91 92 93
impl Deref for Builder<'_, 'll, 'tcx> {
    type Target = CodegenCx<'ll, 'tcx>;

    fn deref(&self) -> &Self::Target {
        self.cx
    }
}
D
Denis Merigoux 已提交
94 95

impl HasCodegen<'tcx> for Builder<'_, 'll, 'tcx> {
D
Denis Merigoux 已提交
96
    type CodegenCx = CodegenCx<'ll, 'tcx>;
97
}
98

99
impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
100 101
    fn new_block<'b>(
        cx: &'a CodegenCx<'ll, 'tcx>,
102
        llfn: &'ll Value,
103 104
        name: &'b str
    ) -> Self {
105
        let mut bx = Builder::with_cx(cx);
106
        let llbb = unsafe {
107
            let name = SmallCStr::new(name);
108
            llvm::LLVMAppendBasicBlockInContext(
109
                cx.llcx,
110 111 112 113
                llfn,
                name.as_ptr()
            )
        };
114 115
        bx.position_at_end(llbb);
        bx
116 117
    }

118
    fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
M
Mark-Simulacrum 已提交
119 120
        // Create a fresh builder from the crate context.
        let llbuilder = unsafe {
121
            llvm::LLVMCreateBuilderInContext(cx.llcx)
M
Mark-Simulacrum 已提交
122
        };
123
        Builder {
124
            llbuilder,
125
            cx,
126 127 128
        }
    }

129
    fn build_sibling_block<'b>(&self, name: &'b str) -> Self {
130
        Builder::new_block(self.cx, self.llfn(), name)
131 132
    }

133
    fn llfn(&self) -> &'ll Value {
134 135 136 137 138
        unsafe {
            llvm::LLVMGetBasicBlockParent(self.llbb())
        }
    }

139
    fn llbb(&self) -> &'ll BasicBlock {
140 141 142 143 144
        unsafe {
            llvm::LLVMGetInsertBlock(self.llbuilder)
        }
    }

M
Mark-Simulacrum 已提交
145
    fn count_insn(&self, category: &str) {
146 147
        if self.cx().sess().codegen_stats() {
            self.cx().stats.borrow_mut().n_llvm_insns += 1;
148
        }
149 150 151 152 153 154
        if self.cx().sess().count_llvm_insns() {
            *self.cx().stats
                      .borrow_mut()
                      .llvm_insns
                      .entry(category.to_string())
                      .or_insert(0) += 1;
155 156 157
        }
    }

158
    fn set_value_name(&mut self, value: &'ll Value, name: &str) {
159
        let cname = SmallCStr::new(name);
160 161 162 163 164
        unsafe {
            llvm::LLVMSetValueName(value, cname.as_ptr());
        }
    }

165
    fn position_at_end(&mut self, llbb: &'ll BasicBlock) {
166 167 168 169 170
        unsafe {
            llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
        }
    }

171
    fn position_at_start(&mut self, llbb: &'ll BasicBlock) {
172 173 174 175 176
        unsafe {
            llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
        }
    }

177
    fn ret_void(&mut self) {
178 179 180 181 182 183
        self.count_insn("retvoid");
        unsafe {
            llvm::LLVMBuildRetVoid(self.llbuilder);
        }
    }

184
    fn ret(&mut self, v: &'ll Value) {
185 186 187 188 189 190
        self.count_insn("ret");
        unsafe {
            llvm::LLVMBuildRet(self.llbuilder, v);
        }
    }

191
    fn br(&mut self, dest: &'ll BasicBlock) {
192 193 194 195 196 197
        self.count_insn("br");
        unsafe {
            llvm::LLVMBuildBr(self.llbuilder, dest);
        }
    }

198
    fn cond_br(
199
        &mut self,
200 201 202
        cond: &'ll Value,
        then_llbb: &'ll BasicBlock,
        else_llbb: &'ll BasicBlock,
203
    ) {
204 205 206 207 208 209
        self.count_insn("condbr");
        unsafe {
            llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
        }
    }

210
    fn switch(
211
        &mut self,
212 213
        v: &'ll Value,
        else_llbb: &'ll BasicBlock,
214
        num_cases: usize,
215
    ) -> &'ll Value {
216 217 218 219 220
        unsafe {
            llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, num_cases as c_uint)
        }
    }

221
    fn invoke(
222
        &mut self,
223 224 225 226 227 228
        llfn: &'ll Value,
        args: &[&'ll Value],
        then: &'ll BasicBlock,
        catch: &'ll BasicBlock,
        funclet: Option<&Funclet<'ll>>,
    ) -> &'ll Value {
229
        self.count_insn("invoke");
230

231 232 233
        debug!("Invoke {:?} with args ({:?})",
               llfn,
               args);
234

235
        let args = self.check_call("invoke", llfn, args);
236
        let bundle = funclet.map(|funclet| funclet.bundle());
237
        let bundle = bundle.as_ref().map(|b| &*b.raw);
238

239
        unsafe {
240 241 242 243 244 245 246 247
            llvm::LLVMRustBuildInvoke(self.llbuilder,
                                      llfn,
                                      args.as_ptr(),
                                      args.len() as c_uint,
                                      then,
                                      catch,
                                      bundle,
                                      noname())
248
        }
249 250
    }

251
    fn unreachable(&mut self) {
252 253 254 255 256 257 258
        self.count_insn("unreachable");
        unsafe {
            llvm::LLVMBuildUnreachable(self.llbuilder);
        }
    }

    /* Arithmetic */
259
    fn add(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
260 261 262 263 264 265
        self.count_insn("add");
        unsafe {
            llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
        }
    }

266
    fn fadd(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
267 268 269 270 271 272
        self.count_insn("fadd");
        unsafe {
            llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
        }
    }

273
    fn fadd_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
274 275 276 277 278 279 280 281
        self.count_insn("fadd");
        unsafe {
            let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname());
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

282
    fn sub(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
283 284 285 286 287 288
        self.count_insn("sub");
        unsafe {
            llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
        }
    }

289
    fn fsub(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
B
Ben Harris 已提交
290
        self.count_insn("fsub");
291 292 293 294 295
        unsafe {
            llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
        }
    }

296
    fn fsub_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
B
Ben Harris 已提交
297
        self.count_insn("fsub");
298 299 300 301 302 303 304
        unsafe {
            let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname());
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

305
    fn mul(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
306 307 308 309 310 311
        self.count_insn("mul");
        unsafe {
            llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
        }
    }

312
    fn fmul(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
313 314 315 316 317 318
        self.count_insn("fmul");
        unsafe {
            llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
        }
    }

319
    fn fmul_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
320 321 322 323 324 325 326 327 328
        self.count_insn("fmul");
        unsafe {
            let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname());
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }


329
    fn udiv(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
330 331 332 333 334 335
        self.count_insn("udiv");
        unsafe {
            llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
        }
    }

336
    fn exactudiv(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
337 338 339 340 341 342
        self.count_insn("exactudiv");
        unsafe {
            llvm::LLVMBuildExactUDiv(self.llbuilder, lhs, rhs, noname())
        }
    }

343
    fn sdiv(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
344 345 346 347 348 349
        self.count_insn("sdiv");
        unsafe {
            llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
        }
    }

350
    fn exactsdiv(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
351 352 353 354 355 356
        self.count_insn("exactsdiv");
        unsafe {
            llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
        }
    }

357
    fn fdiv(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
358 359 360 361 362 363
        self.count_insn("fdiv");
        unsafe {
            llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
        }
    }

364
    fn fdiv_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
365 366 367 368 369 370 371 372
        self.count_insn("fdiv");
        unsafe {
            let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname());
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

373
    fn urem(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
374 375 376 377 378 379
        self.count_insn("urem");
        unsafe {
            llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
        }
    }

380
    fn srem(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
381 382 383 384 385 386
        self.count_insn("srem");
        unsafe {
            llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
        }
    }

387
    fn frem(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
388 389 390 391 392 393
        self.count_insn("frem");
        unsafe {
            llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
        }
    }

394
    fn frem_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
395 396 397 398 399 400 401 402
        self.count_insn("frem");
        unsafe {
            let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname());
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

403
    fn shl(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
404 405 406 407 408 409
        self.count_insn("shl");
        unsafe {
            llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
        }
    }

410
    fn lshr(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
411 412 413 414 415 416
        self.count_insn("lshr");
        unsafe {
            llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
        }
    }

417
    fn ashr(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
418 419 420 421 422 423
        self.count_insn("ashr");
        unsafe {
            llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
        }
    }

424
    fn and(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
425 426 427 428 429 430
        self.count_insn("and");
        unsafe {
            llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
        }
    }

431
    fn or(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
432 433 434 435 436 437
        self.count_insn("or");
        unsafe {
            llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
        }
    }

438
    fn xor(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
439 440 441 442 443 444
        self.count_insn("xor");
        unsafe {
            llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
        }
    }

445
    fn neg(&mut self, v: &'ll Value) -> &'ll Value {
446 447
        self.count_insn("neg");
        unsafe {
448
            llvm::LLVMBuildNeg(self.llbuilder, v, noname())
449 450 451
        }
    }

452
    fn fneg(&mut self, v: &'ll Value) -> &'ll Value {
453 454
        self.count_insn("fneg");
        unsafe {
455
            llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
456 457 458
        }
    }

459
    fn not(&mut self, v: &'ll Value) -> &'ll Value {
460 461
        self.count_insn("not");
        unsafe {
462
            llvm::LLVMBuildNot(self.llbuilder, v, noname())
463 464 465
        }
    }

466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539
    fn checked_binop(
        &mut self,
        oop: OverflowOp,
        ty: Ty,
        lhs: Self::Value,
        rhs: Self::Value,
    ) -> (Self::Value, Self::Value) {
        use syntax::ast::IntTy::*;
        use syntax::ast::UintTy::*;
        use rustc::ty::{Int, Uint};

        let new_sty = match ty.sty {
            Int(Isize) => Int(self.cx().tcx.sess.target.isize_ty),
            Uint(Usize) => Uint(self.cx().tcx.sess.target.usize_ty),
            ref t @ Uint(_) | ref t @ Int(_) => t.clone(),
            _ => panic!("tried to get overflow intrinsic for op applied to non-int type")
        };

        let name = match oop {
            OverflowOp::Add => match new_sty {
                Int(I8) => "llvm.sadd.with.overflow.i8",
                Int(I16) => "llvm.sadd.with.overflow.i16",
                Int(I32) => "llvm.sadd.with.overflow.i32",
                Int(I64) => "llvm.sadd.with.overflow.i64",
                Int(I128) => "llvm.sadd.with.overflow.i128",

                Uint(U8) => "llvm.uadd.with.overflow.i8",
                Uint(U16) => "llvm.uadd.with.overflow.i16",
                Uint(U32) => "llvm.uadd.with.overflow.i32",
                Uint(U64) => "llvm.uadd.with.overflow.i64",
                Uint(U128) => "llvm.uadd.with.overflow.i128",

                _ => unreachable!(),
            },
            OverflowOp::Sub => match new_sty {
                Int(I8) => "llvm.ssub.with.overflow.i8",
                Int(I16) => "llvm.ssub.with.overflow.i16",
                Int(I32) => "llvm.ssub.with.overflow.i32",
                Int(I64) => "llvm.ssub.with.overflow.i64",
                Int(I128) => "llvm.ssub.with.overflow.i128",

                Uint(U8) => "llvm.usub.with.overflow.i8",
                Uint(U16) => "llvm.usub.with.overflow.i16",
                Uint(U32) => "llvm.usub.with.overflow.i32",
                Uint(U64) => "llvm.usub.with.overflow.i64",
                Uint(U128) => "llvm.usub.with.overflow.i128",

                _ => unreachable!(),
            },
            OverflowOp::Mul => match new_sty {
                Int(I8) => "llvm.smul.with.overflow.i8",
                Int(I16) => "llvm.smul.with.overflow.i16",
                Int(I32) => "llvm.smul.with.overflow.i32",
                Int(I64) => "llvm.smul.with.overflow.i64",
                Int(I128) => "llvm.smul.with.overflow.i128",

                Uint(U8) => "llvm.umul.with.overflow.i8",
                Uint(U16) => "llvm.umul.with.overflow.i16",
                Uint(U32) => "llvm.umul.with.overflow.i32",
                Uint(U64) => "llvm.umul.with.overflow.i64",
                Uint(U128) => "llvm.umul.with.overflow.i128",

                _ => unreachable!(),
            },
        };

        let intrinsic = self.cx().get_intrinsic(&name);
        let res = self.call(intrinsic, &[lhs, rhs], None);
        (
            self.extract_value(res, 0),
            self.extract_value(res, 1),
        )
    }

540
    fn alloca(&mut self, ty: &'ll Type, name: &str, align: Align) -> &'ll Value {
541
        let mut bx = Builder::with_cx(self.cx);
542
        bx.position_at_start(unsafe {
543 544
            llvm::LLVMGetFirstBasicBlock(self.llfn())
        });
545
        bx.dynamic_alloca(ty, name, align)
546 547
    }

548
    fn dynamic_alloca(&mut self, ty: &'ll Type, name: &str, align: Align) -> &'ll Value {
549 550
        self.count_insn("alloca");
        unsafe {
551
            let alloca = if name.is_empty() {
552
                llvm::LLVMBuildAlloca(self.llbuilder, ty, noname())
553
            } else {
554
                let name = SmallCStr::new(name);
555
                llvm::LLVMBuildAlloca(self.llbuilder, ty,
A
Alex Crichton 已提交
556
                                      name.as_ptr())
557
            };
558
            llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
559
            alloca
560 561 562
        }
    }

563
    fn array_alloca(&mut self,
564
                        ty: &'ll Type,
565
                        len: &'ll Value,
M
Masaki Hara 已提交
566
                        name: &str,
567
                        align: Align) -> &'ll Value {
M
Masaki Hara 已提交
568 569 570 571 572 573 574 575 576
        self.count_insn("alloca");
        unsafe {
            let alloca = if name.is_empty() {
                llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, noname())
            } else {
                let name = SmallCStr::new(name);
                llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len,
                                           name.as_ptr())
            };
577
            llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
M
Masaki Hara 已提交
578 579 580 581
            alloca
        }
    }

582
    fn load(&mut self, ptr: &'ll Value, align: Align) -> &'ll Value {
583 584
        self.count_insn("load");
        unsafe {
585
            let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
586
            llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
587
            load
588 589 590
        }
    }

591
    fn volatile_load(&mut self, ptr: &'ll Value) -> &'ll Value {
592 593 594
        self.count_insn("load.volatile");
        unsafe {
            let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
595
            llvm::LLVMSetVolatile(insn, llvm::True);
596 597 598 599
            insn
        }
    }

600
    fn atomic_load(
601
        &mut self,
602
        ptr: &'ll Value,
603
        order: rustc_codegen_ssa::common::AtomicOrdering,
604
        size: Size,
605
    ) -> &'ll Value {
606 607
        self.count_insn("load.atomic");
        unsafe {
608 609 610 611 612 613
            let load = llvm::LLVMRustBuildAtomicLoad(
                self.llbuilder,
                ptr,
                noname(),
                AtomicOrdering::from_generic(order),
            );
614 615
            // LLVM requires the alignment of atomic loads to be at least the size of the type.
            llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
616
            load
617 618 619
        }
    }

620
    fn load_operand(
621
        &mut self,
622 623 624 625 626 627 628 629 630 631
        place: PlaceRef<'tcx, &'ll Value>
    ) -> OperandRef<'tcx, &'ll Value> {
        debug!("PlaceRef::load: {:?}", place);

        assert_eq!(place.llextra.is_some(), place.layout.is_unsized());

        if place.layout.is_zst() {
            return OperandRef::new_zst(self.cx(), place.layout);
        }

632 633 634 635 636
        fn scalar_load_metadata<'a, 'll, 'tcx>(
            bx: &mut Builder<'a, 'll, 'tcx>,
            load: &'ll Value,
            scalar: &layout::Scalar
        ) {
637 638 639
            let vr = scalar.valid_range.clone();
            match scalar.value {
                layout::Int(..) => {
640
                    let range = scalar.valid_range_exclusive(bx.cx());
641
                    if range.start != range.end {
642
                        bx.range_metadata(load, range);
643 644 645
                    }
                }
                layout::Pointer if vr.start() < vr.end() && !vr.contains(&0) => {
646
                    bx.nonnull_metadata(load);
647 648 649
                }
                _ => {}
            }
650
        }
651 652 653 654 655 656 657 658 659 660 661 662 663 664 665

        let val = if let Some(llextra) = place.llextra {
            OperandValue::Ref(place.llval, Some(llextra), place.align)
        } else if place.layout.is_llvm_immediate() {
            let mut const_llval = None;
            unsafe {
                if let Some(global) = llvm::LLVMIsAGlobalVariable(place.llval) {
                    if llvm::LLVMIsGlobalConstant(global) == llvm::True {
                        const_llval = llvm::LLVMGetInitializer(global);
                    }
                }
            }
            let llval = const_llval.unwrap_or_else(|| {
                let load = self.load(place.llval, place.align);
                if let layout::Abi::Scalar(ref scalar) = place.layout.abi {
666
                    scalar_load_metadata(self, load, scalar);
667 668 669
                }
                load
            });
670
            OperandValue::Immediate(to_immediate(self, llval, place.layout))
671
        } else if let layout::Abi::ScalarPair(ref a, ref b) = place.layout.abi {
672 673 674
            let b_offset = a.value.size(self).align_to(b.value.align(self).abi);

            let mut load = |i, scalar: &layout::Scalar, align| {
675
                let llptr = self.struct_gep(place.llval, i as u64);
676
                let load = self.load(llptr, align);
677
                scalar_load_metadata(self, load, scalar);
678 679 680 681 682 683
                if scalar.is_bool() {
                    self.trunc(load, self.cx().type_i1())
                } else {
                    load
                }
            };
684 685 686 687 688

            OperandValue::Pair(
                load(0, a, place.align),
                load(1, b, place.align.restrict_for_offset(b_offset)),
            )
689 690 691 692 693 694 695 696
        } else {
            OperandValue::Ref(place.llval, None, place.align)
        };

        OperandRef { val, layout: place.layout }
    }


697

698
    fn range_metadata(&mut self, load: &'ll Value, range: Range<u128>) {
699
        if self.cx().sess().target.target.arch == "amdgpu" {
700 701 702 703 704 705 706
            // amdgpu/LLVM does something weird and thinks a i64 value is
            // split into a v2i32, halving the bitwidth LLVM expects,
            // tripping an assertion. So, for now, just disable this
            // optimization.
            return;
        }

707
        unsafe {
708
            let llty = self.cx.val_ty(load);
709
            let v = [
710 711
                self.cx.const_uint_big(llty, range.start),
                self.cx.const_uint_big(llty, range.end)
712
            ];
713

714
            llvm::LLVMSetMetadata(load, llvm::MD_range as c_uint,
715
                                  llvm::LLVMMDNodeInContext(self.cx.llcx,
716 717
                                                            v.as_ptr(),
                                                            v.len() as c_uint));
718 719 720
        }
    }

721
    fn nonnull_metadata(&mut self, load: &'ll Value) {
722
        unsafe {
723
            llvm::LLVMSetMetadata(load, llvm::MD_nonnull as c_uint,
724
                                  llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
725 726 727
        }
    }

728
    fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
729 730 731
        self.store_with_flags(val, ptr, align, MemFlags::empty())
    }

732
    fn store_with_flags(
733
        &mut self,
734 735
        val: &'ll Value,
        ptr: &'ll Value,
736
        align: Align,
737
        flags: MemFlags,
738
    ) -> &'ll Value {
739
        debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
740
        self.count_insn("store");
741
        let ptr = self.check_store(val, ptr);
742
        unsafe {
743
            let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
744 745 746
            let align = if flags.contains(MemFlags::UNALIGNED) {
                1
            } else {
747
                align.bytes() as c_uint
748 749
            };
            llvm::LLVMSetAlignment(store, align);
750 751 752 753 754 755 756 757
            if flags.contains(MemFlags::VOLATILE) {
                llvm::LLVMSetVolatile(store, llvm::True);
            }
            if flags.contains(MemFlags::NONTEMPORAL) {
                // According to LLVM [1] building a nontemporal store must
                // *always* point to a metadata value of the integer 1.
                //
                // [1]: http://llvm.org/docs/LangRef.html#store-instruction
758
                let one = self.cx.const_i32(1);
759 760 761
                let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1);
                llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node);
            }
762
            store
763 764 765
        }
    }

766
   fn atomic_store(&mut self, val: &'ll Value, ptr: &'ll Value,
767
                   order: rustc_codegen_ssa::common::AtomicOrdering, size: Size) {
768
        debug!("Store {:?} -> {:?}", val, ptr);
769
        self.count_insn("store.atomic");
770
        let ptr = self.check_store(val, ptr);
771
        unsafe {
772 773 774 775 776 777
            let store = llvm::LLVMRustBuildAtomicStore(
                self.llbuilder,
                val,
                ptr,
                AtomicOrdering::from_generic(order),
            );
778 779
            // LLVM requires the alignment of atomic stores to be at least the size of the type.
            llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
780 781 782
        }
    }

783
    fn gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
784 785
        self.count_insn("gep");
        unsafe {
786
            llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
787 788 789 790
                               indices.len() as c_uint, noname())
        }
    }

791
    fn inbounds_gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
792 793 794
        self.count_insn("inboundsgep");
        unsafe {
            llvm::LLVMBuildInBoundsGEP(
795
                self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
796 797 798 799
        }
    }

    /* Casts */
800
    fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
801 802
        self.count_insn("trunc");
        unsafe {
803
            llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, noname())
804 805 806
        }
    }

807
    fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
808 809
        self.count_insn("sext");
        unsafe {
810
            llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, noname())
811 812 813
        }
    }

814
    fn fptoui(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
815 816
        self.count_insn("fptoui");
        unsafe {
817
            llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, noname())
818 819 820
        }
    }

821
    fn fptosi(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
822 823
        self.count_insn("fptosi");
        unsafe {
824
            llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty,noname())
825 826 827
        }
    }

828
    fn uitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
829 830
        self.count_insn("uitofp");
        unsafe {
831
            llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, noname())
832 833 834
        }
    }

835
    fn sitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
836 837
        self.count_insn("sitofp");
        unsafe {
838
            llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, noname())
839 840 841
        }
    }

842
    fn fptrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
843 844
        self.count_insn("fptrunc");
        unsafe {
845
            llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, noname())
846 847 848
        }
    }

849
    fn fpext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
850 851
        self.count_insn("fpext");
        unsafe {
852
            llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, noname())
853 854 855
        }
    }

856
    fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
857 858
        self.count_insn("ptrtoint");
        unsafe {
859
            llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, noname())
860 861 862
        }
    }

863
    fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
864 865
        self.count_insn("inttoptr");
        unsafe {
866
            llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, noname())
867 868 869
        }
    }

870
    fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
871 872
        self.count_insn("bitcast");
        unsafe {
873
            llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, noname())
874 875 876
        }
    }

877

878
    fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
879
        self.count_insn("intcast");
880
        unsafe {
881
            llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty, is_signed)
882 883 884
        }
    }

885
    fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
886
        self.count_insn("pointercast");
887
        unsafe {
888
            llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, noname())
889 890 891 892
        }
    }

    /* Comparisons */
893
    fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
894
        self.count_insn("icmp");
895
        let op = llvm::IntPredicate::from_generic(op);
896 897 898 899 900
        unsafe {
            llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
        }
    }

901
    fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
902 903 904 905 906 907 908
        self.count_insn("fcmp");
        unsafe {
            llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
        }
    }

    /* Miscellaneous instructions */
909
    fn empty_phi(&mut self, ty: &'ll Type) -> &'ll Value {
910 911
        self.count_insn("emptyphi");
        unsafe {
912
            llvm::LLVMBuildPhi(self.llbuilder, ty, noname())
913 914 915
        }
    }

916
    fn phi(&mut self, ty: &'ll Type, vals: &[&'ll Value], bbs: &[&'ll BasicBlock]) -> &'ll Value {
917 918 919 920
        assert_eq!(vals.len(), bbs.len());
        let phi = self.empty_phi(ty);
        self.count_insn("addincoming");
        unsafe {
921 922
            llvm::LLVMAddIncoming(phi, vals.as_ptr(),
                                  bbs.as_ptr(),
923 924 925 926 927
                                  vals.len() as c_uint);
            phi
        }
    }

928
    fn inline_asm_call(&mut self, asm: &CStr, cons: &CStr,
929
                       inputs: &[&'ll Value], output: &'ll Type,
930
                       volatile: bool, alignstack: bool,
931
                       dia: syntax::ast::AsmDialect) -> Option<&'ll Value> {
932 933
        self.count_insn("inlineasm");

934 935 936 937
        let volatile = if volatile { llvm::True }
                       else        { llvm::False };
        let alignstack = if alignstack { llvm::True }
                         else          { llvm::False };
938

939
        let argtys = inputs.iter().map(|v| {
940
            debug!("Asm Input Type: {:?}", *v);
941
            self.cx.val_ty(*v)
942
        }).collect::<Vec<_>>();
943

944
        debug!("Asm Output Type: {:?}", output);
945
        let fty = self.cx().type_func(&argtys[..], output);
946
        unsafe {
947
            // Ask LLVM to verify that the constraints are well-formed.
948
            let constraints_ok = llvm::LLVMRustInlineAsmVerify(fty, cons.as_ptr());
949
            debug!("Constraint verification result: {:?}", constraints_ok);
950
            if constraints_ok {
951
                let v = llvm::LLVMRustInlineAsm(
952 953 954 955 956 957 958
                    fty,
                    asm.as_ptr(),
                    cons.as_ptr(),
                    volatile,
                    alignstack,
                    AsmDialect::from_generic(dia),
                );
959 960
                Some(self.call(v, inputs, None))
            } else {
M
Matthias Krüger 已提交
961
                // LLVM has detected an issue with our constraints, bail out
962 963
                None
            }
964 965 966
        }
    }

967 968
    fn memcpy(&mut self, dst: &'ll Value, dst_align: Align,
                  src: &'ll Value, src_align: Align,
969 970 971 972 973 974 975 976 977 978 979 980
                  size: &'ll Value, flags: MemFlags) {
        if flags.contains(MemFlags::NONTEMPORAL) {
            // HACK(nox): This is inefficient but there is no nontemporal memcpy.
            let val = self.load(src, src_align);
            let ptr = self.pointercast(dst, self.cx().type_ptr_to(self.cx().val_ty(val)));
            self.store_with_flags(val, ptr, dst_align, flags);
            return;
        }
        let size = self.intcast(size, self.cx().type_isize(), false);
        let is_volatile = flags.contains(MemFlags::VOLATILE);
        let dst = self.pointercast(dst, self.cx().type_i8p());
        let src = self.pointercast(src, self.cx().type_i8p());
981
        unsafe {
982 983
            llvm::LLVMRustBuildMemCpy(self.llbuilder, dst, dst_align.bytes() as c_uint,
                                      src, src_align.bytes() as c_uint, size, is_volatile);
984 985 986
        }
    }

987 988
    fn memmove(&mut self, dst: &'ll Value, dst_align: Align,
                  src: &'ll Value, src_align: Align,
989 990 991 992 993 994 995 996 997 998 999 1000
                  size: &'ll Value, flags: MemFlags) {
        if flags.contains(MemFlags::NONTEMPORAL) {
            // HACK(nox): This is inefficient but there is no nontemporal memmove.
            let val = self.load(src, src_align);
            let ptr = self.pointercast(dst, self.cx().type_ptr_to(self.cx().val_ty(val)));
            self.store_with_flags(val, ptr, dst_align, flags);
            return;
        }
        let size = self.intcast(size, self.cx().type_isize(), false);
        let is_volatile = flags.contains(MemFlags::VOLATILE);
        let dst = self.pointercast(dst, self.cx().type_i8p());
        let src = self.pointercast(src, self.cx().type_i8p());
1001
        unsafe {
1002 1003
            llvm::LLVMRustBuildMemMove(self.llbuilder, dst, dst_align.bytes() as c_uint,
                                      src, src_align.bytes() as c_uint, size, is_volatile);
1004 1005 1006
        }
    }

1007
    fn memset(
1008
        &mut self,
1009 1010 1011
        ptr: &'ll Value,
        fill_byte: &'ll Value,
        size: &'ll Value,
1012
        align: Align,
1013 1014
        flags: MemFlags,
    ) {
1015
        let ptr_width = &self.cx().sess().target.target.target_pointer_width;
1016 1017 1018
        let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
        let llintrinsicfn = self.cx().get_intrinsic(&intrinsic_key);
        let ptr = self.pointercast(ptr, self.cx().type_i8p());
1019
        let align = self.cx().const_u32(align.bytes() as u32);
1020 1021 1022 1023
        let volatile = self.cx().const_bool(flags.contains(MemFlags::VOLATILE));
        self.call(llintrinsicfn, &[ptr, fill_byte, size, align, volatile], None);
    }

1024
    fn minnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
G
gnzlbg 已提交
1025 1026
        self.count_insn("minnum");
        unsafe {
G
gnzlbg 已提交
1027
            let instr = llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs);
1028
            instr.expect("LLVMRustBuildMinNum is not available in LLVM version < 6.0")
G
gnzlbg 已提交
1029 1030
        }
    }
1031
    fn maxnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
G
gnzlbg 已提交
1032 1033
        self.count_insn("maxnum");
        unsafe {
G
gnzlbg 已提交
1034
            let instr = llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs);
1035
            instr.expect("LLVMRustBuildMaxNum is not available in LLVM version < 6.0")
G
gnzlbg 已提交
1036 1037 1038
        }
    }

1039
    fn select(
1040
        &mut self, cond: &'ll Value,
1041 1042 1043
        then_val: &'ll Value,
        else_val: &'ll Value,
    ) -> &'ll Value {
1044 1045 1046 1047 1048 1049
        self.count_insn("select");
        unsafe {
            llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
        }
    }

1050
    #[allow(dead_code)]
1051
    fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
1052 1053
        self.count_insn("vaarg");
        unsafe {
1054
            llvm::LLVMBuildVAArg(self.llbuilder, list, ty, noname())
1055 1056 1057
        }
    }

1058
    fn extract_element(&mut self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
1059 1060 1061 1062 1063 1064
        self.count_insn("extractelement");
        unsafe {
            llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
        }
    }

1065
    fn insert_element(
1066
        &mut self, vec: &'ll Value,
1067 1068 1069
        elt: &'ll Value,
        idx: &'ll Value,
    ) -> &'ll Value {
1070 1071 1072 1073 1074 1075
        self.count_insn("insertelement");
        unsafe {
            llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
        }
    }

1076
    fn shuffle_vector(&mut self, v1: &'ll Value, v2: &'ll Value, mask: &'ll Value) -> &'ll Value {
1077 1078 1079 1080 1081 1082
        self.count_insn("shufflevector");
        unsafe {
            llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
        }
    }

1083
    fn vector_splat(&mut self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
1084
        unsafe {
1085
            let elt_ty = self.cx.val_ty(elt);
1086
            let undef = llvm::LLVMGetUndef(self.cx().type_vector(elt_ty, num_elts as u64));
1087
            let vec = self.insert_element(undef, elt, self.cx.const_i32(0));
1088
            let vec_i32_ty = self.cx().type_vector(self.cx().type_i32(), num_elts as u64);
1089
            self.shuffle_vector(vec, undef, self.cx().const_null(vec_i32_ty))
1090 1091 1092
        }
    }

1093
    fn vector_reduce_fadd_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1094 1095
        self.count_insn("vector.reduce.fadd_fast");
        unsafe {
1096 1097 1098
            // FIXME: add a non-fast math version once
            // https://bugs.llvm.org/show_bug.cgi?id=36732
            // is fixed.
1099
            let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
1100 1101 1102 1103
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
1104
    fn vector_reduce_fmul_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
1105 1106
        self.count_insn("vector.reduce.fmul_fast");
        unsafe {
1107 1108 1109
            // FIXME: add a non-fast math version once
            // https://bugs.llvm.org/show_bug.cgi?id=36732
            // is fixed.
1110
            let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
1111 1112 1113 1114
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
1115
    fn vector_reduce_add(&mut self, src: &'ll Value) -> &'ll Value {
1116
        self.count_insn("vector.reduce.add");
1117
        unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
1118
    }
1119
    fn vector_reduce_mul(&mut self, src: &'ll Value) -> &'ll Value {
1120
        self.count_insn("vector.reduce.mul");
1121
        unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
1122
    }
1123
    fn vector_reduce_and(&mut self, src: &'ll Value) -> &'ll Value {
1124
        self.count_insn("vector.reduce.and");
1125
        unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
1126
    }
1127
    fn vector_reduce_or(&mut self, src: &'ll Value) -> &'ll Value {
1128
        self.count_insn("vector.reduce.or");
1129
        unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
1130
    }
1131
    fn vector_reduce_xor(&mut self, src: &'ll Value) -> &'ll Value {
1132
        self.count_insn("vector.reduce.xor");
1133
        unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
1134
    }
1135
    fn vector_reduce_fmin(&mut self, src: &'ll Value) -> &'ll Value {
1136
        self.count_insn("vector.reduce.fmin");
1137
        unsafe { llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false) }
1138
    }
1139
    fn vector_reduce_fmax(&mut self, src: &'ll Value) -> &'ll Value {
1140
        self.count_insn("vector.reduce.fmax");
1141
        unsafe { llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false) }
1142
    }
1143
    fn vector_reduce_fmin_fast(&mut self, src: &'ll Value) -> &'ll Value {
1144 1145
        self.count_insn("vector.reduce.fmin_fast");
        unsafe {
1146
            let instr = llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
1147 1148 1149 1150
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
1151
    fn vector_reduce_fmax_fast(&mut self, src: &'ll Value) -> &'ll Value {
1152 1153
        self.count_insn("vector.reduce.fmax_fast");
        unsafe {
1154
            let instr = llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
1155 1156 1157 1158
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
1159
    fn vector_reduce_min(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
1160
        self.count_insn("vector.reduce.min");
1161
        unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
1162
    }
1163
    fn vector_reduce_max(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
1164
        self.count_insn("vector.reduce.max");
1165
        unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
1166 1167
    }

1168
    fn extract_value(&mut self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
1169
        self.count_insn("extractvalue");
1170
        assert_eq!(idx as c_uint as u64, idx);
1171 1172 1173 1174 1175
        unsafe {
            llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
        }
    }

1176
    fn insert_value(&mut self, agg_val: &'ll Value, elt: &'ll Value,
1177
                       idx: u64) -> &'ll Value {
1178
        self.count_insn("insertvalue");
1179
        assert_eq!(idx as c_uint as u64, idx);
1180 1181
        unsafe {
            llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
1182
                                       noname())
1183 1184 1185
        }
    }

1186
    fn landing_pad(&mut self, ty: &'ll Type, pers_fn: &'ll Value,
1187
                       num_clauses: usize) -> &'ll Value {
1188 1189
        self.count_insn("landingpad");
        unsafe {
1190
            llvm::LLVMBuildLandingPad(self.llbuilder, ty, pers_fn,
1191
                                      num_clauses as c_uint, noname())
1192 1193 1194
        }
    }

1195
    fn add_clause(&mut self, landing_pad: &'ll Value, clause: &'ll Value) {
1196 1197 1198 1199 1200
        unsafe {
            llvm::LLVMAddClause(landing_pad, clause);
        }
    }

1201
    fn set_cleanup(&mut self, landing_pad: &'ll Value) {
1202 1203
        self.count_insn("setcleanup");
        unsafe {
1204
            llvm::LLVMSetCleanup(landing_pad, llvm::True);
1205 1206 1207
        }
    }

1208
    fn resume(&mut self, exn: &'ll Value) -> &'ll Value {
1209 1210 1211 1212 1213 1214
        self.count_insn("resume");
        unsafe {
            llvm::LLVMBuildResume(self.llbuilder, exn)
        }
    }

1215
    fn cleanup_pad(&mut self,
1216
                       parent: Option<&'ll Value>,
1217
                       args: &[&'ll Value]) -> Funclet<'ll> {
1218
        self.count_insn("cleanuppad");
1219
        let name = const_cstr!("cleanuppad");
1220 1221 1222 1223 1224 1225 1226
        let ret = unsafe {
            llvm::LLVMRustBuildCleanupPad(self.llbuilder,
                                          parent,
                                          args.len() as c_uint,
                                          args.as_ptr(),
                                          name.as_ptr())
        };
1227
        Funclet::new(ret.expect("LLVM does not have support for cleanuppad"))
1228 1229
    }

1230
    fn cleanup_ret(
1231
        &mut self, funclet: &Funclet<'ll>,
1232 1233
        unwind: Option<&'ll BasicBlock>,
    ) -> &'ll Value {
1234 1235
        self.count_insn("cleanupret");
        let ret = unsafe {
1236
            llvm::LLVMRustBuildCleanupRet(self.llbuilder, funclet.cleanuppad(), unwind)
1237
        };
1238
        ret.expect("LLVM does not have support for cleanupret")
1239 1240
    }

1241
    fn catch_pad(&mut self,
1242
                     parent: &'ll Value,
1243
                     args: &[&'ll Value]) -> Funclet<'ll> {
1244
        self.count_insn("catchpad");
1245
        let name = const_cstr!("catchpad");
1246 1247 1248 1249 1250
        let ret = unsafe {
            llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
                                        args.len() as c_uint, args.as_ptr(),
                                        name.as_ptr())
        };
1251
        Funclet::new(ret.expect("LLVM does not have support for catchpad"))
1252 1253
    }

1254
    fn catch_ret(&mut self, funclet: &Funclet<'ll>, unwind: &'ll BasicBlock) -> &'ll Value {
1255 1256
        self.count_insn("catchret");
        let ret = unsafe {
1257
            llvm::LLVMRustBuildCatchRet(self.llbuilder, funclet.cleanuppad(), unwind)
1258
        };
1259
        ret.expect("LLVM does not have support for catchret")
1260 1261
    }

1262
    fn catch_switch(
1263
        &mut self,
1264 1265
        parent: Option<&'ll Value>,
        unwind: Option<&'ll BasicBlock>,
1266
        num_handlers: usize,
1267
    ) -> &'ll Value {
1268
        self.count_insn("catchswitch");
1269
        let name = const_cstr!("catchswitch");
1270 1271 1272 1273 1274
        let ret = unsafe {
            llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
                                           num_handlers as c_uint,
                                           name.as_ptr())
        };
1275
        ret.expect("LLVM does not have support for catchswitch")
1276 1277
    }

1278
    fn add_handler(&mut self, catch_switch: &'ll Value, handler: &'ll BasicBlock) {
1279 1280 1281 1282 1283
        unsafe {
            llvm::LLVMRustAddHandler(catch_switch, handler);
        }
    }

1284
    fn set_personality_fn(&mut self, personality: &'ll Value) {
1285
        unsafe {
1286
            llvm::LLVMSetPersonalityFn(self.llfn(), personality);
1287 1288 1289
        }
    }

1290
    // Atomic Operations
1291
    fn atomic_cmpxchg(
1292
        &mut self,
1293 1294 1295
        dst: &'ll Value,
        cmp: &'ll Value,
        src: &'ll Value,
1296 1297
        order: rustc_codegen_ssa::common::AtomicOrdering,
        failure_order: rustc_codegen_ssa::common::AtomicOrdering,
1298
        weak: bool,
1299
    ) -> &'ll Value {
1300
        let weak = if weak { llvm::True } else { llvm::False };
1301
        unsafe {
1302 1303 1304 1305 1306 1307 1308 1309 1310
            llvm::LLVMRustBuildAtomicCmpXchg(
                self.llbuilder,
                dst,
                cmp,
                src,
                AtomicOrdering::from_generic(order),
                AtomicOrdering::from_generic(failure_order),
                weak
            )
1311 1312
        }
    }
1313
    fn atomic_rmw(
1314
        &mut self,
1315
        op: rustc_codegen_ssa::common::AtomicRmwBinOp,
1316 1317
        dst: &'ll Value,
        src: &'ll Value,
1318
        order: rustc_codegen_ssa::common::AtomicOrdering,
1319
    ) -> &'ll Value {
1320
        unsafe {
1321 1322 1323 1324 1325
            llvm::LLVMBuildAtomicRMW(
                self.llbuilder,
                AtomicRmwBinOp::from_generic(op),
                dst,
                src,
1326
                AtomicOrdering::from_generic(order),
1327
                False)
1328 1329
        }
    }
J
James Miller 已提交
1330

D
Denis Merigoux 已提交
1331
    fn atomic_fence(
1332
        &mut self,
1333 1334
        order: rustc_codegen_ssa::common::AtomicOrdering,
        scope: rustc_codegen_ssa::common::SynchronizationScope
D
Denis Merigoux 已提交
1335
    ) {
J
James Miller 已提交
1336
        unsafe {
1337 1338 1339
            llvm::LLVMRustBuildAtomicFence(
                self.llbuilder,
                AtomicOrdering::from_generic(order),
1340
                SynchronizationScope::from_generic(scope)
1341
            );
J
James Miller 已提交
1342 1343
        }
    }
1344

1345
    fn add_case(&mut self, s: &'ll Value, on_val: &'ll Value, dest: &'ll BasicBlock) {
1346 1347 1348 1349 1350
        unsafe {
            llvm::LLVMAddCase(s, on_val, dest)
        }
    }

1351
    fn add_incoming_to_phi(&mut self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
B
Ben Harris 已提交
1352
        self.count_insn("addincoming");
1353 1354 1355 1356 1357
        unsafe {
            llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
        }
    }

1358
    fn set_invariant_load(&mut self, load: &'ll Value) {
1359 1360
        unsafe {
            llvm::LLVMSetMetadata(load, llvm::MD_invariant_load as c_uint,
1361
                                  llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
1362 1363 1364
        }
    }

1365
    fn check_store<'b>(&mut self,
1366 1367
                       val: &'ll Value,
                       ptr: &'ll Value) -> &'ll Value {
1368 1369
        let dest_ptr_ty = self.cx.val_ty(ptr);
        let stored_ty = self.cx.val_ty(val);
1370
        let stored_ptr_ty = self.cx.type_ptr_to(stored_ty);
1371

D
Denis Merigoux 已提交
1372
        assert_eq!(self.cx.type_kind(dest_ptr_ty), TypeKind::Pointer);
1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383

        if dest_ptr_ty == stored_ptr_ty {
            ptr
        } else {
            debug!("Type mismatch in store. \
                    Expected {:?}, got {:?}; inserting bitcast",
                   dest_ptr_ty, stored_ptr_ty);
            self.bitcast(ptr, stored_ptr_ty)
        }
    }

1384
    fn check_call<'b>(&mut self,
1385
                      typ: &str,
1386 1387
                      llfn: &'ll Value,
                      args: &'b [&'ll Value]) -> Cow<'b, [&'ll Value]> {
1388
        let mut fn_ty = self.cx.val_ty(llfn);
1389
        // Strip off pointers
D
Denis Merigoux 已提交
1390
        while self.cx.type_kind(fn_ty) == TypeKind::Pointer {
D
Denis Merigoux 已提交
1391
            fn_ty = self.cx.element_type(fn_ty);
1392 1393
        }

D
Denis Merigoux 已提交
1394
        assert!(self.cx.type_kind(fn_ty) == TypeKind::Function,
1395
                "builder::{} not passed a function, but {:?}", typ, fn_ty);
1396

1397
        let param_tys = self.cx.func_params_types(fn_ty);
1398

1399
        let all_args_match = param_tys.iter()
1400
            .zip(args.iter().map(|&v| self.cx().val_ty(v)))
1401 1402 1403 1404 1405 1406 1407 1408 1409 1410
            .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);

        if all_args_match {
            return Cow::Borrowed(args);
        }

        let casted_args: Vec<_> = param_tys.into_iter()
            .zip(args.iter())
            .enumerate()
            .map(|(i, (expected_ty, &actual_val))| {
1411
                let actual_ty = self.cx().val_ty(actual_val);
1412 1413 1414
                if expected_ty != actual_ty {
                    debug!("Type mismatch in function call of {:?}. \
                            Expected {:?} for param {}, got {:?}; injecting bitcast",
1415
                           llfn, expected_ty, i, actual_ty);
1416 1417 1418 1419 1420 1421
                    self.bitcast(actual_val, expected_ty)
                } else {
                    actual_val
                }
            })
            .collect();
1422

L
ljedrz 已提交
1423
        Cow::Owned(casted_args)
1424
    }
1425

1426
    fn lifetime_start(&mut self, ptr: &'ll Value, size: Size) {
1427 1428 1429
        self.call_lifetime_intrinsic("llvm.lifetime.start", ptr, size);
    }

1430
    fn lifetime_end(&mut self, ptr: &'ll Value, size: Size) {
1431 1432 1433
        self.call_lifetime_intrinsic("llvm.lifetime.end", ptr, size);
    }

1434
    fn call(
1435
        &mut self,
1436 1437 1438 1439
        llfn: &'ll Value,
        args: &[&'ll Value],
        funclet: Option<&Funclet<'ll>>,
    ) -> &'ll Value {
1440 1441 1442 1443 1444 1445 1446
        self.count_insn("call");

        debug!("Call {:?} with args ({:?})",
               llfn,
               args);

        let args = self.check_call("call", llfn, args);
1447
        let bundle = funclet.map(|funclet| funclet.bundle());
1448
        let bundle = bundle.as_ref().map(|b| &*b.raw);
1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460

        unsafe {
            llvm::LLVMRustBuildCall(
                self.llbuilder,
                llfn,
                args.as_ptr() as *const &llvm::Value,
                args.len() as c_uint,
                bundle, noname()
            )
        }
    }

1461
    fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1462 1463 1464 1465 1466 1467
        self.count_insn("zext");
        unsafe {
            llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, noname())
        }
    }

1468
    fn struct_gep(&mut self, ptr: &'ll Value, idx: u64) -> &'ll Value {
1469 1470 1471 1472 1473 1474 1475
        self.count_insn("structgep");
        assert_eq!(idx as c_uint as u64, idx);
        unsafe {
            llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
        }
    }

1476
    fn cx(&self) -> &CodegenCx<'ll, 'tcx> {
1477
        self.cx
1478
    }
1479

1480 1481
    unsafe fn delete_basic_block(&mut self, bb: &'ll BasicBlock) {
        llvm::LLVMDeleteBasicBlock(bb);
1482 1483
    }

1484
    fn do_not_inline(&mut self, llret: &'ll Value) {
1485 1486
        llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
    }
1487
}
1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505

impl Builder<'a, 'll, 'tcx> {
    fn call_lifetime_intrinsic(&mut self, intrinsic: &str, ptr: &'ll Value, size: Size) {
        if self.cx.sess().opts.optimize == config::OptLevel::No {
            return;
        }

        let size = size.bytes();
        if size == 0 {
            return;
        }

        let lifetime_intrinsic = self.cx.get_intrinsic(intrinsic);

        let ptr = self.pointercast(ptr, self.cx.type_i8p());
        self.call(lifetime_intrinsic, &[self.cx.const_u64(size), ptr], None);
    }
}