builder.rs 44.6 KB
Newer Older
B
bjorn3 已提交
1
use crate::llvm::{AtomicRmwBinOp, AtomicOrdering, SynchronizationScope};
T
Taiki Endo 已提交
2 3 4 5 6 7
use crate::llvm::{self, False, BasicBlock};
use crate::common::Funclet;
use crate::context::CodegenCx;
use crate::type_::Type;
use crate::type_of::LayoutLlvmExt;
use crate::value::Value;
8
use syntax::symbol::LocalInternedString;
9
use rustc_codegen_ssa::common::{IntPredicate, TypeKind, RealPredicate};
T
Taiki Endo 已提交
10
use rustc_codegen_ssa::MemFlags;
11
use libc::{c_uint, c_char};
D
Denis Merigoux 已提交
12
use rustc::ty::{self, Ty, TyCtxt};
13
use rustc::ty::layout::{self, Align, Size, TyLayout};
B
bjorn3 已提交
14
use rustc::hir::def_id::DefId;
15
use rustc::session::config;
16
use rustc_data_structures::small_c_str::SmallCStr;
17
use rustc_codegen_ssa::traits::*;
18 19 20
use rustc_codegen_ssa::base::to_immediate;
use rustc_codegen_ssa::mir::operand::{OperandValue, OperandRef};
use rustc_codegen_ssa::mir::place::PlaceRef;
S
Saleem Jaffer 已提交
21
use rustc_target::spec::{HasTargetSpec, Target};
22
use std::borrow::Cow;
23
use std::ffi::CStr;
B
bjorn3 已提交
24
use std::ops::{Deref, Range};
25
use std::ptr;
26
use std::iter::TrustedLen;
27

28 29
// All Builders must have an llfn associated with them
#[must_use]
30
pub struct Builder<'a, 'll: 'a, 'tcx: 'll> {
31
    pub llbuilder: &'ll mut llvm::Builder<'ll>,
32
    pub cx: &'a CodegenCx<'ll, 'tcx>,
33 34
}

35
impl Drop for Builder<'a, 'll, 'tcx> {
M
Mark-Simulacrum 已提交
36 37
    fn drop(&mut self) {
        unsafe {
38
            llvm::LLVMDisposeBuilder(&mut *(self.llbuilder as *mut _));
M
Mark-Simulacrum 已提交
39 40 41 42
        }
    }
}

43 44 45 46 47 48 49 50 51
// FIXME(eddyb) use a checked constructor when they become `const fn`.
const EMPTY_C_STR: &CStr = unsafe {
    CStr::from_bytes_with_nul_unchecked(b"\0")
};

/// Empty string, to be used where LLVM expects an instruction name, indicating
/// that the instruction is to be left unnamed (i.e. numbered, in textual IR).
// FIXME(eddyb) pass `&CStr` directly to FFI once it's a thin pointer.
const UNNAMED: *const c_char = EMPTY_C_STR.as_ptr();
52

53 54 55 56
impl BackendTypes for Builder<'_, 'll, 'tcx> {
    type Value = <CodegenCx<'ll, 'tcx> as BackendTypes>::Value;
    type BasicBlock = <CodegenCx<'ll, 'tcx> as BackendTypes>::BasicBlock;
    type Type = <CodegenCx<'ll, 'tcx> as BackendTypes>::Type;
57
    type Funclet = <CodegenCx<'ll, 'tcx> as BackendTypes>::Funclet;
58 59

    type DIScope = <CodegenCx<'ll, 'tcx> as BackendTypes>::DIScope;
D
Denis Merigoux 已提交
60 61 62 63 64 65 66 67 68
}

impl ty::layout::HasDataLayout for Builder<'_, '_, '_> {
    fn data_layout(&self) -> &ty::layout::TargetDataLayout {
        self.cx.data_layout()
    }
}

impl ty::layout::HasTyCtxt<'tcx> for Builder<'_, '_, 'tcx> {
69
    fn tcx(&self) -> TyCtxt<'tcx> {
D
Denis Merigoux 已提交
70 71 72 73
        self.cx.tcx
    }
}

S
Saleem Jaffer 已提交
74 75 76 77 78 79
impl ty::layout::HasParamEnv<'tcx> for Builder<'_, '_, 'tcx> {
    fn param_env(&self) -> ty::ParamEnv<'tcx> {
        self.cx.param_env()
    }
}

S
Saleem Jaffer 已提交
80 81 82 83 84 85
impl HasTargetSpec for Builder<'_, '_, 'tcx> {
    fn target_spec(&self) -> &Target {
        &self.cx.target_spec()
    }
}

D
Denis Merigoux 已提交
86 87 88 89 90 91 92 93 94
impl ty::layout::LayoutOf for Builder<'_, '_, 'tcx> {
    type Ty = Ty<'tcx>;
    type TyLayout = TyLayout<'tcx>;

    fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
        self.cx.layout_of(ty)
    }
}

B
bjorn3 已提交
95 96 97 98 99 100 101
impl Deref for Builder<'_, 'll, 'tcx> {
    type Target = CodegenCx<'ll, 'tcx>;

    fn deref(&self) -> &Self::Target {
        self.cx
    }
}
D
Denis Merigoux 已提交
102 103

impl HasCodegen<'tcx> for Builder<'_, 'll, 'tcx> {
D
Denis Merigoux 已提交
104
    type CodegenCx = CodegenCx<'ll, 'tcx>;
105
}
106

107 108 109 110
macro_rules! builder_methods_for_value_instructions {
    ($($name:ident($($arg:ident),*) => $llvm_capi:ident),+ $(,)?) => {
        $(fn $name(&mut self, $($arg: &'ll Value),*) -> &'ll Value {
            unsafe {
111
                llvm::$llvm_capi(self.llbuilder, $($arg,)* UNNAMED)
112 113 114 115 116
            }
        })*
    }
}

117
impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
118 119
    fn new_block<'b>(
        cx: &'a CodegenCx<'ll, 'tcx>,
120
        llfn: &'ll Value,
121 122
        name: &'b str
    ) -> Self {
123
        let mut bx = Builder::with_cx(cx);
124
        let llbb = unsafe {
125
            let name = SmallCStr::new(name);
126
            llvm::LLVMAppendBasicBlockInContext(
127
                cx.llcx,
128 129 130 131
                llfn,
                name.as_ptr()
            )
        };
132 133
        bx.position_at_end(llbb);
        bx
134 135
    }

136
    fn with_cx(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
M
Mark-Simulacrum 已提交
137 138
        // Create a fresh builder from the crate context.
        let llbuilder = unsafe {
139
            llvm::LLVMCreateBuilderInContext(cx.llcx)
M
Mark-Simulacrum 已提交
140
        };
141
        Builder {
142
            llbuilder,
143
            cx,
144 145 146
        }
    }

147
    fn build_sibling_block<'b>(&self, name: &'b str) -> Self {
148
        Builder::new_block(self.cx, self.llfn(), name)
149 150
    }

151
    fn llbb(&self) -> &'ll BasicBlock {
152 153 154 155 156
        unsafe {
            llvm::LLVMGetInsertBlock(self.llbuilder)
        }
    }

157
    fn position_at_end(&mut self, llbb: &'ll BasicBlock) {
158 159 160 161 162
        unsafe {
            llvm::LLVMPositionBuilderAtEnd(self.llbuilder, llbb);
        }
    }

163
    fn ret_void(&mut self) {
164 165 166 167 168
        unsafe {
            llvm::LLVMBuildRetVoid(self.llbuilder);
        }
    }

169
    fn ret(&mut self, v: &'ll Value) {
170 171 172 173 174
        unsafe {
            llvm::LLVMBuildRet(self.llbuilder, v);
        }
    }

175
    fn br(&mut self, dest: &'ll BasicBlock) {
176 177 178 179 180
        unsafe {
            llvm::LLVMBuildBr(self.llbuilder, dest);
        }
    }

181
    fn cond_br(
182
        &mut self,
183 184 185
        cond: &'ll Value,
        then_llbb: &'ll BasicBlock,
        else_llbb: &'ll BasicBlock,
186
    ) {
187 188 189 190 191
        unsafe {
            llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
        }
    }

192
    fn switch(
193
        &mut self,
194 195
        v: &'ll Value,
        else_llbb: &'ll BasicBlock,
196
        cases: impl ExactSizeIterator<Item = (u128, &'ll BasicBlock)> + TrustedLen,
B
bjorn3 已提交
197 198
    ) {
        let switch = unsafe {
199
            llvm::LLVMBuildSwitch(self.llbuilder, v, else_llbb, cases.len() as c_uint)
B
bjorn3 已提交
200 201 202 203 204 205
        };
        for (on_val, dest) in cases {
            let on_val = self.const_uint_big(self.val_ty(v), on_val);
            unsafe {
                llvm::LLVMAddCase(switch, on_val, dest)
            }
206 207 208
        }
    }

209
    fn invoke(
210
        &mut self,
211 212 213 214 215 216
        llfn: &'ll Value,
        args: &[&'ll Value],
        then: &'ll BasicBlock,
        catch: &'ll BasicBlock,
        funclet: Option<&Funclet<'ll>>,
    ) -> &'ll Value {
217

218 219 220
        debug!("Invoke {:?} with args ({:?})",
               llfn,
               args);
221

222
        let args = self.check_call("invoke", llfn, args);
223
        let bundle = funclet.map(|funclet| funclet.bundle());
224
        let bundle = bundle.as_ref().map(|b| &*b.raw);
225

226
        unsafe {
227 228 229 230 231 232 233
            llvm::LLVMRustBuildInvoke(self.llbuilder,
                                      llfn,
                                      args.as_ptr(),
                                      args.len() as c_uint,
                                      then,
                                      catch,
                                      bundle,
234
                                      UNNAMED)
235
        }
236 237
    }

238
    fn unreachable(&mut self) {
239 240 241 242 243
        unsafe {
            llvm::LLVMBuildUnreachable(self.llbuilder);
        }
    }

244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267
    builder_methods_for_value_instructions! {
        add(a, b) => LLVMBuildAdd,
        fadd(a, b) => LLVMBuildFAdd,
        sub(a, b) => LLVMBuildSub,
        fsub(a, b) => LLVMBuildFSub,
        mul(a, b) => LLVMBuildMul,
        fmul(a, b) => LLVMBuildFMul,
        udiv(a, b) => LLVMBuildUDiv,
        exactudiv(a, b) => LLVMBuildExactUDiv,
        sdiv(a, b) => LLVMBuildSDiv,
        exactsdiv(a, b) => LLVMBuildExactSDiv,
        fdiv(a, b) => LLVMBuildFDiv,
        urem(a, b) => LLVMBuildURem,
        srem(a, b) => LLVMBuildSRem,
        frem(a, b) => LLVMBuildFRem,
        shl(a, b) => LLVMBuildShl,
        lshr(a, b) => LLVMBuildLShr,
        ashr(a, b) => LLVMBuildAShr,
        and(a, b) => LLVMBuildAnd,
        or(a, b) => LLVMBuildOr,
        xor(a, b) => LLVMBuildXor,
        neg(x) => LLVMBuildNeg,
        fneg(x) => LLVMBuildFNeg,
        not(x) => LLVMBuildNot,
268 269 270 271 272 273
        unchecked_sadd(x, y) => LLVMBuildNSWAdd,
        unchecked_uadd(x, y) => LLVMBuildNUWAdd,
        unchecked_ssub(x, y) => LLVMBuildNSWSub,
        unchecked_usub(x, y) => LLVMBuildNUWSub,
        unchecked_smul(x, y) => LLVMBuildNSWMul,
        unchecked_umul(x, y) => LLVMBuildNUWMul,
274 275
    }

276
    fn fadd_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
277
        unsafe {
278
            let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, UNNAMED);
279 280 281 282 283
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

284
    fn fsub_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
285
        unsafe {
286
            let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, UNNAMED);
287 288 289 290 291
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

292
    fn fmul_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
293
        unsafe {
294
            let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, UNNAMED);
295 296 297 298 299
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

300
    fn fdiv_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
301
        unsafe {
302
            let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, UNNAMED);
303 304 305 306 307
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

308
    fn frem_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
309
        unsafe {
310
            let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, UNNAMED);
311 312 313 314 315
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }

316 317 318
    fn checked_binop(
        &mut self,
        oop: OverflowOp,
319
        ty: Ty<'_>,
320 321 322 323 324 325 326 327
        lhs: Self::Value,
        rhs: Self::Value,
    ) -> (Self::Value, Self::Value) {
        use syntax::ast::IntTy::*;
        use syntax::ast::UintTy::*;
        use rustc::ty::{Int, Uint};

        let new_sty = match ty.sty {
328 329
            Int(Isize) => Int(self.tcx.sess.target.isize_ty),
            Uint(Usize) => Uint(self.tcx.sess.target.usize_ty),
330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378 379 380 381
            ref t @ Uint(_) | ref t @ Int(_) => t.clone(),
            _ => panic!("tried to get overflow intrinsic for op applied to non-int type")
        };

        let name = match oop {
            OverflowOp::Add => match new_sty {
                Int(I8) => "llvm.sadd.with.overflow.i8",
                Int(I16) => "llvm.sadd.with.overflow.i16",
                Int(I32) => "llvm.sadd.with.overflow.i32",
                Int(I64) => "llvm.sadd.with.overflow.i64",
                Int(I128) => "llvm.sadd.with.overflow.i128",

                Uint(U8) => "llvm.uadd.with.overflow.i8",
                Uint(U16) => "llvm.uadd.with.overflow.i16",
                Uint(U32) => "llvm.uadd.with.overflow.i32",
                Uint(U64) => "llvm.uadd.with.overflow.i64",
                Uint(U128) => "llvm.uadd.with.overflow.i128",

                _ => unreachable!(),
            },
            OverflowOp::Sub => match new_sty {
                Int(I8) => "llvm.ssub.with.overflow.i8",
                Int(I16) => "llvm.ssub.with.overflow.i16",
                Int(I32) => "llvm.ssub.with.overflow.i32",
                Int(I64) => "llvm.ssub.with.overflow.i64",
                Int(I128) => "llvm.ssub.with.overflow.i128",

                Uint(U8) => "llvm.usub.with.overflow.i8",
                Uint(U16) => "llvm.usub.with.overflow.i16",
                Uint(U32) => "llvm.usub.with.overflow.i32",
                Uint(U64) => "llvm.usub.with.overflow.i64",
                Uint(U128) => "llvm.usub.with.overflow.i128",

                _ => unreachable!(),
            },
            OverflowOp::Mul => match new_sty {
                Int(I8) => "llvm.smul.with.overflow.i8",
                Int(I16) => "llvm.smul.with.overflow.i16",
                Int(I32) => "llvm.smul.with.overflow.i32",
                Int(I64) => "llvm.smul.with.overflow.i64",
                Int(I128) => "llvm.smul.with.overflow.i128",

                Uint(U8) => "llvm.umul.with.overflow.i8",
                Uint(U16) => "llvm.umul.with.overflow.i16",
                Uint(U32) => "llvm.umul.with.overflow.i32",
                Uint(U64) => "llvm.umul.with.overflow.i64",
                Uint(U128) => "llvm.umul.with.overflow.i128",

                _ => unreachable!(),
            },
        };

382
        let intrinsic = self.get_intrinsic(&name);
383 384 385 386 387 388 389
        let res = self.call(intrinsic, &[lhs, rhs], None);
        (
            self.extract_value(res, 0),
            self.extract_value(res, 1),
        )
    }

390
    fn alloca(&mut self, ty: &'ll Type, name: &str, align: Align) -> &'ll Value {
391
        let mut bx = Builder::with_cx(self.cx);
392
        bx.position_at_start(unsafe {
393 394
            llvm::LLVMGetFirstBasicBlock(self.llfn())
        });
395
        bx.dynamic_alloca(ty, name, align)
396 397
    }

398
    fn dynamic_alloca(&mut self, ty: &'ll Type, name: &str, align: Align) -> &'ll Value {
399
        unsafe {
400
            let alloca = if name.is_empty() {
401
                llvm::LLVMBuildAlloca(self.llbuilder, ty, UNNAMED)
402
            } else {
403
                let name = SmallCStr::new(name);
404
                llvm::LLVMBuildAlloca(self.llbuilder, ty,
A
Alex Crichton 已提交
405
                                      name.as_ptr())
406
            };
407
            llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
408
            alloca
409 410 411
        }
    }

412
    fn array_alloca(&mut self,
413
                        ty: &'ll Type,
414
                        len: &'ll Value,
M
Masaki Hara 已提交
415
                        name: &str,
416
                        align: Align) -> &'ll Value {
M
Masaki Hara 已提交
417 418
        unsafe {
            let alloca = if name.is_empty() {
419
                llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, UNNAMED)
M
Masaki Hara 已提交
420 421 422 423 424
            } else {
                let name = SmallCStr::new(name);
                llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len,
                                           name.as_ptr())
            };
425
            llvm::LLVMSetAlignment(alloca, align.bytes() as c_uint);
M
Masaki Hara 已提交
426 427 428 429
            alloca
        }
    }

430
    fn load(&mut self, ptr: &'ll Value, align: Align) -> &'ll Value {
431
        unsafe {
432
            let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, UNNAMED);
433
            llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
434
            load
435 436 437
        }
    }

438
    fn volatile_load(&mut self, ptr: &'ll Value) -> &'ll Value {
439
        unsafe {
440
            let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, UNNAMED);
441 442
            llvm::LLVMSetVolatile(load, llvm::True);
            load
443 444 445
        }
    }

446
    fn atomic_load(
447
        &mut self,
448
        ptr: &'ll Value,
449
        order: rustc_codegen_ssa::common::AtomicOrdering,
450
        size: Size,
451
    ) -> &'ll Value {
452
        unsafe {
453 454 455
            let load = llvm::LLVMRustBuildAtomicLoad(
                self.llbuilder,
                ptr,
456
                UNNAMED,
457 458
                AtomicOrdering::from_generic(order),
            );
459 460
            // LLVM requires the alignment of atomic loads to be at least the size of the type.
            llvm::LLVMSetAlignment(load, size.bytes() as c_uint);
461
            load
462 463 464
        }
    }

465
    fn load_operand(
466
        &mut self,
467 468 469 470 471 472 473
        place: PlaceRef<'tcx, &'ll Value>
    ) -> OperandRef<'tcx, &'ll Value> {
        debug!("PlaceRef::load: {:?}", place);

        assert_eq!(place.llextra.is_some(), place.layout.is_unsized());

        if place.layout.is_zst() {
474
            return OperandRef::new_zst(self, place.layout);
475 476
        }

477 478 479 480 481
        fn scalar_load_metadata<'a, 'll, 'tcx>(
            bx: &mut Builder<'a, 'll, 'tcx>,
            load: &'ll Value,
            scalar: &layout::Scalar
        ) {
482 483 484
            let vr = scalar.valid_range.clone();
            match scalar.value {
                layout::Int(..) => {
485
                    let range = scalar.valid_range_exclusive(bx);
486
                    if range.start != range.end {
487
                        bx.range_metadata(load, range);
488 489 490
                    }
                }
                layout::Pointer if vr.start() < vr.end() && !vr.contains(&0) => {
491
                    bx.nonnull_metadata(load);
492 493 494
                }
                _ => {}
            }
495
        }
496 497 498 499 500 501 502 503 504 505 506 507 508 509 510

        let val = if let Some(llextra) = place.llextra {
            OperandValue::Ref(place.llval, Some(llextra), place.align)
        } else if place.layout.is_llvm_immediate() {
            let mut const_llval = None;
            unsafe {
                if let Some(global) = llvm::LLVMIsAGlobalVariable(place.llval) {
                    if llvm::LLVMIsGlobalConstant(global) == llvm::True {
                        const_llval = llvm::LLVMGetInitializer(global);
                    }
                }
            }
            let llval = const_llval.unwrap_or_else(|| {
                let load = self.load(place.llval, place.align);
                if let layout::Abi::Scalar(ref scalar) = place.layout.abi {
511
                    scalar_load_metadata(self, load, scalar);
512 513 514
                }
                load
            });
515
            OperandValue::Immediate(to_immediate(self, llval, place.layout))
516
        } else if let layout::Abi::ScalarPair(ref a, ref b) = place.layout.abi {
517 518 519
            let b_offset = a.value.size(self).align_to(b.value.align(self).abi);

            let mut load = |i, scalar: &layout::Scalar, align| {
520
                let llptr = self.struct_gep(place.llval, i as u64);
521
                let load = self.load(llptr, align);
522
                scalar_load_metadata(self, load, scalar);
523
                if scalar.is_bool() {
524
                    self.trunc(load, self.type_i1())
525 526 527 528
                } else {
                    load
                }
            };
529 530 531 532 533

            OperandValue::Pair(
                load(0, a, place.align),
                load(1, b, place.align.restrict_for_offset(b_offset)),
            )
534 535 536 537 538 539 540
        } else {
            OperandValue::Ref(place.llval, None, place.align)
        };

        OperandRef { val, layout: place.layout }
    }

541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560
    fn write_operand_repeatedly(
        mut self,
        cg_elem: OperandRef<'tcx, &'ll Value>,
        count: u64,
        dest: PlaceRef<'tcx, &'ll Value>,
    ) -> Self {
        let zero = self.const_usize(0);
        let count = self.const_usize(count);
        let start = dest.project_index(&mut self, zero).llval;
        let end = dest.project_index(&mut self, count).llval;

        let mut header_bx = self.build_sibling_block("repeat_loop_header");
        let mut body_bx = self.build_sibling_block("repeat_loop_body");
        let next_bx = self.build_sibling_block("repeat_loop_next");

        self.br(header_bx.llbb());
        let current = header_bx.phi(self.val_ty(start), &[start], &[self.llbb()]);

        let keep_going = header_bx.icmp(IntPredicate::IntNE, current, end);
        header_bx.cond_br(keep_going, body_bx.llbb(), next_bx.llbb());
561

562 563 564 565 566 567 568 569 570 571
        let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
        cg_elem.val.store(&mut body_bx,
            PlaceRef::new_sized(current, cg_elem.layout, align));

        let next = body_bx.inbounds_gep(current, &[self.const_usize(1)]);
        body_bx.br(header_bx.llbb());
        header_bx.add_incoming_to_phi(current, next, body_bx.llbb());

        next_bx
    }
572

573
    fn range_metadata(&mut self, load: &'ll Value, range: Range<u128>) {
574
        if self.sess().target.target.arch == "amdgpu" {
575 576 577 578 579 580 581
            // amdgpu/LLVM does something weird and thinks a i64 value is
            // split into a v2i32, halving the bitwidth LLVM expects,
            // tripping an assertion. So, for now, just disable this
            // optimization.
            return;
        }

582
        unsafe {
583
            let llty = self.cx.val_ty(load);
584
            let v = [
585 586
                self.cx.const_uint_big(llty, range.start),
                self.cx.const_uint_big(llty, range.end)
587
            ];
588

589
            llvm::LLVMSetMetadata(load, llvm::MD_range as c_uint,
590
                                  llvm::LLVMMDNodeInContext(self.cx.llcx,
591 592
                                                            v.as_ptr(),
                                                            v.len() as c_uint));
593 594 595
        }
    }

596
    fn nonnull_metadata(&mut self, load: &'ll Value) {
597
        unsafe {
598
            llvm::LLVMSetMetadata(load, llvm::MD_nonnull as c_uint,
599
                                  llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
600 601 602
        }
    }

603
    fn store(&mut self, val: &'ll Value, ptr: &'ll Value, align: Align) -> &'ll Value {
604 605 606
        self.store_with_flags(val, ptr, align, MemFlags::empty())
    }

607
    fn store_with_flags(
608
        &mut self,
609 610
        val: &'ll Value,
        ptr: &'ll Value,
611
        align: Align,
612
        flags: MemFlags,
613
    ) -> &'ll Value {
614
        debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
615
        let ptr = self.check_store(val, ptr);
616
        unsafe {
617
            let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
618 619 620
            let align = if flags.contains(MemFlags::UNALIGNED) {
                1
            } else {
621
                align.bytes() as c_uint
622 623
            };
            llvm::LLVMSetAlignment(store, align);
624 625 626 627 628 629 630 631
            if flags.contains(MemFlags::VOLATILE) {
                llvm::LLVMSetVolatile(store, llvm::True);
            }
            if flags.contains(MemFlags::NONTEMPORAL) {
                // According to LLVM [1] building a nontemporal store must
                // *always* point to a metadata value of the integer 1.
                //
                // [1]: http://llvm.org/docs/LangRef.html#store-instruction
632
                let one = self.cx.const_i32(1);
633 634 635
                let node = llvm::LLVMMDNodeInContext(self.cx.llcx, &one, 1);
                llvm::LLVMSetMetadata(store, llvm::MD_nontemporal as c_uint, node);
            }
636
            store
637 638 639
        }
    }

640
   fn atomic_store(&mut self, val: &'ll Value, ptr: &'ll Value,
641
                   order: rustc_codegen_ssa::common::AtomicOrdering, size: Size) {
642
        debug!("Store {:?} -> {:?}", val, ptr);
643
        let ptr = self.check_store(val, ptr);
644
        unsafe {
645 646 647 648 649 650
            let store = llvm::LLVMRustBuildAtomicStore(
                self.llbuilder,
                val,
                ptr,
                AtomicOrdering::from_generic(order),
            );
651 652
            // LLVM requires the alignment of atomic stores to be at least the size of the type.
            llvm::LLVMSetAlignment(store, size.bytes() as c_uint);
653 654 655
        }
    }

656
    fn gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
657
        unsafe {
658
            llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
659
                               indices.len() as c_uint, UNNAMED)
660 661 662
        }
    }

663
    fn inbounds_gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
664 665
        unsafe {
            llvm::LLVMBuildInBoundsGEP(
666
                self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, UNNAMED)
667 668 669
        }
    }

B
bjorn3 已提交
670 671 672
    fn struct_gep(&mut self, ptr: &'ll Value, idx: u64) -> &'ll Value {
        assert_eq!(idx as c_uint as u64, idx);
        unsafe {
673
            llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, UNNAMED)
B
bjorn3 已提交
674 675 676
        }
    }

677
    /* Casts */
678
    fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
679
        unsafe {
680
            llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, UNNAMED)
681 682 683
        }
    }

684
    fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
685
        unsafe {
686
            llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, UNNAMED)
687 688 689
        }
    }

690
    fn fptoui(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
691
        unsafe {
692
            llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, UNNAMED)
693 694 695
        }
    }

696
    fn fptosi(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
697
        unsafe {
698
            llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty,UNNAMED)
699 700 701
        }
    }

702
    fn uitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
703
        unsafe {
704
            llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, UNNAMED)
705 706 707
        }
    }

708
    fn sitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
709
        unsafe {
710
            llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, UNNAMED)
711 712 713
        }
    }

714
    fn fptrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
715
        unsafe {
716
            llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, UNNAMED)
717 718 719
        }
    }

720
    fn fpext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
721
        unsafe {
722
            llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, UNNAMED)
723 724 725
        }
    }

726
    fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
727
        unsafe {
728
            llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, UNNAMED)
729 730 731
        }
    }

732
    fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
733
        unsafe {
734
            llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, UNNAMED)
735 736 737
        }
    }

738
    fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
739
        unsafe {
740
            llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, UNNAMED)
741 742 743
        }
    }

744

745
    fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
746
        unsafe {
747
            llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty, is_signed)
748 749 750
        }
    }

751
    fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
752
        unsafe {
753
            llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, UNNAMED)
754 755 756 757
        }
    }

    /* Comparisons */
758
    fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
759
        let op = llvm::IntPredicate::from_generic(op);
760
        unsafe {
761
            llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED)
762 763 764
        }
    }

765
    fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
766
        unsafe {
767
            llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, UNNAMED)
768 769 770 771
        }
    }

    /* Miscellaneous instructions */
772 773
    fn memcpy(&mut self, dst: &'ll Value, dst_align: Align,
                  src: &'ll Value, src_align: Align,
774 775 776 777
                  size: &'ll Value, flags: MemFlags) {
        if flags.contains(MemFlags::NONTEMPORAL) {
            // HACK(nox): This is inefficient but there is no nontemporal memcpy.
            let val = self.load(src, src_align);
778
            let ptr = self.pointercast(dst, self.type_ptr_to(self.val_ty(val)));
779 780 781
            self.store_with_flags(val, ptr, dst_align, flags);
            return;
        }
782
        let size = self.intcast(size, self.type_isize(), false);
783
        let is_volatile = flags.contains(MemFlags::VOLATILE);
784 785
        let dst = self.pointercast(dst, self.type_i8p());
        let src = self.pointercast(src, self.type_i8p());
786
        unsafe {
787 788
            llvm::LLVMRustBuildMemCpy(self.llbuilder, dst, dst_align.bytes() as c_uint,
                                      src, src_align.bytes() as c_uint, size, is_volatile);
789 790 791
        }
    }

792 793
    fn memmove(&mut self, dst: &'ll Value, dst_align: Align,
                  src: &'ll Value, src_align: Align,
794 795 796 797
                  size: &'ll Value, flags: MemFlags) {
        if flags.contains(MemFlags::NONTEMPORAL) {
            // HACK(nox): This is inefficient but there is no nontemporal memmove.
            let val = self.load(src, src_align);
798
            let ptr = self.pointercast(dst, self.type_ptr_to(self.val_ty(val)));
799 800 801
            self.store_with_flags(val, ptr, dst_align, flags);
            return;
        }
802
        let size = self.intcast(size, self.type_isize(), false);
803
        let is_volatile = flags.contains(MemFlags::VOLATILE);
804 805
        let dst = self.pointercast(dst, self.type_i8p());
        let src = self.pointercast(src, self.type_i8p());
806
        unsafe {
807 808
            llvm::LLVMRustBuildMemMove(self.llbuilder, dst, dst_align.bytes() as c_uint,
                                      src, src_align.bytes() as c_uint, size, is_volatile);
809 810 811
        }
    }

812
    fn memset(
813
        &mut self,
814 815 816
        ptr: &'ll Value,
        fill_byte: &'ll Value,
        size: &'ll Value,
817
        align: Align,
818 819
        flags: MemFlags,
    ) {
820
        let ptr_width = &self.sess().target.target.target_pointer_width;
821
        let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
822 823 824 825
        let llintrinsicfn = self.get_intrinsic(&intrinsic_key);
        let ptr = self.pointercast(ptr, self.type_i8p());
        let align = self.const_u32(align.bytes() as u32);
        let volatile = self.const_bool(flags.contains(MemFlags::VOLATILE));
826 827 828
        self.call(llintrinsicfn, &[ptr, fill_byte, size, align, volatile], None);
    }

829
    fn select(
830
        &mut self, cond: &'ll Value,
831 832 833
        then_val: &'ll Value,
        else_val: &'ll Value,
    ) -> &'ll Value {
834
        unsafe {
835
            llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, UNNAMED)
836 837 838
        }
    }

B
bjorn3 已提交
839 840 841
    #[allow(dead_code)]
    fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
        unsafe {
842
            llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED)
B
bjorn3 已提交
843 844 845
        }
    }

846
    fn extract_element(&mut self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
847
        unsafe {
848
            llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, UNNAMED)
849 850 851
        }
    }

852
    fn vector_splat(&mut self, num_elts: usize, elt: &'ll Value) -> &'ll Value {
853
        unsafe {
854
            let elt_ty = self.cx.val_ty(elt);
855
            let undef = llvm::LLVMGetUndef(self.type_vector(elt_ty, num_elts as u64));
856
            let vec = self.insert_element(undef, elt, self.cx.const_i32(0));
857 858
            let vec_i32_ty = self.type_vector(self.type_i32(), num_elts as u64);
            self.shuffle_vector(vec, undef, self.const_null(vec_i32_ty))
859 860 861
        }
    }

862
    fn extract_value(&mut self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
863
        assert_eq!(idx as c_uint as u64, idx);
864
        unsafe {
865
            llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, UNNAMED)
866 867 868
        }
    }

869
    fn insert_value(&mut self, agg_val: &'ll Value, elt: &'ll Value,
870
                       idx: u64) -> &'ll Value {
871
        assert_eq!(idx as c_uint as u64, idx);
872 873
        unsafe {
            llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
874
                                       UNNAMED)
875 876 877
        }
    }

878
    fn landing_pad(&mut self, ty: &'ll Type, pers_fn: &'ll Value,
879
                       num_clauses: usize) -> &'ll Value {
880
        unsafe {
881
            llvm::LLVMBuildLandingPad(self.llbuilder, ty, pers_fn,
882
                                      num_clauses as c_uint, UNNAMED)
883 884 885
        }
    }

886
    fn set_cleanup(&mut self, landing_pad: &'ll Value) {
887
        unsafe {
888
            llvm::LLVMSetCleanup(landing_pad, llvm::True);
889 890 891
        }
    }

892
    fn resume(&mut self, exn: &'ll Value) -> &'ll Value {
893 894 895 896 897
        unsafe {
            llvm::LLVMBuildResume(self.llbuilder, exn)
        }
    }

898
    fn cleanup_pad(&mut self,
899
                       parent: Option<&'ll Value>,
900
                       args: &[&'ll Value]) -> Funclet<'ll> {
901
        let name = const_cstr!("cleanuppad");
902 903 904 905 906 907 908
        let ret = unsafe {
            llvm::LLVMRustBuildCleanupPad(self.llbuilder,
                                          parent,
                                          args.len() as c_uint,
                                          args.as_ptr(),
                                          name.as_ptr())
        };
909
        Funclet::new(ret.expect("LLVM does not have support for cleanuppad"))
910 911
    }

912
    fn cleanup_ret(
913
        &mut self, funclet: &Funclet<'ll>,
914 915
        unwind: Option<&'ll BasicBlock>,
    ) -> &'ll Value {
916
        let ret = unsafe {
917
            llvm::LLVMRustBuildCleanupRet(self.llbuilder, funclet.cleanuppad(), unwind)
918
        };
919
        ret.expect("LLVM does not have support for cleanupret")
920 921
    }

922
    fn catch_pad(&mut self,
923
                     parent: &'ll Value,
924
                     args: &[&'ll Value]) -> Funclet<'ll> {
925
        let name = const_cstr!("catchpad");
926 927 928 929 930
        let ret = unsafe {
            llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
                                        args.len() as c_uint, args.as_ptr(),
                                        name.as_ptr())
        };
931
        Funclet::new(ret.expect("LLVM does not have support for catchpad"))
932 933
    }

934
    fn catch_switch(
935
        &mut self,
936 937
        parent: Option<&'ll Value>,
        unwind: Option<&'ll BasicBlock>,
938
        num_handlers: usize,
939
    ) -> &'ll Value {
940
        let name = const_cstr!("catchswitch");
941 942 943 944 945
        let ret = unsafe {
            llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
                                           num_handlers as c_uint,
                                           name.as_ptr())
        };
946
        ret.expect("LLVM does not have support for catchswitch")
947 948
    }

949
    fn add_handler(&mut self, catch_switch: &'ll Value, handler: &'ll BasicBlock) {
950 951 952 953 954
        unsafe {
            llvm::LLVMRustAddHandler(catch_switch, handler);
        }
    }

955
    fn set_personality_fn(&mut self, personality: &'ll Value) {
956
        unsafe {
957
            llvm::LLVMSetPersonalityFn(self.llfn(), personality);
958 959 960
        }
    }

961
    // Atomic Operations
962
    fn atomic_cmpxchg(
963
        &mut self,
964 965 966
        dst: &'ll Value,
        cmp: &'ll Value,
        src: &'ll Value,
967 968
        order: rustc_codegen_ssa::common::AtomicOrdering,
        failure_order: rustc_codegen_ssa::common::AtomicOrdering,
969
        weak: bool,
970
    ) -> &'ll Value {
971
        let weak = if weak { llvm::True } else { llvm::False };
972
        unsafe {
973 974 975 976 977 978 979 980 981
            llvm::LLVMRustBuildAtomicCmpXchg(
                self.llbuilder,
                dst,
                cmp,
                src,
                AtomicOrdering::from_generic(order),
                AtomicOrdering::from_generic(failure_order),
                weak
            )
982 983
        }
    }
984
    fn atomic_rmw(
985
        &mut self,
986
        op: rustc_codegen_ssa::common::AtomicRmwBinOp,
987 988
        dst: &'ll Value,
        src: &'ll Value,
989
        order: rustc_codegen_ssa::common::AtomicOrdering,
990
    ) -> &'ll Value {
991
        unsafe {
992 993 994 995 996
            llvm::LLVMBuildAtomicRMW(
                self.llbuilder,
                AtomicRmwBinOp::from_generic(op),
                dst,
                src,
997
                AtomicOrdering::from_generic(order),
998
                False)
999 1000
        }
    }
J
James Miller 已提交
1001

D
Denis Merigoux 已提交
1002
    fn atomic_fence(
1003
        &mut self,
1004 1005
        order: rustc_codegen_ssa::common::AtomicOrdering,
        scope: rustc_codegen_ssa::common::SynchronizationScope
D
Denis Merigoux 已提交
1006
    ) {
J
James Miller 已提交
1007
        unsafe {
1008 1009 1010
            llvm::LLVMRustBuildAtomicFence(
                self.llbuilder,
                AtomicOrdering::from_generic(order),
1011
                SynchronizationScope::from_generic(scope)
1012
            );
J
James Miller 已提交
1013 1014
        }
    }
1015

1016
    fn set_invariant_load(&mut self, load: &'ll Value) {
1017 1018
        unsafe {
            llvm::LLVMSetMetadata(load, llvm::MD_invariant_load as c_uint,
1019
                                  llvm::LLVMMDNodeInContext(self.cx.llcx, ptr::null(), 0));
1020 1021 1022
        }
    }

1023
    fn lifetime_start(&mut self, ptr: &'ll Value, size: Size) {
1024 1025 1026
        self.call_lifetime_intrinsic("llvm.lifetime.start", ptr, size);
    }

1027
    fn lifetime_end(&mut self, ptr: &'ll Value, size: Size) {
1028 1029 1030
        self.call_lifetime_intrinsic("llvm.lifetime.end", ptr, size);
    }

1031
    fn call(
1032
        &mut self,
1033 1034 1035 1036
        llfn: &'ll Value,
        args: &[&'ll Value],
        funclet: Option<&Funclet<'ll>>,
    ) -> &'ll Value {
1037 1038 1039 1040 1041 1042

        debug!("Call {:?} with args ({:?})",
               llfn,
               args);

        let args = self.check_call("call", llfn, args);
1043
        let bundle = funclet.map(|funclet| funclet.bundle());
1044
        let bundle = bundle.as_ref().map(|b| &*b.raw);
1045 1046 1047 1048 1049 1050 1051

        unsafe {
            llvm::LLVMRustBuildCall(
                self.llbuilder,
                llfn,
                args.as_ptr() as *const &llvm::Value,
                args.len() as c_uint,
1052
                bundle, UNNAMED
1053 1054 1055 1056
            )
        }
    }

1057
    fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
1058
        unsafe {
1059
            llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, UNNAMED)
1060 1061 1062 1063
        }
    }


1064
    fn cx(&self) -> &CodegenCx<'ll, 'tcx> {
1065
        self.cx
1066
    }
1067

1068 1069
    unsafe fn delete_basic_block(&mut self, bb: &'ll BasicBlock) {
        llvm::LLVMDeleteBasicBlock(bb);
1070 1071
    }

1072
    fn do_not_inline(&mut self, llret: &'ll Value) {
1073 1074
        llvm::Attribute::NoInline.apply_callsite(llvm::AttributePlace::Function, llret);
    }
1075
}
1076

1077 1078
impl StaticBuilderMethods for Builder<'a, 'll, 'tcx> {
    fn get_static(&mut self, def_id: DefId) -> &'ll Value {
B
bjorn3 已提交
1079
        // Forward to the `get_static` method of `CodegenCx`
B
bjorn3 已提交
1080 1081
        self.cx().get_static(def_id)
    }
1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092 1093 1094 1095 1096 1097 1098 1099 1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111

    fn static_panic_msg(
        &mut self,
        msg: Option<LocalInternedString>,
        filename: LocalInternedString,
        line: Self::Value,
        col: Self::Value,
        kind: &str,
    ) -> Self::Value {
        let align = self.tcx.data_layout.aggregate_align.abi
            .max(self.tcx.data_layout.i32_align.abi)
            .max(self.tcx.data_layout.pointer_align.abi);

        let filename = self.const_str_slice(filename);

        let with_msg_components;
        let without_msg_components;

        let components = if let Some(msg) = msg {
            let msg = self.const_str_slice(msg);
            with_msg_components = [msg, filename, line, col];
            &with_msg_components as &[_]
        } else {
            without_msg_components = [filename, line, col];
            &without_msg_components as &[_]
        };

        let struct_ = self.const_struct(&components, false);
        self.static_addr_of(struct_, align, Some(kind))
    }
B
bjorn3 已提交
1112 1113
}

1114
impl Builder<'a, 'll, 'tcx> {
B
bjorn3 已提交
1115 1116 1117 1118 1119 1120
    pub fn llfn(&self) -> &'ll Value {
        unsafe {
            llvm::LLVMGetBasicBlockParent(self.llbb())
        }
    }

1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140
    fn position_at_start(&mut self, llbb: &'ll BasicBlock) {
        unsafe {
            llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
        }
    }

    pub fn minnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
        unsafe { llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs) }
    }

    pub fn maxnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
        unsafe { llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs) }
    }

    pub fn insert_element(
        &mut self, vec: &'ll Value,
        elt: &'ll Value,
        idx: &'ll Value,
    ) -> &'ll Value {
        unsafe {
1141
            llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, UNNAMED)
1142 1143 1144 1145 1146 1147 1148 1149 1150 1151
        }
    }

    pub fn shuffle_vector(
        &mut self,
        v1: &'ll Value,
        v2: &'ll Value,
        mask: &'ll Value,
    ) -> &'ll Value {
        unsafe {
1152
            llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, UNNAMED)
1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230
        }
    }

    pub fn vector_reduce_fadd_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
        unsafe {
            // FIXME: add a non-fast math version once
            // https://bugs.llvm.org/show_bug.cgi?id=36732
            // is fixed.
            let instr = llvm::LLVMRustBuildVectorReduceFAdd(self.llbuilder, acc, src);
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
    pub fn vector_reduce_fmul_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
        unsafe {
            // FIXME: add a non-fast math version once
            // https://bugs.llvm.org/show_bug.cgi?id=36732
            // is fixed.
            let instr = llvm::LLVMRustBuildVectorReduceFMul(self.llbuilder, acc, src);
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
    pub fn vector_reduce_add(&mut self, src: &'ll Value) -> &'ll Value {
        unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
    }
    pub fn vector_reduce_mul(&mut self, src: &'ll Value) -> &'ll Value {
        unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
    }
    pub fn vector_reduce_and(&mut self, src: &'ll Value) -> &'ll Value {
        unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
    }
    pub fn vector_reduce_or(&mut self, src: &'ll Value) -> &'ll Value {
        unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
    }
    pub fn vector_reduce_xor(&mut self, src: &'ll Value) -> &'ll Value {
        unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
    }
    pub fn vector_reduce_fmin(&mut self, src: &'ll Value) -> &'ll Value {
        unsafe { llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false) }
    }
    pub fn vector_reduce_fmax(&mut self, src: &'ll Value) -> &'ll Value {
        unsafe { llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false) }
    }
    pub fn vector_reduce_fmin_fast(&mut self, src: &'ll Value) -> &'ll Value {
        unsafe {
            let instr = llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
    pub fn vector_reduce_fmax_fast(&mut self, src: &'ll Value) -> &'ll Value {
        unsafe {
            let instr = llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
            llvm::LLVMRustSetHasUnsafeAlgebra(instr);
            instr
        }
    }
    pub fn vector_reduce_min(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
        unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
    }
    pub fn vector_reduce_max(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
        unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
    }

    pub fn add_clause(&mut self, landing_pad: &'ll Value, clause: &'ll Value) {
        unsafe {
            llvm::LLVMAddClause(landing_pad, clause);
        }
    }

    pub fn catch_ret(&mut self, funclet: &Funclet<'ll>, unwind: &'ll BasicBlock) -> &'ll Value {
        let ret = unsafe {
            llvm::LLVMRustBuildCatchRet(self.llbuilder, funclet.cleanuppad(), unwind)
        };
        ret.expect("LLVM does not have support for catchret")
    }

1231
    fn check_store(&mut self, val: &'ll Value, ptr: &'ll Value) -> &'ll Value {
1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291
        let dest_ptr_ty = self.cx.val_ty(ptr);
        let stored_ty = self.cx.val_ty(val);
        let stored_ptr_ty = self.cx.type_ptr_to(stored_ty);

        assert_eq!(self.cx.type_kind(dest_ptr_ty), TypeKind::Pointer);

        if dest_ptr_ty == stored_ptr_ty {
            ptr
        } else {
            debug!("Type mismatch in store. \
                    Expected {:?}, got {:?}; inserting bitcast",
                   dest_ptr_ty, stored_ptr_ty);
            self.bitcast(ptr, stored_ptr_ty)
        }
    }

    fn check_call<'b>(&mut self,
                      typ: &str,
                      llfn: &'ll Value,
                      args: &'b [&'ll Value]) -> Cow<'b, [&'ll Value]> {
        let mut fn_ty = self.cx.val_ty(llfn);
        // Strip off pointers
        while self.cx.type_kind(fn_ty) == TypeKind::Pointer {
            fn_ty = self.cx.element_type(fn_ty);
        }

        assert!(self.cx.type_kind(fn_ty) == TypeKind::Function,
                "builder::{} not passed a function, but {:?}", typ, fn_ty);

        let param_tys = self.cx.func_params_types(fn_ty);

        let all_args_match = param_tys.iter()
            .zip(args.iter().map(|&v| self.val_ty(v)))
            .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);

        if all_args_match {
            return Cow::Borrowed(args);
        }

        let casted_args: Vec<_> = param_tys.into_iter()
            .zip(args.iter())
            .enumerate()
            .map(|(i, (expected_ty, &actual_val))| {
                let actual_ty = self.val_ty(actual_val);
                if expected_ty != actual_ty {
                    debug!("Type mismatch in function call of {:?}. \
                            Expected {:?} for param {}, got {:?}; injecting bitcast",
                           llfn, expected_ty, i, actual_ty);
                    self.bitcast(actual_val, expected_ty)
                } else {
                    actual_val
                }
            })
            .collect();

        Cow::Owned(casted_args)
    }

    pub fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
        unsafe {
1292
            llvm::LLVMBuildVAArg(self.llbuilder, list, ty, UNNAMED)
1293 1294 1295
        }
    }

1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310
    fn call_lifetime_intrinsic(&mut self, intrinsic: &str, ptr: &'ll Value, size: Size) {
        if self.cx.sess().opts.optimize == config::OptLevel::No {
            return;
        }

        let size = size.bytes();
        if size == 0 {
            return;
        }

        let lifetime_intrinsic = self.cx.get_intrinsic(intrinsic);

        let ptr = self.pointercast(ptr, self.cx.type_i8p());
        self.call(lifetime_intrinsic, &[self.cx.const_u64(size), ptr], None);
    }
1311 1312 1313 1314

    fn phi(&mut self, ty: &'ll Type, vals: &[&'ll Value], bbs: &[&'ll BasicBlock]) -> &'ll Value {
        assert_eq!(vals.len(), bbs.len());
        let phi = unsafe {
1315
            llvm::LLVMBuildPhi(self.llbuilder, ty, UNNAMED)
1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329
        };
        unsafe {
            llvm::LLVMAddIncoming(phi, vals.as_ptr(),
                                  bbs.as_ptr(),
                                  vals.len() as c_uint);
            phi
        }
    }

    fn add_incoming_to_phi(&mut self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
        unsafe {
            llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
        }
    }
1330
}