This intrinsic is lowered to the ``val``.
+'``llvm.assume``' Intrinsic
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Syntax:
+"""""""
+
+::
+
+ declare void @llvm.assume(i1 %cond)
+
+Overview:
+"""""""""
+
+The ``llvm.assume`` allows the optimizer to assume that the provided
+condition is true. This information can then be used in simplifying other parts
+of the code.
+
+Arguments:
+""""""""""
+
+The condition which the optimizer may assume is always true.
+
+Semantics:
+""""""""""
+
+The intrinsic allows the optimizer to assume that the provided condition is
+always true whenever the control flow reaches the intrinsic call. No code is
+generated for this intrinsic, and instructions that contribute only to the
+provided condition are not used for code generation. If the condition is
+violated during execution, the behavior is undefined.
+
+Please note that optimizer might limit the transformations performed on values
+used by the ``llvm.assume`` intrinsic in order to preserve the instructions
+only used to form the intrinsic's input argument. This might prove undesirable
+if the extra information provided by the ``llvm.assume`` intrinsic does cause
+sufficient overall improvement in code quality. For this reason,
+``llvm.assume`` should not be used to document basic mathematical invariants
+that the optimizer can otherwise deduce or facts that are of little use to the
+optimizer.
+
'``llvm.donothing``' Intrinsic
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def int_readcyclecounter : Intrinsic<[llvm_i64_ty]>;
+// The assume intrinsic is marked as arbitrarily writing so that proper
+// control dependencies will be maintained.
+def int_assume : Intrinsic<[], [llvm_i1_ty], []>;
+
// Stack Protector Intrinsic - The stackprotector intrinsic writes the stack
// guard to the correct place on the stack frame.
def int_stackprotector : Intrinsic<[], [llvm_ptr_ty, llvm_ptrptr_ty], []>;
const Location &Loc) override;
ModRefResult getModRefInfo(ImmutableCallSite CS1,
- ImmutableCallSite CS2) override {
- // The AliasAnalysis base class has some smarts, lets use them.
- return AliasAnalysis::getModRefInfo(CS1, CS2);
- }
+ ImmutableCallSite CS2) override;
/// pointsToConstantMemory - Chase pointers until we find a (constant
/// global) or not.
return Loc;
}
+static bool isAssumeIntrinsic(ImmutableCallSite CS) {
+ const IntrinsicInst *II = dyn_cast<IntrinsicInst>(CS.getInstruction());
+ if (II && II->getIntrinsicID() == Intrinsic::assume)
+ return true;
+
+ return false;
+}
+
/// getModRefInfo - Check to see if the specified callsite can clobber the
/// specified memory object. Since we only look at local properties of this
/// function, we really can't say much about this query. We do, however, use
return NoModRef;
}
+ // While the assume intrinsic is marked as arbitrarily writing so that
+ // proper control dependencies will be maintained, it never aliases any
+ // particular memory location.
+ if (isAssumeIntrinsic(CS))
+ return NoModRef;
+
// The AliasAnalysis base class has some smarts, lets use them.
return AliasAnalysis::getModRefInfo(CS, Loc);
}
+AliasAnalysis::ModRefResult
+BasicAliasAnalysis::getModRefInfo(ImmutableCallSite CS1,
+ ImmutableCallSite CS2) {
+ // While the assume intrinsic is marked as arbitrarily writing so that
+ // proper control dependencies will be maintained, it never aliases any
+ // particular memory location.
+ if (isAssumeIntrinsic(CS1) || isAssumeIntrinsic(CS2))
+ return NoModRef;
+
+ // The AliasAnalysis base class has some smarts, lets use them.
+ return AliasAnalysis::getModRefInfo(CS1, CS2);
+}
+
/// aliasGEP - Provide a bunch of ad-hoc rules to disambiguate a GEP instruction
/// against another pointer. We know that V1 is a GEP, but we don't know
/// anything about V2. UnderlyingV1 is GetUnderlyingObject(GEP1, DL),
// FIXME: This is wrong for libc intrinsics.
return TCC_Basic;
+ case Intrinsic::assume:
case Intrinsic::dbg_declare:
case Intrinsic::dbg_value:
case Intrinsic::invariant_start:
case Intrinsic::pow: ISD = ISD::FPOW; break;
case Intrinsic::fma: ISD = ISD::FMA; break;
case Intrinsic::fmuladd: ISD = ISD::FMA; break;
+ // FIXME: We should return 0 whenever getIntrinsicCost == TCC_Free.
case Intrinsic::lifetime_start:
case Intrinsic::lifetime_end:
return 0;
CI->replaceAllUsesWith(CI->getOperand(0));
break;
+ case Intrinsic::assume:
case Intrinsic::var_annotation:
- break; // Strip out annotate intrinsic
-
+ break; // Strip out these intrinsics
+
case Intrinsic::memcpy: {
Type *IntPtr = DL.getIntPtrType(Context);
Value *Size = Builder.CreateIntCast(CI->getArgOperand(2), IntPtr,
// Drop the intrinsic, but forward the value
setValue(&I, getValue(I.getOperand(0)));
return nullptr;
+ case Intrinsic::assume:
case Intrinsic::var_annotation:
- // Discard annotate attributes
+ // Discard annotate attributes and assumptions
return nullptr;
case Intrinsic::init_trampoline: {
if (II->getIntrinsicID() == Intrinsic::lifetime_start ||
II->getIntrinsicID() == Intrinsic::lifetime_end)
return isa<UndefValue>(II->getArgOperand(1));
+
+ // Assumptions are dead if their condition is trivially true.
+ if (II->getIntrinsicID() == Intrinsic::assume) {
+ if (ConstantInt *Cond = dyn_cast<ConstantInt>(II->getArgOperand(0)))
+ return !Cond->isZero();
+
+ return false;
+ }
}
if (isAllocLikeFn(I, TLI)) return true;
// instructions into LLVM unreachable insts. The instruction combining pass
// canonicalizes unreachable insts into stores to null or undef.
for (BasicBlock::iterator BBI = BB->begin(), E = BB->end(); BBI != E;++BBI){
+ // Assumptions that are known to be false are equivalent to unreachable.
+ // Also, if the condition is undefined, then we make the choice most
+ // beneficial to the optimizer, and choose that to also be unreachable.
+ if (IntrinsicInst *II = dyn_cast<IntrinsicInst>(BBI))
+ if (II->getIntrinsicID() == Intrinsic::assume) {
+ bool MakeUnreachable = false;
+ if (isa<UndefValue>(II->getArgOperand(0)))
+ MakeUnreachable = true;
+ else if (ConstantInt *Cond =
+ dyn_cast<ConstantInt>(II->getArgOperand(0)))
+ MakeUnreachable = Cond->isZero();
+
+ if (MakeUnreachable) {
+ // Don't insert a call to llvm.trap right before the unreachable.
+ changeToUnreachable(BBI, false);
+ Changed = true;
+ break;
+ }
+ }
+
if (CallInst *CI = dyn_cast<CallInst>(BBI)) {
if (CI->doesNotReturn()) {
// If we found a call to a no-return function, insert an unreachable
--- /dev/null
+; RUN: opt < %s -basicaa -aa-eval -print-all-alias-modref-info -disable-output 2>&1 | FileCheck %s
+target datalayout = "e-p:32:32:32-i1:8:32-i8:8:32-i16:16:32-i32:32:32-i64:32:32-f32:32:32-f64:32:32-v64:32:64-v128:32:128-a0:0:32-n32"
+
+declare void @llvm.memcpy.p0i8.p0i8.i64(i8* nocapture, i8* nocapture, i64, i32, i1) #0
+declare void @llvm.assume(i1) #0
+
+define void @test1(i8* %P, i8* %Q) nounwind ssp {
+ tail call void @llvm.assume(i1 true)
+ tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %P, i8* %Q, i64 12, i32 1, i1 false)
+ ret void
+
+; CHECK-LABEL: Function: test1:
+
+; CHECK: MayAlias: i8* %P, i8* %Q
+; CHECK: NoModRef: Ptr: i8* %P <-> tail call void @llvm.assume(i1 true)
+; CHECK: NoModRef: Ptr: i8* %Q <-> tail call void @llvm.assume(i1 true)
+; CHECK: Both ModRef: Ptr: i8* %P <-> tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %P, i8* %Q, i64 12, i32 1, i1 false)
+; CHECK: Both ModRef: Ptr: i8* %Q <-> tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %P, i8* %Q, i64 12, i32 1, i1 false)
+; CHECK: NoModRef: tail call void @llvm.assume(i1 true) <-> tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %P, i8* %Q, i64 12, i32 1, i1 false)
+; CHECK: NoModRef: tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %P, i8* %Q, i64 12, i32 1, i1 false) <-> tail call void @llvm.assume(i1 true)
+}
+
+attributes #0 = { nounwind }
--- /dev/null
+; RUN: llc < %s
+
+define void @main() {
+ call void @llvm.assume(i1 1)
+ ret void
+}
+
+declare void @llvm.assume(i1) nounwind
+
--- /dev/null
+; RUN: opt -instsimplify -S < %s | FileCheck %s
+
+define void @test1() {
+ call void @llvm.assume(i1 1)
+ ret void
+
+; CHECK-LABEL: @test1
+; CHECK-NOT: llvm.assume
+; CHECK: ret void
+}
+
+declare void @llvm.assume(i1) nounwind
+
--- /dev/null
+; RUN: opt -simplifycfg -S < %s | FileCheck %s
+
+define void @test1() {
+ call void @llvm.assume(i1 0)
+ ret void
+
+; CHECK-LABEL: @test1
+; CHECK-NOT: llvm.assume
+; CHECK: unreachable
+}
+
+define void @test2() {
+ call void @llvm.assume(i1 undef)
+ ret void
+
+; CHECK-LABEL: @test2
+; CHECK-NOT: llvm.assume
+; CHECK: unreachable
+}
+
+declare void @llvm.assume(i1) nounwind
+