// If we get here we could have other crazy uses that are transitively
// loaded.
assert((isa<PHINode>(GlobalUser) || isa<SelectInst>(GlobalUser) ||
- isa<ConstantExpr>(GlobalUser)) && "Only expect load and stores!");
+ isa<ConstantExpr>(GlobalUser) || isa<CmpInst>(GlobalUser)) &&
+ "Only expect load and stores!");
}
}
GV->getInitializer()->isNullValue()) {
if (Constant *SOVC = dyn_cast<Constant>(StoredOnceVal)) {
if (GV->getInitializer()->getType() != SOVC->getType())
- SOVC =
- ConstantExpr::getBitCast(SOVC, GV->getInitializer()->getType());
+ SOVC = ConstantExpr::getBitCast(SOVC, GV->getInitializer()->getType());
// Optimize away any trapping uses of the loaded value.
if (OptimizeAwayTrappingUsesOfLoads(GV, SOVC))
ret void
}
declare void @llvm.memset.p0i8.i64(i8* nocapture, i8, i64, i32, i1) nounwind
+
+
+; PR9856
+@g_52 = internal global i32** null, align 8
+@g_90 = external global i32*, align 8
+
+define void @icmp_user_of_stored_once() nounwind ssp {
+entry:
+ %tmp4 = load i32*** @g_52, align 8
+ store i32** @g_90, i32*** @g_52
+ %cmp17 = icmp ne i32*** undef, @g_52
+ ret void
+}
+