if (Value *SimplifiedFortifiedCI = FortifiedSimplifier.optimizeCall(CI)) {
// Try to further simplify the result.
CallInst *SimplifiedCI = dyn_cast<CallInst>(SimplifiedFortifiedCI);
- if (SimplifiedCI && SimplifiedCI->getCalledFunction())
- if (Value *V = optimizeStringMemoryLibCall(SimplifiedCI, Builder)) {
+ if (SimplifiedCI && SimplifiedCI->getCalledFunction()) {
+ // Use an IR Builder from SimplifiedCI if available instead of CI
+ // to guarantee we reach all uses we might replace later on.
+ IRBuilder<> TmpBuilder(SimplifiedCI);
+ if (Value *V = optimizeStringMemoryLibCall(SimplifiedCI, TmpBuilder)) {
// If we were able to further simplify, remove the now redundant call.
SimplifiedCI->replaceAllUsesWith(V);
SimplifiedCI->eraseFromParent();
return V;
}
+ }
return SimplifiedFortifiedCI;
}
ret i8* %ret
}
+; Test that RAUW in SimplifyLibCalls for __memset_chk generates valid IR
+define i32 @test_rauw(i8* %a, i8* %b, i8** %c) {
+; CHECK-LABEL: test_rauw
+entry:
+ %call49 = call i64 @strlen(i8* %a)
+ %add180 = add i64 %call49, 1
+ %yo107 = call i64 @llvm.objectsize.i64.p0i8(i8* %b, i1 false)
+ %call50 = call i8* @__memmove_chk(i8* %b, i8* %a, i64 %add180, i64 %yo107)
+; CHECK: %strlen = call i64 @strlen(i8* %b)
+; CHECK-NEXT: %strchr2 = getelementptr i8, i8* %b, i64 %strlen
+ %call51i = call i8* @strrchr(i8* %b, i32 0)
+ %d = load i8*, i8** %c, align 8
+ %sub182 = ptrtoint i8* %d to i64
+ %sub183 = ptrtoint i8* %b to i64
+ %sub184 = sub i64 %sub182, %sub183
+ %add52.i.i = add nsw i64 %sub184, 1
+; CHECK: call void @llvm.memset.p0i8.i64(i8* %strchr2
+ %call185 = call i8* @__memset_chk(i8* %call51i, i32 0, i64 %add52.i.i, i64 -1)
+ ret i32 4
+}
+
+declare i8* @__memmove_chk(i8*, i8*, i64, i64)
+declare i8* @strrchr(i8*, i32)
+declare i64 @strlen(i8* nocapture)
+declare i64 @llvm.objectsize.i64.p0i8(i8*, i1)
+
declare i8* @__memset_chk(i8*, i32, i64, i64)