// If -tailcallopt is specified, make fastcc functions tail-callable.
const Function *CallerF = DAG.getMachineFunction().getFunction();
- if (PerformTailCallOpt &&
- CalleeCC == CallingConv::Fast &&
- CallerF->getCallingConv() == CalleeCC)
- return true;
+ if (PerformTailCallOpt) {
+ if (CalleeCC == CallingConv::Fast &&
+ CallerF->getCallingConv() == CalleeCC)
+ return true;
+ return false;
+ }
+
+ // Do not tail call optimize vararg calls for now.
+ if (isVarArg)
+ return false;
+
+ // Don't tail call optimize recursive call.
+ GlobalAddressSDNode *G = dyn_cast<GlobalAddressSDNode>(Callee);
+ const Function *CalleeF = G ? cast<Function>(G->getGlobal()) : 0;
+ if (CallerF == CalleeF)
+ return false;
+ // If it's an indirect call, conversatively return false if the caller's
+ // address is taken.
+ if (!isa<ExternalSymbolSDNode>(Callee) && CallerF->hasAddressTaken())
+ return false;
// Look for obvious safe cases to perform tail call optimization.
// If the callee takes no arguments then go on to check the results of the
return true;
// If the return types match, then it's safe.
- GlobalAddressSDNode *G = dyn_cast<GlobalAddressSDNode>(Callee);
if (!G) return false; // FIXME: common external symbols?
- Function *CalleeF = cast<Function>(G->getGlobal());
const Type *CalleeRetTy = CalleeF->getReturnType();
return CallerRetTy == CalleeRetTy;
}
tail call void %x() nounwind
ret void
}
+
+define i32 @t6(i32 %x) nounwind ssp {
+entry:
+; 32: t6:
+; 32: call {{_?}}t6
+; 32: call {{_?}}bar
+
+; 64: t6:
+; 64: callq {{_?}}t6
+; 64: jmp {{_?}}bar
+ %0 = icmp slt i32 %x, 10
+ br i1 %0, label %bb, label %bb1
+
+bb:
+ %1 = add nsw i32 %x, -1
+ %2 = tail call i32 @t6(i32 %1) nounwind ssp
+ ret i32 %2
+
+bb1:
+ %3 = tail call i32 @bar(i32 %x) nounwind
+ ret i32 %3
+}
+
+declare i32 @bar(i32)