#include <algorithm>
using namespace llvm;
-static cl::opt<bool> VerifyFastRegalloc("verify-fast-regalloc", cl::Hidden,
- cl::desc("Verify machine code before fast regalloc"));
-
STATISTIC(NumStores, "Number of stores added");
STATISTIC(NumLoads , "Number of loads added");
STATISTIC(NumCopies, "Number of copies coalesced");
LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum,
unsigned VirtReg, unsigned Hint);
void spillAll(MachineInstr *MI);
- bool setPhysReg(MachineOperand &MO, unsigned PhysReg);
+ bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg);
};
char RAFast::ID = 0;
}
void RAFast::addKillFlag(const LiveReg &LR) {
if (!LR.LastUse) return;
MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum);
- if (MO.isDef())
- MO.setIsDead();
- else if (!LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum))
- MO.setIsKill();
+ if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) {
+ if (MO.getReg() == LR.PhysReg)
+ MO.setIsKill();
+ else
+ LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true);
+ }
}
/// killVirtReg - Mark virtreg as no longer available.
void RAFast::spillAll(MachineInstr *MI) {
if (LiveVirtRegs.empty()) return;
isBulkSpilling = true;
- for (LiveRegMap::iterator i = LiveVirtRegs.begin(),
- e = LiveVirtRegs.end(); i != e; ++i)
+ // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
+ // of spilling here is deterministic, if arbitrary.
+ for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end();
+ i != e; ++i)
spillVirtReg(MI, i);
LiveVirtRegs.clear();
isBulkSpilling = false;
// can be allocated directly.
// Returns spillImpossible when PhysReg or an alias can't be spilled.
unsigned RAFast::calcSpillCost(unsigned PhysReg) const {
+ if (UsedInInstr.test(PhysReg))
+ return spillImpossible;
switch (unsigned VirtReg = PhysRegState[PhysReg]) {
case regDisabled:
break;
unsigned Cost = 0;
for (const unsigned *AS = TRI->getAliasSet(PhysReg);
unsigned Alias = *AS; ++AS) {
+ if (UsedInInstr.test(Alias))
+ return spillImpossible;
switch (unsigned VirtReg = PhysRegState[Alias]) {
case regDisabled:
break;
// Ignore invalid hints.
if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) ||
- !RC->contains(Hint) || UsedInInstr.test(Hint) ||
- !Allocatable.test(Hint)))
+ !RC->contains(Hint) || !Allocatable.test(Hint)))
Hint = 0;
// Take hint when possible.
if (Hint) {
- assert(RC->contains(Hint) && !UsedInInstr.test(Hint) &&
- Allocatable.test(Hint) && "Invalid hint should have been cleared");
switch(calcSpillCost(Hint)) {
default:
definePhysReg(MI, Hint, regFree);
unsigned BestReg = 0, BestCost = spillImpossible;
for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) {
- if (UsedInInstr.test(*I)) continue;
unsigned Cost = calcSpillCost(*I);
// Cost is 0 when all aliases are already disabled.
if (Cost == 0)
bool New;
tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg()));
LiveReg &LR = LRI->second;
+ bool PartialRedef = MI->getOperand(OpNum).getSubReg();
if (New) {
// If there is no hint, peek at the only use of this register.
if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) &&
Hint = DstReg;
}
allocVirtReg(MI, *LRI, Hint);
- } else
- addKillFlag(LR); // Kill before redefine.
+ // If this is only a partial redefinition, we must reload the other parts.
+ if (PartialRedef && MI->readsVirtualRegister(VirtReg)) {
+ const TargetRegisterClass *RC = MRI->getRegClass(VirtReg);
+ int FI = getStackSpaceFor(VirtReg, RC);
+ DEBUG(dbgs() << "Reloading for partial redef: %reg" << VirtReg << "\n");
+ TII->loadRegFromStackSlot(*MBB, MI, LR.PhysReg, FI, RC, TRI);
+ ++NumLoads;
+ }
+ } else if (LR.LastUse && !PartialRedef) {
+ // Redefining a live register - kill at the last use, unless it is this
+ // instruction defining VirtReg multiple times.
+ if (LR.LastUse != MI || LR.LastUse->getOperand(LR.LastOpNum).isUse())
+ addKillFlag(LR);
+ }
assert(LR.PhysReg && "Register not assigned");
LR.LastUse = MI;
LR.LastOpNum = OpNum;
return LRI;
}
-// setPhysReg - Change MO the refer the PhysReg, considering subregs.
-// This may invalidate MO if it is necessary to add implicit kills for a
-// superregister.
-// Return tru if MO kills its register.
-bool RAFast::setPhysReg(MachineOperand &MO, unsigned PhysReg) {
+// setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering
+// subregs. This may invalidate any operand pointers.
+// Return true if the operand kills its register.
+bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) {
+ MachineOperand &MO = MI->getOperand(OpNum);
if (!MO.getSubReg()) {
MO.setReg(PhysReg);
return MO.isKill() || MO.isDead();
// Handle subregister index.
MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0);
MO.setSubReg(0);
- if (MO.isUse()) {
- if (MO.isKill()) {
- MO.getParent()->addRegisterKilled(PhysReg, TRI, true);
- return true;
- }
- return false;
- }
- // A subregister def implicitly defines the whole physreg.
- if (MO.isDead()) {
- MO.getParent()->addRegisterDead(PhysReg, TRI, true);
+
+ // A kill flag implies killing the full register. Add corresponding super
+ // register kill.
+ if (MO.isKill()) {
+ MI->addRegisterKilled(PhysReg, TRI, true);
return true;
}
- MO.getParent()->addRegisterDefined(PhysReg, TRI);
- return false;
+ return MO.isDead();
}
void RAFast::AllocateBasicBlock() {
E = MBB->livein_end(); I != E; ++I)
definePhysReg(MII, *I, regReserved);
- SmallVector<unsigned, 8> PhysECs;
+ SmallVector<unsigned, 8> PhysECs, VirtDead;
SmallVector<MachineInstr*, 32> Coalesced;
// Otherwise, sequentially allocate each instruction in the MBB.
if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
LiveRegMap::iterator LRI = LiveVirtRegs.find(Reg);
if (LRI != LiveVirtRegs.end())
- setPhysReg(MO, LRI->second.PhysReg);
+ setPhysReg(MI, i, LRI->second.PhysReg);
else
MO.setReg(0); // We can't allocate a physreg for a DebugValue, sorry!
}
LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst);
unsigned PhysReg = LRI->second.PhysReg;
CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0;
- if (setPhysReg(MO, PhysReg))
+ if (setPhysReg(MI, i, PhysReg))
killVirtReg(LRI);
} else if (MO.isEarlyClobber()) {
+ // Note: defineVirtReg may invalidate MO.
LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0);
unsigned PhysReg = LRI->second.PhysReg;
- setPhysReg(MO, PhysReg);
+ setPhysReg(MI, i, PhysReg);
PhysECs.push_back(PhysReg);
}
}
}
LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc);
unsigned PhysReg = LRI->second.PhysReg;
- if (setPhysReg(MO, PhysReg)) {
- killVirtReg(LRI);
+ if (setPhysReg(MI, i, PhysReg)) {
+ VirtDead.push_back(Reg);
CopyDst = 0; // cancel coalescing;
} else
CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0;
}
+ // Kill dead defs after the scan to ensure that multiple defs of the same
+ // register are allocated identically. We didn't need to do this for uses
+ // because we are crerating our own kill flags, and they are always at the
+ // last use.
+ for (unsigned i = 0, e = VirtDead.size(); i != e; ++i)
+ killVirtReg(VirtDead[i]);
+ VirtDead.clear();
+
MRI->addPhysRegsUsed(UsedInInstr);
if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) {
DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
<< "********** Function: "
<< ((Value*)Fn.getFunction())->getName() << '\n');
- if (VerifyFastRegalloc)
- Fn.verify(this, true);
MF = &Fn;
MRI = &MF->getRegInfo();
TM = &Fn.getTarget();