STATISTIC(numPeep , "Number of identity moves eliminated after coalescing");
STATISTIC(numFolded , "Number of loads/stores folded into instructions");
STATISTIC(numAborts , "Number of times interval joining aborted");
-static cl::opt<bool> ReduceJoinPhys("reduce-joining-phy-regs", cl::Hidden);
namespace {
RegisterPass<LiveIntervals> X("liveintervals", "Live Interval Analysis");
RemoveMachineInstrFromMaps(mii);
mii = mbbi->erase(mii);
++numPeep;
- }
- else {
+ } else {
for (unsigned i = 0, e = mii->getNumOperands(); i != e; ++i) {
const MachineOperand &mop = mii->getOperand(i);
if (mop.isRegister() && mop.getReg() &&
mii->getOperand(i).setReg(reg);
LiveInterval &RegInt = getInterval(reg);
- RegInt.weight +=
- (mop.isUse() + mop.isDef()) * pow(10.0F, (int)loopDepth);
+ float w = (mop.isUse()+mop.isDef()) * powf(10.0F, (float)loopDepth);
+ // If the definition instruction is re-materializable, its spill
+ // weight is half of what it would have been normally unless it's
+ // a load from fixed stack slot.
+ int Dummy;
+ if (RegInt.remat && !tii_->isLoadFromStackSlot(RegInt.remat, Dummy))
+ w /= 2;
+ RegInt.weight += w;
}
}
++mii;
for (unsigned i = 0; i != MI->getNumOperands(); ++i) {
MachineOperand& mop = MI->getOperand(i);
if (mop.isRegister() && mop.getReg() == li.reg) {
- if (MachineInstr *fmi = mri_->foldMemoryOperand(MI, i, slot)) {
+ MachineInstr *fmi = li.remat ? NULL
+ : mri_->foldMemoryOperand(MI, i, slot);
+ if (fmi) {
// Attempt to fold the memory reference into the instruction. If we
// can do this, we don't need to insert spill code.
if (lv_)
// create a new register for this spill
vrm.grow();
+ if (li.remat)
+ vrm.setVirtIsReMaterialized(NewVReg, li.remat);
vrm.assignVirt2StackSlot(NewVReg, slot);
LiveInterval &nI = getOrCreateInterval(NewVReg);
+ nI.remat = li.remat;
assert(nI.empty());
// the spill weight is now infinity as it
// done once for the vreg. We use an empty interval to detect the first
// time we see a vreg.
if (interval.empty()) {
+ // Remember if the definition can be rematerialized. All load's from fixed
+ // stack slots are re-materializable.
+ int FrameIdx = 0;
+ if (vi.DefInst &&
+ (tii_->isReMaterializable(vi.DefInst->getOpcode()) ||
+ (tii_->isLoadFromStackSlot(vi.DefInst, FrameIdx) &&
+ mf_->getFrameInfo()->isFixedObjectIndex(FrameIdx))))
+ interval.remat = vi.DefInst;
+
// Get the Idx of the defining instructions.
unsigned defIndex = getDefIndex(MIIdx);
}
} else {
+ // Can no longer safely assume definition is rematerializable.
+ interval.remat = NULL;
+
// If this is the second time we see a virtual register definition, it
// must be due to phi elimination or two addr elimination. If this is
// the result of two address elimination, then the vreg is one of the
// If the source instruction was killing the source register before the
// merge, unset the isKill marker given the live range has been extended.
- MachineOperand *MOK = ValLREndInst->findRegisterUseOperand(IntB.reg, true);
- if (MOK)
- MOK->unsetIsKill();
+ int UIdx = ValLREndInst->findRegisterUseOperand(IntB.reg, true);
+ if (UIdx != -1)
+ ValLREndInst->getOperand(UIdx).unsetIsKill();
// Finally, delete the copy instruction.
RemoveMachineInstrFromMaps(CopyMI);
// Check if it is necessary to propagate "isDead" property before intervals
// are joined.
+ MachineBasicBlock *CopyBB = CopyMI->getParent();
MachineOperand *mopd = CopyMI->findRegisterDefOperand(DstReg);
bool isDead = mopd->isDead();
bool isShorten = false;
- unsigned SrcStart = 0;
- unsigned SrcEnd = 0;
+ unsigned SrcStart = 0, RemoveStart = 0;
+ unsigned SrcEnd = 0, RemoveEnd = 0;
if (isDead) {
unsigned CopyIdx = getInstructionIndex(CopyMI);
LiveInterval::iterator SrcLR =
SrcInt.FindLiveRangeContaining(getUseIndex(CopyIdx));
- SrcStart = SrcLR->start;
- SrcEnd = SrcLR->end;
+ RemoveStart = SrcStart = SrcLR->start;
+ RemoveEnd = SrcEnd = SrcLR->end;
// The instruction which defines the src is only truly dead if there are
// no intermediate uses and there isn't a use beyond the copy.
// FIXME: find the last use, mark is kill and shorten the live range.
- if (SrcEnd > getDefIndex(CopyIdx))
+ if (SrcEnd > getDefIndex(CopyIdx)) {
isDead = false;
- else {
+ } else {
MachineOperand *MOU;
- MachineInstr *LastUse =
- lastRegisterUse(repSrcReg, SrcStart, CopyIdx, MOU);
+ MachineInstr *LastUse= lastRegisterUse(repSrcReg, SrcStart, CopyIdx, MOU);
if (LastUse) {
// Shorten the liveinterval to the end of last use.
MOU->setIsKill();
isDead = false;
isShorten = true;
- SrcEnd = getUseIndex(getInstructionIndex(LastUse));
+ RemoveStart = getDefIndex(getInstructionIndex(LastUse));
+ RemoveEnd = SrcEnd;
+ } else {
+ MachineInstr *SrcMI = getInstructionFromIndex(SrcStart);
+ if (SrcMI) {
+ MachineOperand *mops = findDefOperand(SrcMI, repSrcReg);
+ if (mops)
+ // A dead def should have a single cycle interval.
+ ++RemoveStart;
+ }
}
}
- if (isDead)
- isShorten = true;
}
// We need to be careful about coalescing a source physical register with a
// virtual register. Once the coalescing is done, it cannot be broken and
// these are not spillable! If the destination interval uses are far away,
// think twice about coalescing them!
- if (ReduceJoinPhys && !isDead &&
- MRegisterInfo::isPhysicalRegister(repSrcReg)) {
+ if (!mopd->isDead() && MRegisterInfo::isPhysicalRegister(repSrcReg)) {
// Small function. No need to worry!
- if (r2iMap_.size() <= allocatableRegs_.size() * 2)
+ unsigned Threshold = allocatableRegs_.count() * 2;
+ if (r2iMap_.size() <= Threshold)
goto TryJoin;
LiveVariables::VarInfo& dvi = lv_->getVarInfo(repDstReg);
// Is the value used in the current BB or any immediate successroe BB?
- MachineBasicBlock *SrcBB = CopyMI->getParent();
- if (!dvi.UsedBlocks[SrcBB->getNumber()]) {
- for (MachineBasicBlock::succ_iterator SI = SrcBB->succ_begin(),
- SE = SrcBB->succ_end(); SI != SE; ++SI) {
- MachineBasicBlock *SuccMBB = *SI;
- if (dvi.UsedBlocks[SuccMBB->getNumber()])
+ if (dvi.UsedBlocks[CopyBB->getNumber()])
+ goto TryJoin;
+ for (MachineBasicBlock::succ_iterator SI = CopyBB->succ_begin(),
+ SE = CopyBB->succ_end(); SI != SE; ++SI) {
+ MachineBasicBlock *SuccMBB = *SI;
+ if (dvi.UsedBlocks[SuccMBB->getNumber()])
goto TryJoin;
- }
}
// Ok, no use in this BB and no use in immediate successor BB's. Be really
// careful now!
// It's only used in one BB, forget about it!
- if (dvi.UsedBlocks.count() <= 1) {
+ if (dvi.UsedBlocks.count() < 2) {
++numAborts;
return false;
}
- // Examine all the blocks where the value is used. If any is in the same
- // loop, then it's ok. Or if the current BB is a preheader of any of the
- // loop that uses this value, that's ok as well.
- const LoopInfo &LI = getAnalysis<LoopInfo>();
- const Loop *L = LI.getLoopFor(SrcBB->getBasicBlock());
+ // Determine whether to allow coalescing based on how far the closest
+ // use is.
+ unsigned CopyIdx = getInstructionIndex(CopyMI);
+ unsigned MinDist = i2miMap_.size() * InstrSlots::NUM;
int UseBBNum = dvi.UsedBlocks.find_first();
while (UseBBNum != -1) {
MachineBasicBlock *UseBB = mf_->getBlockNumbered(UseBBNum);
- const Loop *UL = LI.getLoopFor(UseBB->getBasicBlock());
- if ((UL && UL == L) || // A use in the same loop
- (UL && L && // A use in a loop and this BB is the preheader
- UL->getLoopPreheader() == SrcBB->getBasicBlock()))
- goto TryJoin;
+ unsigned UseIdx = getMBBStartIdx(UseBB);
+ if (UseIdx > CopyIdx) {
+ MinDist = std::min(MinDist, UseIdx - CopyIdx);
+ if (MinDist <= Threshold)
+ break;
+ }
UseBBNum = dvi.UsedBlocks.find_next(UseBBNum);
}
-
- // Don't do it!
- ++numAborts;
- return false;
+ if (MinDist > Threshold) {
+ // Don't do it!
+ ++numAborts;
+ return false;
+ }
}
TryJoin:
} else {
MachineInstr *SrcMI = getInstructionFromIndex(SrcStart);
if (SrcMI) {
- MachineOperand *mops = SrcMI->findRegisterDefOperand(SrcReg);
+ MachineOperand *mops = findDefOperand(SrcMI, repSrcReg);
if (mops)
- // FIXME: mops == NULL means SrcMI defines a subregister?
mops->setIsDead();
}
}
}
- if (isShorten) {
+ if (isShorten || isDead) {
// Shorten the live interval.
LiveInterval &LiveInInt = (repSrcReg == DestInt.reg) ? DestInt : SrcInt;
- LiveInInt.removeRange(SrcStart, SrcEnd);
+ LiveInInt.removeRange(RemoveStart, RemoveEnd);
}
} else {
// Coallescing failed.
if (MRegisterInfo::isPhysicalRegister(repDstReg)) {
for (const unsigned *AS = mri_->getAliasSet(repDstReg); *AS; ++AS)
getInterval(*AS).MergeInClobberRanges(SrcInt);
+ } else {
+ // Merge UsedBlocks info if the destination is a virtual register.
+ LiveVariables::VarInfo& dVI = lv_->getVarInfo(repDstReg);
+ LiveVariables::VarInfo& sVI = lv_->getVarInfo(repSrcReg);
+ dVI.UsedBlocks |= sVI.UsedBlocks;
}
DOUT << "\n\t\tJoined. Result = "; DestInt.print(DOUT, mri_);
return NULL;
}
+
+/// findDefOperand - Returns the MachineOperand that is a def of the specific
+/// register. It returns NULL if the def is not found.
+MachineOperand *LiveIntervals::findDefOperand(MachineInstr *MI, unsigned Reg) {
+ for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
+ MachineOperand &MO = MI->getOperand(i);
+ if (MO.isReg() && MO.isDef() &&
+ mri_->regsOverlap(rep(MO.getReg()), Reg))
+ return &MO;
+ }
+ return NULL;
+}
+
/// unsetRegisterKill - Unset IsKill property of all uses of specific register
/// of the specific instruction.
void LiveIntervals::unsetRegisterKill(MachineInstr *MI, unsigned Reg) {