X-Git-Url: http://demsky.eecs.uci.edu/git/?a=blobdiff_plain;f=lib%2FCodeGen%2FSimpleRegisterCoalescing.cpp;h=a60939c301057c2d0f0498c316c866cb949eec6c;hb=a29c13086a3add78a3a79f744573fe09eaa9dc88;hp=29e634b477d16db1fa211ebfe2199cfc79c1da97;hpb=a1566f2e12ce87a5bca30bc0189a0cdbb40136a4;p=oota-llvm.git diff --git a/lib/CodeGen/SimpleRegisterCoalescing.cpp b/lib/CodeGen/SimpleRegisterCoalescing.cpp index 29e634b477d..a60939c3010 100644 --- a/lib/CodeGen/SimpleRegisterCoalescing.cpp +++ b/lib/CodeGen/SimpleRegisterCoalescing.cpp @@ -25,6 +25,7 @@ #include "llvm/CodeGen/RegisterCoalescer.h" #include "llvm/Target/TargetInstrInfo.h" #include "llvm/Target/TargetMachine.h" +#include "llvm/Target/TargetOptions.h" #include "llvm/Support/CommandLine.h" #include "llvm/Support/Debug.h" #include "llvm/ADT/SmallSet.h" @@ -38,6 +39,7 @@ STATISTIC(numJoins , "Number of interval joins performed"); STATISTIC(numSubJoins , "Number of subclass joins performed"); STATISTIC(numCommutes , "Number of instruction commuting performed"); STATISTIC(numExtends , "Number of copies extended"); +STATISTIC(NumReMats , "Number of instructions re-materialized"); STATISTIC(numPeep , "Number of identity moves eliminated after coalescing"); STATISTIC(numAborts , "Number of times interval joining aborted"); @@ -66,13 +68,16 @@ static RegisterAnalysisGroup V(X); const PassInfo *const llvm::SimpleRegisterCoalescingID = &X; void SimpleRegisterCoalescing::getAnalysisUsage(AnalysisUsage &AU) const { + AU.addRequired(); AU.addPreserved(); + AU.addRequired(); AU.addPreserved(); AU.addPreservedID(MachineDominatorsID); - AU.addPreservedID(PHIEliminationID); + if (StrongPHIElim) + AU.addPreservedID(StrongPHIEliminationID); + else + AU.addPreservedID(PHIEliminationID); AU.addPreservedID(TwoAddressInstructionPassID); - AU.addRequired(); - AU.addRequired(); MachineFunctionPass::getAnalysisUsage(AU); } @@ -181,16 +186,20 @@ bool SimpleRegisterCoalescing::AdjustCopiesBackFrom(LiveInterval &IntA, } // Okay, merge "B1" into the same value number as "B0". - if (BValNo != ValLR->valno) + if (BValNo != ValLR->valno) { + IntB.addKills(ValLR->valno, BValNo->kills); IntB.MergeValueNumberInto(BValNo, ValLR->valno); + } DOUT << " result = "; IntB.print(DOUT, tri_); DOUT << "\n"; // If the source instruction was killing the source register before the // merge, unset the isKill marker given the live range has been extended. int UIdx = ValLREndInst->findRegisterUseOperandIdx(IntB.reg, true); - if (UIdx != -1) + if (UIdx != -1) { ValLREndInst->getOperand(UIdx).setIsKill(false); + IntB.removeKill(ValLR->valno, FillerStart); + } ++numExtends; return true; @@ -426,6 +435,70 @@ bool SimpleRegisterCoalescing::RemoveCopyByCommutingDef(LiveInterval &IntA, return true; } +/// ReMaterializeTrivialDef - If the source of a copy is defined by a trivial +/// computation, replace the copy by rematerialize the definition. +bool SimpleRegisterCoalescing::ReMaterializeTrivialDef(LiveInterval &SrcInt, + unsigned DstReg, + MachineInstr *CopyMI) { + unsigned CopyIdx = li_->getUseIndex(li_->getInstructionIndex(CopyMI)); + LiveInterval::iterator SrcLR = SrcInt.FindLiveRangeContaining(CopyIdx); + if (SrcLR == SrcInt.end()) // Should never happen! + return false; + VNInfo *ValNo = SrcLR->valno; + // If other defs can reach uses of this def, then it's not safe to perform + // the optimization. + if (ValNo->def == ~0U || ValNo->def == ~1U || ValNo->hasPHIKill) + return false; + MachineInstr *DefMI = li_->getInstructionFromIndex(ValNo->def); + const TargetInstrDesc &TID = DefMI->getDesc(); + if (!TID.isAsCheapAsAMove()) + return false; + bool SawStore = false; + if (!DefMI->isSafeToMove(tii_, SawStore)) + return false; + + unsigned DefIdx = li_->getDefIndex(CopyIdx); + const LiveRange *DLR= li_->getInterval(DstReg).getLiveRangeContaining(DefIdx); + DLR->valno->copy = NULL; + // Don't forget to update sub-register intervals. + if (TargetRegisterInfo::isPhysicalRegister(DstReg)) { + for (const unsigned* SR = tri_->getSubRegisters(DstReg); *SR; ++SR) { + if (!li_->hasInterval(*SR)) + continue; + DLR = li_->getInterval(*SR).getLiveRangeContaining(DefIdx); + if (DLR && DLR->valno->copy == CopyMI) + DLR->valno->copy = NULL; + } + } + + MachineBasicBlock *MBB = CopyMI->getParent(); + MachineBasicBlock::iterator MII = next(MachineBasicBlock::iterator(CopyMI)); + CopyMI->removeFromParent(); + tii_->reMaterialize(*MBB, MII, DstReg, DefMI); + MachineInstr *NewMI = prior(MII); + // CopyMI may have implicit operands, transfer them over to the newly + // rematerialized instruction. And update implicit def interval valnos. + for (unsigned i = CopyMI->getDesc().getNumOperands(), + e = CopyMI->getNumOperands(); i != e; ++i) { + MachineOperand &MO = CopyMI->getOperand(i); + if (MO.isReg() && MO.isImplicit()) + NewMI->addOperand(MO); + if (MO.isDef() && li_->hasInterval(MO.getReg())) { + unsigned Reg = MO.getReg(); + DLR = li_->getInterval(Reg).getLiveRangeContaining(DefIdx); + if (DLR && DLR->valno->copy == CopyMI) + DLR->valno->copy = NULL; + } + } + + li_->ReplaceMachineInstrInMaps(CopyMI, NewMI); + MBB->getParent()->DeleteMachineInstr(CopyMI); + ReMatCopies.insert(CopyMI); + ReMatDefs.insert(DefMI); + ++NumReMats; + return true; +} + /// isBackEdgeCopy - Returns true if CopyMI is a back edge copy. /// bool SimpleRegisterCoalescing::isBackEdgeCopy(MachineInstr *CopyMI, @@ -475,22 +548,50 @@ SimpleRegisterCoalescing::UpdateRegDefsUses(unsigned SrcReg, unsigned DstReg, unsigned UseDstReg = DstReg; if (OldSubIdx) UseDstReg = tri_->getSubReg(DstReg, OldSubIdx); + + unsigned CopySrcReg, CopyDstReg; + if (tii_->isMoveInstr(*UseMI, CopySrcReg, CopyDstReg) && + CopySrcReg != CopyDstReg && + CopySrcReg == SrcReg && CopyDstReg != UseDstReg) { + // If the use is a copy and it won't be coalesced away, and its source + // is defined by a trivial computation, try to rematerialize it instead. + if (ReMaterializeTrivialDef(li_->getInterval(SrcReg), CopyDstReg,UseMI)) + continue; + } + O.setReg(UseDstReg); O.setSubReg(0); - } else { - // Sub-register indexes goes from small to large. e.g. - // RAX: 1 -> AL, 2 -> AX, 3 -> EAX - // EAX: 1 -> AL, 2 -> AX - // So RAX's sub-register 2 is AX, RAX's sub-regsiter 3 is EAX, whose - // sub-register 2 is also AX. - if (SubIdx && OldSubIdx && SubIdx != OldSubIdx) - assert(OldSubIdx < SubIdx && "Conflicting sub-register index!"); - else if (SubIdx) - O.setSubReg(SubIdx); - // Remove would-be duplicated kill marker. - if (O.isKill() && UseMI->killsRegister(DstReg)) - O.setIsKill(false); - O.setReg(DstReg); + continue; + } + + // Sub-register indexes goes from small to large. e.g. + // RAX: 1 -> AL, 2 -> AX, 3 -> EAX + // EAX: 1 -> AL, 2 -> AX + // So RAX's sub-register 2 is AX, RAX's sub-regsiter 3 is EAX, whose + // sub-register 2 is also AX. + if (SubIdx && OldSubIdx && SubIdx != OldSubIdx) + assert(OldSubIdx < SubIdx && "Conflicting sub-register index!"); + else if (SubIdx) + O.setSubReg(SubIdx); + // Remove would-be duplicated kill marker. + if (O.isKill() && UseMI->killsRegister(DstReg)) + O.setIsKill(false); + O.setReg(DstReg); + + // After updating the operand, check if the machine instruction has + // become a copy. If so, update its val# information. + const TargetInstrDesc &TID = UseMI->getDesc(); + unsigned CopySrcReg, CopyDstReg; + if (TID.getNumDefs() == 1 && TID.getNumOperands() > 2 && + tii_->isMoveInstr(*UseMI, CopySrcReg, CopyDstReg) && + CopySrcReg != CopyDstReg && + (TargetRegisterInfo::isVirtualRegister(CopyDstReg) || + allocatableRegs_[CopyDstReg])) { + LiveInterval &LI = li_->getInterval(CopyDstReg); + unsigned DefIdx = li_->getDefIndex(li_->getInstructionIndex(UseMI)); + const LiveRange *DLR = LI.getLiveRangeContaining(DefIdx); + if (DLR->valno->def == DefIdx) + DLR->valno->copy = UseMI; } } } @@ -533,14 +634,11 @@ void SimpleRegisterCoalescing::RemoveUnnecessaryKills(unsigned Reg, MachineOperand &UseMO = UI.getOperand(); if (UseMO.isKill()) { MachineInstr *UseMI = UseMO.getParent(); - unsigned SReg, DReg; - if (!tii_->isMoveInstr(*UseMI, SReg, DReg)) - continue; unsigned UseIdx = li_->getUseIndex(li_->getInstructionIndex(UseMI)); if (JoinedCopies.count(UseMI)) continue; const LiveRange *UI = LI.getLiveRangeContaining(UseIdx); - if (!LI.isKill(UI->valno, UseIdx+1)) + if (!UI || !LI.isKill(UI->valno, UseIdx+1)) UseMO.setIsKill(false); } } @@ -609,6 +707,18 @@ bool SimpleRegisterCoalescing::ShortenDeadCopyLiveRange(LiveInterval &li, return false; } +/// RemoveDeadDef - If a def of a live interval is now determined dead, remove +/// the val# it defines. If the live interval becomes empty, remove it as well. +bool SimpleRegisterCoalescing::RemoveDeadDef(LiveInterval &li, + MachineInstr *DefMI) { + unsigned DefIdx = li_->getDefIndex(li_->getInstructionIndex(DefMI)); + LiveInterval::iterator MLR = li.FindLiveRangeContaining(DefIdx); + if (DefIdx != MLR->valno->def) + return false; + li.removeValNo(MLR->valno); + return removeIntervalIfEmpty(li, li_, tri_); +} + /// PropagateDeadness - Propagate the dead marker to the instruction which /// defines the val#. static void PropagateDeadness(LiveInterval &li, MachineInstr *CopyMI, @@ -634,7 +744,7 @@ static bool isSameOrFallThroughBB(MachineBasicBlock *MBB, if (MBB == SuccMBB) return true; MachineBasicBlock *TBB = 0, *FBB = 0; - std::vector Cond; + SmallVector Cond; return !tii_->AnalyzeBranch(*MBB, TBB, FBB, Cond) && !TBB && !FBB && MBB->isSuccessor(SuccMBB); } @@ -814,6 +924,8 @@ void SimpleRegisterCoalescing::RemoveCopiesFromValNo(LiveInterval &li, } } +/// getMatchingSuperReg - Return a super-register of the specified register +/// Reg so its sub-register of index SubIdx is Reg. static unsigned getMatchingSuperReg(unsigned Reg, unsigned SubIdx, const TargetRegisterClass *RC, const TargetRegisterInfo* TRI) { @@ -858,6 +970,61 @@ SimpleRegisterCoalescing::isProfitableToCoalesceToSubRC(unsigned SrcReg, return (SrcSize + DstSize) <= Threshold; } +/// HasIncompatibleSubRegDefUse - If we are trying to coalesce a virtual +/// register with a physical register, check if any of the virtual register +/// operand is a sub-register use or def. If so, make sure it won't result +/// in an illegal extract_subreg or insert_subreg instruction. e.g. +/// vr1024 = extract_subreg vr1025, 1 +/// ... +/// vr1024 = mov8rr AH +/// If vr1024 is coalesced with AH, the extract_subreg is now illegal since +/// AH does not have a super-reg whose sub-register 1 is AH. +bool +SimpleRegisterCoalescing::HasIncompatibleSubRegDefUse(MachineInstr *CopyMI, + unsigned VirtReg, + unsigned PhysReg) { + for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(VirtReg), + E = mri_->reg_end(); I != E; ++I) { + MachineOperand &O = I.getOperand(); + MachineInstr *MI = &*I; + if (MI == CopyMI || JoinedCopies.count(MI)) + continue; + unsigned SubIdx = O.getSubReg(); + if (SubIdx && !tri_->getSubReg(PhysReg, SubIdx)) + return true; + if (MI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG) { + SubIdx = MI->getOperand(2).getImm(); + if (O.isUse() && !tri_->getSubReg(PhysReg, SubIdx)) + return true; + if (O.isDef()) { + unsigned SrcReg = MI->getOperand(1).getReg(); + const TargetRegisterClass *RC = + TargetRegisterInfo::isPhysicalRegister(SrcReg) + ? tri_->getPhysicalRegisterRegClass(SrcReg) + : mri_->getRegClass(SrcReg); + if (!getMatchingSuperReg(PhysReg, SubIdx, RC, tri_)) + return true; + } + } + if (MI->getOpcode() == TargetInstrInfo::INSERT_SUBREG) { + SubIdx = MI->getOperand(3).getImm(); + if (VirtReg == MI->getOperand(0).getReg()) { + if (!tri_->getSubReg(PhysReg, SubIdx)) + return true; + } else { + unsigned DstReg = MI->getOperand(0).getReg(); + const TargetRegisterClass *RC = + TargetRegisterInfo::isPhysicalRegister(DstReg) + ? tri_->getPhysicalRegisterRegClass(DstReg) + : mri_->getRegClass(DstReg); + if (!getMatchingSuperReg(PhysReg, SubIdx, RC, tri_)) + return true; + } + } + } + return false; +} + /// JoinCopy - Attempt to join intervals corresponding to SrcReg/DstReg, /// which are the src/dst of the copy instruction CopyMI. This returns true @@ -868,7 +1035,7 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) { MachineInstr *CopyMI = TheCopy.MI; Again = false; - if (JoinedCopies.count(CopyMI)) + if (JoinedCopies.count(CopyMI) || ReMatCopies.count(CopyMI)) return false; // Already done. DOUT << li_->getInstructionIndex(CopyMI) << '\t' << *CopyMI; @@ -968,10 +1135,10 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) { mri_->getRegClass(isExtSubReg ? SrcReg : DstReg); if (isExtSubReg) { RealDstReg = getMatchingSuperReg(DstReg, SubIdx, RC, tri_); - assert(RealDstReg && "Invalid extra_subreg instruction!"); + assert(RealDstReg && "Invalid extract_subreg instruction!"); } else { RealSrcReg = getMatchingSuperReg(SrcReg, SubIdx, RC, tri_); - assert(RealSrcReg && "Invalid extra_subreg instruction!"); + assert(RealSrcReg && "Invalid extract_subreg instruction!"); } // For this type of EXTRACT_SUBREG, conservatively @@ -1050,6 +1217,12 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) { return false; } } + + // Will it create illegal extract_subreg / insert_subreg? + if (SrcIsPhys && HasIncompatibleSubRegDefUse(CopyMI, DstReg, SrcReg)) + return false; + if (DstIsPhys && HasIncompatibleSubRegDefUse(CopyMI, SrcReg, DstReg)) + return false; LiveInterval &SrcInt = li_->getInterval(SrcReg); LiveInterval &DstInt = li_->getInterval(DstReg); @@ -1111,6 +1284,12 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) { if (!isEmpty && !JoinIntervals(DstInt, SrcInt, Swapped)) { // Coalescing failed. + + // If definition of source is defined by trivial computation, try + // rematerializing it. + if (!isExtSubReg && !isInsSubReg && + ReMaterializeTrivialDef(SrcInt, DstInt.reg, CopyMI)) + return true; // If we can eliminate the copy without merging the live ranges, do so now. if (!isExtSubReg && !isInsSubReg && @@ -1214,9 +1393,6 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) { if (TargetRegisterInfo::isVirtualRegister(DstReg)) RemoveUnnecessaryKills(DstReg, *ResDstInt); - // SrcReg is guarateed to be the register whose live interval that is - // being merged. - li_->removeInterval(SrcReg); if (isInsSubReg) // Avoid: // r1024 = op @@ -1226,6 +1402,10 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) { RemoveDeadImpDef(DstReg, *ResDstInt); UpdateRegDefsUses(SrcReg, DstReg, SubIdx); + // SrcReg is guarateed to be the register whose live interval that is + // being merged. + li_->removeInterval(SrcReg); + if (isEmpty) { // Now the copy is being coalesced away, the val# previously defined // by the copy is being defined by an IMPLICIT_DEF which defines a zero @@ -1249,6 +1429,15 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) { } } + // If resulting interval has a preference that no longer fits because of subreg + // coalescing, just clear the preference. + if (ResDstInt->preference && (isExtSubReg || isInsSubReg) && + TargetRegisterInfo::isVirtualRegister(ResDstInt->reg)) { + const TargetRegisterClass *RC = mri_->getRegClass(ResDstInt->reg); + if (!RC->contains(ResDstInt->preference)) + ResDstInt->preference = 0; + } + DOUT << "\n\t\tJoined. Result = "; ResDstInt->print(DOUT, tri_); DOUT << "\n"; @@ -1855,7 +2044,7 @@ void SimpleRegisterCoalescing::joinIntervals() { JoinQueue = new JoinPriorityQueue(this); std::vector TryAgainList; - if (loopInfo->begin() == loopInfo->end()) { + if (loopInfo->empty()) { // If there are no loops in the function, join intervals in function order. for (MachineFunction::iterator I = mf_->begin(), E = mf_->end(); I != E; ++I) @@ -1998,7 +2187,7 @@ SimpleRegisterCoalescing::lastRegisterUse(unsigned Start, unsigned End, if (!(tii_->isMoveInstr(*MI, SrcReg, DstReg) && SrcReg == DstReg)) for (unsigned i = 0, NumOps = MI->getNumOperands(); i != NumOps; ++i) { MachineOperand &Use = MI->getOperand(i); - if (Use.isRegister() && Use.isUse() && Use.getReg() && + if (Use.isReg() && Use.isUse() && Use.getReg() && tri_->regsOverlap(Use.getReg(), Reg)) { UseIdx = e; return &Use; @@ -2021,6 +2210,8 @@ void SimpleRegisterCoalescing::printRegName(unsigned reg) const { void SimpleRegisterCoalescing::releaseMemory() { JoinedCopies.clear(); + ReMatCopies.clear(); + ReMatDefs.clear(); } static bool isZeroLengthInterval(LiveInterval *li) { @@ -2052,7 +2243,7 @@ SimpleRegisterCoalescing::TurnCopyIntoImpDef(MachineBasicBlock::iterator &I, CopyMI->setDesc(tii_->get(TargetInstrInfo::IMPLICIT_DEF)); for (int i = CopyMI->getNumOperands() - 1, e = 0; i > e; --i) CopyMI->RemoveOperand(i); - bool NoUse = mri_->use_begin(SrcReg) == mri_->use_end(); + bool NoUse = mri_->use_empty(SrcReg); if (NoUse) { for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(SrcReg), E = mri_->reg_end(); I != E; ) { @@ -2094,13 +2285,14 @@ bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) { joinIntervals(); DOUT << "********** INTERVALS POST JOINING **********\n"; for (LiveIntervals::iterator I = li_->begin(), E = li_->end(); I != E; ++I){ - I->second.print(DOUT, tri_); + I->second->print(DOUT, tri_); DOUT << "\n"; } } // Perform a final pass over the instructions and compute spill weights // and remove identity moves. + SmallVector DeadDefs; for (MachineFunction::iterator mbbi = mf_->begin(), mbbe = mf_->end(); mbbi != mbbe; ++mbbi) { MachineBasicBlock* mbb = mbbi; @@ -2129,26 +2321,57 @@ bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) { continue; } + // Now check if this is a remat'ed def instruction which is now dead. + if (ReMatDefs.count(MI)) { + bool isDead = true; + for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { + const MachineOperand &MO = MI->getOperand(i); + if (!MO.isReg()) + continue; + unsigned Reg = MO.getReg(); + if (TargetRegisterInfo::isVirtualRegister(Reg)) + DeadDefs.push_back(Reg); + if (MO.isDead()) + continue; + if (TargetRegisterInfo::isPhysicalRegister(Reg) || + !mri_->use_empty(Reg)) { + isDead = false; + break; + } + } + if (isDead) { + while (!DeadDefs.empty()) { + unsigned DeadDef = DeadDefs.back(); + DeadDefs.pop_back(); + RemoveDeadDef(li_->getInterval(DeadDef), MI); + } + li_->RemoveMachineInstrFromMaps(mii); + mii = mbbi->erase(mii); + continue; + } else + DeadDefs.clear(); + } + // If the move will be an identity move delete it - bool isMove = tii_->isMoveInstr(*mii, SrcReg, DstReg); + bool isMove = tii_->isMoveInstr(*MI, SrcReg, DstReg); if (isMove && SrcReg == DstReg) { if (li_->hasInterval(SrcReg)) { LiveInterval &RegInt = li_->getInterval(SrcReg); // If def of this move instruction is dead, remove its live range // from the dstination register's live interval. - if (mii->registerDefIsDead(DstReg)) { - if (!ShortenDeadCopySrcLiveRange(RegInt, mii)) - ShortenDeadCopyLiveRange(RegInt, mii); + if (MI->registerDefIsDead(DstReg)) { + if (!ShortenDeadCopySrcLiveRange(RegInt, MI)) + ShortenDeadCopyLiveRange(RegInt, MI); } } - li_->RemoveMachineInstrFromMaps(mii); + li_->RemoveMachineInstrFromMaps(MI); mii = mbbi->erase(mii); ++numPeep; } else if (!isMove || !TurnCopyIntoImpDef(mii, mbb, DstReg, SrcReg)) { SmallSet UniqueUses; - for (unsigned i = 0, e = mii->getNumOperands(); i != e; ++i) { - const MachineOperand &mop = mii->getOperand(i); - if (mop.isRegister() && mop.getReg() && + for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { + const MachineOperand &mop = MI->getOperand(i); + if (mop.isReg() && mop.getReg() && TargetRegisterInfo::isVirtualRegister(mop.getReg())) { unsigned reg = mop.getReg(); // Multiple uses of reg by the same instruction. It should not @@ -2167,7 +2390,7 @@ bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) { } for (LiveIntervals::iterator I = li_->begin(), E = li_->end(); I != E; ++I) { - LiveInterval &LI = I->second; + LiveInterval &LI = *I->second; if (TargetRegisterInfo::isVirtualRegister(LI.reg)) { // If the live interval length is essentially zero, i.e. in every live // range the use follows def immediately, it doesn't make sense to spill @@ -2176,7 +2399,8 @@ bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) { LI.weight = HUGE_VALF; else { bool isLoad = false; - if (li_->isReMaterializable(LI, isLoad)) { + SmallVector SpillIs; + if (li_->isReMaterializable(LI, SpillIs, isLoad)) { // If all of the definitions of the interval are re-materializable, // it is a preferred candidate for spilling. If non of the defs are // loads, then it's potentially very cheap to re-materialize. @@ -2196,7 +2420,7 @@ bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) { // Divide the weight of the interval by its size. This encourages // spilling of intervals that are large and have few uses, and // discourages spilling of small intervals with many uses. - LI.weight /= li_->getApproximateInstructionCount(LI); + LI.weight /= li_->getApproximateInstructionCount(LI) * InstrSlots::NUM; } }