1 //===-- LiveRangeEdit.cpp - Basic tools for editing a register live range -===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // The LiveRangeEdit class represents changes done to a virtual register when it
11 // is spilled or split.
12 //===----------------------------------------------------------------------===//
14 #define DEBUG_TYPE "regalloc"
15 #include "llvm/CodeGen/LiveRangeEdit.h"
16 #include "llvm/ADT/Statistic.h"
17 #include "llvm/CodeGen/CalcSpillWeights.h"
18 #include "llvm/CodeGen/LiveIntervalAnalysis.h"
19 #include "llvm/CodeGen/MachineRegisterInfo.h"
20 #include "llvm/CodeGen/VirtRegMap.h"
21 #include "llvm/Support/Debug.h"
22 #include "llvm/Support/raw_ostream.h"
23 #include "llvm/Target/TargetInstrInfo.h"
27 STATISTIC(NumDCEDeleted, "Number of instructions deleted by DCE");
28 STATISTIC(NumDCEFoldedLoads, "Number of single use loads folded after DCE");
29 STATISTIC(NumFracRanges, "Number of live ranges fractured by DCE");
31 void LiveRangeEdit::Delegate::anchor() { }
33 LiveInterval &LiveRangeEdit::createFrom(unsigned OldReg) {
34 unsigned VReg = MRI.createVirtualRegister(MRI.getRegClass(OldReg));
37 VRM->setIsSplitFromReg(VReg, VRM->getOriginal(OldReg));
39 LiveInterval &LI = LIS.getOrCreateInterval(VReg);
40 NewRegs.push_back(&LI);
44 bool LiveRangeEdit::checkRematerializable(VNInfo *VNI,
45 const MachineInstr *DefMI,
47 assert(DefMI && "Missing instruction");
48 ScannedRemattable = true;
49 if (!TII.isTriviallyReMaterializable(DefMI, aa))
51 Remattable.insert(VNI);
55 void LiveRangeEdit::scanRemattable(AliasAnalysis *aa) {
56 for (LiveInterval::vni_iterator I = getParent().vni_begin(),
57 E = getParent().vni_end(); I != E; ++I) {
61 MachineInstr *DefMI = LIS.getInstructionFromIndex(VNI->def);
64 checkRematerializable(VNI, DefMI, aa);
66 ScannedRemattable = true;
69 bool LiveRangeEdit::anyRematerializable(AliasAnalysis *aa) {
70 if (!ScannedRemattable)
72 return !Remattable.empty();
75 /// allUsesAvailableAt - Return true if all registers used by OrigMI at
76 /// OrigIdx are also available with the same value at UseIdx.
77 bool LiveRangeEdit::allUsesAvailableAt(const MachineInstr *OrigMI,
79 SlotIndex UseIdx) const {
80 OrigIdx = OrigIdx.getRegSlot(true);
81 UseIdx = UseIdx.getRegSlot(true);
82 for (unsigned i = 0, e = OrigMI->getNumOperands(); i != e; ++i) {
83 const MachineOperand &MO = OrigMI->getOperand(i);
84 if (!MO.isReg() || !MO.getReg() || !MO.readsReg())
87 // We can't remat physreg uses, unless it is a constant.
88 if (TargetRegisterInfo::isPhysicalRegister(MO.getReg())) {
89 if (MRI.isConstantPhysReg(MO.getReg(), *OrigMI->getParent()->getParent()))
94 LiveInterval &li = LIS.getInterval(MO.getReg());
95 const VNInfo *OVNI = li.getVNInfoAt(OrigIdx);
99 // Don't allow rematerialization immediately after the original def.
100 // It would be incorrect if OrigMI redefines the register.
102 if (SlotIndex::isSameInstr(OrigIdx, UseIdx))
105 if (OVNI != li.getVNInfoAt(UseIdx))
111 bool LiveRangeEdit::canRematerializeAt(Remat &RM,
114 assert(ScannedRemattable && "Call anyRematerializable first");
116 // Use scanRemattable info.
117 if (!Remattable.count(RM.ParentVNI))
120 // No defining instruction provided.
123 DefIdx = LIS.getInstructionIndex(RM.OrigMI);
125 DefIdx = RM.ParentVNI->def;
126 RM.OrigMI = LIS.getInstructionFromIndex(DefIdx);
127 assert(RM.OrigMI && "No defining instruction for remattable value");
130 // If only cheap remats were requested, bail out early.
131 if (cheapAsAMove && !RM.OrigMI->isAsCheapAsAMove())
134 // Verify that all used registers are available with the same values.
135 if (!allUsesAvailableAt(RM.OrigMI, DefIdx, UseIdx))
141 SlotIndex LiveRangeEdit::rematerializeAt(MachineBasicBlock &MBB,
142 MachineBasicBlock::iterator MI,
145 const TargetRegisterInfo &tri,
147 assert(RM.OrigMI && "Invalid remat");
148 TII.reMaterialize(MBB, MI, DestReg, 0, RM.OrigMI, tri);
149 Rematted.insert(RM.ParentVNI);
150 return LIS.getSlotIndexes()->insertMachineInstrInMaps(--MI, Late)
154 void LiveRangeEdit::eraseVirtReg(unsigned Reg) {
155 if (TheDelegate && TheDelegate->LRE_CanEraseVirtReg(Reg))
156 LIS.removeInterval(Reg);
159 bool LiveRangeEdit::foldAsLoad(LiveInterval *LI,
160 SmallVectorImpl<MachineInstr*> &Dead) {
161 MachineInstr *DefMI = 0, *UseMI = 0;
163 // Check that there is a single def and a single use.
164 for (MachineRegisterInfo::reg_nodbg_iterator I = MRI.reg_nodbg_begin(LI->reg),
165 E = MRI.reg_nodbg_end(); I != E; ++I) {
166 MachineOperand &MO = I.getOperand();
167 MachineInstr *MI = MO.getParent();
169 if (DefMI && DefMI != MI)
171 if (!MI->canFoldAsLoad())
174 } else if (!MO.isUndef()) {
175 if (UseMI && UseMI != MI)
177 // FIXME: Targets don't know how to fold subreg uses.
183 if (!DefMI || !UseMI)
186 // Since we're moving the DefMI load, make sure we're not extending any live
188 if (!allUsesAvailableAt(DefMI,
189 LIS.getInstructionIndex(DefMI),
190 LIS.getInstructionIndex(UseMI)))
193 // We also need to make sure it is safe to move the load.
194 // Assume there are stores between DefMI and UseMI.
195 bool SawStore = true;
196 if (!DefMI->isSafeToMove(&TII, 0, SawStore))
199 DEBUG(dbgs() << "Try to fold single def: " << *DefMI
200 << " into single use: " << *UseMI);
202 SmallVector<unsigned, 8> Ops;
203 if (UseMI->readsWritesVirtualRegister(LI->reg, &Ops).second)
206 MachineInstr *FoldMI = TII.foldMemoryOperand(UseMI, Ops, DefMI);
209 DEBUG(dbgs() << " folded: " << *FoldMI);
210 LIS.ReplaceMachineInstrInMaps(UseMI, FoldMI);
211 UseMI->eraseFromParent();
212 DefMI->addRegisterDead(LI->reg, 0);
213 Dead.push_back(DefMI);
218 /// Find all live intervals that need to shrink, then remove the instruction.
219 void LiveRangeEdit::eliminateDeadDef(MachineInstr *MI, ToShrinkSet &ToShrink) {
220 assert(MI->allDefsAreDead() && "Def isn't really dead");
221 SlotIndex Idx = LIS.getInstructionIndex(MI).getRegSlot();
223 // Never delete a bundled instruction.
224 if (MI->isBundled()) {
227 // Never delete inline asm.
228 if (MI->isInlineAsm()) {
229 DEBUG(dbgs() << "Won't delete: " << Idx << '\t' << *MI);
233 // Use the same criteria as DeadMachineInstructionElim.
234 bool SawStore = false;
235 if (!MI->isSafeToMove(&TII, 0, SawStore)) {
236 DEBUG(dbgs() << "Can't delete: " << Idx << '\t' << *MI);
240 DEBUG(dbgs() << "Deleting dead def " << Idx << '\t' << *MI);
242 // Collect virtual registers to be erased after MI is gone.
243 SmallVector<unsigned, 8> RegsToErase;
244 bool ReadsPhysRegs = false;
246 // Check for live intervals that may shrink
247 for (MachineInstr::mop_iterator MOI = MI->operands_begin(),
248 MOE = MI->operands_end(); MOI != MOE; ++MOI) {
251 unsigned Reg = MOI->getReg();
252 if (!TargetRegisterInfo::isVirtualRegister(Reg)) {
253 // Check if MI reads any unreserved physregs.
254 if (Reg && MOI->readsReg() && !MRI.isReserved(Reg))
255 ReadsPhysRegs = true;
256 else if (MOI->isDef()) {
257 for (MCRegUnitIterator Units(Reg, MRI.getTargetRegisterInfo());
258 Units.isValid(); ++Units) {
259 if (LiveInterval *LI = LIS.getCachedRegUnit(*Units)) {
260 if (VNInfo *VNI = LI->getVNInfoAt(Idx))
261 LI->removeValNo(VNI);
267 LiveInterval &LI = LIS.getInterval(Reg);
269 // Shrink read registers, unless it is likely to be expensive and
270 // unlikely to change anything. We typically don't want to shrink the
271 // PIC base register that has lots of uses everywhere.
272 // Always shrink COPY uses that probably come from live range splitting.
273 if (MI->readsVirtualRegister(Reg) &&
274 (MI->isCopy() || MOI->isDef() || MRI.hasOneNonDBGUse(Reg) ||
276 ToShrink.insert(&LI);
278 // Remove defined value.
280 if (VNInfo *VNI = LI.getVNInfoAt(Idx)) {
282 TheDelegate->LRE_WillShrinkVirtReg(LI.reg);
285 RegsToErase.push_back(Reg);
290 // Currently, we don't support DCE of physreg live ranges. If MI reads
291 // any unreserved physregs, don't erase the instruction, but turn it into
292 // a KILL instead. This way, the physreg live ranges don't end up
294 // FIXME: It would be better to have something like shrinkToUses() for
295 // physregs. That could potentially enable more DCE and it would free up
296 // the physreg. It would not happen often, though.
298 MI->setDesc(TII.get(TargetOpcode::KILL));
299 // Remove all operands that aren't physregs.
300 for (unsigned i = MI->getNumOperands(); i; --i) {
301 const MachineOperand &MO = MI->getOperand(i-1);
302 if (MO.isReg() && TargetRegisterInfo::isPhysicalRegister(MO.getReg()))
304 MI->RemoveOperand(i-1);
306 DEBUG(dbgs() << "Converted physregs to:\t" << *MI);
309 TheDelegate->LRE_WillEraseInstruction(MI);
310 LIS.RemoveMachineInstrFromMaps(MI);
311 MI->eraseFromParent();
315 // Erase any virtregs that are now empty and unused. There may be <undef>
316 // uses around. Keep the empty live range in that case.
317 for (unsigned i = 0, e = RegsToErase.size(); i != e; ++i) {
318 unsigned Reg = RegsToErase[i];
319 if (LIS.hasInterval(Reg) && MRI.reg_nodbg_empty(Reg)) {
320 ToShrink.remove(&LIS.getInterval(Reg));
326 void LiveRangeEdit::eliminateDeadDefs(SmallVectorImpl<MachineInstr*> &Dead,
327 ArrayRef<unsigned> RegsBeingSpilled) {
328 ToShrinkSet ToShrink;
331 // Erase all dead defs.
332 while (!Dead.empty())
333 eliminateDeadDef(Dead.pop_back_val(), ToShrink);
335 if (ToShrink.empty())
338 // Shrink just one live interval. Then delete new dead defs.
339 LiveInterval *LI = ToShrink.back();
341 if (foldAsLoad(LI, Dead))
344 TheDelegate->LRE_WillShrinkVirtReg(LI->reg);
345 if (!LIS.shrinkToUses(LI, &Dead))
348 // Don't create new intervals for a register being spilled.
349 // The new intervals would have to be spilled anyway so its not worth it.
350 // Also they currently aren't spilled so creating them and not spilling
351 // them results in incorrect code.
352 bool BeingSpilled = false;
353 for (unsigned i = 0, e = RegsBeingSpilled.size(); i != e; ++i) {
354 if (LI->reg == RegsBeingSpilled[i]) {
360 if (BeingSpilled) continue;
362 // LI may have been separated, create new intervals.
363 LI->RenumberValues(LIS);
364 ConnectedVNInfoEqClasses ConEQ(LIS);
365 unsigned NumComp = ConEQ.Classify(LI);
369 bool IsOriginal = VRM && VRM->getOriginal(LI->reg) == LI->reg;
370 DEBUG(dbgs() << NumComp << " components: " << *LI << '\n');
371 SmallVector<LiveInterval*, 8> Dups(1, LI);
372 for (unsigned i = 1; i != NumComp; ++i) {
373 Dups.push_back(&createFrom(LI->reg));
374 // If LI is an original interval that hasn't been split yet, make the new
375 // intervals their own originals instead of referring to LI. The original
376 // interval must contain all the split products, and LI doesn't.
378 VRM->setIsSplitFromReg(Dups.back()->reg, 0);
380 TheDelegate->LRE_DidCloneVirtReg(Dups.back()->reg, LI->reg);
382 ConEQ.Distribute(&Dups[0], MRI);
384 for (unsigned i = 0; i != NumComp; ++i)
385 dbgs() << '\t' << *Dups[i] << '\n';
391 LiveRangeEdit::calculateRegClassAndHint(MachineFunction &MF,
392 const MachineLoopInfo &Loops,
393 const MachineBlockFrequencyInfo &MBFI) {
394 VirtRegAuxInfo VRAI(MF, LIS, Loops, MBFI);
395 for (iterator I = begin(), E = end(); I != E; ++I) {
396 LiveInterval &LI = **I;
397 if (MRI.recomputeRegClass(LI.reg, MF.getTarget()))
398 DEBUG(dbgs() << "Inflated " << PrintReg(LI.reg) << " to "
399 << MRI.getRegClass(LI.reg)->getName() << '\n');
400 VRAI.CalculateWeightAndHint(LI);