1 //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements the AggressiveAntiDepBreaker class, which
11 // implements register anti-dependence breaking during post-RA
12 // scheduling. It attempts to break all anti-dependencies within a
15 //===----------------------------------------------------------------------===//
17 #define DEBUG_TYPE "post-RA-sched"
18 #include "AggressiveAntiDepBreaker.h"
19 #include "llvm/CodeGen/MachineBasicBlock.h"
20 #include "llvm/CodeGen/MachineFrameInfo.h"
21 #include "llvm/CodeGen/MachineInstr.h"
22 #include "llvm/CodeGen/RegisterClassInfo.h"
23 #include "llvm/Support/CommandLine.h"
24 #include "llvm/Support/Debug.h"
25 #include "llvm/Support/ErrorHandling.h"
26 #include "llvm/Support/raw_ostream.h"
27 #include "llvm/Target/TargetInstrInfo.h"
28 #include "llvm/Target/TargetMachine.h"
29 #include "llvm/Target/TargetRegisterInfo.h"
32 // If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod
34 DebugDiv("agg-antidep-debugdiv",
35 cl::desc("Debug control for aggressive anti-dep breaker"),
36 cl::init(0), cl::Hidden);
38 DebugMod("agg-antidep-debugmod",
39 cl::desc("Debug control for aggressive anti-dep breaker"),
40 cl::init(0), cl::Hidden);
42 AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs,
43 MachineBasicBlock *BB) :
44 NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0),
45 GroupNodeIndices(TargetRegs, 0),
46 KillIndices(TargetRegs, 0),
47 DefIndices(TargetRegs, 0)
49 const unsigned BBSize = BB->size();
50 for (unsigned i = 0; i < NumTargetRegs; ++i) {
51 // Initialize all registers to be in their own group. Initially we
52 // assign the register to the same-indexed GroupNode.
53 GroupNodeIndices[i] = i;
54 // Initialize the indices to indicate that no registers are live.
56 DefIndices[i] = BBSize;
60 unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) {
61 unsigned Node = GroupNodeIndices[Reg];
62 while (GroupNodes[Node] != Node)
63 Node = GroupNodes[Node];
68 void AggressiveAntiDepState::GetGroupRegs(
70 std::vector<unsigned> &Regs,
71 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs)
73 for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) {
74 if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0))
79 unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2)
81 assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!");
82 assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!");
84 // find group for each register
85 unsigned Group1 = GetGroup(Reg1);
86 unsigned Group2 = GetGroup(Reg2);
88 // if either group is 0, then that must become the parent
89 unsigned Parent = (Group1 == 0) ? Group1 : Group2;
90 unsigned Other = (Parent == Group1) ? Group2 : Group1;
91 GroupNodes.at(Other) = Parent;
95 unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg)
97 // Create a new GroupNode for Reg. Reg's existing GroupNode must
98 // stay as is because there could be other GroupNodes referring to
100 unsigned idx = GroupNodes.size();
101 GroupNodes.push_back(idx);
102 GroupNodeIndices[Reg] = idx;
106 bool AggressiveAntiDepState::IsLive(unsigned Reg)
108 // KillIndex must be defined and DefIndex not defined for a register
110 return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u));
115 AggressiveAntiDepBreaker::
116 AggressiveAntiDepBreaker(MachineFunction& MFi,
117 const RegisterClassInfo &RCI,
118 TargetSubtargetInfo::RegClassVector& CriticalPathRCs) :
119 AntiDepBreaker(), MF(MFi),
120 MRI(MF.getRegInfo()),
121 TII(MF.getTarget().getInstrInfo()),
122 TRI(MF.getTarget().getRegisterInfo()),
125 /* Collect a bitset of all registers that are only broken if they
126 are on the critical path. */
127 for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) {
128 BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]);
129 if (CriticalPathSet.none())
130 CriticalPathSet = CPSet;
132 CriticalPathSet |= CPSet;
135 DEBUG(dbgs() << "AntiDep Critical-Path Registers:");
136 DEBUG(for (int r = CriticalPathSet.find_first(); r != -1;
137 r = CriticalPathSet.find_next(r))
138 dbgs() << " " << TRI->getName(r));
139 DEBUG(dbgs() << '\n');
142 AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() {
146 void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) {
147 assert(State == NULL);
148 State = new AggressiveAntiDepState(TRI->getNumRegs(), BB);
150 bool IsReturnBlock = (!BB->empty() && BB->back().isReturn());
151 std::vector<unsigned> &KillIndices = State->GetKillIndices();
152 std::vector<unsigned> &DefIndices = State->GetDefIndices();
154 // Determine the live-out physregs for this block.
156 // In a return block, examine the function live-out regs.
157 for (MachineRegisterInfo::liveout_iterator I = MRI.liveout_begin(),
158 E = MRI.liveout_end(); I != E; ++I) {
159 for (MCRegAliasIterator AI(*I, TRI, true); AI.isValid(); ++AI) {
161 State->UnionGroups(Reg, 0);
162 KillIndices[Reg] = BB->size();
163 DefIndices[Reg] = ~0u;
168 // In a non-return block, examine the live-in regs of all successors.
169 // Note a return block can have successors if the return instruction is
171 for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(),
172 SE = BB->succ_end(); SI != SE; ++SI)
173 for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(),
174 E = (*SI)->livein_end(); I != E; ++I) {
175 for (MCRegAliasIterator AI(*I, TRI, true); AI.isValid(); ++AI) {
177 State->UnionGroups(Reg, 0);
178 KillIndices[Reg] = BB->size();
179 DefIndices[Reg] = ~0u;
183 // Mark live-out callee-saved registers. In a return block this is
184 // all callee-saved registers. In non-return this is any
185 // callee-saved register that is not saved in the prolog.
186 const MachineFrameInfo *MFI = MF.getFrameInfo();
187 BitVector Pristine = MFI->getPristineRegs(BB);
188 for (const uint16_t *I = TRI->getCalleeSavedRegs(&MF); *I; ++I) {
190 if (!IsReturnBlock && !Pristine.test(Reg)) continue;
191 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
192 unsigned AliasReg = *AI;
193 State->UnionGroups(AliasReg, 0);
194 KillIndices[AliasReg] = BB->size();
195 DefIndices[AliasReg] = ~0u;
200 void AggressiveAntiDepBreaker::FinishBlock() {
205 void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count,
206 unsigned InsertPosIndex) {
207 assert(Count < InsertPosIndex && "Instruction index out of expected range!");
209 std::set<unsigned> PassthruRegs;
210 GetPassthruRegs(MI, PassthruRegs);
211 PrescanInstruction(MI, Count, PassthruRegs);
212 ScanInstruction(MI, Count);
214 DEBUG(dbgs() << "Observe: ");
216 DEBUG(dbgs() << "\tRegs:");
218 std::vector<unsigned> &DefIndices = State->GetDefIndices();
219 for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) {
220 // If Reg is current live, then mark that it can't be renamed as
221 // we don't know the extent of its live-range anymore (now that it
222 // has been scheduled). If it is not live but was defined in the
223 // previous schedule region, then set its def index to the most
224 // conservative location (i.e. the beginning of the previous
226 if (State->IsLive(Reg)) {
227 DEBUG(if (State->GetGroup(Reg) != 0)
228 dbgs() << " " << TRI->getName(Reg) << "=g" <<
229 State->GetGroup(Reg) << "->g0(region live-out)");
230 State->UnionGroups(Reg, 0);
231 } else if ((DefIndices[Reg] < InsertPosIndex)
232 && (DefIndices[Reg] >= Count)) {
233 DefIndices[Reg] = Count;
236 DEBUG(dbgs() << '\n');
239 bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI,
242 if (!MO.isReg() || !MO.isImplicit())
245 unsigned Reg = MO.getReg();
249 MachineOperand *Op = NULL;
251 Op = MI->findRegisterUseOperand(Reg, true);
253 Op = MI->findRegisterDefOperand(Reg);
255 return((Op != NULL) && Op->isImplicit());
258 void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI,
259 std::set<unsigned>& PassthruRegs) {
260 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
261 MachineOperand &MO = MI->getOperand(i);
262 if (!MO.isReg()) continue;
263 if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) ||
264 IsImplicitDefUse(MI, MO)) {
265 const unsigned Reg = MO.getReg();
266 PassthruRegs.insert(Reg);
267 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
268 PassthruRegs.insert(*SubRegs);
273 /// AntiDepEdges - Return in Edges the anti- and output- dependencies
274 /// in SU that we want to consider for breaking.
275 static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) {
276 SmallSet<unsigned, 4> RegSet;
277 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
279 if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) {
280 unsigned Reg = P->getReg();
281 if (RegSet.count(Reg) == 0) {
282 Edges.push_back(&*P);
289 /// CriticalPathStep - Return the next SUnit after SU on the bottom-up
291 static const SUnit *CriticalPathStep(const SUnit *SU) {
292 const SDep *Next = 0;
293 unsigned NextDepth = 0;
294 // Find the predecessor edge with the greatest depth.
296 for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
298 const SUnit *PredSU = P->getSUnit();
299 unsigned PredLatency = P->getLatency();
300 unsigned PredTotalLatency = PredSU->getDepth() + PredLatency;
301 // In the case of a latency tie, prefer an anti-dependency edge over
302 // other types of edges.
303 if (NextDepth < PredTotalLatency ||
304 (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) {
305 NextDepth = PredTotalLatency;
311 return (Next) ? Next->getSUnit() : 0;
314 void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx,
317 const char *footer) {
318 std::vector<unsigned> &KillIndices = State->GetKillIndices();
319 std::vector<unsigned> &DefIndices = State->GetDefIndices();
320 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
321 RegRefs = State->GetRegRefs();
323 if (!State->IsLive(Reg)) {
324 KillIndices[Reg] = KillIdx;
325 DefIndices[Reg] = ~0u;
327 State->LeaveGroup(Reg);
328 DEBUG(if (header != NULL) {
329 dbgs() << header << TRI->getName(Reg); header = NULL; });
330 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag);
332 // Repeat for subregisters.
333 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) {
334 unsigned SubregReg = *SubRegs;
335 if (!State->IsLive(SubregReg)) {
336 KillIndices[SubregReg] = KillIdx;
337 DefIndices[SubregReg] = ~0u;
338 RegRefs.erase(SubregReg);
339 State->LeaveGroup(SubregReg);
340 DEBUG(if (header != NULL) {
341 dbgs() << header << TRI->getName(Reg); header = NULL; });
342 DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" <<
343 State->GetGroup(SubregReg) << tag);
347 DEBUG(if ((header == NULL) && (footer != NULL)) dbgs() << footer);
350 void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI,
352 std::set<unsigned>& PassthruRegs) {
353 std::vector<unsigned> &DefIndices = State->GetDefIndices();
354 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
355 RegRefs = State->GetRegRefs();
357 // Handle dead defs by simulating a last-use of the register just
358 // after the def. A dead def can occur because the def is truly
359 // dead, or because only a subregister is live at the def. If we
360 // don't do this the dead def will be incorrectly merged into the
362 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
363 MachineOperand &MO = MI->getOperand(i);
364 if (!MO.isReg() || !MO.isDef()) continue;
365 unsigned Reg = MO.getReg();
366 if (Reg == 0) continue;
368 HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n");
371 DEBUG(dbgs() << "\tDef Groups:");
372 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
373 MachineOperand &MO = MI->getOperand(i);
374 if (!MO.isReg() || !MO.isDef()) continue;
375 unsigned Reg = MO.getReg();
376 if (Reg == 0) continue;
378 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg));
380 // If MI's defs have a special allocation requirement, don't allow
381 // any def registers to be changed. Also assume all registers
382 // defined in a call must not be changed (ABI).
383 if (MI->isCall() || MI->hasExtraDefRegAllocReq() ||
384 TII->isPredicated(MI)) {
385 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
386 State->UnionGroups(Reg, 0);
389 // Any aliased that are live at this point are completely or
390 // partially defined here, so group those aliases with Reg.
391 for (MCRegAliasIterator AI(Reg, TRI, false); AI.isValid(); ++AI) {
392 unsigned AliasReg = *AI;
393 if (State->IsLive(AliasReg)) {
394 State->UnionGroups(Reg, AliasReg);
395 DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " <<
396 TRI->getName(AliasReg) << ")");
400 // Note register reference...
401 const TargetRegisterClass *RC = NULL;
402 if (i < MI->getDesc().getNumOperands())
403 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF);
404 AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
405 RegRefs.insert(std::make_pair(Reg, RR));
408 DEBUG(dbgs() << '\n');
410 // Scan the register defs for this instruction and update
412 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
413 MachineOperand &MO = MI->getOperand(i);
414 if (!MO.isReg() || !MO.isDef()) continue;
415 unsigned Reg = MO.getReg();
416 if (Reg == 0) continue;
417 // Ignore KILLs and passthru registers for liveness...
418 if (MI->isKill() || (PassthruRegs.count(Reg) != 0))
421 // Update def for Reg and aliases.
422 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI)
423 DefIndices[*AI] = Count;
427 void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI,
429 DEBUG(dbgs() << "\tUse Groups:");
430 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
431 RegRefs = State->GetRegRefs();
433 // If MI's uses have special allocation requirement, don't allow
434 // any use registers to be changed. Also assume all registers
435 // used in a call must not be changed (ABI).
436 // FIXME: The issue with predicated instruction is more complex. We are being
437 // conservatively here because the kill markers cannot be trusted after
439 // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14]
441 // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395]
442 // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12]
443 // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8)
445 // The first R6 kill is not really a kill since it's killed by a predicated
446 // instruction which may not be executed. The second R6 def may or may not
447 // re-define R6 so it's not safe to change it since the last R6 use cannot be
449 bool Special = MI->isCall() ||
450 MI->hasExtraSrcRegAllocReq() ||
451 TII->isPredicated(MI);
453 // Scan the register uses for this instruction and update
454 // live-ranges, groups and RegRefs.
455 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
456 MachineOperand &MO = MI->getOperand(i);
457 if (!MO.isReg() || !MO.isUse()) continue;
458 unsigned Reg = MO.getReg();
459 if (Reg == 0) continue;
461 DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" <<
462 State->GetGroup(Reg));
464 // It wasn't previously live but now it is, this is a kill. Forget
465 // the previous live-range information and start a new live-range
467 HandleLastUse(Reg, Count, "(last-use)");
470 DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
471 State->UnionGroups(Reg, 0);
474 // Note register reference...
475 const TargetRegisterClass *RC = NULL;
476 if (i < MI->getDesc().getNumOperands())
477 RC = TII->getRegClass(MI->getDesc(), i, TRI, MF);
478 AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
479 RegRefs.insert(std::make_pair(Reg, RR));
482 DEBUG(dbgs() << '\n');
484 // Form a group of all defs and uses of a KILL instruction to ensure
485 // that all registers are renamed as a group.
487 DEBUG(dbgs() << "\tKill Group:");
489 unsigned FirstReg = 0;
490 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
491 MachineOperand &MO = MI->getOperand(i);
492 if (!MO.isReg()) continue;
493 unsigned Reg = MO.getReg();
494 if (Reg == 0) continue;
497 DEBUG(dbgs() << "=" << TRI->getName(Reg));
498 State->UnionGroups(FirstReg, Reg);
500 DEBUG(dbgs() << " " << TRI->getName(Reg));
505 DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n');
509 BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) {
510 BitVector BV(TRI->getNumRegs(), false);
513 // Check all references that need rewriting for Reg. For each, use
514 // the corresponding register class to narrow the set of registers
515 // that are appropriate for renaming.
516 std::pair<std::multimap<unsigned,
517 AggressiveAntiDepState::RegisterReference>::iterator,
518 std::multimap<unsigned,
519 AggressiveAntiDepState::RegisterReference>::iterator>
520 Range = State->GetRegRefs().equal_range(Reg);
521 for (std::multimap<unsigned,
522 AggressiveAntiDepState::RegisterReference>::iterator Q = Range.first,
523 QE = Range.second; Q != QE; ++Q) {
524 const TargetRegisterClass *RC = Q->second.RC;
525 if (RC == NULL) continue;
527 BitVector RCBV = TRI->getAllocatableSet(MF, RC);
535 DEBUG(dbgs() << " " << RC->getName());
541 bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters(
542 unsigned AntiDepGroupIndex,
543 RenameOrderType& RenameOrder,
544 std::map<unsigned, unsigned> &RenameMap) {
545 std::vector<unsigned> &KillIndices = State->GetKillIndices();
546 std::vector<unsigned> &DefIndices = State->GetDefIndices();
547 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
548 RegRefs = State->GetRegRefs();
550 // Collect all referenced registers in the same group as
551 // AntiDepReg. These all need to be renamed together if we are to
552 // break the anti-dependence.
553 std::vector<unsigned> Regs;
554 State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs);
555 assert(Regs.size() > 0 && "Empty register group!");
556 if (Regs.size() == 0)
559 // Find the "superest" register in the group. At the same time,
560 // collect the BitVector of registers that can be used to rename
562 DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex
564 std::map<unsigned, BitVector> RenameRegisterMap;
565 unsigned SuperReg = 0;
566 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
567 unsigned Reg = Regs[i];
568 if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg))
571 // If Reg has any references, then collect possible rename regs
572 if (RegRefs.count(Reg) > 0) {
573 DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":");
575 BitVector BV = GetRenameRegisters(Reg);
576 RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV));
578 DEBUG(dbgs() << " ::");
579 DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r))
580 dbgs() << " " << TRI->getName(r));
581 DEBUG(dbgs() << "\n");
585 // All group registers should be a subreg of SuperReg.
586 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
587 unsigned Reg = Regs[i];
588 if (Reg == SuperReg) continue;
589 bool IsSub = TRI->isSubRegister(SuperReg, Reg);
590 assert(IsSub && "Expecting group subregister");
596 // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod
598 static int renamecnt = 0;
599 if (renamecnt++ % DebugDiv != DebugMod)
602 dbgs() << "*** Performing rename " << TRI->getName(SuperReg) <<
607 // Check each possible rename register for SuperReg in round-robin
608 // order. If that register is available, and the corresponding
609 // registers are available for the other group subregisters, then we
610 // can use those registers to rename.
612 // FIXME: Using getMinimalPhysRegClass is very conservative. We should
613 // check every use of the register and find the largest register class
614 // that can be used in all of them.
615 const TargetRegisterClass *SuperRC =
616 TRI->getMinimalPhysRegClass(SuperReg, MVT::Other);
618 ArrayRef<MCPhysReg> Order = RegClassInfo.getOrder(SuperRC);
620 DEBUG(dbgs() << "\tEmpty Super Regclass!!\n");
624 DEBUG(dbgs() << "\tFind Registers:");
626 if (RenameOrder.count(SuperRC) == 0)
627 RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size()));
629 unsigned OrigR = RenameOrder[SuperRC];
630 unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR);
633 if (R == 0) R = Order.size();
635 const unsigned NewSuperReg = Order[R];
636 // Don't consider non-allocatable registers
637 if (!MRI.isAllocatable(NewSuperReg)) continue;
638 // Don't replace a register with itself.
639 if (NewSuperReg == SuperReg) continue;
641 DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':');
644 // For each referenced group register (which must be a SuperReg or
645 // a subregister of SuperReg), find the corresponding subregister
646 // of NewSuperReg and make sure it is free to be renamed.
647 for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
648 unsigned Reg = Regs[i];
650 if (Reg == SuperReg) {
651 NewReg = NewSuperReg;
653 unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg);
654 if (NewSubRegIdx != 0)
655 NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx);
658 DEBUG(dbgs() << " " << TRI->getName(NewReg));
660 // Check if Reg can be renamed to NewReg.
661 BitVector BV = RenameRegisterMap[Reg];
662 if (!BV.test(NewReg)) {
663 DEBUG(dbgs() << "(no rename)");
667 // If NewReg is dead and NewReg's most recent def is not before
668 // Regs's kill, it's safe to replace Reg with NewReg. We
669 // must also check all aliases of NewReg, because we can't define a
670 // register when any sub or super is already live.
671 if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) {
672 DEBUG(dbgs() << "(live)");
676 for (MCRegAliasIterator AI(NewReg, TRI, false); AI.isValid(); ++AI) {
677 unsigned AliasReg = *AI;
678 if (State->IsLive(AliasReg) ||
679 (KillIndices[Reg] > DefIndices[AliasReg])) {
680 DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)");
689 // Record that 'Reg' can be renamed to 'NewReg'.
690 RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg));
693 // If we fall-out here, then every register in the group can be
694 // renamed, as recorded in RenameMap.
695 RenameOrder.erase(SuperRC);
696 RenameOrder.insert(RenameOrderType::value_type(SuperRC, R));
697 DEBUG(dbgs() << "]\n");
701 DEBUG(dbgs() << ']');
704 DEBUG(dbgs() << '\n');
706 // No registers are free and available!
710 /// BreakAntiDependencies - Identifiy anti-dependencies within the
711 /// ScheduleDAG and break them by renaming registers.
713 unsigned AggressiveAntiDepBreaker::BreakAntiDependencies(
714 const std::vector<SUnit>& SUnits,
715 MachineBasicBlock::iterator Begin,
716 MachineBasicBlock::iterator End,
717 unsigned InsertPosIndex,
718 DbgValueVector &DbgValues) {
720 std::vector<unsigned> &KillIndices = State->GetKillIndices();
721 std::vector<unsigned> &DefIndices = State->GetDefIndices();
722 std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
723 RegRefs = State->GetRegRefs();
725 // The code below assumes that there is at least one instruction,
726 // so just duck out immediately if the block is empty.
727 if (SUnits.empty()) return 0;
729 // For each regclass the next register to use for renaming.
730 RenameOrderType RenameOrder;
732 // ...need a map from MI to SUnit.
733 std::map<MachineInstr *, const SUnit *> MISUnitMap;
734 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
735 const SUnit *SU = &SUnits[i];
736 MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(),
740 // Track progress along the critical path through the SUnit graph as
741 // we walk the instructions. This is needed for regclasses that only
742 // break critical-path anti-dependencies.
743 const SUnit *CriticalPathSU = 0;
744 MachineInstr *CriticalPathMI = 0;
745 if (CriticalPathSet.any()) {
746 for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
747 const SUnit *SU = &SUnits[i];
748 if (!CriticalPathSU ||
749 ((SU->getDepth() + SU->Latency) >
750 (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) {
755 CriticalPathMI = CriticalPathSU->getInstr();
759 DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n");
760 DEBUG(dbgs() << "Available regs:");
761 for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) {
762 if (!State->IsLive(Reg))
763 DEBUG(dbgs() << " " << TRI->getName(Reg));
765 DEBUG(dbgs() << '\n');
768 // Attempt to break anti-dependence edges. Walk the instructions
769 // from the bottom up, tracking information about liveness as we go
770 // to help determine which registers are available.
772 unsigned Count = InsertPosIndex - 1;
773 for (MachineBasicBlock::iterator I = End, E = Begin;
775 MachineInstr *MI = --I;
777 if (MI->isDebugValue())
780 DEBUG(dbgs() << "Anti: ");
783 std::set<unsigned> PassthruRegs;
784 GetPassthruRegs(MI, PassthruRegs);
786 // Process the defs in MI...
787 PrescanInstruction(MI, Count, PassthruRegs);
789 // The dependence edges that represent anti- and output-
790 // dependencies that are candidates for breaking.
791 std::vector<const SDep *> Edges;
792 const SUnit *PathSU = MISUnitMap[MI];
793 AntiDepEdges(PathSU, Edges);
795 // If MI is not on the critical path, then we don't rename
796 // registers in the CriticalPathSet.
797 BitVector *ExcludeRegs = NULL;
798 if (MI == CriticalPathMI) {
799 CriticalPathSU = CriticalPathStep(CriticalPathSU);
800 CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : 0;
802 ExcludeRegs = &CriticalPathSet;
805 // Ignore KILL instructions (they form a group in ScanInstruction
806 // but don't cause any anti-dependence breaking themselves)
808 // Attempt to break each anti-dependency...
809 for (unsigned i = 0, e = Edges.size(); i != e; ++i) {
810 const SDep *Edge = Edges[i];
811 SUnit *NextSU = Edge->getSUnit();
813 if ((Edge->getKind() != SDep::Anti) &&
814 (Edge->getKind() != SDep::Output)) continue;
816 unsigned AntiDepReg = Edge->getReg();
817 DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg));
818 assert(AntiDepReg != 0 && "Anti-dependence on reg0?");
820 if (!MRI.isAllocatable(AntiDepReg)) {
821 // Don't break anti-dependencies on non-allocatable registers.
822 DEBUG(dbgs() << " (non-allocatable)\n");
824 } else if ((ExcludeRegs != NULL) && ExcludeRegs->test(AntiDepReg)) {
825 // Don't break anti-dependencies for critical path registers
826 // if not on the critical path
827 DEBUG(dbgs() << " (not critical-path)\n");
829 } else if (PassthruRegs.count(AntiDepReg) != 0) {
830 // If the anti-dep register liveness "passes-thru", then
831 // don't try to change it. It will be changed along with
832 // the use if required to break an earlier antidep.
833 DEBUG(dbgs() << " (passthru)\n");
836 // No anti-dep breaking for implicit deps
837 MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg);
838 assert(AntiDepOp != NULL &&
839 "Can't find index for defined register operand");
840 if ((AntiDepOp == NULL) || AntiDepOp->isImplicit()) {
841 DEBUG(dbgs() << " (implicit)\n");
845 // If the SUnit has other dependencies on the SUnit that
846 // it anti-depends on, don't bother breaking the
847 // anti-dependency since those edges would prevent such
848 // units from being scheduled past each other
851 // Also, if there are dependencies on other SUnits with the
852 // same register as the anti-dependency, don't attempt to
854 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
855 PE = PathSU->Preds.end(); P != PE; ++P) {
856 if (P->getSUnit() == NextSU ?
857 (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) :
858 (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) {
863 for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
864 PE = PathSU->Preds.end(); P != PE; ++P) {
865 if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) &&
866 (P->getKind() != SDep::Output)) {
867 DEBUG(dbgs() << " (real dependency)\n");
870 } else if ((P->getSUnit() != NextSU) &&
871 (P->getKind() == SDep::Data) &&
872 (P->getReg() == AntiDepReg)) {
873 DEBUG(dbgs() << " (other dependency)\n");
879 if (AntiDepReg == 0) continue;
882 assert(AntiDepReg != 0);
883 if (AntiDepReg == 0) continue;
885 // Determine AntiDepReg's register group.
886 const unsigned GroupIndex = State->GetGroup(AntiDepReg);
887 if (GroupIndex == 0) {
888 DEBUG(dbgs() << " (zero group)\n");
892 DEBUG(dbgs() << '\n');
894 // Look for a suitable register to use to break the anti-dependence.
895 std::map<unsigned, unsigned> RenameMap;
896 if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) {
897 DEBUG(dbgs() << "\tBreaking anti-dependence edge on "
898 << TRI->getName(AntiDepReg) << ":");
900 // Handle each group register...
901 for (std::map<unsigned, unsigned>::iterator
902 S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) {
903 unsigned CurrReg = S->first;
904 unsigned NewReg = S->second;
906 DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" <<
907 TRI->getName(NewReg) << "(" <<
908 RegRefs.count(CurrReg) << " refs)");
910 // Update the references to the old register CurrReg to
911 // refer to the new register NewReg.
912 std::pair<std::multimap<unsigned,
913 AggressiveAntiDepState::RegisterReference>::iterator,
914 std::multimap<unsigned,
915 AggressiveAntiDepState::RegisterReference>::iterator>
916 Range = RegRefs.equal_range(CurrReg);
917 for (std::multimap<unsigned,
918 AggressiveAntiDepState::RegisterReference>::iterator
919 Q = Range.first, QE = Range.second; Q != QE; ++Q) {
920 Q->second.Operand->setReg(NewReg);
921 // If the SU for the instruction being updated has debug
922 // information related to the anti-dependency register, make
923 // sure to update that as well.
924 const SUnit *SU = MISUnitMap[Q->second.Operand->getParent()];
926 for (DbgValueVector::iterator DVI = DbgValues.begin(),
927 DVE = DbgValues.end(); DVI != DVE; ++DVI)
928 if (DVI->second == Q->second.Operand->getParent())
929 UpdateDbgValue(DVI->first, AntiDepReg, NewReg);
932 // We just went back in time and modified history; the
933 // liveness information for CurrReg is now inconsistent. Set
934 // the state as if it were dead.
935 State->UnionGroups(NewReg, 0);
936 RegRefs.erase(NewReg);
937 DefIndices[NewReg] = DefIndices[CurrReg];
938 KillIndices[NewReg] = KillIndices[CurrReg];
940 State->UnionGroups(CurrReg, 0);
941 RegRefs.erase(CurrReg);
942 DefIndices[CurrReg] = KillIndices[CurrReg];
943 KillIndices[CurrReg] = ~0u;
944 assert(((KillIndices[CurrReg] == ~0u) !=
945 (DefIndices[CurrReg] == ~0u)) &&
946 "Kill and Def maps aren't consistent for AntiDepReg!");
950 DEBUG(dbgs() << '\n');
955 ScanInstruction(MI, Count);