assert(MRegisterInfo::isVirtualRegister(virtReg));
assert(Virt2StackSlotMap[virtReg] == NO_STACK_SLOT &&
"attempt to assign stack slot to already spilled register");
+ assert((frameIndex >= 0 ||
+ (frameIndex >= MF.getFrameInfo()->getObjectIndexBegin())) &&
+ "illegal fixed frame index");
Virt2StackSlotMap[virtReg] = frameIndex;
}
assert(MRegisterInfo::isVirtualRegister(virtReg));
assert(Virt2StackSlotMap[virtReg] == NO_STACK_SLOT &&
"attempt to assign re-mat id to already spilled register");
+ const MachineInstr *DefMI = getReMaterializedMI(virtReg);
+ int FrameIdx;
+ if (TII.isLoadFromStackSlot((MachineInstr*)DefMI, FrameIdx)) {
+ // Load from stack slot is re-materialize as reload from the stack slot!
+ Virt2StackSlotMap[virtReg] = FrameIdx;
+ return FrameIdx;
+ }
Virt2StackSlotMap[virtReg] = ReMatId;
- ++NumReMats;
return ReMatId++;
}
assert(II != SpillSlotsAvailable.end() && "Slot not available!");
unsigned Val = II->second.first;
assert((Val >> 1) == PhysReg && "Bidirectional map mismatch!");
- II->second.second.push_back(Use);
+ // This can be true if there are multiple uses of the same register.
+ if (II->second.second.back() != Use)
+ II->second.second.push_back(Use);
}
}
/// register allocator is done with them. If possible, avoid reloading vregs.
void LocalSpiller::RewriteMBB(MachineBasicBlock &MBB, VirtRegMap &VRM,
std::vector<MachineInstr*> &ReMatedMIs) {
-
DOUT << MBB.getBasicBlock()->getName() << ":\n";
// Spills - Keep track of which spilled values are available in physregs so
const TargetInstrDescriptor *TID = MI.getInstrDescriptor();
// If this instruction is being rematerialized, just remove it!
- if (TID->Flags & M_REMATERIALIZIBLE) {
+ int FrameIdx;
+ if ((TID->Flags & M_REMATERIALIZIBLE) ||
+ TII->isLoadFromStackSlot(&MI, FrameIdx)) {
bool Remove = true;
for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
MachineOperand &MO = MI.getOperand(i);
// necessary.
bool WasKill = false;
if (SSMI) {
- MachineOperand *MOK = SSMI->findRegisterUseOperand(PhysReg, true);
- if (MOK) {
- WasKill = MOK->isKill();
- MOK->unsetIsKill();
+ int UIdx = SSMI->findRegisterUseOperand(PhysReg, true);
+ if (UIdx != -1) {
+ MachineOperand &MOK = SSMI->getOperand(UIdx);
+ WasKill = MOK.isKill();
+ MOK.unsetIsKill();
}
}
if (ti == -1) {
// necessary.
bool WasKill = false;
if (SSMI) {
- MachineOperand *MOK = SSMI->findRegisterUseOperand(PhysReg, true);
- if (MOK) {
- WasKill = MOK->isKill();
- MOK->unsetIsKill();
+ int UIdx = SSMI->findRegisterUseOperand(PhysReg, true);
+ if (UIdx != -1) {
+ MachineOperand &MOK = SSMI->getOperand(UIdx);
+ WasKill = MOK.isKill();
+ MOK.unsetIsKill();
}
}
MachineInstr *CopyMI = prior(MII);
if (WasKill) {
// Transfer kill to the next use.
- MachineOperand *MOU = CopyMI->findRegisterUseOperand(PhysReg);
- MOU->setIsKill();
+ int UIdx = CopyMI->findRegisterUseOperand(PhysReg);
+ assert(UIdx != -1);
+ MachineOperand &MOU = CopyMI->getOperand(UIdx);
+ MOU.setIsKill();
}
Spills.addLastUse(PhysReg, CopyMI);
PhysRegsUsed[PhysReg] = true;
ReusedOperands.markClobbered(PhysReg);
- if (doReMat)
+ if (doReMat) {
MRI->reMaterialize(MBB, &MI, PhysReg, VRM.getReMaterializedMI(VirtReg));
- else
+ ++NumReMats;
+ } else {
MRI->loadRegFromStackSlot(MBB, &MI, PhysReg, StackSlot, RC);
+ ++NumLoads;
+ }
// This invalidates PhysReg.
Spills.ClobberPhysReg(PhysReg);
// unless it's a two-address operand.
if (TID->getOperandConstraint(i, TOI::TIED_TO) == -1)
MI.getOperand(i).setIsKill();
- ++NumLoads;
MI.getOperand(i).setReg(PhysReg);
DOUT << '\t' << *prior(MII);
}
// extended. Remove its kill.
bool WasKill = false;
if (SSMI) {
- MachineOperand *MOK = SSMI->findRegisterUseOperand(InReg, true);
- if (MOK) {
- WasKill = MOK->isKill();
- MOK->unsetIsKill();
+ int UIdx = SSMI->findRegisterUseOperand(InReg, true);
+ if (UIdx != -1) {
+ MachineOperand &MOK = SSMI->getOperand(UIdx);
+ WasKill = MOK.isKill();
+ MOK.unsetIsKill();
}
}
if (NextMII != MBB.end()) {
- // If NextMII uses InReg (must be the copy?), mark it killed.
- MachineOperand *MOU = NextMII->findRegisterUseOperand(InReg);
- if (MOU) {
- if (WasKill)
- MOU->setIsKill();
+ // If NextMII uses InReg and the use is not a two address
+ // operand, mark it killed.
+ int UIdx = NextMII->findRegisterUseOperand(InReg);
+ if (UIdx != -1) {
+ MachineOperand &MOU = NextMII->getOperand(UIdx);
+ if (WasKill) {
+ const TargetInstrDescriptor *NTID =
+ NextMII->getInstrDescriptor();
+ if (UIdx >= NTID->numOperands ||
+ NTID->getOperandConstraint(UIdx, TOI::TIED_TO) == -1)
+ MOU.setIsKill();
+ }
Spills.addLastUse(InReg, &(*NextMII));
}
}
if (TII->isMoveInstr(MI, Src, Dst) && Src == Dst) {
++NumDCE;
DOUT << "Removing now-noop copy: " << MI;
+ Spills.removeLastUse(Src, &MI);
MBB.erase(&MI);
VRM.RemoveFromFoldedVirtMap(&MI);
goto ProcessNextInst;