return r != end() && r->start <= index;
}
- /// killedAt - Return true if a live range ends at index. Note that the kill
- /// point is not contained in the half-open live range. It is usually the
- /// getDefIndex() slot following its last use.
- bool killedAt(SlotIndex index) const {
- const_iterator r = find(index.getRegSlot(true));
- return r != end() && r->end == index;
+ /// Return true if a live range ends at the instruction at this index. Note
+ /// that the kill point is not contained in the half-open live range. It is
+ /// usually the EarlyClobber or Register slot following its last use.
+ bool isKilledAtInstr(SlotIndex index) const {
+ SlotIndex BaseIdx = index.getBaseIndex();
+ const_iterator r = find(BaseIdx);
+ return r != end() && r->end.getBaseIndex() == BaseIdx;
}
/// getLiveRangeContaining - Return the live range that contains the
// Always shrink COPY uses that probably come from live range splitting.
if (MI->readsVirtualRegister(Reg) &&
(MI->isCopy() || MOI->isDef() || MRI.hasOneNonDBGUse(Reg) ||
- LI.killedAt(Idx)))
+ LI.isKilledAtInstr(Idx)))
ToShrink.insert(&LI);
// Remove defined value.
// Adjust liveouts if LiveIntervals are available.
if (RequireIntervals) {
const LiveInterval *LI = getInterval(Reg);
- if (LI && !LI->killedAt(SlotIdx))
+ if (LI && !LI->isKilledAtInstr(SlotIdx))
discoverLiveOut(Reg);
}
increaseRegPressure(Reg);
bool lastUse = false;
if (RequireIntervals) {
const LiveInterval *LI = getInterval(Reg);
- lastUse = LI && LI->killedAt(SlotIdx);
+ lastUse = LI && LI->isKilledAtInstr(SlotIdx);
}
else {
// Allocatable physregs are always single-use before register rewriting.
// to be bottom-scheduled to avoid searching uses at each query.
SlotIndex CurrIdx = getCurrSlot();
const LiveInterval *LI = getInterval(Reg);
- if (LI && LI->killedAt(SlotIdx)
+ if (LI && LI->isKilledAtInstr(SlotIdx)
&& !findUseBetween(Reg, CurrIdx, SlotIdx, MRI, LIS)) {
decreaseRegPressure(Reg);
}