- Removing the infamous r2rMap_ and rep() method. Now the coalescer will update

register defs and uses after each successful coalescing.
- Also removed a number of hacks and fixed some subtle kill information bugs.


git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@47167 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Evan Cheng 2008-02-15 18:24:29 +00:00
parent f20db15954
commit c8d044e4f7
6 changed files with 328 additions and 403 deletions

View File

@ -37,19 +37,18 @@ namespace llvm {
/// merge point), it contains ~0u,x. If the value number is not in use, it
/// contains ~1u,x to indicate that the value # is not used.
/// def - Instruction # of the definition.
/// reg - Source reg iff val# is defined by a copy; zero otherwise.
/// copy - Copy iff val# is defined by a copy; zero otherwise.
/// hasPHIKill - One or more of the kills are PHI nodes.
/// kills - Instruction # of the kills. If a kill is an odd #, it means
/// the kill is a phi join point.
/// kills - Instruction # of the kills.
struct VNInfo {
unsigned id;
unsigned def;
unsigned reg;
MachineInstr *copy;
bool hasPHIKill;
SmallVector<unsigned, 4> kills;
VNInfo() : id(~1U), def(~1U), reg(0), hasPHIKill(false) {}
VNInfo(unsigned i, unsigned d, unsigned r)
: id(i), def(d), reg(r), hasPHIKill(false) {}
VNInfo() : id(~1U), def(~1U), copy(0), hasPHIKill(false) {}
VNInfo(unsigned i, unsigned d, MachineInstr *c)
: id(i), def(d), copy(c), hasPHIKill(false) {}
};
/// LiveRange structure - This represents a simple register range in the
@ -159,14 +158,14 @@ namespace llvm {
/// another.
void copyValNumInfo(VNInfo *DstValNo, const VNInfo *SrcValNo) {
DstValNo->def = SrcValNo->def;
DstValNo->reg = SrcValNo->reg;
DstValNo->copy = SrcValNo->copy;
DstValNo->hasPHIKill = SrcValNo->hasPHIKill;
DstValNo->kills = SrcValNo->kills;
}
/// getNextValue - Create a new value number and return it. MIIdx specifies
/// the instruction that defines the value number.
VNInfo *getNextValue(unsigned MIIdx, unsigned SrcReg,
VNInfo *getNextValue(unsigned MIIdx, MachineInstr *CopyMI,
BumpPtrAllocator &VNInfoAllocator) {
#ifdef __GNUC__
unsigned Alignment = __alignof__(VNInfo);
@ -176,7 +175,7 @@ namespace llvm {
#endif
VNInfo *VNI= static_cast<VNInfo*>(VNInfoAllocator.Allocate(sizeof(VNInfo),
Alignment));
new (VNI) VNInfo(valnos.size(), MIIdx, SrcReg);
new (VNI) VNInfo(valnos.size(), MIIdx, CopyMI);
valnos.push_back(VNI);
return VNI;
}
@ -199,7 +198,7 @@ namespace llvm {
void addKills(VNInfo *VNI, const SmallVector<unsigned, 4> &kills) {
for (unsigned i = 0, e = kills.size(); i != e; ++i) {
unsigned KillIdx = kills[i];
if (!liveAt(KillIdx)) {
if (!liveBeforeAndAt(KillIdx)) {
SmallVector<unsigned, 4>::iterator
I = std::lower_bound(VNI->kills.begin(), VNI->kills.end(), KillIdx);
VNI->kills.insert(I, KillIdx);
@ -231,6 +230,15 @@ namespace llvm {
kills.erase(I, E);
}
/// isKill - Return true if the specified index is a kill of the
/// specified val#.
bool isKill(const VNInfo *VNI, unsigned KillIdx) const {
const SmallVector<unsigned, 4> &kills = VNI->kills;
SmallVector<unsigned, 4>::const_iterator
I = std::lower_bound(kills.begin(), kills.end(), KillIdx);
return I != kills.end() && *I == KillIdx;
}
/// MergeValueNumberInto - This method is called when two value nubmers
/// are found to be equivalent. This eliminates V1, replacing all
/// LiveRanges with the V1 value number with the V2 value number. This can
@ -284,6 +292,11 @@ namespace llvm {
bool liveAt(unsigned index) const;
// liveBeforeAndAt - Check if the interval is live at the index and the
// index just before it. If index is liveAt, check if it starts a new live
// range.If it does, then check if the previous live range ends at index-1.
bool liveBeforeAndAt(unsigned index) const;
/// getLiveRangeContaining - Return the live range that contains the
/// specified index, or null if there is none.
const LiveRange *getLiveRangeContaining(unsigned Idx) const {

View File

@ -229,6 +229,10 @@ namespace llvm {
BumpPtrAllocator& getVNInfoAllocator() { return VNInfoAllocator; }
/// getVNInfoSourceReg - Helper function that parses the specified VNInfo
/// copy field and returns the source register that defines it.
unsigned getVNInfoSourceReg(const VNInfo *VNI) const;
virtual void getAnalysisUsage(AnalysisUsage &AU) const;
virtual void releaseMemory();
@ -276,7 +280,7 @@ namespace llvm {
MachineBasicBlock::iterator mi,
unsigned MIIdx,
LiveInterval &interval,
unsigned SrcReg);
MachineInstr *CopyMI);
/// handleLiveInRegister - Create interval for a livein register.
void handleLiveInRegister(MachineBasicBlock* mbb,

View File

@ -44,6 +44,27 @@ bool LiveInterval::liveAt(unsigned I) const {
return r->contains(I);
}
// liveBeforeAndAt - Check if the interval is live at the index and the index
// just before it. If index is liveAt, check if it starts a new live range.
// If it does, then check if the previous live range ends at index-1.
bool LiveInterval::liveBeforeAndAt(unsigned I) const {
Ranges::const_iterator r = std::upper_bound(ranges.begin(), ranges.end(), I);
if (r == ranges.begin())
return false;
--r;
if (!r->contains(I))
return false;
if (I != r->start)
return true;
// I is the start of a live range. Check if the previous live range ends
// at I-1.
if (r == ranges.begin())
return false;
return r->end == I;
}
// overlaps - Return true if the intersection of the two live intervals is
// not empty.
//

View File

@ -224,14 +224,12 @@ void LiveIntervals::handleVirtualRegisterDef(MachineBasicBlock *mbb,
// Get the Idx of the defining instructions.
unsigned defIndex = getDefIndex(MIIdx);
VNInfo *ValNo;
MachineInstr *CopyMI = NULL;
unsigned SrcReg, DstReg;
if (tii_->isMoveInstr(*mi, SrcReg, DstReg))
ValNo = interval.getNextValue(defIndex, SrcReg, VNInfoAllocator);
else if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG)
ValNo = interval.getNextValue(defIndex, mi->getOperand(1).getReg(),
VNInfoAllocator);
else
ValNo = interval.getNextValue(defIndex, 0, VNInfoAllocator);
if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
tii_->isMoveInstr(*mi, SrcReg, DstReg))
CopyMI = mi;
ValNo = interval.getNextValue(defIndex, CopyMI, VNInfoAllocator);
assert(ValNo->id == 0 && "First value in interval is not 0?");
@ -326,13 +324,12 @@ void LiveIntervals::handleVirtualRegisterDef(MachineBasicBlock *mbb,
// The new value number (#1) is defined by the instruction we claimed
// defined value #0.
VNInfo *ValNo = interval.getNextValue(0, 0, VNInfoAllocator);
ValNo->def = OldValNo->def;
ValNo->reg = OldValNo->reg;
VNInfo *ValNo = interval.getNextValue(OldValNo->def, OldValNo->copy,
VNInfoAllocator);
// Value#0 is now defined by the 2-addr instruction.
OldValNo->def = RedefIndex;
OldValNo->reg = 0;
OldValNo->copy = 0;
// Add the new live interval which replaces the range for the input copy.
LiveRange LR(DefIndex, RedefIndex, ValNo);
@ -364,7 +361,6 @@ void LiveIntervals::handleVirtualRegisterDef(MachineBasicBlock *mbb,
DOUT << " Removing [" << Start << "," << End << "] from: ";
interval.print(DOUT, tri_); DOUT << "\n";
interval.removeRange(Start, End);
interval.addKill(VNI, Start);
VNI->hasPHIKill = true;
DOUT << " RESULT: "; interval.print(DOUT, tri_);
@ -383,14 +379,12 @@ void LiveIntervals::handleVirtualRegisterDef(MachineBasicBlock *mbb,
unsigned defIndex = getDefIndex(MIIdx);
VNInfo *ValNo;
MachineInstr *CopyMI = NULL;
unsigned SrcReg, DstReg;
if (tii_->isMoveInstr(*mi, SrcReg, DstReg))
ValNo = interval.getNextValue(defIndex, SrcReg, VNInfoAllocator);
else if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG)
ValNo = interval.getNextValue(defIndex, mi->getOperand(1).getReg(),
VNInfoAllocator);
else
ValNo = interval.getNextValue(defIndex, 0, VNInfoAllocator);
if (mi->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
tii_->isMoveInstr(*mi, SrcReg, DstReg))
CopyMI = mi;
ValNo = interval.getNextValue(defIndex, CopyMI, VNInfoAllocator);
unsigned killIndex = getInstructionIndex(&mbb->back()) + InstrSlots::NUM;
LiveRange LR(defIndex, killIndex, ValNo);
@ -408,7 +402,7 @@ void LiveIntervals::handlePhysicalRegisterDef(MachineBasicBlock *MBB,
MachineBasicBlock::iterator mi,
unsigned MIIdx,
LiveInterval &interval,
unsigned SrcReg) {
MachineInstr *CopyMI) {
// A physical register cannot be live across basic block, so its
// lifetime must end somewhere in its defining basic block.
DOUT << "\t\tregister: "; DEBUG(printRegName(interval.reg));
@ -449,7 +443,7 @@ void LiveIntervals::handlePhysicalRegisterDef(MachineBasicBlock *MBB,
// The only case we should have a dead physreg here without a killing or
// instruction where we know it's dead is if it is live-in to the function
// and never used.
assert(!SrcReg && "physreg was not killed in defining block!");
assert(!CopyMI && "physreg was not killed in defining block!");
end = getDefIndex(start) + 1; // It's dead.
exit:
@ -458,7 +452,7 @@ exit:
// Already exists? Extend old live interval.
LiveInterval::iterator OldLR = interval.FindLiveRangeContaining(start);
VNInfo *ValNo = (OldLR != interval.end())
? OldLR->valno : interval.getNextValue(start, SrcReg, VNInfoAllocator);
? OldLR->valno : interval.getNextValue(start, CopyMI, VNInfoAllocator);
LiveRange LR(start, end, ValNo);
interval.addRange(LR);
interval.addKill(LR.valno, end);
@ -472,12 +466,12 @@ void LiveIntervals::handleRegisterDef(MachineBasicBlock *MBB,
if (TargetRegisterInfo::isVirtualRegister(reg))
handleVirtualRegisterDef(MBB, MI, MIIdx, getOrCreateInterval(reg));
else if (allocatableRegs_[reg]) {
MachineInstr *CopyMI = NULL;
unsigned SrcReg, DstReg;
if (MI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG)
SrcReg = MI->getOperand(1).getReg();
else if (!tii_->isMoveInstr(*MI, SrcReg, DstReg))
SrcReg = 0;
handlePhysicalRegisterDef(MBB, MI, MIIdx, getOrCreateInterval(reg), SrcReg);
if (MI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG ||
tii_->isMoveInstr(*MI, SrcReg, DstReg))
CopyMI = MI;
handlePhysicalRegisterDef(MBB, MI, MIIdx, getOrCreateInterval(reg), CopyMI);
// Def of a register also defines its sub-registers.
for (const unsigned* AS = tri_->getSubRegisters(reg); *AS; ++AS)
// Avoid processing some defs more than once.
@ -601,6 +595,20 @@ LiveInterval LiveIntervals::createInterval(unsigned reg) {
return LiveInterval(reg, Weight);
}
/// getVNInfoSourceReg - Helper function that parses the specified VNInfo
/// copy field and returns the source register that defines it.
unsigned LiveIntervals::getVNInfoSourceReg(const VNInfo *VNI) const {
if (!VNI->copy)
return 0;
if (VNI->copy->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG)
return VNI->copy->getOperand(1).getReg();
unsigned SrcReg, DstReg;
if (tii_->isMoveInstr(*VNI->copy, SrcReg, DstReg))
return SrcReg;
assert(0 && "Unrecognized copy instruction!");
return 0;
}
//===----------------------------------------------------------------------===//
// Register allocator hooks.

View File

@ -106,9 +106,8 @@ bool SimpleRegisterCoalescing::AdjustCopiesBackFrom(LiveInterval &IntA,
// Get the location that B is defined at. Two options: either this value has
// an unknown definition point or it is defined at CopyIdx. If unknown, we
// can't process it.
if (!BValNo->reg) return false;
assert(BValNo->def == CopyIdx &&
"Copy doesn't define the value?");
if (!BValNo->copy) return false;
assert(BValNo->def == CopyIdx && "Copy doesn't define the value?");
// AValNo is the value number in A that defines the copy, A3 in the example.
LiveInterval::iterator ALR = IntA.FindLiveRangeContaining(CopyIdx-1);
@ -116,14 +115,14 @@ bool SimpleRegisterCoalescing::AdjustCopiesBackFrom(LiveInterval &IntA,
// If AValNo is defined as a copy from IntB, we can potentially process this.
// Get the instruction that defines this value number.
unsigned SrcReg = AValNo->reg;
unsigned SrcReg = li_->getVNInfoSourceReg(AValNo);
if (!SrcReg) return false; // Not defined by a copy.
// If the value number is not defined by a copy instruction, ignore it.
// If the source register comes from an interval other than IntB, we can't
// handle this.
if (rep(SrcReg) != IntB.reg) return false;
if (SrcReg != IntB.reg) return false;
// Get the LiveRange in IntB that this value number starts with.
LiveInterval::iterator ValLR = IntB.FindLiveRangeContaining(AValNo->def-1);
@ -159,7 +158,7 @@ bool SimpleRegisterCoalescing::AdjustCopiesBackFrom(LiveInterval &IntA,
// that defines this value #'. Update the the valnum with the new defining
// instruction #.
BValNo->def = FillerStart;
BValNo->reg = 0;
BValNo->copy = NULL;
// Okay, we can merge them. We need to insert a new liverange:
// [ValLR.end, BLR.begin) of either value number, then we merge the
@ -231,7 +230,7 @@ bool SimpleRegisterCoalescing::RemoveCopyByCommutingDef(LiveInterval &IntA,
// Get the location that B is defined at. Two options: either this value has
// an unknown definition point or it is defined at CopyIdx. If unknown, we
// can't process it.
if (!BValNo->reg) return false;
if (!BValNo->copy) return false;
assert(BValNo->def == CopyIdx && "Copy doesn't define the value?");
// AValNo is the value number in A that defines the copy, A3 in the example.
@ -243,35 +242,14 @@ bool SimpleRegisterCoalescing::RemoveCopyByCommutingDef(LiveInterval &IntA,
return false;
MachineInstr *DefMI = li_->getInstructionFromIndex(AValNo->def);
const TargetInstrDesc &TID = DefMI->getDesc();
if (!TID.isCommutable())
return false;
int Idx = -1;
for (unsigned i = 0, e = DefMI->getNumOperands(); i != e; ++i) {
MachineOperand &MO = DefMI->getOperand(i);
if (!MO.isRegister()) continue;
unsigned Reg = MO.getReg();
if (Reg && TargetRegisterInfo::isVirtualRegister(Reg)) {
if (rep(Reg) == IntA.reg) {
// If the dest register comes from an interval other than IntA, we
// can't handle this.
if (Reg != IntA.reg)
return false;
continue;
}
if (Idx != -1)
// FIXME: Being overly careful here. We just need to figure out the
// which register operand will become the new def.
return false;
Idx = i;
}
}
if (Idx == -1)
// Something like %reg1024 = add %reg1024, %reg1024
unsigned NewDstIdx;
if (!TID.isCommutable() ||
!tii_->CommuteChangesDestination(DefMI, NewDstIdx))
return false;
MachineOperand &MO = DefMI->getOperand(Idx);
unsigned NewReg = MO.getReg();
if (rep(NewReg) != IntB.reg || !MO.isKill())
MachineOperand &NewDstMO = DefMI->getOperand(NewDstIdx);
unsigned NewReg = NewDstMO.getReg();
if (NewReg != IntB.reg || !NewDstMO.isKill())
return false;
// Make sure there are no other definitions of IntB that would reach the
@ -329,16 +307,7 @@ bool SimpleRegisterCoalescing::RemoveCopyByCommutingDef(LiveInterval &IntA,
unsigned SrcReg, DstReg;
if (!tii_->isMoveInstr(*UseMI, SrcReg, DstReg))
continue;
unsigned repDstReg = rep(DstReg);
if (repDstReg != IntB.reg) {
// Update dst register interval val# since its source register has
// changed.
LiveInterval &DLI = li_->getInterval(repDstReg);
LiveInterval::iterator DLR =
DLI.FindLiveRangeContaining(li_->getDefIndex(UseIdx));
DLR->valno->reg = NewReg;
ChangedCopies.insert(UseMI);
} else {
if (DstReg == IntB.reg) {
// This copy will become a noop. If it's defining a new val#,
// remove that val# as well. However this live range is being
// extended to the end of the existing live range defined by the copy.
@ -389,14 +358,23 @@ bool SimpleRegisterCoalescing::RemoveCopyByCommutingDef(LiveInterval &IntA,
return true;
}
/// AddSubRegIdxPairs - Recursively mark all the registers represented by the
/// specified register as sub-registers. The recursion level is expected to be
/// shallow.
void SimpleRegisterCoalescing::AddSubRegIdxPairs(unsigned Reg, unsigned SubIdx) {
std::vector<unsigned> &JoinedRegs = r2rRevMap_[Reg];
for (unsigned i = 0, e = JoinedRegs.size(); i != e; ++i) {
SubRegIdxes.push_back(std::make_pair(JoinedRegs[i], SubIdx));
AddSubRegIdxPairs(JoinedRegs[i], SubIdx);
/// RemoveUnnecessaryKills - Remove kill markers that are no longer accurate
/// due to live range lengthening as the result of coalescing.
void SimpleRegisterCoalescing::RemoveUnnecessaryKills(unsigned Reg,
LiveInterval &LI) {
for (MachineRegisterInfo::use_iterator UI = mri_->use_begin(Reg),
UE = mri_->use_end(); UI != UE; ++UI) {
MachineOperand &UseMO = UI.getOperand();
if (UseMO.isKill()) {
MachineInstr *UseMI = UseMO.getParent();
unsigned UseIdx = li_->getUseIndex(li_->getInstructionIndex(UseMI));
if (JoinedCopies.count(UseMI))
continue;
LiveInterval::const_iterator UI = LI.FindLiveRangeContaining(UseIdx);
assert(UI != LI.end());
if (!LI.isKill(UI->valno, UseIdx+1))
UseMO.setIsKill(false);
}
}
}
@ -411,7 +389,6 @@ bool SimpleRegisterCoalescing::isBackEdgeCopy(MachineInstr *CopyMI,
if (MBB != L->getLoopLatch())
return false;
DstReg = rep(DstReg);
LiveInterval &LI = li_->getInterval(DstReg);
unsigned DefIdx = li_->getInstructionIndex(CopyMI);
LiveInterval::const_iterator DstLR =
@ -425,6 +402,42 @@ bool SimpleRegisterCoalescing::isBackEdgeCopy(MachineInstr *CopyMI,
return false;
}
/// UpdateRegDefsUses - Replace all defs and uses of SrcReg to DstReg and
/// update the subregister number if it is not zero. If DstReg is a
/// physical register and the existing subregister number of the def / use
/// being updated is not zero, make sure to set it to the correct physical
/// subregister.
void
SimpleRegisterCoalescing::UpdateRegDefsUses(unsigned SrcReg, unsigned DstReg,
unsigned SubIdx) {
bool DstIsPhys = TargetRegisterInfo::isPhysicalRegister(DstReg);
if (DstIsPhys && SubIdx) {
// Figure out the real physical register we are updating with.
DstReg = tri_->getSubReg(DstReg, SubIdx);
SubIdx = 0;
}
for (MachineRegisterInfo::reg_iterator I = mri_->reg_begin(SrcReg),
E = mri_->reg_end(); I != E; ) {
MachineOperand &O = I.getOperand();
++I;
if (DstIsPhys) {
unsigned UseSubIdx = O.getSubReg();
unsigned UseDstReg = DstReg;
if (UseSubIdx)
UseDstReg = tri_->getSubReg(DstReg, UseSubIdx);
O.setReg(UseDstReg);
O.setSubReg(0);
} else {
unsigned OldSubIdx = O.getSubReg();
assert((!SubIdx || !OldSubIdx) && "Conflicting sub-register index!");
if (SubIdx)
O.setSubReg(SubIdx);
O.setReg(DstReg);
}
}
}
/// JoinCopy - Attempt to join intervals corresponding to SrcReg/DstReg,
/// which are the src/dst of the copy instruction CopyMI. This returns true
/// if the copy was successfully coalesced away. If it is not currently
@ -439,35 +452,26 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
DOUT << li_->getInstructionIndex(CopyMI) << '\t' << *CopyMI;
// Get representative registers.
unsigned SrcReg = TheCopy.SrcReg;
unsigned DstReg = TheCopy.DstReg;
// CopyMI has been modified due to commuting.
if (ChangedCopies.count(CopyMI)) {
if (tii_->isMoveInstr(*CopyMI, SrcReg, DstReg))
;
else if (CopyMI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG) {
unsigned SrcReg;
unsigned DstReg;
bool isExtSubReg = CopyMI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG;
unsigned SubIdx = 0;
if (isExtSubReg) {
DstReg = CopyMI->getOperand(0).getReg();
SrcReg = CopyMI->getOperand(1).getReg();
} else
assert(0 && "Unrecognized move instruction!");
TheCopy.SrcReg = SrcReg;
TheCopy.DstReg = DstReg;
ChangedCopies.erase(CopyMI);
} else if (!tii_->isMoveInstr(*CopyMI, SrcReg, DstReg)) {
assert(0 && "Unrecognized copy instruction!");
return false;
}
unsigned repSrcReg = rep(SrcReg);
unsigned repDstReg = rep(DstReg);
// If they are already joined we continue.
if (repSrcReg == repDstReg) {
if (SrcReg == DstReg) {
DOUT << "\tCopy already coalesced.\n";
return false; // Not coalescable.
}
bool SrcIsPhys = TargetRegisterInfo::isPhysicalRegister(repSrcReg);
bool DstIsPhys = TargetRegisterInfo::isPhysicalRegister(repDstReg);
bool SrcIsPhys = TargetRegisterInfo::isPhysicalRegister(SrcReg);
bool DstIsPhys = TargetRegisterInfo::isPhysicalRegister(DstReg);
// If they are both physical registers, we cannot join them.
if (SrcIsPhys && DstIsPhys) {
@ -476,31 +480,31 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
}
// We only join virtual registers with allocatable physical registers.
if (SrcIsPhys && !allocatableRegs_[repSrcReg]) {
if (SrcIsPhys && !allocatableRegs_[SrcReg]) {
DOUT << "\tSrc reg is unallocatable physreg.\n";
return false; // Not coalescable.
}
if (DstIsPhys && !allocatableRegs_[repDstReg]) {
if (DstIsPhys && !allocatableRegs_[DstReg]) {
DOUT << "\tDst reg is unallocatable physreg.\n";
return false; // Not coalescable.
}
bool isExtSubReg = CopyMI->getOpcode() == TargetInstrInfo::EXTRACT_SUBREG;
unsigned RealDstReg = 0;
if (isExtSubReg) {
unsigned SubIdx = CopyMI->getOperand(2).getImm();
if (SrcIsPhys)
SubIdx = CopyMI->getOperand(2).getImm();
if (SrcIsPhys) {
// r1024 = EXTRACT_SUBREG EAX, 0 then r1024 is really going to be
// coalesced with AX.
repSrcReg = tri_->getSubReg(repSrcReg, SubIdx);
else if (DstIsPhys) {
SrcReg = tri_->getSubReg(SrcReg, SubIdx);
SubIdx = 0;
} else if (DstIsPhys) {
// If this is a extract_subreg where dst is a physical register, e.g.
// cl = EXTRACT_SUBREG reg1024, 1
// then create and update the actual physical register allocated to RHS.
const TargetRegisterClass *RC = mri_->getRegClass(repSrcReg);
for (const unsigned *SRs = tri_->getSuperRegisters(repDstReg);
const TargetRegisterClass *RC = mri_->getRegClass(SrcReg);
for (const unsigned *SRs = tri_->getSuperRegisters(DstReg);
unsigned SR = *SRs; ++SRs) {
if (repDstReg == tri_->getSubReg(SR, SubIdx) &&
if (DstReg == tri_->getSubReg(SR, SubIdx) &&
RC->contains(SR)) {
RealDstReg = SR;
break;
@ -511,7 +515,7 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
// For this type of EXTRACT_SUBREG, conservatively
// check if the live interval of the source register interfere with the
// actual super physical register we are trying to coalesce with.
LiveInterval &RHS = li_->getInterval(repSrcReg);
LiveInterval &RHS = li_->getInterval(SrcReg);
if (li_->hasInterval(RealDstReg) &&
RHS.overlaps(li_->getInterval(RealDstReg))) {
DOUT << "Interfere with register ";
@ -524,24 +528,32 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
DEBUG(li_->getInterval(*SR).print(DOUT, tri_));
return false; // Not coalescable
}
SubIdx = 0;
} else {
unsigned SrcSize= li_->getInterval(repSrcReg).getSize() / InstrSlots::NUM;
unsigned DstSize= li_->getInterval(repDstReg).getSize() / InstrSlots::NUM;
const TargetRegisterClass *RC=mf_->getRegInfo().getRegClass(repDstReg);
unsigned SrcSize= li_->getInterval(SrcReg).getSize() / InstrSlots::NUM;
unsigned DstSize= li_->getInterval(DstReg).getSize() / InstrSlots::NUM;
const TargetRegisterClass *RC = mri_->getRegClass(DstReg);
unsigned Threshold = allocatableRCRegs_[RC].count();
// Be conservative. If both sides are virtual registers, do not coalesce
// if this will cause a high use density interval to target a smaller set
// of registers.
if (DstSize > Threshold || SrcSize > Threshold) {
LiveVariables::VarInfo &svi = lv_->getVarInfo(repSrcReg);
LiveVariables::VarInfo &dvi = lv_->getVarInfo(repDstReg);
LiveVariables::VarInfo &svi = lv_->getVarInfo(SrcReg);
LiveVariables::VarInfo &dvi = lv_->getVarInfo(DstReg);
if ((float)dvi.NumUses / DstSize < (float)svi.NumUses / SrcSize) {
Again = true; // May be possible to coalesce later.
return false;
}
}
}
} else if (differingRegisterClasses(repSrcReg, repDstReg)) {
} else if (differingRegisterClasses(SrcReg, DstReg)) {
// FIXME: What if the resul of a EXTRACT_SUBREG is then coalesced
// with another? If it's the resulting destination register, then
// the subidx must be propagated to uses (but only those defined
// by the EXTRACT_SUBREG). If it's being coalesced into another
// register, it should be safe because register is assumed to have
// the register class of the super-register.
// If they are not of the same register class, we cannot join them.
DOUT << "\tSrc/Dest are different register classes.\n";
// Allow the coalescer to try again in case either side gets coalesced to
@ -552,9 +564,9 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
return false;
}
LiveInterval &SrcInt = li_->getInterval(repSrcReg);
LiveInterval &DstInt = li_->getInterval(repDstReg);
assert(SrcInt.reg == repSrcReg && DstInt.reg == repDstReg &&
LiveInterval &SrcInt = li_->getInterval(SrcReg);
LiveInterval &DstInt = li_->getInterval(DstReg);
assert(SrcInt.reg == SrcReg && DstInt.reg == DstReg &&
"Register mapping is horribly broken!");
DOUT << "\t\tInspecting "; SrcInt.print(DOUT, tri_);
@ -580,19 +592,20 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
if (SrcEnd > li_->getDefIndex(CopyIdx)) {
isDead = false;
} else {
MachineOperand *MOU;
MachineInstr *LastUse= lastRegisterUse(SrcStart, CopyIdx, repSrcReg, MOU);
unsigned LastUseIdx;
MachineOperand *LastUse =
lastRegisterUse(SrcStart, CopyIdx, SrcReg, LastUseIdx);
if (LastUse) {
// Shorten the liveinterval to the end of last use.
MOU->setIsKill();
LastUse->setIsKill();
isDead = false;
isShorten = true;
RemoveStart = li_->getDefIndex(li_->getInstructionIndex(LastUse));
RemoveStart = li_->getDefIndex(LastUseIdx);
RemoveEnd = SrcEnd;
} else {
MachineInstr *SrcMI = li_->getInstructionFromIndex(SrcStart);
if (SrcMI) {
MachineOperand *mops = findDefOperand(SrcMI, repSrcReg);
MachineOperand *mops = findDefOperand(SrcMI, SrcReg);
if (mops)
// A dead def should have a single cycle interval.
++RemoveStart;
@ -607,9 +620,9 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
// think twice about coalescing them!
if (!mopd->isDead() && (SrcIsPhys || DstIsPhys) && !isExtSubReg) {
LiveInterval &JoinVInt = SrcIsPhys ? DstInt : SrcInt;
unsigned JoinVReg = SrcIsPhys ? repDstReg : repSrcReg;
unsigned JoinPReg = SrcIsPhys ? repSrcReg : repDstReg;
const TargetRegisterClass *RC = mf_->getRegInfo().getRegClass(JoinVReg);
unsigned JoinVReg = SrcIsPhys ? DstReg : SrcReg;
unsigned JoinPReg = SrcIsPhys ? SrcReg : DstReg;
const TargetRegisterClass *RC = mri_->getRegClass(JoinVReg);
unsigned Threshold = allocatableRCRegs_[RC].count() * 2;
if (TheCopy.isBackEdge)
Threshold *= 2; // Favors back edge copies.
@ -638,15 +651,15 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
if (isDead) {
// Result of the copy is dead. Propagate this property.
if (SrcStart == 0) {
assert(TargetRegisterInfo::isPhysicalRegister(repSrcReg) &&
assert(TargetRegisterInfo::isPhysicalRegister(SrcReg) &&
"Live-in must be a physical register!");
// Live-in to the function but dead. Remove it from entry live-in set.
// JoinIntervals may end up swapping the two intervals.
mf_->begin()->removeLiveIn(repSrcReg);
mf_->begin()->removeLiveIn(SrcReg);
} else {
MachineInstr *SrcMI = li_->getInstructionFromIndex(SrcStart);
if (SrcMI) {
MachineOperand *mops = findDefOperand(SrcMI, repSrcReg);
MachineOperand *mops = findDefOperand(SrcMI, SrcReg);
if (mops)
mops->setIsDead();
}
@ -679,21 +692,21 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
LiveInterval *ResSrcInt = &SrcInt;
LiveInterval *ResDstInt = &DstInt;
if (Swapped) {
std::swap(repSrcReg, repDstReg);
std::swap(SrcReg, DstReg);
std::swap(ResSrcInt, ResDstInt);
}
assert(TargetRegisterInfo::isVirtualRegister(repSrcReg) &&
assert(TargetRegisterInfo::isVirtualRegister(SrcReg) &&
"LiveInterval::join didn't work right!");
// If we're about to merge live ranges into a physical register live range,
// we have to update any aliased register's live ranges to indicate that they
// have clobbered values for this range.
if (TargetRegisterInfo::isPhysicalRegister(repDstReg)) {
if (TargetRegisterInfo::isPhysicalRegister(DstReg)) {
// Unset unnecessary kills.
if (!ResDstInt->containsOneValue()) {
for (LiveInterval::Ranges::const_iterator I = ResSrcInt->begin(),
E = ResSrcInt->end(); I != E; ++I)
unsetRegisterKills(I->start, I->end, repDstReg);
unsetRegisterKills(I->start, I->end, DstReg);
}
// If this is a extract_subreg where dst is a physical register, e.g.
@ -709,73 +722,73 @@ bool SimpleRegisterCoalescing::JoinCopy(CopyRec &TheCopy, bool &Again) {
assert(DstLR != ResDstInt->end() && "Invalid joined interval!");
const VNInfo *DstValNo = DstLR->valno;
if (CopiedValNos.insert(DstValNo)) {
VNInfo *ValNo = RealDstInt.getNextValue(DstValNo->def, DstValNo->reg,
VNInfo *ValNo = RealDstInt.getNextValue(DstValNo->def, DstValNo->copy,
li_->getVNInfoAllocator());
ValNo->hasPHIKill = DstValNo->hasPHIKill;
RealDstInt.addKills(ValNo, DstValNo->kills);
RealDstInt.MergeValueInAsValue(*ResDstInt, DstValNo, ValNo);
}
}
repDstReg = RealDstReg;
DstReg = RealDstReg;
}
// Update the liveintervals of sub-registers.
for (const unsigned *AS = tri_->getSubRegisters(repDstReg); *AS; ++AS)
for (const unsigned *AS = tri_->getSubRegisters(DstReg); *AS; ++AS)
li_->getOrCreateInterval(*AS).MergeInClobberRanges(*ResSrcInt,
li_->getVNInfoAllocator());
} else {
// Merge use info if the destination is a virtual register.
LiveVariables::VarInfo& dVI = lv_->getVarInfo(repDstReg);
LiveVariables::VarInfo& sVI = lv_->getVarInfo(repSrcReg);
LiveVariables::VarInfo& dVI = lv_->getVarInfo(DstReg);
LiveVariables::VarInfo& sVI = lv_->getVarInfo(SrcReg);
dVI.NumUses += sVI.NumUses;
}
// Remember these liveintervals have been joined.
JoinedLIs.set(repSrcReg - TargetRegisterInfo::FirstVirtualRegister);
if (TargetRegisterInfo::isVirtualRegister(repDstReg))
JoinedLIs.set(repDstReg - TargetRegisterInfo::FirstVirtualRegister);
// If this is a EXTRACT_SUBREG, make sure the result of coalescing is the
// larger super-register.
if (isExtSubReg && !SrcIsPhys && !DstIsPhys) {
if (!Swapped) {
// Make sure we allocate the larger super-register.
ResSrcInt->Copy(*ResDstInt, li_->getVNInfoAllocator());
std::swap(repSrcReg, repDstReg);
std::swap(SrcReg, DstReg);
std::swap(ResSrcInt, ResDstInt);
}
unsigned SubIdx = CopyMI->getOperand(2).getImm();
SubRegIdxes.push_back(std::make_pair(repSrcReg, SubIdx));
AddSubRegIdxPairs(repSrcReg, SubIdx);
}
if (NewHeuristic) {
// Add all copies that define val# in the source interval into the queue.
for (LiveInterval::const_vni_iterator i = ResSrcInt->vni_begin(),
e = ResSrcInt->vni_end(); i != e; ++i) {
const VNInfo *vni = *i;
if (vni->def && vni->def != ~1U && vni->def != ~0U) {
if (!vni->def || vni->def == ~1U || vni->def == ~0U)
continue;
MachineInstr *CopyMI = li_->getInstructionFromIndex(vni->def);
unsigned SrcReg, DstReg;
unsigned NewSrcReg, NewDstReg;
if (CopyMI &&
JoinedCopies.count(CopyMI) == 0 &&
tii_->isMoveInstr(*CopyMI, SrcReg, DstReg)) {
tii_->isMoveInstr(*CopyMI, NewSrcReg, NewDstReg)) {
unsigned LoopDepth = loopInfo->getLoopDepth(CopyMI->getParent());
JoinQueue->push(CopyRec(CopyMI, SrcReg, DstReg, LoopDepth,
JoinQueue->push(CopyRec(CopyMI, LoopDepth,
isBackEdgeCopy(CopyMI, DstReg)));
}
}
}
}
DOUT << "\n\t\tJoined. Result = "; ResDstInt->print(DOUT, tri_);
DOUT << "\n";
// repSrcReg is guarateed to be the register whose live interval that is
// being merged.
li_->removeInterval(repSrcReg);
r2rMap_[repSrcReg] = repDstReg;
r2rRevMap_[repDstReg].push_back(repSrcReg);
// Finally, delete the copy instruction.
// Remember to delete the copy instruction.
JoinedCopies.insert(CopyMI);
// Some live range has been lengthened due to colaescing, eliminate the
// unnecessary kills.
RemoveUnnecessaryKills(SrcReg, *ResDstInt);
if (TargetRegisterInfo::isVirtualRegister(DstReg))
RemoveUnnecessaryKills(DstReg, *ResDstInt);
// SrcReg is guarateed to be the register whose live interval that is
// being merged.
li_->removeInterval(SrcReg);
UpdateRegDefsUses(SrcReg, DstReg, SubIdx);
++numJoins;
return true;
}
@ -878,8 +891,8 @@ bool SimpleRegisterCoalescing::SimpleJoin(LiveInterval &LHS, LiveInterval &RHS){
// If we haven't already recorded that this value # is safe, check it.
if (!InVector(LHSIt->valno, EliminatedLHSVals)) {
// Copy from the RHS?
unsigned SrcReg = LHSIt->valno->reg;
if (rep(SrcReg) != RHS.reg)
unsigned SrcReg = li_->getVNInfoSourceReg(LHSIt->valno);
if (SrcReg != RHS.reg)
return false; // Nope, bail out.
EliminatedLHSVals.push_back(LHSIt->valno);
@ -906,7 +919,7 @@ bool SimpleRegisterCoalescing::SimpleJoin(LiveInterval &LHS, LiveInterval &RHS){
} else {
// Otherwise, if this is a copy from the RHS, mark it as being merged
// in.
if (rep(LHSIt->valno->reg) == RHS.reg) {
if (li_->getVNInfoSourceReg(LHSIt->valno) == RHS.reg) {
EliminatedLHSVals.push_back(LHSIt->valno);
// We know this entire LHS live range is okay, so skip it now.
@ -951,7 +964,7 @@ bool SimpleRegisterCoalescing::SimpleJoin(LiveInterval &LHS, LiveInterval &RHS){
// value number is defined where the RHS value number was.
const VNInfo *VNI = RHS.getValNumInfo(0);
LHSValNo->def = VNI->def;
LHSValNo->reg = VNI->reg;
LHSValNo->copy = VNI->copy;
// Okay, the final step is to loop over the RHS live intervals, adding them to
// the LHS.
@ -1012,8 +1025,8 @@ bool SimpleRegisterCoalescing::JoinIntervals(LiveInterval &LHS,
int RHSValID = -1;
VNInfo *RHSValNoInfo = NULL;
VNInfo *RHSValNoInfo0 = RHS.getValNumInfo(0);
unsigned RHSSrcReg = RHSValNoInfo0->reg;
if ((RHSSrcReg == 0 || rep(RHSSrcReg) != LHS.reg)) {
unsigned RHSSrcReg = li_->getVNInfoSourceReg(RHSValNoInfo0);
if ((RHSSrcReg == 0 || RHSSrcReg != LHS.reg)) {
// If RHS is not defined as a copy from the LHS, we can use simpler and
// faster checks to see if the live ranges are coalescable. This joiner
// can't swap the LHS/RHS intervals though.
@ -1039,8 +1052,8 @@ bool SimpleRegisterCoalescing::JoinIntervals(LiveInterval &LHS,
i != e; ++i) {
VNInfo *VNI = *i;
unsigned VN = VNI->id;
if (unsigned LHSSrcReg = VNI->reg) {
if (rep(LHSSrcReg) != RHS.reg) {
if (unsigned LHSSrcReg = li_->getVNInfoSourceReg(VNI)) {
if (LHSSrcReg != RHS.reg) {
// If this is not a copy from the RHS, its value number will be
// unmodified by the coalescing.
NewVNInfo[VN] = VNI;
@ -1078,13 +1091,12 @@ bool SimpleRegisterCoalescing::JoinIntervals(LiveInterval &LHS,
for (LiveInterval::vni_iterator i = LHS.vni_begin(), e = LHS.vni_end();
i != e; ++i) {
VNInfo *VNI = *i;
unsigned ValSrcReg = VNI->reg;
if (VNI->def == ~1U ||ValSrcReg == 0) // Src not defined by a copy?
if (VNI->def == ~1U || VNI->copy == 0) // Src not defined by a copy?
continue;
// DstReg is known to be a register in the LHS interval. If the src is
// from the RHS interval, we can use its value #.
if (rep(ValSrcReg) != RHS.reg)
if (li_->getVNInfoSourceReg(VNI) != RHS.reg)
continue;
// Figure out the value # from the RHS.
@ -1096,13 +1108,12 @@ bool SimpleRegisterCoalescing::JoinIntervals(LiveInterval &LHS,
for (LiveInterval::vni_iterator i = RHS.vni_begin(), e = RHS.vni_end();
i != e; ++i) {
VNInfo *VNI = *i;
unsigned ValSrcReg = VNI->reg;
if (VNI->def == ~1U || ValSrcReg == 0) // Src not defined by a copy?
if (VNI->def == ~1U || VNI->copy == 0) // Src not defined by a copy?
continue;
// DstReg is known to be a register in the RHS interval. If the src is
// from the LHS interval, we can use its value #.
if (rep(ValSrcReg) != LHS.reg)
if (li_->getVNInfoSourceReg(VNI) != LHS.reg)
continue;
// Figure out the value # from the LHS.
@ -1245,39 +1256,9 @@ bool CopyRecSort::operator()(CopyRec left, CopyRec right) const {
// Inner loops first.
if (left.LoopDepth > right.LoopDepth)
return false;
else if (left.LoopDepth == right.LoopDepth) {
else if (left.LoopDepth == right.LoopDepth)
if (left.isBackEdge && !right.isBackEdge)
return false;
else if (left.isBackEdge == right.isBackEdge) {
// Join virtuals to physical registers first.
bool LDstIsPhys = TargetRegisterInfo::isPhysicalRegister(left.DstReg);
bool LSrcIsPhys = TargetRegisterInfo::isPhysicalRegister(left.SrcReg);
bool LIsPhys = LDstIsPhys || LSrcIsPhys;
bool RDstIsPhys = TargetRegisterInfo::isPhysicalRegister(right.DstReg);
bool RSrcIsPhys = TargetRegisterInfo::isPhysicalRegister(right.SrcReg);
bool RIsPhys = RDstIsPhys || RSrcIsPhys;
if (LIsPhys && !RIsPhys)
return false;
else if (LIsPhys == RIsPhys) {
// Join shorter intervals first.
unsigned LSize = 0;
unsigned RSize = 0;
if (LIsPhys) {
LSize = LDstIsPhys ? 0 : JPQ->getRepIntervalSize(left.DstReg);
LSize += LSrcIsPhys ? 0 : JPQ->getRepIntervalSize(left.SrcReg);
RSize = RDstIsPhys ? 0 : JPQ->getRepIntervalSize(right.DstReg);
RSize += RSrcIsPhys ? 0 : JPQ->getRepIntervalSize(right.SrcReg);
} else {
LSize = std::min(JPQ->getRepIntervalSize(left.DstReg),
JPQ->getRepIntervalSize(left.SrcReg));
RSize = std::min(JPQ->getRepIntervalSize(right.DstReg),
JPQ->getRepIntervalSize(right.SrcReg));
}
if (LSize < RSize)
return false;
}
}
}
return true;
}
@ -1300,18 +1281,15 @@ void SimpleRegisterCoalescing::CopyCoalesceInMBB(MachineBasicBlock *MBB,
} else if (!tii_->isMoveInstr(*Inst, SrcReg, DstReg))
continue;
unsigned repSrcReg = rep(SrcReg);
unsigned repDstReg = rep(DstReg);
bool SrcIsPhys = TargetRegisterInfo::isPhysicalRegister(repSrcReg);
bool DstIsPhys = TargetRegisterInfo::isPhysicalRegister(repDstReg);
bool SrcIsPhys = TargetRegisterInfo::isPhysicalRegister(SrcReg);
bool DstIsPhys = TargetRegisterInfo::isPhysicalRegister(DstReg);
if (NewHeuristic) {
JoinQueue->push(CopyRec(Inst, SrcReg, DstReg, LoopDepth,
isBackEdgeCopy(Inst, DstReg)));
JoinQueue->push(CopyRec(Inst, LoopDepth, isBackEdgeCopy(Inst, DstReg)));
} else {
if (SrcIsPhys || DstIsPhys)
PhysCopies.push_back(CopyRec(Inst, SrcReg, DstReg, 0, false));
PhysCopies.push_back(CopyRec(Inst, 0, false));
else
VirtCopies.push_back(CopyRec(Inst, SrcReg, DstReg, 0, false));
VirtCopies.push_back(CopyRec(Inst, 0, false));
}
}
@ -1341,9 +1319,6 @@ void SimpleRegisterCoalescing::joinIntervals() {
if (NewHeuristic)
JoinQueue = new JoinPriorityQueue<CopyRecSort>(this);
JoinedLIs.resize(li_->getNumIntervals());
JoinedLIs.reset();
std::vector<CopyRec> TryAgainList;
if (loopInfo->begin() == loopInfo->end()) {
// If there are no loops in the function, join intervals in function order.
@ -1414,37 +1389,8 @@ void SimpleRegisterCoalescing::joinIntervals() {
}
}
// Some live range has been lengthened due to colaescing, eliminate the
// unnecessary kills.
int RegNum = JoinedLIs.find_first();
while (RegNum != -1) {
unsigned Reg = RegNum + TargetRegisterInfo::FirstVirtualRegister;
unsigned repReg = rep(Reg);
LiveInterval &LI = li_->getInterval(repReg);
LiveVariables::VarInfo& svi = lv_->getVarInfo(Reg);
for (unsigned i = 0, e = svi.Kills.size(); i != e; ++i) {
MachineInstr *Kill = svi.Kills[i];
// Suppose vr1 = op vr2, x
// and vr1 and vr2 are coalesced. vr2 should still be marked kill
// unless it is a two-address operand.
if (li_->isRemoved(Kill) || hasRegisterDef(Kill, repReg))
continue;
if (LI.liveAt(li_->getInstructionIndex(Kill) + InstrSlots::NUM))
unsetRegisterKill(Kill, repReg);
}
RegNum = JoinedLIs.find_next(RegNum);
}
if (NewHeuristic)
delete JoinQueue;
DOUT << "*** Register mapping ***\n";
for (unsigned i = 0, e = r2rMap_.size(); i != e; ++i)
if (r2rMap_[i]) {
DOUT << " reg " << i << " -> ";
DEBUG(printRegName(r2rMap_[i]));
DOUT << "\n";
}
}
/// Return true if the two specified registers belong to different register
@ -1456,24 +1402,38 @@ bool SimpleRegisterCoalescing::differingRegisterClasses(unsigned RegA,
if (TargetRegisterInfo::isPhysicalRegister(RegA)) {
assert(TargetRegisterInfo::isVirtualRegister(RegB) &&
"Shouldn't consider two physregs!");
return !mf_->getRegInfo().getRegClass(RegB)->contains(RegA);
return !mri_->getRegClass(RegB)->contains(RegA);
}
// Compare against the regclass for the second reg.
const TargetRegisterClass *RegClass = mf_->getRegInfo().getRegClass(RegA);
const TargetRegisterClass *RegClass = mri_->getRegClass(RegA);
if (TargetRegisterInfo::isVirtualRegister(RegB))
return RegClass != mf_->getRegInfo().getRegClass(RegB);
return RegClass != mri_->getRegClass(RegB);
else
return !RegClass->contains(RegB);
}
/// FIXME: Make use MachineRegisterInfo use information for virtual registers.
/// lastRegisterUse - Returns the last use of the specific register between
/// cycles Start and End. It also returns the use operand by reference. It
/// returns NULL if there are no uses.
MachineInstr *
/// cycles Start and End or NULL if there are no uses.
MachineOperand *
SimpleRegisterCoalescing::lastRegisterUse(unsigned Start, unsigned End,
unsigned Reg, MachineOperand *&MOU) {
unsigned Reg, unsigned &UseIdx) const{
UseIdx = 0;
if (TargetRegisterInfo::isVirtualRegister(Reg)) {
MachineOperand *LastUse = NULL;
for (MachineRegisterInfo::use_iterator I = mri_->use_begin(Reg),
E = mri_->use_end(); I != E; ++I) {
MachineOperand &Use = I.getOperand();
MachineInstr *UseMI = Use.getParent();
unsigned Idx = li_->getInstructionIndex(UseMI);
if (Idx >= Start && Idx < End && Idx >= UseIdx) {
LastUse = &Use;
UseIdx = Idx;
}
}
return LastUse;
}
int e = (End-1) / InstrSlots::NUM * InstrSlots::NUM;
int s = Start;
while (e >= s) {
@ -1487,11 +1447,11 @@ SimpleRegisterCoalescing::lastRegisterUse(unsigned Start, unsigned End,
return NULL;
for (unsigned i = 0, NumOps = MI->getNumOperands(); i != NumOps; ++i) {
MachineOperand &MO = MI->getOperand(i);
if (MO.isRegister() && MO.isUse() && MO.getReg() &&
tri_->regsOverlap(rep(MO.getReg()), Reg)) {
MOU = &MO;
return MI;
MachineOperand &Use = MI->getOperand(i);
if (Use.isRegister() && Use.isUse() && Use.getReg() &&
tri_->regsOverlap(Use.getReg(), Reg)) {
UseIdx = e;
return &Use;
}
}
@ -1505,28 +1465,16 @@ SimpleRegisterCoalescing::lastRegisterUse(unsigned Start, unsigned End,
/// findDefOperand - Returns the MachineOperand that is a def of the specific
/// register. It returns NULL if the def is not found.
MachineOperand *SimpleRegisterCoalescing::findDefOperand(MachineInstr *MI,
unsigned Reg) {
unsigned Reg) const {
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
MachineOperand &MO = MI->getOperand(i);
if (MO.isRegister() && MO.isDef() &&
tri_->regsOverlap(rep(MO.getReg()), Reg))
tri_->regsOverlap(MO.getReg(), Reg))
return &MO;
}
return NULL;
}
/// unsetRegisterKill - Unset IsKill property of all uses of specific register
/// of the specific instruction.
void SimpleRegisterCoalescing::unsetRegisterKill(MachineInstr *MI,
unsigned Reg) {
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
MachineOperand &MO = MI->getOperand(i);
if (MO.isRegister() && MO.isKill() && MO.getReg() &&
tri_->regsOverlap(rep(MO.getReg()), Reg))
MO.setIsKill(false);
}
}
/// unsetRegisterKills - Unset IsKill property of all uses of specific register
/// between cycles Start and End.
void SimpleRegisterCoalescing::unsetRegisterKills(unsigned Start, unsigned End,
@ -1546,7 +1494,7 @@ void SimpleRegisterCoalescing::unsetRegisterKills(unsigned Start, unsigned End,
for (unsigned i = 0, NumOps = MI->getNumOperands(); i != NumOps; ++i) {
MachineOperand &MO = MI->getOperand(i);
if (MO.isRegister() && MO.isKill() && MO.getReg() &&
tri_->regsOverlap(rep(MO.getReg()), Reg)) {
tri_->regsOverlap(MO.getReg(), Reg)) {
MO.setIsKill(false);
}
}
@ -1555,18 +1503,6 @@ void SimpleRegisterCoalescing::unsetRegisterKills(unsigned Start, unsigned End,
}
}
/// hasRegisterDef - True if the instruction defines the specific register.
///
bool SimpleRegisterCoalescing::hasRegisterDef(MachineInstr *MI, unsigned Reg) {
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
MachineOperand &MO = MI->getOperand(i);
if (MO.isRegister() && MO.isDef() &&
tri_->regsOverlap(rep(MO.getReg()), Reg))
return true;
}
return false;
}
void SimpleRegisterCoalescing::printRegName(unsigned reg) const {
if (TargetRegisterInfo::isPhysicalRegister(reg))
cerr << tri_->getName(reg);
@ -1575,14 +1511,7 @@ void SimpleRegisterCoalescing::printRegName(unsigned reg) const {
}
void SimpleRegisterCoalescing::releaseMemory() {
for (unsigned i = 0, e = r2rMap_.size(); i != e; ++i)
r2rRevMap_[i].clear();
r2rRevMap_.clear();
r2rMap_.clear();
JoinedLIs.clear();
SubRegIdxes.clear();
JoinedCopies.clear();
ChangedCopies.clear();
}
static bool isZeroLengthInterval(LiveInterval *li) {
@ -1613,12 +1542,7 @@ bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) {
allocatableRCRegs_.insert(std::make_pair(*I,
tri_->getAllocatableSet(fn, *I)));
MachineRegisterInfo &RegInfo = mf_->getRegInfo();
r2rMap_.grow(RegInfo.getLastVirtReg());
r2rRevMap_.grow(RegInfo.getLastVirtReg());
// Join (coalesce) intervals if requested.
IndexedMap<unsigned, VirtReg2IndexFunctor> RegSubIdxMap;
if (EnableJoining) {
joinIntervals();
DOUT << "********** INTERVALS POST JOINING **********\n";
@ -1634,19 +1558,10 @@ bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) {
(*I)->eraseFromParent();
++numPeep;
}
// Transfer sub-registers info to MachineRegisterInfo now that coalescing
// information is complete.
RegSubIdxMap.grow(RegInfo.getLastVirtReg()+1);
while (!SubRegIdxes.empty()) {
std::pair<unsigned, unsigned> RI = SubRegIdxes.back();
SubRegIdxes.pop_back();
RegSubIdxMap[RI.first] = RI.second;
}
}
// perform a final pass over the instructions and compute spill
// weights, coalesce virtual registers and remove identity moves.
// Perform a final pass over the instructions and compute spill weights
// and remove identity moves.
for (MachineFunction::iterator mbbi = mf_->begin(), mbbe = mf_->end();
mbbi != mbbe; ++mbbi) {
MachineBasicBlock* mbb = mbbi;
@ -1655,11 +1570,10 @@ bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) {
for (MachineBasicBlock::iterator mii = mbb->begin(), mie = mbb->end();
mii != mie; ) {
// if the move will be an identity move delete it
unsigned srcReg, dstReg, RegRep;
if (tii_->isMoveInstr(*mii, srcReg, dstReg) &&
(RegRep = rep(srcReg)) == rep(dstReg)) {
unsigned srcReg, dstReg;
if (tii_->isMoveInstr(*mii, srcReg, dstReg) && srcReg == dstReg) {
// remove from def list
LiveInterval &RegInt = li_->getOrCreateInterval(RegRep);
LiveInterval &RegInt = li_->getOrCreateInterval(srcReg);
MachineOperand *MO = mii->findRegisterDefOperand(dstReg);
// If def of this move instruction is dead, remove its live range from
// the dstination register's live interval.
@ -1668,7 +1582,7 @@ bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) {
LiveInterval::iterator MLR = RegInt.FindLiveRangeContaining(MoveIdx);
RegInt.removeRange(MLR->start, MoveIdx+1, true);
if (RegInt.empty())
li_->removeInterval(RegRep);
li_->removeInterval(srcReg);
}
li_->RemoveMachineInstrFromMaps(mii);
mii = mbbi->erase(mii);
@ -1679,17 +1593,7 @@ bool SimpleRegisterCoalescing::runOnMachineFunction(MachineFunction &fn) {
const MachineOperand &mop = mii->getOperand(i);
if (mop.isRegister() && mop.getReg() &&
TargetRegisterInfo::isVirtualRegister(mop.getReg())) {
// replace register with representative register
unsigned OrigReg = mop.getReg();
unsigned reg = rep(OrigReg);
unsigned SubIdx = RegSubIdxMap[OrigReg];
if (SubIdx && TargetRegisterInfo::isPhysicalRegister(reg))
mii->getOperand(i).setReg(tri_->getSubReg(reg, SubIdx));
else {
mii->getOperand(i).setReg(reg);
mii->getOperand(i).setSubReg(SubIdx);
}
unsigned reg = mop.getReg();
// Multiple uses of reg by the same instruction. It should not
// contribute to spill weight again.
if (UniqueUses.count(reg) != 0)

View File

@ -34,12 +34,10 @@ namespace llvm {
///
struct CopyRec {
MachineInstr *MI;
unsigned SrcReg, DstReg;
unsigned LoopDepth;
bool isBackEdge;
CopyRec(MachineInstr *mi, unsigned src, unsigned dst, unsigned depth,
bool be)
: MI(mi), SrcReg(src), DstReg(dst), LoopDepth(depth), isBackEdge(be) {};
CopyRec(MachineInstr *mi, unsigned depth, bool be)
: MI(mi), LoopDepth(depth), isBackEdge(be) {};
};
template<class SF> class JoinPriorityQueue;
@ -67,7 +65,7 @@ namespace llvm {
bool empty() const { return Queue.empty(); }
void push(CopyRec R) { Queue.push(R); }
CopyRec pop() {
if (empty()) return CopyRec(0, 0, 0, 0, false);
if (empty()) return CopyRec(0, 0, false);
CopyRec R = Queue.top();
Queue.pop();
return R;
@ -80,7 +78,7 @@ namespace llvm {
class SimpleRegisterCoalescing : public MachineFunctionPass,
public RegisterCoalescer {
MachineFunction* mf_;
const MachineRegisterInfo* mri_;
MachineRegisterInfo* mri_;
const TargetMachine* tm_;
const TargetRegisterInfo* tri_;
const TargetInstrInfo* tii_;
@ -91,33 +89,14 @@ namespace llvm {
BitVector allocatableRegs_;
DenseMap<const TargetRegisterClass*, BitVector> allocatableRCRegs_;
/// r2rMap_ - Map from register to its representative register.
///
IndexedMap<unsigned> r2rMap_;
/// r2rRevMap_ - Reverse of r2rRevMap_, i.e. Map from register to all
/// the registers it represent.
IndexedMap<std::vector<unsigned> > r2rRevMap_;
/// JoinQueue - A priority queue of copy instructions the coalescer is
/// going to process.
JoinPriorityQueue<CopyRecSort> *JoinQueue;
/// JoinedLIs - Keep track which register intervals have been coalesced
/// with other intervals.
BitVector JoinedLIs;
/// SubRegIdxes - Keep track of sub-register and indexes.
///
SmallVector<std::pair<unsigned, unsigned>, 32> SubRegIdxes;
/// JoinedCopies - Keep track of copies eliminated due to coalescing.
///
SmallPtrSet<MachineInstr*, 32> JoinedCopies;
/// ChangedCopies - Keep track of copies modified due to commuting.
SmallPtrSet<MachineInstr*, 32> ChangedCopies;
public:
static char ID; // Pass identifcation, replacement for typeid
SimpleRegisterCoalescing() : MachineFunctionPass((intptr_t)&ID) {}
@ -146,7 +125,6 @@ namespace llvm {
/// getRepIntervalSize - Called from join priority queue sorting function.
/// It returns the size of the interval that represent the given register.
unsigned getRepIntervalSize(unsigned Reg) {
Reg = rep(Reg);
if (!li_->hasInterval(Reg))
return 0;
return li_->getInterval(Reg).getSize();
@ -193,51 +171,48 @@ namespace llvm {
bool differingRegisterClasses(unsigned RegA, unsigned RegB) const;
/// AdjustCopiesBackFrom - We found a non-trivially-coalescable copy. If
/// the source value number is defined by a copy from the destination reg
/// see if we can merge these two destination reg valno# into a single
/// value number, eliminating a copy.
bool AdjustCopiesBackFrom(LiveInterval &IntA, LiveInterval &IntB,
MachineInstr *CopyMI);
/// RemoveCopyByCommutingDef - We found a non-trivially-coalescable copy.
/// If the source value number is defined by a commutable instruction and
/// its other operand is coalesced to the copy dest register, see if we
/// can transform the copy into a noop by commuting the definition.
bool RemoveCopyByCommutingDef(LiveInterval &IntA, LiveInterval &IntB,
MachineInstr *CopyMI);
/// AddSubRegIdxPairs - Recursively mark all the registers represented by the
/// specified register as sub-registers. The recursion level is expected to be
/// shallow.
void AddSubRegIdxPairs(unsigned Reg, unsigned SubIdx);
/// RemoveUnnecessaryKills - Remove kill markers that are no longer accurate
/// due to live range lengthening as the result of coalescing.
void RemoveUnnecessaryKills(unsigned Reg, LiveInterval &LI);
/// isBackEdgeCopy - Returns true if CopyMI is a back edge copy.
///
bool isBackEdgeCopy(MachineInstr *CopyMI, unsigned DstReg);
/// UpdateRegDefsUses - Replace all defs and uses of SrcReg to DstReg and
/// update the subregister number if it is not zero. If DstReg is a
/// physical register and the existing subregister number of the def / use
/// being updated is not zero, make sure to set it to the correct physical
/// subregister.
void UpdateRegDefsUses(unsigned SrcReg, unsigned DstReg, unsigned SubIdx);
/// lastRegisterUse - Returns the last use of the specific register between
/// cycles Start and End. It also returns the use operand by reference. It
/// returns NULL if there are no uses.
MachineInstr *lastRegisterUse(unsigned Start, unsigned End, unsigned Reg,
MachineOperand *&MOU);
/// cycles Start and End or NULL if there are no uses.
MachineOperand *lastRegisterUse(unsigned Start, unsigned End, unsigned Reg,
unsigned &LastUseIdx) const;
/// findDefOperand - Returns the MachineOperand that is a def of the specific
/// register. It returns NULL if the def is not found.
MachineOperand *findDefOperand(MachineInstr *MI, unsigned Reg);
/// unsetRegisterKill - Unset IsKill property of all uses of the specific
/// register of the specific instruction.
void unsetRegisterKill(MachineInstr *MI, unsigned Reg);
MachineOperand *findDefOperand(MachineInstr *MI, unsigned Reg) const;
/// unsetRegisterKills - Unset IsKill property of all uses of specific register
/// between cycles Start and End.
void unsetRegisterKills(unsigned Start, unsigned End, unsigned Reg);
/// hasRegisterDef - True if the instruction defines the specific register.
///
bool hasRegisterDef(MachineInstr *MI, unsigned Reg);
/// rep - returns the representative of this register
unsigned rep(unsigned Reg) {
unsigned Rep = r2rMap_[Reg];
if (Rep)
return r2rMap_[Reg] = rep(Rep);
return Reg;
}
void printRegName(unsigned reg) const;
};