mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2025-08-09 11:25:55 +00:00
RegisterCoalescer: Cleanup comment style
- Consistenly put comments above the function declaration, not the definition. To achieve this some duplicate comments got merged and some comment parts describing implementation details got moved into their functions. - Consistently use doxygen comments above functions. - Do not use doxygen comments inside functions. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@226351 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
@@ -58,12 +58,12 @@ EnableJoining("join-liveintervals",
|
|||||||
cl::desc("Coalesce copies (default=true)"),
|
cl::desc("Coalesce copies (default=true)"),
|
||||||
cl::init(true));
|
cl::init(true));
|
||||||
|
|
||||||
// Temporary flag to test critical edge unsplitting.
|
/// Temporary flag to test critical edge unsplitting.
|
||||||
static cl::opt<bool>
|
static cl::opt<bool>
|
||||||
EnableJoinSplits("join-splitedges",
|
EnableJoinSplits("join-splitedges",
|
||||||
cl::desc("Coalesce copies on split edges (default=subtarget)"), cl::Hidden);
|
cl::desc("Coalesce copies on split edges (default=subtarget)"), cl::Hidden);
|
||||||
|
|
||||||
// Temporary flag to test global copy optimization.
|
/// Temporary flag to test global copy optimization.
|
||||||
static cl::opt<cl::boolOrDefault>
|
static cl::opt<cl::boolOrDefault>
|
||||||
EnableGlobalCopies("join-globalcopies",
|
EnableGlobalCopies("join-globalcopies",
|
||||||
cl::desc("Coalesce copies that span blocks (default=subtarget)"),
|
cl::desc("Coalesce copies that span blocks (default=subtarget)"),
|
||||||
@@ -120,7 +120,7 @@ namespace {
|
|||||||
/// Recursively eliminate dead defs in DeadDefs.
|
/// Recursively eliminate dead defs in DeadDefs.
|
||||||
void eliminateDeadDefs();
|
void eliminateDeadDefs();
|
||||||
|
|
||||||
/// LiveRangeEdit callback.
|
/// LiveRangeEdit callback for eliminateDeadDefs().
|
||||||
void LRE_WillEraseInstruction(MachineInstr *MI) override;
|
void LRE_WillEraseInstruction(MachineInstr *MI) override;
|
||||||
|
|
||||||
/// Coalesce the LocalWorkList.
|
/// Coalesce the LocalWorkList.
|
||||||
@@ -133,16 +133,15 @@ namespace {
|
|||||||
/// copies that cannot yet be coalesced into WorkList.
|
/// copies that cannot yet be coalesced into WorkList.
|
||||||
void copyCoalesceInMBB(MachineBasicBlock *MBB);
|
void copyCoalesceInMBB(MachineBasicBlock *MBB);
|
||||||
|
|
||||||
/// Try to coalesce all copies in CurrList. Return
|
/// Tries to coalesce all copies in CurrList. Returns true if any progress
|
||||||
/// true if any progress was made.
|
/// was made.
|
||||||
bool copyCoalesceWorkList(MutableArrayRef<MachineInstr*> CurrList);
|
bool copyCoalesceWorkList(MutableArrayRef<MachineInstr*> CurrList);
|
||||||
|
|
||||||
/// Attempt to join intervals corresponding to SrcReg/DstReg,
|
/// Attempt to join intervals corresponding to SrcReg/DstReg, which are the
|
||||||
/// which are the src/dst of the copy instruction CopyMI. This returns
|
/// src/dst of the copy instruction CopyMI. This returns true if the copy
|
||||||
/// true if the copy was successfully coalesced away. If it is not
|
/// was successfully coalesced away. If it is not currently possible to
|
||||||
/// currently possible to coalesce this interval, but it may be possible if
|
/// coalesce this interval, but it may be possible if other things get
|
||||||
/// other things get coalesced, then it returns true by reference in
|
/// coalesced, then it returns true by reference in 'Again'.
|
||||||
/// 'Again'.
|
|
||||||
bool joinCopy(MachineInstr *TheCopy, bool &Again);
|
bool joinCopy(MachineInstr *TheCopy, bool &Again);
|
||||||
|
|
||||||
/// Attempt to join these two intervals. On failure, this
|
/// Attempt to join these two intervals. On failure, this
|
||||||
@@ -169,10 +168,10 @@ namespace {
|
|||||||
void joinSubRegRanges(LiveRange &LRange, LiveRange &RRange,
|
void joinSubRegRanges(LiveRange &LRange, LiveRange &RRange,
|
||||||
unsigned LaneMask, const CoalescerPair &CP);
|
unsigned LaneMask, const CoalescerPair &CP);
|
||||||
|
|
||||||
/// We found a non-trivially-coalescable copy. If
|
/// We found a non-trivially-coalescable copy. If the source value number is
|
||||||
/// the source value number is defined by a copy from the destination reg
|
/// defined by a copy from the destination reg see if we can merge these two
|
||||||
/// see if we can merge these two destination reg valno# into a single
|
/// destination reg valno# into a single value number, eliminating a copy.
|
||||||
/// value number, eliminating a copy.
|
/// This returns true if an interval was modified.
|
||||||
bool adjustCopiesBackFrom(const CoalescerPair &CP, MachineInstr *CopyMI);
|
bool adjustCopiesBackFrom(const CoalescerPair &CP, MachineInstr *CopyMI);
|
||||||
|
|
||||||
/// Return true if there are definitions of IntB
|
/// Return true if there are definitions of IntB
|
||||||
@@ -184,6 +183,7 @@ namespace {
|
|||||||
/// If the source value number is defined by a commutable instruction and
|
/// If the source value number is defined by a commutable instruction and
|
||||||
/// its other operand is coalesced to the copy dest register, see if we
|
/// its other operand is coalesced to the copy dest register, see if we
|
||||||
/// can transform the copy into a noop by commuting the definition.
|
/// can transform the copy into a noop by commuting the definition.
|
||||||
|
/// This returns true if an interval was modified.
|
||||||
bool removeCopyByCommutingDef(const CoalescerPair &CP,MachineInstr *CopyMI);
|
bool removeCopyByCommutingDef(const CoalescerPair &CP,MachineInstr *CopyMI);
|
||||||
|
|
||||||
/// If the source of a copy is defined by a
|
/// If the source of a copy is defined by a
|
||||||
@@ -191,21 +191,21 @@ namespace {
|
|||||||
bool reMaterializeTrivialDef(CoalescerPair &CP, MachineInstr *CopyMI,
|
bool reMaterializeTrivialDef(CoalescerPair &CP, MachineInstr *CopyMI,
|
||||||
bool &IsDefCopy);
|
bool &IsDefCopy);
|
||||||
|
|
||||||
/// Return true if a physreg copy should be joined.
|
/// Return true if a copy involving a physreg should be joined.
|
||||||
bool canJoinPhys(const CoalescerPair &CP);
|
bool canJoinPhys(const CoalescerPair &CP);
|
||||||
|
|
||||||
/// Replace all defs and uses of SrcReg to DstReg and
|
/// Replace all defs and uses of SrcReg to DstReg and update the subregister
|
||||||
/// update the subregister number if it is not zero. If DstReg is a
|
/// number if it is not zero. If DstReg is a physical register and the
|
||||||
/// physical register and the existing subregister number of the def / use
|
/// existing subregister number of the def / use being updated is not zero,
|
||||||
/// being updated is not zero, make sure to set it to the correct physical
|
/// make sure to set it to the correct physical subregister.
|
||||||
/// subregister.
|
|
||||||
void updateRegDefsUses(unsigned SrcReg, unsigned DstReg, unsigned SubIdx);
|
void updateRegDefsUses(unsigned SrcReg, unsigned DstReg, unsigned SubIdx);
|
||||||
|
|
||||||
/// Handle copies of undef values.
|
/// Handle copies of undef values.
|
||||||
|
/// Returns true if @p CopyMI was a copy of an undef value and eliminated.
|
||||||
bool eliminateUndefCopy(MachineInstr *CopyMI);
|
bool eliminateUndefCopy(MachineInstr *CopyMI);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
static char ID; // Class identification, replacement for typeinfo
|
static char ID; ///< Class identification, replacement for typeinfo
|
||||||
RegisterCoalescer() : MachineFunctionPass(ID) {
|
RegisterCoalescer() : MachineFunctionPass(ID) {
|
||||||
initializeRegisterCoalescerPass(*PassRegistry::getPassRegistry());
|
initializeRegisterCoalescerPass(*PassRegistry::getPassRegistry());
|
||||||
}
|
}
|
||||||
@@ -220,7 +220,7 @@ namespace {
|
|||||||
/// Implement the dump method.
|
/// Implement the dump method.
|
||||||
void print(raw_ostream &O, const Module* = nullptr) const override;
|
void print(raw_ostream &O, const Module* = nullptr) const override;
|
||||||
};
|
};
|
||||||
} /// end anonymous namespace
|
} // end anonymous namespace
|
||||||
|
|
||||||
char &llvm::RegisterCoalescerID = RegisterCoalescer::ID;
|
char &llvm::RegisterCoalescerID = RegisterCoalescer::ID;
|
||||||
|
|
||||||
@@ -254,11 +254,11 @@ static bool isMoveInstr(const TargetRegisterInfo &tri, const MachineInstr *MI,
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return true if this block should be vacated by the coalescer to eliminate
|
/// Return true if this block should be vacated by the coalescer to eliminate
|
||||||
// branches. The important cases to handle in the coalescer are critical edges
|
/// branches. The important cases to handle in the coalescer are critical edges
|
||||||
// split during phi elimination which contain only copies. Simple blocks that
|
/// split during phi elimination which contain only copies. Simple blocks that
|
||||||
// contain non-branches should also be vacated, but this can be handled by an
|
/// contain non-branches should also be vacated, but this can be handled by an
|
||||||
// earlier pass similar to early if-conversion.
|
/// earlier pass similar to early if-conversion.
|
||||||
static bool isSplitEdge(const MachineBasicBlock *MBB) {
|
static bool isSplitEdge(const MachineBasicBlock *MBB) {
|
||||||
if (MBB->pred_size() != 1 || MBB->succ_size() != 1)
|
if (MBB->pred_size() != 1 || MBB->succ_size() != 1)
|
||||||
return false;
|
return false;
|
||||||
@@ -423,27 +423,11 @@ void RegisterCoalescer::eliminateDeadDefs() {
|
|||||||
nullptr, this).eliminateDeadDefs(DeadDefs);
|
nullptr, this).eliminateDeadDefs(DeadDefs);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Callback from eliminateDeadDefs().
|
|
||||||
void RegisterCoalescer::LRE_WillEraseInstruction(MachineInstr *MI) {
|
void RegisterCoalescer::LRE_WillEraseInstruction(MachineInstr *MI) {
|
||||||
// MI may be in WorkList. Make sure we don't visit it.
|
// MI may be in WorkList. Make sure we don't visit it.
|
||||||
ErasedInstrs.insert(MI);
|
ErasedInstrs.insert(MI);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// We found a non-trivially-coalescable copy with IntA
|
|
||||||
/// being the source and IntB being the dest, thus this defines a value number
|
|
||||||
/// in IntB. If the source value number (in IntA) is defined by a copy from B,
|
|
||||||
/// see if we can merge these two pieces of B into a single value number,
|
|
||||||
/// eliminating a copy. For example:
|
|
||||||
///
|
|
||||||
/// A3 = B0
|
|
||||||
/// ...
|
|
||||||
/// B1 = A3 <- this copy
|
|
||||||
///
|
|
||||||
/// In this case, B0 can be extended to where the B1 copy lives, allowing the B1
|
|
||||||
/// value number to be replaced with B0 (which simplifies the B liveinterval).
|
|
||||||
///
|
|
||||||
/// This returns true if an interval was modified.
|
|
||||||
///
|
|
||||||
bool RegisterCoalescer::adjustCopiesBackFrom(const CoalescerPair &CP,
|
bool RegisterCoalescer::adjustCopiesBackFrom(const CoalescerPair &CP,
|
||||||
MachineInstr *CopyMI) {
|
MachineInstr *CopyMI) {
|
||||||
assert(!CP.isPartial() && "This doesn't work for partial copies.");
|
assert(!CP.isPartial() && "This doesn't work for partial copies.");
|
||||||
@@ -455,6 +439,20 @@ bool RegisterCoalescer::adjustCopiesBackFrom(const CoalescerPair &CP,
|
|||||||
LIS->getInterval(CP.isFlipped() ? CP.getSrcReg() : CP.getDstReg());
|
LIS->getInterval(CP.isFlipped() ? CP.getSrcReg() : CP.getDstReg());
|
||||||
SlotIndex CopyIdx = LIS->getInstructionIndex(CopyMI).getRegSlot();
|
SlotIndex CopyIdx = LIS->getInstructionIndex(CopyMI).getRegSlot();
|
||||||
|
|
||||||
|
// We have a non-trivially-coalescable copy with IntA being the source and
|
||||||
|
// IntB being the dest, thus this defines a value number in IntB. If the
|
||||||
|
// source value number (in IntA) is defined by a copy from B, see if we can
|
||||||
|
// merge these two pieces of B into a single value number, eliminating a copy.
|
||||||
|
// For example:
|
||||||
|
//
|
||||||
|
// A3 = B0
|
||||||
|
// ...
|
||||||
|
// B1 = A3 <- this copy
|
||||||
|
//
|
||||||
|
// In this case, B0 can be extended to where the B1 copy lives, allowing the
|
||||||
|
// B1 value number to be replaced with B0 (which simplifies the B
|
||||||
|
// liveinterval).
|
||||||
|
|
||||||
// BValNo is a value number in B that is defined by a copy from A. 'B1' in
|
// BValNo is a value number in B that is defined by a copy from A. 'B1' in
|
||||||
// the example above.
|
// the example above.
|
||||||
LiveInterval::iterator BS = IntB.FindSegmentContaining(CopyIdx);
|
LiveInterval::iterator BS = IntB.FindSegmentContaining(CopyIdx);
|
||||||
@@ -544,8 +542,6 @@ bool RegisterCoalescer::adjustCopiesBackFrom(const CoalescerPair &CP,
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return true if there are definitions of IntB
|
|
||||||
/// other than BValNo val# that can reach uses of AValno val# of IntA.
|
|
||||||
bool RegisterCoalescer::hasOtherReachingDefs(LiveInterval &IntA,
|
bool RegisterCoalescer::hasOtherReachingDefs(LiveInterval &IntA,
|
||||||
LiveInterval &IntB,
|
LiveInterval &IntB,
|
||||||
VNInfo *AValNo,
|
VNInfo *AValNo,
|
||||||
@@ -585,29 +581,6 @@ static void addSegmentsWithValNo(LiveRange &Dst, VNInfo *DstValNo,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// We found a non-trivially-coalescable copy with
|
|
||||||
/// IntA being the source and IntB being the dest, thus this defines a value
|
|
||||||
/// number in IntB. If the source value number (in IntA) is defined by a
|
|
||||||
/// commutable instruction and its other operand is coalesced to the copy dest
|
|
||||||
/// register, see if we can transform the copy into a noop by commuting the
|
|
||||||
/// definition. For example,
|
|
||||||
///
|
|
||||||
/// A3 = op A2 B0<kill>
|
|
||||||
/// ...
|
|
||||||
/// B1 = A3 <- this copy
|
|
||||||
/// ...
|
|
||||||
/// = op A3 <- more uses
|
|
||||||
///
|
|
||||||
/// ==>
|
|
||||||
///
|
|
||||||
/// B2 = op B0 A2<kill>
|
|
||||||
/// ...
|
|
||||||
/// B1 = B2 <- now an identity copy
|
|
||||||
/// ...
|
|
||||||
/// = op B2 <- more uses
|
|
||||||
///
|
|
||||||
/// This returns true if an interval was modified.
|
|
||||||
///
|
|
||||||
bool RegisterCoalescer::removeCopyByCommutingDef(const CoalescerPair &CP,
|
bool RegisterCoalescer::removeCopyByCommutingDef(const CoalescerPair &CP,
|
||||||
MachineInstr *CopyMI) {
|
MachineInstr *CopyMI) {
|
||||||
assert(!CP.isPhys());
|
assert(!CP.isPhys());
|
||||||
@@ -617,6 +590,26 @@ bool RegisterCoalescer::removeCopyByCommutingDef(const CoalescerPair &CP,
|
|||||||
LiveInterval &IntB =
|
LiveInterval &IntB =
|
||||||
LIS->getInterval(CP.isFlipped() ? CP.getSrcReg() : CP.getDstReg());
|
LIS->getInterval(CP.isFlipped() ? CP.getSrcReg() : CP.getDstReg());
|
||||||
|
|
||||||
|
// We found a non-trivially-coalescable copy with IntA being the source and
|
||||||
|
// IntB being the dest, thus this defines a value number in IntB. If the
|
||||||
|
// source value number (in IntA) is defined by a commutable instruction and
|
||||||
|
// its other operand is coalesced to the copy dest register, see if we can
|
||||||
|
// transform the copy into a noop by commuting the definition. For example,
|
||||||
|
//
|
||||||
|
// A3 = op A2 B0<kill>
|
||||||
|
// ...
|
||||||
|
// B1 = A3 <- this copy
|
||||||
|
// ...
|
||||||
|
// = op A3 <- more uses
|
||||||
|
//
|
||||||
|
// ==>
|
||||||
|
//
|
||||||
|
// B2 = op B0 A2<kill>
|
||||||
|
// ...
|
||||||
|
// B1 = B2 <- now an identity copy
|
||||||
|
// ...
|
||||||
|
// = op B2 <- more uses
|
||||||
|
|
||||||
// BValNo is a value number in B that is defined by a copy from A. 'B1' in
|
// BValNo is a value number in B that is defined by a copy from A. 'B1' in
|
||||||
// the example above.
|
// the example above.
|
||||||
SlotIndex CopyIdx = LIS->getInstructionIndex(CopyMI).getRegSlot();
|
SlotIndex CopyIdx = LIS->getInstructionIndex(CopyMI).getRegSlot();
|
||||||
@@ -833,8 +826,6 @@ bool RegisterCoalescer::removeCopyByCommutingDef(const CoalescerPair &CP,
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If the source of a copy is defined by a trivial
|
|
||||||
/// computation, replace the copy by rematerialize the definition.
|
|
||||||
bool RegisterCoalescer::reMaterializeTrivialDef(CoalescerPair &CP,
|
bool RegisterCoalescer::reMaterializeTrivialDef(CoalescerPair &CP,
|
||||||
MachineInstr *CopyMI,
|
MachineInstr *CopyMI,
|
||||||
bool &IsDefCopy) {
|
bool &IsDefCopy) {
|
||||||
@@ -1029,14 +1020,15 @@ static void removeUndefValue(LiveRange &LR, SlotIndex At)
|
|||||||
LR.removeValNo(VNInfo);
|
LR.removeValNo(VNInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// ProcessImpicitDefs may leave some copies of <undef>
|
|
||||||
/// values, it only removes local variables. When we have a copy like:
|
|
||||||
///
|
|
||||||
/// %vreg1 = COPY %vreg2<undef>
|
|
||||||
///
|
|
||||||
/// We delete the copy and remove the corresponding value number from %vreg1.
|
|
||||||
/// Any uses of that value number are marked as <undef>.
|
|
||||||
bool RegisterCoalescer::eliminateUndefCopy(MachineInstr *CopyMI) {
|
bool RegisterCoalescer::eliminateUndefCopy(MachineInstr *CopyMI) {
|
||||||
|
// ProcessImpicitDefs may leave some copies of <undef> values, it only removes
|
||||||
|
// local variables. When we have a copy like:
|
||||||
|
//
|
||||||
|
// %vreg1 = COPY %vreg2<undef>
|
||||||
|
//
|
||||||
|
// We delete the copy and remove the corresponding value number from %vreg1.
|
||||||
|
// Any uses of that value number are marked as <undef>.
|
||||||
|
|
||||||
// Note that we do not query CoalescerPair here but redo isMoveInstr as the
|
// Note that we do not query CoalescerPair here but redo isMoveInstr as the
|
||||||
// CoalescerPair may have a new register class with adjusted subreg indices
|
// CoalescerPair may have a new register class with adjusted subreg indices
|
||||||
// at this point.
|
// at this point.
|
||||||
@@ -1106,10 +1098,6 @@ bool RegisterCoalescer::eliminateUndefCopy(MachineInstr *CopyMI) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Replace all defs and uses of SrcReg to DstReg and update the subregister
|
|
||||||
/// number if it is not zero. If DstReg is a physical register and the existing
|
|
||||||
/// subregister number of the def / use being updated is not zero, make sure to
|
|
||||||
/// set it to the correct physical subregister.
|
|
||||||
void RegisterCoalescer::updateRegDefsUses(unsigned SrcReg,
|
void RegisterCoalescer::updateRegDefsUses(unsigned SrcReg,
|
||||||
unsigned DstReg,
|
unsigned DstReg,
|
||||||
unsigned SubIdx) {
|
unsigned SubIdx) {
|
||||||
@@ -1198,11 +1186,10 @@ void RegisterCoalescer::updateRegDefsUses(unsigned SrcReg,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return true if a copy involving a physreg should be joined.
|
|
||||||
bool RegisterCoalescer::canJoinPhys(const CoalescerPair &CP) {
|
bool RegisterCoalescer::canJoinPhys(const CoalescerPair &CP) {
|
||||||
/// Always join simple intervals that are defined by a single copy from a
|
// Always join simple intervals that are defined by a single copy from a
|
||||||
/// reserved register. This doesn't increase register pressure, so it is
|
// reserved register. This doesn't increase register pressure, so it is
|
||||||
/// always beneficial.
|
// always beneficial.
|
||||||
if (!MRI->isReserved(CP.getDstReg())) {
|
if (!MRI->isReserved(CP.getDstReg())) {
|
||||||
DEBUG(dbgs() << "\tCan only merge into reserved registers.\n");
|
DEBUG(dbgs() << "\tCan only merge into reserved registers.\n");
|
||||||
return false;
|
return false;
|
||||||
@@ -1216,11 +1203,6 @@ bool RegisterCoalescer::canJoinPhys(const CoalescerPair &CP) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempt to join intervals corresponding to SrcReg/DstReg,
|
|
||||||
/// which are the src/dst of the copy instruction CopyMI. This returns true
|
|
||||||
/// if the copy was successfully coalesced away. If it is not currently
|
|
||||||
/// possible to coalesce this interval, but it may be possible if other
|
|
||||||
/// things get coalesced, then it returns true by reference in 'Again'.
|
|
||||||
bool RegisterCoalescer::joinCopy(MachineInstr *CopyMI, bool &Again) {
|
bool RegisterCoalescer::joinCopy(MachineInstr *CopyMI, bool &Again) {
|
||||||
|
|
||||||
Again = false;
|
Again = false;
|
||||||
@@ -1424,7 +1406,6 @@ bool RegisterCoalescer::joinCopy(MachineInstr *CopyMI, bool &Again) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempt joining with a reserved physreg.
|
|
||||||
bool RegisterCoalescer::joinReservedPhysReg(CoalescerPair &CP) {
|
bool RegisterCoalescer::joinReservedPhysReg(CoalescerPair &CP) {
|
||||||
assert(CP.isPhys() && "Must be a physreg copy");
|
assert(CP.isPhys() && "Must be a physreg copy");
|
||||||
assert(MRI->isReserved(CP.getDstReg()) && "Not a reserved register");
|
assert(MRI->isReserved(CP.getDstReg()) && "Not a reserved register");
|
||||||
@@ -1580,11 +1561,12 @@ class JoinVals {
|
|||||||
/// (Main) register we work on.
|
/// (Main) register we work on.
|
||||||
const unsigned Reg;
|
const unsigned Reg;
|
||||||
|
|
||||||
// Reg (and therefore the values in this liverange) will end up as subregister
|
/// Reg (and therefore the values in this liverange) will end up as
|
||||||
// SubIdx in the coalesced register. Either CP.DstIdx or CP.SrcIdx.
|
/// subregister SubIdx in the coalesced register. Either CP.DstIdx or
|
||||||
|
/// CP.SrcIdx.
|
||||||
const unsigned SubIdx;
|
const unsigned SubIdx;
|
||||||
// The LaneMask that this liverange will occupy the coalesced register. May be
|
/// The LaneMask that this liverange will occupy the coalesced register. May
|
||||||
// smaller than the lanemask produced by SubIdx when merging subranges.
|
/// be smaller than the lanemask produced by SubIdx when merging subranges.
|
||||||
const unsigned LaneMask;
|
const unsigned LaneMask;
|
||||||
|
|
||||||
/// This is true when joining sub register ranges, false when joining main
|
/// This is true when joining sub register ranges, false when joining main
|
||||||
@@ -1593,7 +1575,7 @@ class JoinVals {
|
|||||||
/// Whether the current LiveInterval tracks subregister liveness.
|
/// Whether the current LiveInterval tracks subregister liveness.
|
||||||
const bool TrackSubRegLiveness;
|
const bool TrackSubRegLiveness;
|
||||||
|
|
||||||
// Values that will be present in the final live range.
|
/// Values that will be present in the final live range.
|
||||||
SmallVectorImpl<VNInfo*> &NewVNInfo;
|
SmallVectorImpl<VNInfo*> &NewVNInfo;
|
||||||
|
|
||||||
const CoalescerPair &CP;
|
const CoalescerPair &CP;
|
||||||
@@ -1601,75 +1583,75 @@ class JoinVals {
|
|||||||
SlotIndexes *Indexes;
|
SlotIndexes *Indexes;
|
||||||
const TargetRegisterInfo *TRI;
|
const TargetRegisterInfo *TRI;
|
||||||
|
|
||||||
// Value number assignments. Maps value numbers in LI to entries in NewVNInfo.
|
/// Value number assignments. Maps value numbers in LI to entries in
|
||||||
// This is suitable for passing to LiveInterval::join().
|
/// NewVNInfo. This is suitable for passing to LiveInterval::join().
|
||||||
SmallVector<int, 8> Assignments;
|
SmallVector<int, 8> Assignments;
|
||||||
|
|
||||||
// Conflict resolution for overlapping values.
|
/// Conflict resolution for overlapping values.
|
||||||
enum ConflictResolution {
|
enum ConflictResolution {
|
||||||
// No overlap, simply keep this value.
|
/// No overlap, simply keep this value.
|
||||||
CR_Keep,
|
CR_Keep,
|
||||||
|
|
||||||
// Merge this value into OtherVNI and erase the defining instruction.
|
/// Merge this value into OtherVNI and erase the defining instruction.
|
||||||
// Used for IMPLICIT_DEF, coalescable copies, and copies from external
|
/// Used for IMPLICIT_DEF, coalescable copies, and copies from external
|
||||||
// values.
|
/// values.
|
||||||
CR_Erase,
|
CR_Erase,
|
||||||
|
|
||||||
// Merge this value into OtherVNI but keep the defining instruction.
|
/// Merge this value into OtherVNI but keep the defining instruction.
|
||||||
// This is for the special case where OtherVNI is defined by the same
|
/// This is for the special case where OtherVNI is defined by the same
|
||||||
// instruction.
|
/// instruction.
|
||||||
CR_Merge,
|
CR_Merge,
|
||||||
|
|
||||||
// Keep this value, and have it replace OtherVNI where possible. This
|
/// Keep this value, and have it replace OtherVNI where possible. This
|
||||||
// complicates value mapping since OtherVNI maps to two different values
|
/// complicates value mapping since OtherVNI maps to two different values
|
||||||
// before and after this def.
|
/// before and after this def.
|
||||||
// Used when clobbering undefined or dead lanes.
|
/// Used when clobbering undefined or dead lanes.
|
||||||
CR_Replace,
|
CR_Replace,
|
||||||
|
|
||||||
// Unresolved conflict. Visit later when all values have been mapped.
|
/// Unresolved conflict. Visit later when all values have been mapped.
|
||||||
CR_Unresolved,
|
CR_Unresolved,
|
||||||
|
|
||||||
// Unresolvable conflict. Abort the join.
|
/// Unresolvable conflict. Abort the join.
|
||||||
CR_Impossible
|
CR_Impossible
|
||||||
};
|
};
|
||||||
|
|
||||||
// Per-value info for LI. The lane bit masks are all relative to the final
|
/// Per-value info for LI. The lane bit masks are all relative to the final
|
||||||
// joined register, so they can be compared directly between SrcReg and
|
/// joined register, so they can be compared directly between SrcReg and
|
||||||
// DstReg.
|
/// DstReg.
|
||||||
struct Val {
|
struct Val {
|
||||||
ConflictResolution Resolution;
|
ConflictResolution Resolution;
|
||||||
|
|
||||||
// Lanes written by this def, 0 for unanalyzed values.
|
/// Lanes written by this def, 0 for unanalyzed values.
|
||||||
unsigned WriteLanes;
|
unsigned WriteLanes;
|
||||||
|
|
||||||
// Lanes with defined values in this register. Other lanes are undef and
|
/// Lanes with defined values in this register. Other lanes are undef and
|
||||||
// safe to clobber.
|
/// safe to clobber.
|
||||||
unsigned ValidLanes;
|
unsigned ValidLanes;
|
||||||
|
|
||||||
// Value in LI being redefined by this def.
|
/// Value in LI being redefined by this def.
|
||||||
VNInfo *RedefVNI;
|
VNInfo *RedefVNI;
|
||||||
|
|
||||||
// Value in the other live range that overlaps this def, if any.
|
/// Value in the other live range that overlaps this def, if any.
|
||||||
VNInfo *OtherVNI;
|
VNInfo *OtherVNI;
|
||||||
|
|
||||||
// Is this value an IMPLICIT_DEF that can be erased?
|
/// Is this value an IMPLICIT_DEF that can be erased?
|
||||||
//
|
///
|
||||||
// IMPLICIT_DEF values should only exist at the end of a basic block that
|
/// IMPLICIT_DEF values should only exist at the end of a basic block that
|
||||||
// is a predecessor to a phi-value. These IMPLICIT_DEF instructions can be
|
/// is a predecessor to a phi-value. These IMPLICIT_DEF instructions can be
|
||||||
// safely erased if they are overlapping a live value in the other live
|
/// safely erased if they are overlapping a live value in the other live
|
||||||
// interval.
|
/// interval.
|
||||||
//
|
///
|
||||||
// Weird control flow graphs and incomplete PHI handling in
|
/// Weird control flow graphs and incomplete PHI handling in
|
||||||
// ProcessImplicitDefs can very rarely create IMPLICIT_DEF values with
|
/// ProcessImplicitDefs can very rarely create IMPLICIT_DEF values with
|
||||||
// longer live ranges. Such IMPLICIT_DEF values should be treated like
|
/// longer live ranges. Such IMPLICIT_DEF values should be treated like
|
||||||
// normal values.
|
/// normal values.
|
||||||
bool ErasableImplicitDef;
|
bool ErasableImplicitDef;
|
||||||
|
|
||||||
// True when the live range of this value will be pruned because of an
|
/// True when the live range of this value will be pruned because of an
|
||||||
// overlapping CR_Replace value in the other live range.
|
/// overlapping CR_Replace value in the other live range.
|
||||||
bool Pruned;
|
bool Pruned;
|
||||||
|
|
||||||
// True once Pruned above has been computed.
|
/// True once Pruned above has been computed.
|
||||||
bool PrunedComputed;
|
bool PrunedComputed;
|
||||||
|
|
||||||
Val() : Resolution(CR_Keep), WriteLanes(0), ValidLanes(0),
|
Val() : Resolution(CR_Keep), WriteLanes(0), ValidLanes(0),
|
||||||
@@ -1679,17 +1661,61 @@ class JoinVals {
|
|||||||
bool isAnalyzed() const { return WriteLanes != 0; }
|
bool isAnalyzed() const { return WriteLanes != 0; }
|
||||||
};
|
};
|
||||||
|
|
||||||
// One entry per value number in LI.
|
/// One entry per value number in LI.
|
||||||
SmallVector<Val, 8> Vals;
|
SmallVector<Val, 8> Vals;
|
||||||
|
|
||||||
|
/// Compute the bitmask of lanes actually written by DefMI.
|
||||||
|
/// Set Redef if there are any partial register definitions that depend on the
|
||||||
|
/// previous value of the register.
|
||||||
unsigned computeWriteLanes(const MachineInstr *DefMI, bool &Redef) const;
|
unsigned computeWriteLanes(const MachineInstr *DefMI, bool &Redef) const;
|
||||||
|
|
||||||
|
/// Find the ultimate value that VNI was copied from.
|
||||||
std::pair<const VNInfo*,unsigned> followCopyChain(const VNInfo *VNI) const;
|
std::pair<const VNInfo*,unsigned> followCopyChain(const VNInfo *VNI) const;
|
||||||
|
|
||||||
bool valuesIdentical(VNInfo *Val0, VNInfo *Val1, const JoinVals &Other) const;
|
bool valuesIdentical(VNInfo *Val0, VNInfo *Val1, const JoinVals &Other) const;
|
||||||
|
|
||||||
|
/// Analyze ValNo in this live range, and set all fields of Vals[ValNo].
|
||||||
|
/// Return a conflict resolution when possible, but leave the hard cases as
|
||||||
|
/// CR_Unresolved.
|
||||||
|
/// Recursively calls computeAssignment() on this and Other, guaranteeing that
|
||||||
|
/// both OtherVNI and RedefVNI have been analyzed and mapped before returning.
|
||||||
|
/// The recursion always goes upwards in the dominator tree, making loops
|
||||||
|
/// impossible.
|
||||||
ConflictResolution analyzeValue(unsigned ValNo, JoinVals &Other);
|
ConflictResolution analyzeValue(unsigned ValNo, JoinVals &Other);
|
||||||
|
|
||||||
|
/// Compute the value assignment for ValNo in RI.
|
||||||
|
/// This may be called recursively by analyzeValue(), but never for a ValNo on
|
||||||
|
/// the stack.
|
||||||
void computeAssignment(unsigned ValNo, JoinVals &Other);
|
void computeAssignment(unsigned ValNo, JoinVals &Other);
|
||||||
|
|
||||||
|
/// Assuming ValNo is going to clobber some valid lanes in Other.LR, compute
|
||||||
|
/// the extent of the tainted lanes in the block.
|
||||||
|
///
|
||||||
|
/// Multiple values in Other.LR can be affected since partial redefinitions
|
||||||
|
/// can preserve previously tainted lanes.
|
||||||
|
///
|
||||||
|
/// 1 %dst = VLOAD <-- Define all lanes in %dst
|
||||||
|
/// 2 %src = FOO <-- ValNo to be joined with %dst:ssub0
|
||||||
|
/// 3 %dst:ssub1 = BAR <-- Partial redef doesn't clear taint in ssub0
|
||||||
|
/// 4 %dst:ssub0 = COPY %src <-- Conflict resolved, ssub0 wasn't read
|
||||||
|
///
|
||||||
|
/// For each ValNo in Other that is affected, add an (EndIndex, TaintedLanes)
|
||||||
|
/// entry to TaintedVals.
|
||||||
|
///
|
||||||
|
/// Returns false if the tainted lanes extend beyond the basic block.
|
||||||
bool taintExtent(unsigned, unsigned, JoinVals&,
|
bool taintExtent(unsigned, unsigned, JoinVals&,
|
||||||
SmallVectorImpl<std::pair<SlotIndex, unsigned> >&);
|
SmallVectorImpl<std::pair<SlotIndex, unsigned> >&);
|
||||||
|
|
||||||
|
/// Return true if MI uses any of the given Lanes from Reg.
|
||||||
|
/// This does not include partial redefinitions of Reg.
|
||||||
bool usesLanes(const MachineInstr *MI, unsigned, unsigned, unsigned) const;
|
bool usesLanes(const MachineInstr *MI, unsigned, unsigned, unsigned) const;
|
||||||
|
|
||||||
|
/// Determine if ValNo is a copy of a value number in LR or Other.LR that will
|
||||||
|
/// be pruned:
|
||||||
|
///
|
||||||
|
/// %dst = COPY %src
|
||||||
|
/// %src = COPY %dst <-- This value to be pruned.
|
||||||
|
/// %dst = COPY %src <-- This value is a copy of a pruned value.
|
||||||
bool isPrunedValue(unsigned ValNo, JoinVals &Other);
|
bool isPrunedValue(unsigned ValNo, JoinVals &Other);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@@ -1717,9 +1743,9 @@ public:
|
|||||||
void pruneValues(JoinVals &Other, SmallVectorImpl<SlotIndex> &EndPoints,
|
void pruneValues(JoinVals &Other, SmallVectorImpl<SlotIndex> &EndPoints,
|
||||||
bool changeInstrs);
|
bool changeInstrs);
|
||||||
|
|
||||||
// Removes subranges starting at copies that get removed. This sometimes
|
/// Removes subranges starting at copies that get removed. This sometimes
|
||||||
// happens when undefined subranges are copied around. These ranges contain
|
/// happens when undefined subranges are copied around. These ranges contain
|
||||||
// no usefull information and can be removed.
|
/// no usefull information and can be removed.
|
||||||
void pruneSubRegValues(LiveInterval &LI, unsigned &ShrinkMask);
|
void pruneSubRegValues(LiveInterval &LI, unsigned &ShrinkMask);
|
||||||
|
|
||||||
/// Erase any machine instructions that have been coalesced away.
|
/// Erase any machine instructions that have been coalesced away.
|
||||||
@@ -1734,9 +1760,6 @@ public:
|
|||||||
};
|
};
|
||||||
} // end anonymous namespace
|
} // end anonymous namespace
|
||||||
|
|
||||||
/// Compute the bitmask of lanes actually written by DefMI.
|
|
||||||
/// Set Redef if there are any partial register definitions that depend on the
|
|
||||||
/// previous value of the register.
|
|
||||||
unsigned JoinVals::computeWriteLanes(const MachineInstr *DefMI, bool &Redef)
|
unsigned JoinVals::computeWriteLanes(const MachineInstr *DefMI, bool &Redef)
|
||||||
const {
|
const {
|
||||||
unsigned L = 0;
|
unsigned L = 0;
|
||||||
@@ -1751,7 +1774,6 @@ unsigned JoinVals::computeWriteLanes(const MachineInstr *DefMI, bool &Redef)
|
|||||||
return L;
|
return L;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Find the ultimate value that VNI was copied from.
|
|
||||||
std::pair<const VNInfo*, unsigned> JoinVals::followCopyChain(
|
std::pair<const VNInfo*, unsigned> JoinVals::followCopyChain(
|
||||||
const VNInfo *VNI) const {
|
const VNInfo *VNI) const {
|
||||||
unsigned Reg = this->Reg;
|
unsigned Reg = this->Reg;
|
||||||
@@ -1812,13 +1834,6 @@ bool JoinVals::valuesIdentical(VNInfo *Value0, VNInfo *Value1,
|
|||||||
return Orig0->def == Orig1->def && Reg0 == Reg1;
|
return Orig0->def == Orig1->def && Reg0 == Reg1;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Analyze ValNo in this live range, and set all fields of Vals[ValNo].
|
|
||||||
/// Return a conflict resolution when possible, but leave the hard cases as
|
|
||||||
/// CR_Unresolved.
|
|
||||||
/// Recursively calls computeAssignment() on this and Other, guaranteeing that
|
|
||||||
/// both OtherVNI and RedefVNI have been analyzed and mapped before returning.
|
|
||||||
/// The recursion always goes upwards in the dominator tree, making loops
|
|
||||||
/// impossible.
|
|
||||||
JoinVals::ConflictResolution
|
JoinVals::ConflictResolution
|
||||||
JoinVals::analyzeValue(unsigned ValNo, JoinVals &Other) {
|
JoinVals::analyzeValue(unsigned ValNo, JoinVals &Other) {
|
||||||
Val &V = Vals[ValNo];
|
Val &V = Vals[ValNo];
|
||||||
@@ -2037,9 +2052,6 @@ JoinVals::analyzeValue(unsigned ValNo, JoinVals &Other) {
|
|||||||
return CR_Unresolved;
|
return CR_Unresolved;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Compute the value assignment for ValNo in RI.
|
|
||||||
/// This may be called recursively by analyzeValue(), but never for a ValNo on
|
|
||||||
/// the stack.
|
|
||||||
void JoinVals::computeAssignment(unsigned ValNo, JoinVals &Other) {
|
void JoinVals::computeAssignment(unsigned ValNo, JoinVals &Other) {
|
||||||
Val &V = Vals[ValNo];
|
Val &V = Vals[ValNo];
|
||||||
if (V.isAnalyzed()) {
|
if (V.isAnalyzed()) {
|
||||||
@@ -2093,21 +2105,6 @@ bool JoinVals::mapValues(JoinVals &Other) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Assuming ValNo is going to clobber some valid lanes in Other.LR, compute
|
|
||||||
/// the extent of the tainted lanes in the block.
|
|
||||||
///
|
|
||||||
/// Multiple values in Other.LR can be affected since partial redefinitions can
|
|
||||||
/// preserve previously tainted lanes.
|
|
||||||
///
|
|
||||||
/// 1 %dst = VLOAD <-- Define all lanes in %dst
|
|
||||||
/// 2 %src = FOO <-- ValNo to be joined with %dst:ssub0
|
|
||||||
/// 3 %dst:ssub1 = BAR <-- Partial redef doesn't clear taint in ssub0
|
|
||||||
/// 4 %dst:ssub0 = COPY %src <-- Conflict resolved, ssub0 wasn't read
|
|
||||||
///
|
|
||||||
/// For each ValNo in Other that is affected, add an (EndIndex, TaintedLanes)
|
|
||||||
/// entry to TaintedVals.
|
|
||||||
///
|
|
||||||
/// Returns false if the tainted lanes extend beyond the basic block.
|
|
||||||
bool JoinVals::
|
bool JoinVals::
|
||||||
taintExtent(unsigned ValNo, unsigned TaintedLanes, JoinVals &Other,
|
taintExtent(unsigned ValNo, unsigned TaintedLanes, JoinVals &Other,
|
||||||
SmallVectorImpl<std::pair<SlotIndex, unsigned> > &TaintExtent) {
|
SmallVectorImpl<std::pair<SlotIndex, unsigned> > &TaintExtent) {
|
||||||
@@ -2148,8 +2145,6 @@ taintExtent(unsigned ValNo, unsigned TaintedLanes, JoinVals &Other,
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return true if MI uses any of the given Lanes from Reg.
|
|
||||||
/// This does not include partial redefinitions of Reg.
|
|
||||||
bool JoinVals::usesLanes(const MachineInstr *MI, unsigned Reg, unsigned SubIdx,
|
bool JoinVals::usesLanes(const MachineInstr *MI, unsigned Reg, unsigned SubIdx,
|
||||||
unsigned Lanes) const {
|
unsigned Lanes) const {
|
||||||
if (MI->isDebugValue())
|
if (MI->isDebugValue())
|
||||||
@@ -2231,13 +2226,6 @@ bool JoinVals::resolveConflicts(JoinVals &Other) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine if ValNo is a copy of a value number in LR or Other.LR that will
|
|
||||||
// be pruned:
|
|
||||||
//
|
|
||||||
// %dst = COPY %src
|
|
||||||
// %src = COPY %dst <-- This value to be pruned.
|
|
||||||
// %dst = COPY %src <-- This value is a copy of a pruned value.
|
|
||||||
//
|
|
||||||
bool JoinVals::isPrunedValue(unsigned ValNo, JoinVals &Other) {
|
bool JoinVals::isPrunedValue(unsigned ValNo, JoinVals &Other) {
|
||||||
Val &V = Vals[ValNo];
|
Val &V = Vals[ValNo];
|
||||||
if (V.Pruned || V.PrunedComputed)
|
if (V.Pruned || V.PrunedComputed)
|
||||||
@@ -2400,9 +2388,9 @@ void RegisterCoalescer::joinSubRegRanges(LiveRange &LRange, LiveRange &RRange,
|
|||||||
JoinVals LHSVals(LRange, CP.getDstReg(), CP.getDstIdx(), LaneMask,
|
JoinVals LHSVals(LRange, CP.getDstReg(), CP.getDstIdx(), LaneMask,
|
||||||
NewVNInfo, CP, LIS, TRI, true, true);
|
NewVNInfo, CP, LIS, TRI, true, true);
|
||||||
|
|
||||||
/// Compute NewVNInfo and resolve conflicts (see also joinVirtRegs())
|
// Compute NewVNInfo and resolve conflicts (see also joinVirtRegs())
|
||||||
/// Conflicts should already be resolved so the mapping/resolution should
|
// Conflicts should already be resolved so the mapping/resolution should
|
||||||
/// always succeed.
|
// always succeed.
|
||||||
if (!LHSVals.mapValues(RHSVals) || !RHSVals.mapValues(LHSVals))
|
if (!LHSVals.mapValues(RHSVals) || !RHSVals.mapValues(LHSVals))
|
||||||
llvm_unreachable("Can't join subrange although main ranges are compatible");
|
llvm_unreachable("Can't join subrange although main ranges are compatible");
|
||||||
if (!LHSVals.resolveConflicts(RHSVals) || !RHSVals.resolveConflicts(LHSVals))
|
if (!LHSVals.resolveConflicts(RHSVals) || !RHSVals.resolveConflicts(LHSVals))
|
||||||
@@ -2574,13 +2562,12 @@ bool RegisterCoalescer::joinVirtRegs(CoalescerPair &CP) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempt to join these two intervals. On failure, this returns false.
|
|
||||||
bool RegisterCoalescer::joinIntervals(CoalescerPair &CP) {
|
bool RegisterCoalescer::joinIntervals(CoalescerPair &CP) {
|
||||||
return CP.isPhys() ? joinReservedPhysReg(CP) : joinVirtRegs(CP);
|
return CP.isPhys() ? joinReservedPhysReg(CP) : joinVirtRegs(CP);
|
||||||
}
|
}
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
// Information concerning MBB coalescing priority.
|
/// Information concerning MBB coalescing priority.
|
||||||
struct MBBPriorityInfo {
|
struct MBBPriorityInfo {
|
||||||
MachineBasicBlock *MBB;
|
MachineBasicBlock *MBB;
|
||||||
unsigned Depth;
|
unsigned Depth;
|
||||||
@@ -2591,10 +2578,10 @@ struct MBBPriorityInfo {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// C-style comparator that sorts first based on the loop depth of the basic
|
/// C-style comparator that sorts first based on the loop depth of the basic
|
||||||
// block (the unsigned), and then on the MBB number.
|
/// block (the unsigned), and then on the MBB number.
|
||||||
//
|
///
|
||||||
// EnableGlobalCopies assumes that the primary sort key is loop depth.
|
/// EnableGlobalCopies assumes that the primary sort key is loop depth.
|
||||||
static int compareMBBPriority(const MBBPriorityInfo *LHS,
|
static int compareMBBPriority(const MBBPriorityInfo *LHS,
|
||||||
const MBBPriorityInfo *RHS) {
|
const MBBPriorityInfo *RHS) {
|
||||||
// Deeper loops first
|
// Deeper loops first
|
||||||
@@ -2634,8 +2621,6 @@ static bool isLocalCopy(MachineInstr *Copy, const LiveIntervals *LIS) {
|
|||||||
|| LIS->intervalIsInOneMBB(LIS->getInterval(DstReg));
|
|| LIS->intervalIsInOneMBB(LIS->getInterval(DstReg));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try joining WorkList copies starting from index From.
|
|
||||||
// Null out any successful joins.
|
|
||||||
bool RegisterCoalescer::
|
bool RegisterCoalescer::
|
||||||
copyCoalesceWorkList(MutableArrayRef<MachineInstr*> CurrList) {
|
copyCoalesceWorkList(MutableArrayRef<MachineInstr*> CurrList) {
|
||||||
bool Progress = false;
|
bool Progress = false;
|
||||||
@@ -2796,9 +2781,9 @@ bool RegisterCoalescer::runOnMachineFunction(MachineFunction &fn) {
|
|||||||
// remove the subranges.
|
// remove the subranges.
|
||||||
LI.clearSubRanges();
|
LI.clearSubRanges();
|
||||||
} else {
|
} else {
|
||||||
|
#ifndef NDEBUG
|
||||||
// If subranges are still supported, then the same subregs should still
|
// If subranges are still supported, then the same subregs should still
|
||||||
// be supported.
|
// be supported.
|
||||||
#ifndef NDEBUG
|
|
||||||
for (LiveInterval::SubRange &S : LI.subranges()) {
|
for (LiveInterval::SubRange &S : LI.subranges()) {
|
||||||
assert ((S.LaneMask & ~MaxMask) == 0);
|
assert ((S.LaneMask & ~MaxMask) == 0);
|
||||||
}
|
}
|
||||||
@@ -2814,7 +2799,6 @@ bool RegisterCoalescer::runOnMachineFunction(MachineFunction &fn) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Implement the dump method.
|
|
||||||
void RegisterCoalescer::print(raw_ostream &O, const Module* m) const {
|
void RegisterCoalescer::print(raw_ostream &O, const Module* m) const {
|
||||||
LIS->print(O, m);
|
LIS->print(O, m);
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user