Coalesce stack slot accesses that arise when spilling both sides of a COPY.

This helps avoid silly code:

    %R0<def = LOAD <fi#5>
    STORE <fi#5>, %R0<kill>

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@110266 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Jakob Stoklund Olesen 2010-08-04 22:35:11 +00:00
parent 87c0175cce
commit 1a0f91b7c5

View File

@ -85,6 +85,7 @@ private:
bool reMaterializeFor(MachineBasicBlock::iterator MI);
void reMaterializeAll();
bool coalesceStackAccess(MachineInstr *MI);
bool foldMemoryOperand(MachineBasicBlock::iterator MI,
const SmallVectorImpl<unsigned> &Ops);
void insertReload(LiveInterval &NewLI, MachineBasicBlock::iterator MI);
@ -291,6 +292,24 @@ void InlineSpiller::reMaterializeAll() {
}
}
/// If MI is a load or store of stackSlot_, it can be removed.
bool InlineSpiller::coalesceStackAccess(MachineInstr *MI) {
int FI = 0;
unsigned reg;
if (!(reg = tii_.isLoadFromStackSlot(MI, FI)) &&
!(reg = tii_.isStoreToStackSlot(MI, FI)))
return false;
// We have a stack access. Is it the right register and slot?
if (reg != li_->reg || FI != stackSlot_)
return false;
DEBUG(dbgs() << "Coalescing stack access: " << *MI);
lis_.RemoveMachineInstrFromMaps(MI);
MI->eraseFromParent();
return true;
}
/// foldMemoryOperand - Try folding stack slot references in Ops into MI.
/// Return true on success, and MI will be erased.
bool InlineSpiller::foldMemoryOperand(MachineBasicBlock::iterator MI,
@ -399,6 +418,10 @@ void InlineSpiller::spill(LiveInterval *li,
continue;
}
// Stack slot accesses may coalesce away.
if (coalesceStackAccess(MI))
continue;
// Analyze instruction.
bool Reads, Writes;
SmallVector<unsigned, 8> Ops;