Hoist the rest of the logic for fully promoting allocas with all uses in

a single block into the helper routine. This takes advantage of the fact
that we can directly replace uses prior to any store with undef to
simplify matters and unconditionally promote allocas only used within
one block.

I've removed the special handling for the case of no stores existing.
This has no semantic effect but might slow things down. I'll fix that in
a later patch when I refactor this entire thing to be easier to manage
the different cases.

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@186783 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Chandler Carruth 2013-07-21 01:44:07 +00:00
parent f61d7e8afa
commit 5dc22183f9

View File

@ -426,7 +426,7 @@ struct StoreIndexSearchPredicate {
/// for (...) { if (c) { A = undef; undef = B; } } /// for (...) { if (c) { A = undef; undef = B; } }
/// ///
/// ... so long as A is not used before undef is set. /// ... so long as A is not used before undef is set.
static void promoteSingleBlockAlloca(AllocaInst *AI, AllocaInfo &Info, static void promoteSingleBlockAlloca(AllocaInst *AI, const AllocaInfo &Info,
LargeBlockInfo &LBI, LargeBlockInfo &LBI,
AliasSetTracker *AST) { AliasSetTracker *AST) {
// The trickiest case to handle is when we have large blocks. Because of this, // The trickiest case to handle is when we have large blocks. Because of this,
@ -434,9 +434,6 @@ static void promoteSingleBlockAlloca(AllocaInst *AI, AllocaInfo &Info,
// significantly pessimize the small block case. This uses LargeBlockInfo to // significantly pessimize the small block case. This uses LargeBlockInfo to
// make it efficient to get the index of various operations in the block. // make it efficient to get the index of various operations in the block.
// Clear out UsingBlocks. We will reconstruct it here if needed.
Info.UsingBlocks.clear();
// Walk the use-def list of the alloca, getting the locations of all stores. // Walk the use-def list of the alloca, getting the locations of all stores.
typedef SmallVector<std::pair<unsigned, StoreInst *>, 64> StoresByIndexTy; typedef SmallVector<std::pair<unsigned, StoreInst *>, 64> StoresByIndexTy;
StoresByIndexTy StoresByIndex; StoresByIndexTy StoresByIndex;
@ -446,19 +443,6 @@ static void promoteSingleBlockAlloca(AllocaInst *AI, AllocaInfo &Info,
if (StoreInst *SI = dyn_cast<StoreInst>(*UI)) if (StoreInst *SI = dyn_cast<StoreInst>(*UI))
StoresByIndex.push_back(std::make_pair(LBI.getInstructionIndex(SI), SI)); StoresByIndex.push_back(std::make_pair(LBI.getInstructionIndex(SI), SI));
// If there are no stores to the alloca, just replace any loads with undef.
if (StoresByIndex.empty()) {
for (Value::use_iterator UI = AI->use_begin(), E = AI->use_end(); UI != E;)
if (LoadInst *LI = dyn_cast<LoadInst>(*UI++)) {
LI->replaceAllUsesWith(UndefValue::get(LI->getType()));
if (AST && LI->getType()->isPointerTy())
AST->deleteValue(LI);
LBI.deleteValue(LI);
LI->eraseFromParent();
}
return;
}
// Sort the stores by their index, making it efficient to do a lookup with a // Sort the stores by their index, making it efficient to do a lookup with a
// binary search. // binary search.
std::sort(StoresByIndex.begin(), StoresByIndex.end()); std::sort(StoresByIndex.begin(), StoresByIndex.end());
@ -478,21 +462,41 @@ static void promoteSingleBlockAlloca(AllocaInst *AI, AllocaInfo &Info,
std::pair<unsigned, StoreInst *>(LoadIdx, static_cast<StoreInst *>(0)), std::pair<unsigned, StoreInst *>(LoadIdx, static_cast<StoreInst *>(0)),
StoreIndexSearchPredicate()); StoreIndexSearchPredicate());
// If there is no store before this load, then we can't promote this load. if (I == StoresByIndex.begin())
if (I == StoresByIndex.begin()) { // If there is no store before this load, the load takes the undef value.
// Can't handle this load, bail out. LI->replaceAllUsesWith(UndefValue::get(LI->getType()));
Info.UsingBlocks.push_back(LI->getParent()); else
continue; // Otherwise, there was a store before this load, the load takes its value.
} LI->replaceAllUsesWith(llvm::prior(I)->second->getOperand(0));
// Otherwise, there was a store before this load, the load takes its value.
--I;
LI->replaceAllUsesWith(I->second->getOperand(0));
if (AST && LI->getType()->isPointerTy()) if (AST && LI->getType()->isPointerTy())
AST->deleteValue(LI); AST->deleteValue(LI);
LI->eraseFromParent(); LI->eraseFromParent();
LBI.deleteValue(LI); LBI.deleteValue(LI);
} }
// Remove the (now dead) stores and alloca.
while (!AI->use_empty()) {
StoreInst *SI = cast<StoreInst>(AI->use_back());
// Record debuginfo for the store before removing it.
if (DbgDeclareInst *DDI = Info.DbgDeclare) {
DIBuilder DIB(*AI->getParent()->getParent()->getParent());
ConvertDebugDeclareToDebugValue(DDI, SI, DIB);
}
SI->eraseFromParent();
LBI.deleteValue(SI);
}
if (AST)
AST->deleteValue(AI);
AI->eraseFromParent();
LBI.deleteValue(AI);
// The alloca's debuginfo can be removed as well.
if (DbgDeclareInst *DDI = Info.DbgDeclare)
DDI->eraseFromParent();
++NumLocalPromoted;
} }
void PromoteMem2Reg::run() { void PromoteMem2Reg::run() {
@ -565,35 +569,9 @@ void PromoteMem2Reg::run() {
if (Info.OnlyUsedInOneBlock) { if (Info.OnlyUsedInOneBlock) {
promoteSingleBlockAlloca(AI, Info, LBI, AST); promoteSingleBlockAlloca(AI, Info, LBI, AST);
// Finally, after the scan, check to see if the stores are all that is // The alloca has been processed, move on.
// left. RemoveFromAllocasList(AllocaNum);
if (Info.UsingBlocks.empty()) { continue;
// Remove the (now dead) stores and alloca.
while (!AI->use_empty()) {
StoreInst *SI = cast<StoreInst>(AI->use_back());
// Record debuginfo for the store before removing it.
if (DbgDeclareInst *DDI = Info.DbgDeclare)
ConvertDebugDeclareToDebugValue(DDI, SI, DIB);
SI->eraseFromParent();
LBI.deleteValue(SI);
}
if (AST)
AST->deleteValue(AI);
AI->eraseFromParent();
LBI.deleteValue(AI);
// The alloca has been processed, move on.
RemoveFromAllocasList(AllocaNum);
// The alloca's debuginfo can be removed as well.
if (DbgDeclareInst *DDI = Info.DbgDeclare)
DDI->eraseFromParent();
++NumLocalPromoted;
continue;
}
} }
// If we haven't computed dominator tree levels, do so now. // If we haven't computed dominator tree levels, do so now.