Reimplement the internal abstraction used by MemDep in terms

of a pointer/int pair instead of a manually bitmangled pointer.
This forces clients to think a little more about checking the 
appropriate pieces and will be useful for internal 
implementation improvements later.

I'm not particularly happy with this.  After going through this
I don't think that the clients of memdep should be exposed to
the internal type at all.  I'll fix this in a subsequent commit.

This has no functionality change.


git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@60230 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Chris Lattner 2008-11-29 01:43:36 +00:00
parent d63e618212
commit 39f372e23e
5 changed files with 207 additions and 190 deletions

View File

@ -17,6 +17,7 @@
#include "llvm/Pass.h"
#include "llvm/ADT/DenseMap.h"
#include "llvm/ADT/SmallPtrSet.h"
#include "llvm/ADT/PointerIntPair.h"
namespace llvm {
class Function;
@ -29,38 +30,54 @@ namespace llvm {
/// It builds on alias analysis information, and tries to provide a lazy,
/// caching interface to a common kind of alias information query.
class MemoryDependenceAnalysis : public FunctionPass {
public:
/// DepType - This enum is used to indicate what flavor of dependence this
/// is. If the type is Normal, there is an associated instruction pointer.
enum DepType {
/// Normal - This is a normal instruction dependence. The pointer member
/// of the DepResultTy pair holds the instruction.
Normal = 0,
/// None - This dependence type indicates that the query does not depend
/// on any instructions, either because it scanned to the start of the
/// function or it scanned to the definition of the memory
/// (alloca/malloc).
None,
/// NonLocal - This marker indicates that the query has no dependency in
/// the specified block. To find out more, the client should query other
/// predecessor blocks.
NonLocal,
/// Dirty - This is an internal marker indicating that that a cache entry
/// is dirty.
Dirty
};
typedef PointerIntPair<Instruction*, 2, DepType> DepResultTy;
private:
// A map from instructions to their dependency, with a boolean
// flags for whether this mapping is confirmed or not.
typedef DenseMap<Instruction*, std::pair<Instruction*, bool> > depMapType;
depMapType depGraphLocal;
typedef DenseMap<Instruction*,
std::pair<DepResultTy, bool> > LocalDepMapType;
LocalDepMapType LocalDeps;
// A map from instructions to their non-local dependencies.
typedef DenseMap<Instruction*,
DenseMap<BasicBlock*, Value*> > nonLocalDepMapType;
DenseMap<BasicBlock*, DepResultTy> > nonLocalDepMapType;
nonLocalDepMapType depGraphNonLocal;
// A reverse mapping from dependencies to the dependees. This is
// used when removing instructions to keep the cache coherent.
typedef DenseMap<Value*, SmallPtrSet<Instruction*, 4> > reverseDepMapType;
typedef DenseMap<DepResultTy,
SmallPtrSet<Instruction*, 4> > reverseDepMapType;
reverseDepMapType reverseDep;
// A reverse mapping form dependencies to the non-local dependees.
reverseDepMapType reverseDepNonLocal;
public:
// Special marker indicating that the query has no dependency
// in the specified block.
static Instruction* const NonLocal;
// Special marker indicating that the query has no dependency at all
static Instruction* const None;
// Special marker indicating a dirty cache entry
static Instruction* const Dirty;
static char ID; // Class identification, replacement for typeinfo
MemoryDependenceAnalysis() : FunctionPass(&ID) {}
static char ID;
/// Pass Implementation stuff. This doesn't do any analysis.
///
@ -68,7 +85,7 @@ namespace llvm {
/// Clean up memory in between runs
void releaseMemory() {
depGraphLocal.clear();
LocalDeps.clear();
depGraphNonLocal.clear();
reverseDep.clear();
reverseDepNonLocal.clear();
@ -81,33 +98,33 @@ namespace llvm {
/// getDependency - Return the instruction on which a memory operation
/// depends, starting with start.
Instruction* getDependency(Instruction* query, Instruction* start = 0,
BasicBlock* block = 0);
DepResultTy getDependency(Instruction *query, Instruction *start = 0,
BasicBlock *block = 0);
/// getNonLocalDependency - Fills the passed-in map with the non-local
/// dependencies of the queries. The map will contain NonLocal for
/// blocks between the query and its dependencies.
void getNonLocalDependency(Instruction* query,
DenseMap<BasicBlock*, Value*>& resp);
DenseMap<BasicBlock*, DepResultTy> &resp);
/// removeInstruction - Remove an instruction from the dependence analysis,
/// updating the dependence of instructions that previously depended on it.
void removeInstruction(Instruction* rem);
void removeInstruction(Instruction *InstToRemove);
/// dropInstruction - Remove an instruction from the analysis, making
/// absolutely conservative assumptions when updating the cache. This is
/// useful, for example when an instruction is changed rather than removed.
void dropInstruction(Instruction* drop);
void dropInstruction(Instruction *InstToDrop);
private:
/// verifyRemoved - Verify that the specified instruction does not occur
/// in our internal data structures.
void verifyRemoved(Instruction *Inst) const;
Instruction* getCallSiteDependency(CallSite C, Instruction* start,
BasicBlock* block);
DepResultTy getCallSiteDependency(CallSite C, Instruction* start,
BasicBlock* block);
void nonLocalHelper(Instruction* query, BasicBlock* block,
DenseMap<BasicBlock*, Value*>& resp);
DenseMap<BasicBlock*, DepResultTy>& resp);
};
} // End llvm namespace

View File

@ -41,10 +41,6 @@ STATISTIC(NumUncacheNonlocal, "Number of uncached non-local responses");
char MemoryDependenceAnalysis::ID = 0;
Instruction* const MemoryDependenceAnalysis::NonLocal = (Instruction*)-3;
Instruction* const MemoryDependenceAnalysis::None = (Instruction*)-4;
Instruction* const MemoryDependenceAnalysis::Dirty = (Instruction*)-5;
// Register this pass...
static RegisterPass<MemoryDependenceAnalysis> X("memdep",
"Memory Dependence Analysis", false, true);
@ -52,18 +48,19 @@ static RegisterPass<MemoryDependenceAnalysis> X("memdep",
/// verifyRemoved - Verify that the specified instruction does not occur
/// in our internal data structures.
void MemoryDependenceAnalysis::verifyRemoved(Instruction *D) const {
for (depMapType::const_iterator I = depGraphLocal.begin(),
E = depGraphLocal.end(); I != E; ++I) {
for (LocalDepMapType::const_iterator I = LocalDeps.begin(),
E = LocalDeps.end(); I != E; ++I) {
assert(I->first != D && "Inst occurs in data structures");
assert(I->second.first != D && "Inst occurs in data structures");
assert(I->second.first.getPointer() != D &&
"Inst occurs in data structures");
}
for (nonLocalDepMapType::const_iterator I = depGraphNonLocal.begin(),
E = depGraphNonLocal.end(); I != E; ++I) {
assert(I->first != D && "Inst occurs in data structures");
for (DenseMap<BasicBlock*, Value*>::iterator II = I->second.begin(),
for (DenseMap<BasicBlock*, DepResultTy>::iterator II = I->second.begin(),
EE = I->second.end(); II != EE; ++II)
assert(II->second != D && "Inst occurs in data structures");
assert(II->second.getPointer() != D && "Inst occurs in data structures");
}
for (reverseDepMapType::const_iterator I = reverseDep.begin(),
@ -90,12 +87,10 @@ void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
/// getCallSiteDependency - Private helper for finding the local dependencies
/// of a call site.
Instruction* MemoryDependenceAnalysis::getCallSiteDependency(CallSite C,
Instruction* start,
BasicBlock* block) {
std::pair<Instruction*, bool>& cachedResult =
depGraphLocal[C.getInstruction()];
MemoryDependenceAnalysis::DepResultTy
MemoryDependenceAnalysis::
getCallSiteDependency(CallSite C, Instruction *start, BasicBlock *block) {
std::pair<DepResultTy, bool> &cachedResult = LocalDeps[C.getInstruction()];
AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
TargetData& TD = getAnalysis<TargetData>();
BasicBlock::iterator blockBegin = C.getInstruction()->getParent()->begin();
@ -141,11 +136,11 @@ Instruction* MemoryDependenceAnalysis::getCallSiteDependency(CallSite C,
AA.getModRefBehavior(CallSite::get(QI));
if (result != AliasAnalysis::DoesNotAccessMemory) {
if (!start && !block) {
cachedResult.first = QI;
cachedResult.first = DepResultTy(QI, Normal);
cachedResult.second = true;
reverseDep[QI].insert(C.getInstruction());
reverseDep[DepResultTy(QI, Normal)].insert(C.getInstruction());
}
return QI;
return DepResultTy(QI, Normal);
} else {
continue;
}
@ -154,33 +149,33 @@ Instruction* MemoryDependenceAnalysis::getCallSiteDependency(CallSite C,
if (AA.getModRefInfo(C, pointer, pointerSize) != AliasAnalysis::NoModRef) {
if (!start && !block) {
cachedResult.first = QI;
cachedResult.first = DepResultTy(QI, Normal);
cachedResult.second = true;
reverseDep[QI].insert(C.getInstruction());
reverseDep[DepResultTy(QI, Normal)].insert(C.getInstruction());
}
return QI;
return DepResultTy(QI, Normal);
}
}
// No dependence found
cachedResult.first = NonLocal;
cachedResult.first = DepResultTy(0, NonLocal);
cachedResult.second = true;
reverseDep[NonLocal].insert(C.getInstruction());
return NonLocal;
reverseDep[DepResultTy(0, NonLocal)].insert(C.getInstruction());
return DepResultTy(0, NonLocal);
}
/// nonLocalHelper - Private helper used to calculate non-local dependencies
/// by doing DFS on the predecessors of a block to find its dependencies
/// by doing DFS on the predecessors of a block to find its dependencies.
void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
BasicBlock* block,
DenseMap<BasicBlock*, Value*>& resp) {
DenseMap<BasicBlock*, DepResultTy> &resp) {
// Set of blocks that we've already visited in our DFS
SmallPtrSet<BasicBlock*, 4> visited;
// If we're updating a dirtied cache entry, we don't need to reprocess
// already computed entries.
for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(),
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = resp.begin(),
E = resp.end(); I != E; ++I)
if (I->second != Dirty)
if (I->second.getInt() != Dirty)
visited.insert(I->first);
// Current stack of the DFS
@ -204,8 +199,8 @@ void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
if (BB != block) {
visited.insert(BB);
Instruction* localDep = getDependency(query, 0, BB);
if (localDep != NonLocal) {
DepResultTy localDep = getDependency(query, 0, BB);
if (localDep.getInt() != NonLocal) {
resp.insert(std::make_pair(BB, localDep));
stack.pop_back();
@ -217,8 +212,8 @@ void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
} else if (BB == block) {
visited.insert(BB);
Instruction* localDep = getDependency(query, 0, BB);
if (localDep != query)
DepResultTy localDep = getDependency(query, 0, BB);
if (localDep != DepResultTy(query, Normal))
resp.insert(std::make_pair(BB, localDep));
stack.pop_back();
@ -246,12 +241,12 @@ void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
// If we didn't insert because we have no predecessors, then this
// query has no dependency at all.
else if (!inserted && !predOnStack) {
resp.insert(std::make_pair(BB, None));
resp.insert(std::make_pair(BB, DepResultTy(0, None)));
// If we didn't insert because our predecessors are already on the stack,
// then we might still have a dependency, but it will be discovered during
// backtracking.
} else if (!inserted && predOnStack){
resp.insert(std::make_pair(BB, NonLocal));
resp.insert(std::make_pair(BB, DepResultTy(0, NonLocal)));
}
stack.pop_back();
@ -262,21 +257,21 @@ void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
/// dependencies of the queries. The map will contain NonLocal for
/// blocks between the query and its dependencies.
void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
DenseMap<BasicBlock*, Value*>& resp) {
DenseMap<BasicBlock*, DepResultTy> &resp) {
if (depGraphNonLocal.count(query)) {
DenseMap<BasicBlock*, Value*>& cached = depGraphNonLocal[query];
DenseMap<BasicBlock*, DepResultTy> &cached = depGraphNonLocal[query];
NumCacheNonlocal++;
SmallVector<BasicBlock*, 4> dirtied;
for (DenseMap<BasicBlock*, Value*>::iterator I = cached.begin(),
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = cached.begin(),
E = cached.end(); I != E; ++I)
if (I->second == Dirty)
if (I->second.getInt() == Dirty)
dirtied.push_back(I->first);
for (SmallVector<BasicBlock*, 4>::iterator I = dirtied.begin(),
E = dirtied.end(); I != E; ++I) {
Instruction* localDep = getDependency(query, 0, *I);
if (localDep != NonLocal)
DepResultTy localDep = getDependency(query, 0, *I);
if (localDep.getInt() != NonLocal)
cached[*I] = localDep;
else {
cached.erase(*I);
@ -287,8 +282,8 @@ void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
resp = cached;
// Update the reverse non-local dependency cache
for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(), E = resp.end();
I != E; ++I)
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = resp.begin(),
E = resp.end(); I != E; ++I)
reverseDepNonLocal[I->second].insert(query);
return;
@ -299,8 +294,8 @@ void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
nonLocalHelper(query, query->getParent(), resp);
// Update the non-local dependency cache
for (DenseMap<BasicBlock*, Value*>::iterator I = resp.begin(), E = resp.end();
I != E; ++I) {
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = resp.begin(),
E = resp.end(); I != E; ++I) {
depGraphNonLocal[query].insert(*I);
reverseDepNonLocal[I->second].insert(query);
}
@ -309,21 +304,24 @@ void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
/// getDependency - Return the instruction on which a memory operation
/// depends. The local parameter indicates if the query should only
/// evaluate dependencies within the same basic block.
Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
Instruction* start,
BasicBlock* block) {
MemoryDependenceAnalysis::DepResultTy
MemoryDependenceAnalysis::getDependency(Instruction *query,
Instruction *start,
BasicBlock *block) {
// Start looking for dependencies with the queried inst
BasicBlock::iterator QI = query;
// Check for a cached result
std::pair<Instruction*, bool>& cachedResult = depGraphLocal[query];
std::pair<DepResultTy, bool>& cachedResult = LocalDeps[query];
// If we have a _confirmed_ cached entry, return it
if (!block && !start) {
if (cachedResult.second)
return cachedResult.first;
else if (cachedResult.first && cachedResult.first != NonLocal)
// If we have an unconfirmed cached entry, we can start our search from there
QI = cachedResult.first;
else if (cachedResult.first.getInt() == Normal &&
cachedResult.first.getPointer())
// If we have an unconfirmed cached entry, we can start our search from
// it.
QI = cachedResult.first.getPointer();
}
if (start)
@ -357,9 +355,9 @@ Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
} else if (CallSite::get(query).getInstruction() != 0)
return getCallSiteDependency(CallSite::get(query), start, block);
else if (isa<AllocationInst>(query))
return None;
return DepResultTy(0, None);
else
return None;
return DepResultTy(0, None);
BasicBlock::iterator blockBegin = block ? block->begin()
: query->getParent()->begin();
@ -375,12 +373,12 @@ Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
// All volatile loads/stores depend on each other
if (queryIsVolatile && S->isVolatile()) {
if (!start && !block) {
cachedResult.first = S;
cachedResult.first = DepResultTy(S, Normal);
cachedResult.second = true;
reverseDep[S].insert(query);
reverseDep[DepResultTy(S, Normal)].insert(query);
}
return S;
return DepResultTy(S, Normal);
}
pointer = S->getPointerOperand();
@ -389,12 +387,12 @@ Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
// All volatile loads/stores depend on each other
if (queryIsVolatile && L->isVolatile()) {
if (!start && !block) {
cachedResult.first = L;
cachedResult.first = DepResultTy(L, Normal);
cachedResult.second = true;
reverseDep[L].insert(query);
reverseDep[DepResultTy(L, Normal)].insert(query);
}
return L;
return DepResultTy(L, Normal);
}
pointer = L->getPointerOperand();
@ -417,7 +415,7 @@ Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
} else if (CallSite::get(QI).getInstruction() != 0) {
// Call insts need special handling. Check if they can modify our pointer
AliasAnalysis::ModRefResult MR = AA.getModRefInfo(CallSite::get(QI),
dependee, dependeeSize);
dependee, dependeeSize);
if (MR != AliasAnalysis::NoModRef) {
// Loads don't depend on read-only calls
@ -425,12 +423,11 @@ Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
continue;
if (!start && !block) {
cachedResult.first = QI;
cachedResult.first = DepResultTy(QI, Normal);
cachedResult.second = true;
reverseDep[QI].insert(query);
reverseDep[DepResultTy(QI, Normal)].insert(query);
}
return QI;
return DepResultTy(QI, Normal);
} else {
continue;
}
@ -448,64 +445,63 @@ Instruction* MemoryDependenceAnalysis::getDependency(Instruction* query,
continue;
if (!start && !block) {
cachedResult.first = QI;
cachedResult.first = DepResultTy(QI, Normal);
cachedResult.second = true;
reverseDep[QI].insert(query);
reverseDep[DepResultTy(QI, Normal)].insert(query);
}
return QI;
return DepResultTy(QI, Normal);
}
}
}
// If we found nothing, return the non-local flag
if (!start && !block) {
cachedResult.first = NonLocal;
cachedResult.first = DepResultTy(0, NonLocal);
cachedResult.second = true;
reverseDep[NonLocal].insert(query);
reverseDep[DepResultTy(0, NonLocal)].insert(query);
}
return NonLocal;
return DepResultTy(0, NonLocal);
}
/// dropInstruction - Remove an instruction from the analysis, making
/// absolutely conservative assumptions when updating the cache. This is
/// useful, for example when an instruction is changed rather than removed.
void MemoryDependenceAnalysis::dropInstruction(Instruction* drop) {
depMapType::iterator depGraphEntry = depGraphLocal.find(drop);
if (depGraphEntry != depGraphLocal.end())
LocalDepMapType::iterator depGraphEntry = LocalDeps.find(drop);
if (depGraphEntry != LocalDeps.end())
reverseDep[depGraphEntry->second.first].erase(drop);
// Drop dependency information for things that depended on this instr
SmallPtrSet<Instruction*, 4>& set = reverseDep[drop];
SmallPtrSet<Instruction*, 4>& set = reverseDep[DepResultTy(drop, Normal)];
for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
I != E; ++I)
depGraphLocal.erase(*I);
LocalDeps.erase(*I);
depGraphLocal.erase(drop);
reverseDep.erase(drop);
LocalDeps.erase(drop);
reverseDep.erase(DepResultTy(drop, Normal));
for (DenseMap<BasicBlock*, Value*>::iterator DI =
depGraphNonLocal[drop].begin(), DE = depGraphNonLocal[drop].end();
for (DenseMap<BasicBlock*, DepResultTy>::iterator DI =
depGraphNonLocal[drop].begin(), DE = depGraphNonLocal[drop].end();
DI != DE; ++DI)
if (DI->second != None)
if (DI->second.getInt() != None)
reverseDepNonLocal[DI->second].erase(drop);
if (reverseDepNonLocal.count(drop)) {
SmallPtrSet<Instruction*, 4>& set = reverseDepNonLocal[drop];
if (reverseDepNonLocal.count(DepResultTy(drop, Normal))) {
SmallPtrSet<Instruction*, 4>& set =
reverseDepNonLocal[DepResultTy(drop, Normal)];
for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
I != E; ++I)
for (DenseMap<BasicBlock*, Value*>::iterator DI =
for (DenseMap<BasicBlock*, DepResultTy>::iterator DI =
depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end();
DI != DE; ++DI)
if (DI->second == drop)
DI->second = Dirty;
if (DI->second == DepResultTy(drop, Normal))
DI->second = DepResultTy(0, Dirty);
}
reverseDepNonLocal.erase(drop);
nonLocalDepMapType::iterator I = depGraphNonLocal.find(drop);
if (I != depGraphNonLocal.end())
depGraphNonLocal.erase(I);
reverseDepNonLocal.erase(DepResultTy(drop, Normal));
depGraphNonLocal.erase(drop);
}
/// removeInstruction - Remove an instruction from the dependence analysis,
@ -514,10 +510,10 @@ void MemoryDependenceAnalysis::dropInstruction(Instruction* drop) {
void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
// Walk through the Non-local dependencies, removing this one as the value
// for any cached queries.
for (DenseMap<BasicBlock*, Value*>::iterator DI =
for (DenseMap<BasicBlock*, DepResultTy>::iterator DI =
depGraphNonLocal[RemInst].begin(), DE = depGraphNonLocal[RemInst].end();
DI != DE; ++DI)
if (DI->second != None)
if (DI->second.getInt() != None)
reverseDepNonLocal[DI->second].erase(RemInst);
// Shortly after this, we will look for things that depend on RemInst. In
@ -525,36 +521,34 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
// could completely delete any entries that depend on this, but it is better
// to make a more accurate approximation where possible. Compute that better
// approximation if we can.
Instruction *NewDependency = 0;
DepResultTy NewDependency;
bool NewDependencyConfirmed = false;
// If we have a cached local dependence query for this instruction, remove it.
//
depMapType::iterator LocalDepEntry = depGraphLocal.find(RemInst);
if (LocalDepEntry != depGraphLocal.end()) {
Instruction *LocalDepInst = LocalDepEntry->second.first;
LocalDepMapType::iterator LocalDepEntry = LocalDeps.find(RemInst);
if (LocalDepEntry != LocalDeps.end()) {
DepResultTy LocalDep = LocalDepEntry->second.first;
bool IsConfirmed = LocalDepEntry->second.second;
// Remove this local dependency info.
depGraphLocal.erase(LocalDepEntry);
LocalDeps.erase(LocalDepEntry);
// Remove us from DepInst's reverse set now that the local dep info is gone.
reverseDep[LocalDepInst].erase(RemInst);
reverseDep[LocalDep].erase(RemInst);
// If we have unconfirmed info, don't trust it.
if (IsConfirmed) {
// If we have a confirmed non-local flag, use it.
if (LocalDepInst == NonLocal || LocalDepInst == None) {
if (LocalDep.getInt() == NonLocal || LocalDep.getInt() == None) {
// The only time this dependency is confirmed is if it is non-local.
NewDependency = LocalDepInst;
NewDependency = LocalDep;
NewDependencyConfirmed = true;
} else {
// If we have dep info for RemInst, set them to it.
NewDependency = next(BasicBlock::iterator(LocalDepInst));
// Don't use RI for the new dependency!
if (NewDependency == RemInst)
NewDependency = 0;
Instruction *NDI = next(BasicBlock::iterator(LocalDep.getPointer()));
if (NDI != RemInst) // Don't use RemInst for the new dependency!
NewDependency = DepResultTy(NDI, Normal);
}
}
}
@ -563,12 +557,13 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
// use the immediate successor of RemInst. We use the successor because
// getDependence starts by checking the immediate predecessor of what is in
// the cache.
if (NewDependency == 0)
NewDependency = next(BasicBlock::iterator(RemInst));
if (NewDependency == DepResultTy(0, Normal))
NewDependency = DepResultTy(next(BasicBlock::iterator(RemInst)), Normal);
// Loop over all of the things that depend on the instruction we're removing.
//
reverseDepMapType::iterator ReverseDepIt = reverseDep.find(RemInst);
reverseDepMapType::iterator ReverseDepIt =
reverseDep.find(DepResultTy(RemInst, Normal));
if (ReverseDepIt != reverseDep.end()) {
SmallPtrSet<Instruction*, 4> &ReverseDeps = ReverseDepIt->second;
for (SmallPtrSet<Instruction*, 4>::iterator I = ReverseDeps.begin(),
@ -580,28 +575,29 @@ void MemoryDependenceAnalysis::removeInstruction(Instruction *RemInst) {
if (InstDependingOnRemInst == RemInst) continue;
// Insert the new dependencies.
depGraphLocal[InstDependingOnRemInst] =
LocalDeps[InstDependingOnRemInst] =
std::make_pair(NewDependency, NewDependencyConfirmed);
// If our NewDependency is an instruction, make sure to remember that new
// things depend on it.
if (NewDependency != NonLocal && NewDependency != None)
// FIXME: Just insert all deps!
if (NewDependency.getInt() != NonLocal && NewDependency.getInt() != None)
reverseDep[NewDependency].insert(InstDependingOnRemInst);
}
reverseDep.erase(RemInst);
reverseDep.erase(DepResultTy(RemInst, Normal));
}
ReverseDepIt = reverseDepNonLocal.find(RemInst);
ReverseDepIt = reverseDepNonLocal.find(DepResultTy(RemInst, Normal));
if (ReverseDepIt != reverseDepNonLocal.end()) {
SmallPtrSet<Instruction*, 4>& set = ReverseDepIt->second;
for (SmallPtrSet<Instruction*, 4>::iterator I = set.begin(), E = set.end();
I != E; ++I)
for (DenseMap<BasicBlock*, Value*>::iterator DI =
for (DenseMap<BasicBlock*, DepResultTy>::iterator DI =
depGraphNonLocal[*I].begin(), DE = depGraphNonLocal[*I].end();
DI != DE; ++DI)
if (DI->second == RemInst)
DI->second = Dirty;
reverseDepNonLocal.erase(RemInst);
if (DI->second == DepResultTy(RemInst, Normal))
DI->second = DepResultTy(0, Dirty);
reverseDepNonLocal.erase(ReverseDepIt);
}
depGraphNonLocal.erase(RemInst);

View File

@ -46,9 +46,11 @@ namespace {
Changed |= runOnBasicBlock(*I);
return Changed;
}
typedef MemoryDependenceAnalysis::DepResultTy DepResultTy;
bool runOnBasicBlock(BasicBlock &BB);
bool handleFreeWithNonTrivialDependency(FreeInst *F, Instruction *Dep);
bool handleFreeWithNonTrivialDependency(FreeInst *F, DepResultTy Dep);
bool handleEndBlock(BasicBlock &BB);
bool RemoveUndeadPointers(Value* pointer, uint64_t killPointerSize,
BasicBlock::iterator& BBI,
@ -108,17 +110,16 @@ bool DSE::runOnBasicBlock(BasicBlock &BB) {
// ... to a pointer that has been stored to before...
if (last) {
Instruction* dep = MD.getDependency(Inst);
DepResultTy dep = MD.getDependency(Inst);
bool deletedStore = false;
// ... and no other memory dependencies are between them....
while (dep != MemoryDependenceAnalysis::None &&
dep != MemoryDependenceAnalysis::NonLocal &&
isa<StoreInst>(dep)) {
if (dep != last ||
while (dep.getInt() == MemoryDependenceAnalysis::Normal &&
isa<StoreInst>(dep.getPointer())) {
if (dep.getPointer() != last ||
TD.getTypeStoreSize(last->getOperand(0)->getType()) >
TD.getTypeStoreSize(Inst->getOperand(0)->getType())) {
dep = MD.getDependency(Inst, dep);
dep = MD.getDependency(Inst, dep.getPointer());
continue;
}
@ -151,14 +152,14 @@ bool DSE::runOnBasicBlock(BasicBlock &BB) {
// loaded from, then the store can be removed;
if (LoadInst* L = dyn_cast<LoadInst>(S->getOperand(0))) {
// FIXME: Don't do dep query if Parents don't match and other stuff!
Instruction* dep = MD.getDependency(S);
DepResultTy dep = MD.getDependency(S);
DominatorTree& DT = getAnalysis<DominatorTree>();
if (!S->isVolatile() && S->getParent() == L->getParent() &&
S->getPointerOperand() == L->getPointerOperand() &&
(dep == MemoryDependenceAnalysis::None ||
dep == MemoryDependenceAnalysis::NonLocal ||
DT.dominates(dep, L))) {
(dep.getInt() == MemoryDependenceAnalysis::None ||
dep.getInt() == MemoryDependenceAnalysis::NonLocal ||
DT.dominates(dep.getPointer(), L))) {
DeleteDeadInstruction(S);
if (!isa<TerminatorInst>(BB.begin()))
@ -184,15 +185,15 @@ bool DSE::runOnBasicBlock(BasicBlock &BB) {
/// handleFreeWithNonTrivialDependency - Handle frees of entire structures whose
/// dependency is a store to a field of that structure.
bool DSE::handleFreeWithNonTrivialDependency(FreeInst* F, Instruction* dep) {
bool DSE::handleFreeWithNonTrivialDependency(FreeInst* F, DepResultTy dep) {
TargetData &TD = getAnalysis<TargetData>();
AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
if (dep == MemoryDependenceAnalysis::None ||
dep == MemoryDependenceAnalysis::NonLocal)
if (dep.getInt() == MemoryDependenceAnalysis::None ||
dep.getInt() == MemoryDependenceAnalysis::NonLocal)
return false;
StoreInst* dependency = dyn_cast<StoreInst>(dep);
StoreInst* dependency = dyn_cast<StoreInst>(dep.getPointer());
if (!dependency)
return false;
else if (dependency->isVolatile())

View File

@ -456,19 +456,21 @@ uint32_t ValueTable::lookup_or_add(Value* V) {
return nextValueNumber++;
}
Instruction* local_dep = MD->getDependency(C);
MemoryDependenceAnalysis::DepResultTy local_dep = MD->getDependency(C);
if (local_dep == MemoryDependenceAnalysis::None) {
if (local_dep.getInt() == MemoryDependenceAnalysis::None) {
valueNumbering.insert(std::make_pair(V, nextValueNumber));
return nextValueNumber++;
} else if (local_dep != MemoryDependenceAnalysis::NonLocal) {
if (!isa<CallInst>(local_dep)) {
} else if (local_dep.getInt() != MemoryDependenceAnalysis::NonLocal) {
// FIXME: INDENT PROPERLY!
if (!isa<CallInst>(local_dep.getPointer())) {
valueNumbering.insert(std::make_pair(V, nextValueNumber));
return nextValueNumber++;
}
CallInst* local_cdep = cast<CallInst>(local_dep);
CallInst* local_cdep = cast<CallInst>(local_dep.getPointer());
// FIXME: INDENT PROPERLY.
if (local_cdep->getCalledFunction() != C->getCalledFunction() ||
local_cdep->getNumOperands() != C->getNumOperands()) {
valueNumbering.insert(std::make_pair(V, nextValueNumber));
@ -493,19 +495,20 @@ uint32_t ValueTable::lookup_or_add(Value* V) {
}
DenseMap<BasicBlock*, Value*> deps;
DenseMap<BasicBlock*, MemoryDependenceAnalysis::DepResultTy> deps;
MD->getNonLocalDependency(C, deps);
CallInst* cdep = 0;
for (DenseMap<BasicBlock*, Value*>::iterator I = deps.begin(),
E = deps.end(); I != E; ++I) {
if (I->second == MemoryDependenceAnalysis::None) {
for (DenseMap<BasicBlock*, MemoryDependenceAnalysis::DepResultTy>
::iterator I = deps.begin(), E = deps.end(); I != E; ++I) {
if (I->second.getInt() == MemoryDependenceAnalysis::None) {
valueNumbering.insert(std::make_pair(V, nextValueNumber));
return nextValueNumber++;
} else if (I->second != MemoryDependenceAnalysis::NonLocal) {
} else if (I->second.getInt() != MemoryDependenceAnalysis::NonLocal) {
// FIXME: INDENT PROPERLY
if (DT->properlyDominates(I->first, C->getParent())) {
if (CallInst* CD = dyn_cast<CallInst>(I->second))
if (CallInst* CD = dyn_cast<CallInst>(I->second.getPointer()))
cdep = CD;
else {
valueNumbering.insert(std::make_pair(V, nextValueNumber));
@ -718,6 +721,8 @@ namespace {
AU.addPreserved<AliasAnalysis>();
}
typedef MemoryDependenceAnalysis::DepResultTy DepResultTy;
// Helper fuctions
// FIXME: eliminate or document these better
bool processLoad(LoadInst* L,
@ -861,7 +866,7 @@ bool GVN::processNonLocalLoad(LoadInst* L,
MemoryDependenceAnalysis& MD = getAnalysis<MemoryDependenceAnalysis>();
// Find the non-local dependencies of the load
DenseMap<BasicBlock*, Value*> deps;
DenseMap<BasicBlock*, DepResultTy> deps;
MD.getNonLocalDependency(L, deps);
// If we had to process more than one hundred blocks to find the
@ -873,19 +878,19 @@ bool GVN::processNonLocalLoad(LoadInst* L,
DenseMap<BasicBlock*, Value*> repl;
// Filter out useless results (non-locals, etc)
for (DenseMap<BasicBlock*, Value*>::iterator I = deps.begin(), E = deps.end();
I != E; ++I) {
if (I->second == MemoryDependenceAnalysis::None)
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = deps.begin(),
E = deps.end(); I != E; ++I) {
if (I->second.getInt() == MemoryDependenceAnalysis::None)
return false;
if (I->second == MemoryDependenceAnalysis::NonLocal)
if (I->second.getInt() == MemoryDependenceAnalysis::NonLocal)
continue;
if (StoreInst* S = dyn_cast<StoreInst>(I->second)) {
if (StoreInst* S = dyn_cast<StoreInst>(I->second.getPointer())) {
if (S->getPointerOperand() != L->getPointerOperand())
return false;
repl[I->first] = S->getOperand(0);
} else if (LoadInst* LD = dyn_cast<LoadInst>(I->second)) {
} else if (LoadInst* LD = dyn_cast<LoadInst>(I->second.getPointer())) {
if (LD->getPointerOperand() != L->getPointerOperand())
return false;
repl[I->first] = LD;
@ -936,8 +941,8 @@ bool GVN::processLoad(LoadInst *L, DenseMap<Value*, LoadInst*> &lastLoad,
// ... to a pointer that has been loaded from before...
MemoryDependenceAnalysis& MD = getAnalysis<MemoryDependenceAnalysis>();
bool removedNonLocal = false;
Instruction* dep = MD.getDependency(L);
if (dep == MemoryDependenceAnalysis::NonLocal &&
DepResultTy dep = MD.getDependency(L);
if (dep.getInt() == MemoryDependenceAnalysis::NonLocal &&
L->getParent() != &L->getParent()->getParent()->getEntryBlock()) {
removedNonLocal = processNonLocalLoad(L, toErase);
@ -952,11 +957,10 @@ bool GVN::processLoad(LoadInst *L, DenseMap<Value*, LoadInst*> &lastLoad,
// Walk up the dependency chain until we either find
// a dependency we can use, or we can't walk any further
while (dep != MemoryDependenceAnalysis::None &&
dep != MemoryDependenceAnalysis::NonLocal &&
(isa<LoadInst>(dep) || isa<StoreInst>(dep))) {
while (dep.getInt() == MemoryDependenceAnalysis::Normal &&
(isa<LoadInst>(dep.getPointer()) || isa<StoreInst>(dep.getPointer()))){
// ... that depends on a store ...
if (StoreInst* S = dyn_cast<StoreInst>(dep)) {
if (StoreInst* S = dyn_cast<StoreInst>(dep.getPointer())) {
if (S->getPointerOperand() == pointer) {
// Remove it!
MD.removeInstruction(L);
@ -974,7 +978,7 @@ bool GVN::processLoad(LoadInst *L, DenseMap<Value*, LoadInst*> &lastLoad,
// If we don't depend on a store, and we haven't
// been loaded before, bail.
break;
} else if (dep == last) {
} else if (dep.getPointer() == last) {
// Remove it!
MD.removeInstruction(L);
@ -985,16 +989,15 @@ bool GVN::processLoad(LoadInst *L, DenseMap<Value*, LoadInst*> &lastLoad,
break;
} else {
dep = MD.getDependency(L, dep);
dep = MD.getDependency(L, dep.getPointer());
}
}
if (dep != MemoryDependenceAnalysis::None &&
dep != MemoryDependenceAnalysis::NonLocal &&
isa<AllocationInst>(dep)) {
if (dep.getInt() == MemoryDependenceAnalysis::Normal &&
isa<AllocationInst>(dep.getPointer())) {
// Check that this load is actually from the
// allocation we found
if (L->getOperand(0)->getUnderlyingObject() == dep) {
if (L->getOperand(0)->getUnderlyingObject() == dep.getPointer()) {
// If this load depends directly on an allocation, there isn't
// anything stored there; therefore, we can optimize this load
// to undef.

View File

@ -629,18 +629,18 @@ bool MemCpyOpt::processMemCpy(MemCpyInst* M) {
// The are two possible optimizations we can do for memcpy:
// a) memcpy-memcpy xform which exposes redundance for DSE
// b) call-memcpy xform for return slot optimization
Instruction* dep = MD.getDependency(M);
if (dep == MemoryDependenceAnalysis::None ||
dep == MemoryDependenceAnalysis::NonLocal)
MemoryDependenceAnalysis::DepResultTy dep = MD.getDependency(M);
if (dep.getInt() == MemoryDependenceAnalysis::None ||
dep.getInt() == MemoryDependenceAnalysis::NonLocal)
return false;
else if (!isa<MemCpyInst>(dep)) {
if (CallInst* C = dyn_cast<CallInst>(dep))
else if (!isa<MemCpyInst>(dep.getPointer())) {
if (CallInst* C = dyn_cast<CallInst>(dep.getPointer()))
return performCallSlotOptzn(M, C);
else
return false;
}
MemCpyInst* MDep = cast<MemCpyInst>(dep);
MemCpyInst* MDep = cast<MemCpyInst>(dep.getPointer());
// We can only transforms memcpy's where the dest of one is the source of the
// other
@ -691,7 +691,7 @@ bool MemCpyOpt::processMemCpy(MemCpyInst* M) {
// If C and M don't interfere, then this is a valid transformation. If they
// did, this would mean that the two sources overlap, which would be bad.
if (MD.getDependency(C) == MDep) {
if (MD.getDependency(C) == dep) {
MD.dropInstruction(M);
M->eraseFromParent();