Misc analysis passes that need to be aware of atomic load/store.

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@137650 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Eli Friedman 2011-08-15 20:54:19 +00:00
parent 47a24ab4d7
commit 667ccf231b
4 changed files with 50 additions and 19 deletions

View File

@ -268,8 +268,8 @@ AliasAnalysis::getLocationForDest(const MemIntrinsic *MTI) {
AliasAnalysis::ModRefResult
AliasAnalysis::getModRefInfo(const LoadInst *L, const Location &Loc) {
// Be conservative in the face of volatile.
if (L->isVolatile())
// Be conservative in the face of volatile/atomic.
if (!L->isUnordered())
return ModRef;
// If the load address doesn't alias the given address, it doesn't read
@ -283,8 +283,8 @@ AliasAnalysis::getModRefInfo(const LoadInst *L, const Location &Loc) {
AliasAnalysis::ModRefResult
AliasAnalysis::getModRefInfo(const StoreInst *S, const Location &Loc) {
// Be conservative in the face of volatile.
if (S->isVolatile())
// Be conservative in the face of volatile/atomic.
if (!S->isUnordered())
return ModRef;
// If the store address cannot alias the pointer in question, then the

View File

@ -76,7 +76,13 @@ static void GetMemRefInstrs(const Loop *L,
}
static bool IsLoadOrStoreInst(Value *I) {
return isa<LoadInst>(I) || isa<StoreInst>(I);
// Returns true if the load or store can be analyzed. Atomic and volatile
// operations have properties which this analysis does not understand.
if (LoadInst *LI = dyn_cast<LoadInst>(I))
return LI->isUnordered();
else if (StoreInst *SI = dyn_cast<StoreInst>(I))
return SI->isUnordered();
return false;
}
static Value *GetPointerOperand(Value *I) {

View File

@ -102,12 +102,21 @@ bool MemDepPrinter::runOnFunction(Function &F) {
} else {
SmallVector<NonLocalDepResult, 4> NLDI;
if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
// FIXME: Volatile is not handled properly here.
if (!LI->isUnordered()) {
// FIXME: Handle atomic/volatile loads.
Deps[Inst].insert(std::make_pair(InstAndClobberFlag(0, false),
static_cast<BasicBlock *>(0)));
continue;
}
AliasAnalysis::Location Loc = AA.getLocation(LI);
MDA.getNonLocalPointerDependency(Loc, !LI->isVolatile(),
LI->getParent(), NLDI);
MDA.getNonLocalPointerDependency(Loc, true, LI->getParent(), NLDI);
} else if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
// FIXME: Volatile is not handled properly here.
if (!LI->isUnordered()) {
// FIXME: Handle atomic/volatile stores.
Deps[Inst].insert(std::make_pair(InstAndClobberFlag(0, false),
static_cast<BasicBlock *>(0)));
continue;
}
AliasAnalysis::Location Loc = AA.getLocation(SI);
MDA.getNonLocalPointerDependency(Loc, false, SI->getParent(), NLDI);
} else if (VAArgInst *VI = dyn_cast<VAArgInst>(Inst)) {

View File

@ -120,21 +120,27 @@ AliasAnalysis::ModRefResult GetLocation(const Instruction *Inst,
AliasAnalysis::Location &Loc,
AliasAnalysis *AA) {
if (const LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
if (LI->isVolatile()) {
Loc = AliasAnalysis::Location();
if (LI->isUnordered()) {
Loc = AA->getLocation(LI);
return AliasAnalysis::Ref;
} else if (LI->getOrdering() == Monotonic) {
Loc = AA->getLocation(LI);
return AliasAnalysis::ModRef;
}
Loc = AA->getLocation(LI);
return AliasAnalysis::Ref;
Loc = AliasAnalysis::Location();
return AliasAnalysis::ModRef;
}
if (const StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
if (SI->isVolatile()) {
Loc = AliasAnalysis::Location();
if (SI->isUnordered()) {
Loc = AA->getLocation(SI);
return AliasAnalysis::Mod;
} else if (SI->getOrdering() == Monotonic) {
Loc = AA->getLocation(SI);
return AliasAnalysis::ModRef;
}
Loc = AA->getLocation(SI);
return AliasAnalysis::Mod;
Loc = AliasAnalysis::Location();
return AliasAnalysis::ModRef;
}
if (const VAArgInst *V = dyn_cast<VAArgInst>(Inst)) {
@ -270,8 +276,8 @@ unsigned MemoryDependenceAnalysis::
getLoadLoadClobberFullWidthSize(const Value *MemLocBase, int64_t MemLocOffs,
unsigned MemLocSize, const LoadInst *LI,
const TargetData &TD) {
// We can only extend non-volatile integer loads.
if (!isa<IntegerType>(LI->getType()) || LI->isVolatile()) return 0;
// We can only extend simple integer loads.
if (!isa<IntegerType>(LI->getType()) || !LI->isSimple()) return 0;
// Get the base of this load.
int64_t LIOffs = 0;
@ -369,6 +375,11 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
// Values depend on loads if the pointers are must aliased. This means that
// a load depends on another must aliased load from the same value.
if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
// Atomic loads have complications involved.
// FIXME: This is overly conservative.
if (!LI->isUnordered())
return MemDepResult::getClobber(LI);
AliasAnalysis::Location LoadLoc = AA->getLocation(LI);
// If we found a pointer, check if it could be the same as our pointer.
@ -424,6 +435,11 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad,
}
if (StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
// Atomic stores have complications involved.
// FIXME: This is overly conservative.
if (!SI->isUnordered())
return MemDepResult::getClobber(SI);
// If alias analysis can tell that this store is guaranteed to not modify
// the query pointer, ignore it. Use getModRefInfo to handle cases where
// the query pointer points to constant memory etc.