diff --git a/lib/Analysis/MemoryDependenceAnalysis.cpp b/lib/Analysis/MemoryDependenceAnalysis.cpp index 33fe425f135..8f22b0ed3de 100644 --- a/lib/Analysis/MemoryDependenceAnalysis.cpp +++ b/lib/Analysis/MemoryDependenceAnalysis.cpp @@ -407,21 +407,33 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad, // Values depend on loads if the pointers are must aliased. This means that // a load depends on another must aliased load from the same value. + // One exception is atomic loads: a value can depend on an atomic load that it + // does not alias with when this atomic load indicates that another thread may + // be accessing the location. if (LoadInst *LI = dyn_cast(Inst)) { // Atomic loads have complications involved. // A monotonic load is OK if the query inst is itself not atomic. // FIXME: This is overly conservative. if (!LI->isUnordered()) { - if (!QueryInst || LI->getOrdering() != Monotonic) + if (!QueryInst) + return MemDepResult::getClobber(LI); + if (LI->getOrdering() != Monotonic) return MemDepResult::getClobber(LI); if (auto *QueryLI = dyn_cast(QueryInst)) - if (!QueryLI->isUnordered()) + if (!QueryLI->isSimple()) return MemDepResult::getClobber(LI); if (auto *QuerySI = dyn_cast(QueryInst)) - if (!QuerySI->isUnordered()) + if (!QuerySI->isSimple()) return MemDepResult::getClobber(LI); } + // FIXME: this is overly conservative. + // While volatile access cannot be eliminated, they do not have to clobber + // non-aliasing locations, as normal accesses can for example be reordered + // with volatile accesses. + if (LI->isVolatile()) + return MemDepResult::getClobber(LI); + AliasAnalysis::Location LoadLoc = AA->getLocation(LI); // If we found a pointer, check if it could be the same as our pointer. @@ -481,16 +493,25 @@ getPointerDependencyFrom(const AliasAnalysis::Location &MemLoc, bool isLoad, // A monotonic store is OK if the query inst is itself not atomic. // FIXME: This is overly conservative. if (!SI->isUnordered()) { - if (!QueryInst || SI->getOrdering() != Monotonic) + if (!QueryInst) + return MemDepResult::getClobber(SI); + if (SI->getOrdering() != Monotonic) return MemDepResult::getClobber(SI); if (auto *QueryLI = dyn_cast(QueryInst)) - if (!QueryLI->isUnordered()) + if (!QueryLI->isSimple()) return MemDepResult::getClobber(SI); if (auto *QuerySI = dyn_cast(QueryInst)) - if (!QuerySI->isUnordered()) + if (!QuerySI->isSimple()) return MemDepResult::getClobber(SI); } + // FIXME: this is overly conservative. + // While volatile access cannot be eliminated, they do not have to clobber + // non-aliasing locations, as normal accesses can for example be reordered + // with volatile accesses. + if (SI->isVolatile()) + return MemDepResult::getClobber(SI); + // If alias analysis can tell that this store is guaranteed to not modify // the query pointer, ignore it. Use getModRefInfo to handle cases where // the query pointer points to constant memory etc. diff --git a/test/Transforms/GVN/atomic.ll b/test/Transforms/GVN/atomic.ll index 094e22bd07e..16ba94066a8 100644 --- a/test/Transforms/GVN/atomic.ll +++ b/test/Transforms/GVN/atomic.ll @@ -78,3 +78,29 @@ entry: %x3 = add i32 %x, %x2 ret i32 %x3 } + +; GVN across monotonic store (allowed) +define i32 @test7() nounwind uwtable ssp { +; CHECK: test7 +; CHECK: add i32 %x, %x +entry: + %x = load i32* @y + store atomic i32 %x, i32* @x monotonic, align 4 + %y = load i32* @y + %z = add i32 %x, %y + ret i32 %z +} + +; GVN of an unordered across monotonic load (not allowed) +define i32 @test8() nounwind uwtable ssp { +; CHECK: test8 +; CHECK: add i32 %x, %y +entry: + %x = load atomic i32* @y unordered, align 4 + %clobber = load atomic i32* @x monotonic, align 4 + %y = load atomic i32* @y monotonic, align 4 + %z = add i32 %x, %y + ret i32 %z +} + +