mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2024-12-28 19:31:58 +00:00
Make provenance checking conservative in cases when
pointers-to-strong-pointers may be in play. These can lead to retains and releases happening in unstructured ways, foiling the optimizer. This fixes rdar://12150909. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@163180 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
be11991208
commit
230768bd13
@ -194,6 +194,11 @@ public:
|
||||
return isNoAlias(Location(V1, V1Size), Location(V2, V2Size));
|
||||
}
|
||||
|
||||
/// isNoAlias - A convenience wrapper.
|
||||
bool isNoAlias(const Value *V1, const Value *V2) {
|
||||
return isNoAlias(Location(V1), Location(V2));
|
||||
}
|
||||
|
||||
/// isMustAlias - A convenience wrapper.
|
||||
bool isMustAlias(const Location &LocA, const Location &LocB) {
|
||||
return alias(LocA, LocB) == MustAlias;
|
||||
|
@ -1236,16 +1236,19 @@ bool ProvenanceAnalysis::relatedCheck(const Value *A, const Value *B) {
|
||||
|
||||
// An ObjC-Identified object can't alias a load if it is never locally stored.
|
||||
if (AIsIdentified) {
|
||||
// Check for an obvious escape.
|
||||
if (isa<LoadInst>(B))
|
||||
return isStoredObjCPointer(A);
|
||||
if (BIsIdentified) {
|
||||
// If both pointers have provenance, they can be directly compared.
|
||||
if (A != B)
|
||||
return false;
|
||||
} else {
|
||||
if (isa<LoadInst>(B))
|
||||
return isStoredObjCPointer(A);
|
||||
// Check for an obvious escape.
|
||||
if (isa<LoadInst>(A))
|
||||
return isStoredObjCPointer(B);
|
||||
// Both pointers are identified and escapes aren't an evident problem.
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (BIsIdentified && isa<LoadInst>(A))
|
||||
} else if (BIsIdentified) {
|
||||
// Check for an obvious escape.
|
||||
if (isa<LoadInst>(A))
|
||||
return isStoredObjCPointer(B);
|
||||
}
|
||||
|
||||
@ -1381,9 +1384,6 @@ namespace {
|
||||
/// PtrState - This class summarizes several per-pointer runtime properties
|
||||
/// which are propogated through the flow graph.
|
||||
class PtrState {
|
||||
/// NestCount - The known minimum level of retain+release nesting.
|
||||
unsigned NestCount;
|
||||
|
||||
/// KnownPositiveRefCount - True if the reference count is known to
|
||||
/// be incremented.
|
||||
bool KnownPositiveRefCount;
|
||||
@ -1401,7 +1401,7 @@ namespace {
|
||||
/// TODO: Encapsulate this better.
|
||||
RRInfo RRI;
|
||||
|
||||
PtrState() : NestCount(0), KnownPositiveRefCount(false), Partial(false),
|
||||
PtrState() : KnownPositiveRefCount(false), Partial(false),
|
||||
Seq(S_None) {}
|
||||
|
||||
void SetKnownPositiveRefCount() {
|
||||
@ -1416,18 +1416,6 @@ namespace {
|
||||
return KnownPositiveRefCount;
|
||||
}
|
||||
|
||||
void IncrementNestCount() {
|
||||
if (NestCount != UINT_MAX) ++NestCount;
|
||||
}
|
||||
|
||||
void DecrementNestCount() {
|
||||
if (NestCount != 0) --NestCount;
|
||||
}
|
||||
|
||||
bool IsKnownNested() const {
|
||||
return NestCount > 0;
|
||||
}
|
||||
|
||||
void SetSeq(Sequence NewSeq) {
|
||||
Seq = NewSeq;
|
||||
}
|
||||
@ -1454,7 +1442,6 @@ void
|
||||
PtrState::Merge(const PtrState &Other, bool TopDown) {
|
||||
Seq = MergeSeqs(Seq, Other.Seq, TopDown);
|
||||
KnownPositiveRefCount = KnownPositiveRefCount && Other.KnownPositiveRefCount;
|
||||
NestCount = std::min(NestCount, Other.NestCount);
|
||||
|
||||
// We can't merge a plain objc_retain with an objc_retainBlock.
|
||||
if (RRI.IsRetainBlock != Other.RRI.IsRetainBlock)
|
||||
@ -1868,6 +1855,26 @@ Constant *ObjCARCOpt::getAutoreleaseCallee(Module *M) {
|
||||
return AutoreleaseCallee;
|
||||
}
|
||||
|
||||
/// IsPotentialUse - Test whether the given value is possible a
|
||||
/// reference-counted pointer, including tests which utilize AliasAnalysis.
|
||||
static bool IsPotentialUse(const Value *Op, AliasAnalysis &AA) {
|
||||
// First make the rudimentary check.
|
||||
if (!IsPotentialUse(Op))
|
||||
return false;
|
||||
|
||||
// Objects in constant memory are not reference-counted.
|
||||
if (AA.pointsToConstantMemory(Op))
|
||||
return false;
|
||||
|
||||
// Pointers in constant memory are not pointing to reference-counted objects.
|
||||
if (const LoadInst *LI = dyn_cast<LoadInst>(Op))
|
||||
if (AA.pointsToConstantMemory(LI->getPointerOperand()))
|
||||
return false;
|
||||
|
||||
// Otherwise assume the worst.
|
||||
return true;
|
||||
}
|
||||
|
||||
/// CanAlterRefCount - Test whether the given instruction can result in a
|
||||
/// reference count modification (positive or negative) for the pointer's
|
||||
/// object.
|
||||
@ -1894,7 +1901,7 @@ CanAlterRefCount(const Instruction *Inst, const Value *Ptr,
|
||||
for (ImmutableCallSite::arg_iterator I = CS.arg_begin(), E = CS.arg_end();
|
||||
I != E; ++I) {
|
||||
const Value *Op = *I;
|
||||
if (IsPotentialUse(Op) && PA.related(Ptr, Op))
|
||||
if (IsPotentialUse(Op, *PA.getAA()) && PA.related(Ptr, Op))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -1919,14 +1926,14 @@ CanUse(const Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA,
|
||||
// Comparing a pointer with null, or any other constant, isn't really a use,
|
||||
// because we don't care what the pointer points to, or about the values
|
||||
// of any other dynamic reference-counted pointers.
|
||||
if (!IsPotentialUse(ICI->getOperand(1)))
|
||||
if (!IsPotentialUse(ICI->getOperand(1), *PA.getAA()))
|
||||
return false;
|
||||
} else if (ImmutableCallSite CS = static_cast<const Value *>(Inst)) {
|
||||
// For calls, just check the arguments (and not the callee operand).
|
||||
for (ImmutableCallSite::arg_iterator OI = CS.arg_begin(),
|
||||
OE = CS.arg_end(); OI != OE; ++OI) {
|
||||
const Value *Op = *OI;
|
||||
if (IsPotentialUse(Op) && PA.related(Ptr, Op))
|
||||
if (IsPotentialUse(Op, *PA.getAA()) && PA.related(Ptr, Op))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -1936,14 +1943,14 @@ CanUse(const Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA,
|
||||
const Value *Op = GetUnderlyingObjCPtr(SI->getPointerOperand());
|
||||
// If we can't tell what the underlying object was, assume there is a
|
||||
// dependence.
|
||||
return IsPotentialUse(Op) && PA.related(Op, Ptr);
|
||||
return IsPotentialUse(Op, *PA.getAA()) && PA.related(Op, Ptr);
|
||||
}
|
||||
|
||||
// Check each operand for a match.
|
||||
for (User::const_op_iterator OI = Inst->op_begin(), OE = Inst->op_end();
|
||||
OI != OE; ++OI) {
|
||||
const Value *Op = *OI;
|
||||
if (IsPotentialUse(Op) && PA.related(Ptr, Op))
|
||||
if (IsPotentialUse(Op, *PA.getAA()) && PA.related(Ptr, Op))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -2612,11 +2619,11 @@ ObjCARCOpt::VisitInstructionBottomUp(Instruction *Inst,
|
||||
MDNode *ReleaseMetadata = Inst->getMetadata(ImpreciseReleaseMDKind);
|
||||
S.ResetSequenceProgress(ReleaseMetadata ? S_MovableRelease : S_Release);
|
||||
S.RRI.ReleaseMetadata = ReleaseMetadata;
|
||||
S.RRI.KnownSafe = S.IsKnownNested() || S.IsKnownIncremented();
|
||||
S.RRI.KnownSafe = S.IsKnownIncremented();
|
||||
S.RRI.IsTailCallRelease = cast<CallInst>(Inst)->isTailCall();
|
||||
S.RRI.Calls.insert(Inst);
|
||||
|
||||
S.IncrementNestCount();
|
||||
S.SetKnownPositiveRefCount();
|
||||
break;
|
||||
}
|
||||
case IC_RetainBlock:
|
||||
@ -2631,7 +2638,6 @@ ObjCARCOpt::VisitInstructionBottomUp(Instruction *Inst,
|
||||
|
||||
PtrState &S = MyStates.getPtrBottomUpState(Arg);
|
||||
S.SetKnownPositiveRefCount();
|
||||
S.DecrementNestCount();
|
||||
|
||||
switch (S.GetSeq()) {
|
||||
case S_Stop:
|
||||
@ -2823,12 +2829,11 @@ ObjCARCOpt::VisitInstructionTopDown(Instruction *Inst,
|
||||
|
||||
S.ResetSequenceProgress(S_Retain);
|
||||
S.RRI.IsRetainBlock = Class == IC_RetainBlock;
|
||||
// Don't check S.IsKnownIncremented() here because it's not sufficient.
|
||||
S.RRI.KnownSafe = S.IsKnownNested();
|
||||
S.RRI.KnownSafe = S.IsKnownIncremented();
|
||||
S.RRI.Calls.insert(Inst);
|
||||
}
|
||||
|
||||
S.IncrementNestCount();
|
||||
S.SetKnownPositiveRefCount();
|
||||
|
||||
// A retain can be a potential use; procede to the generic checking
|
||||
// code below.
|
||||
@ -2838,7 +2843,7 @@ ObjCARCOpt::VisitInstructionTopDown(Instruction *Inst,
|
||||
Arg = GetObjCArg(Inst);
|
||||
|
||||
PtrState &S = MyStates.getPtrTopDownState(Arg);
|
||||
S.DecrementNestCount();
|
||||
S.ClearRefCount();
|
||||
|
||||
switch (S.GetSeq()) {
|
||||
case S_Retain:
|
||||
|
@ -1,4 +1,4 @@
|
||||
; RUN: opt -objc-arc -S < %s | FileCheck %s
|
||||
; RUN: opt -basicaa -objc-arc -S < %s | FileCheck %s
|
||||
|
||||
target datalayout = "e-p:64:64:64"
|
||||
|
||||
@ -1498,7 +1498,7 @@ define i8* @test49(i8* %p) nounwind {
|
||||
}
|
||||
|
||||
; Do delete retain+release with intervening stores of the
|
||||
; address value;
|
||||
; address value.
|
||||
|
||||
; CHECK: define void @test50(
|
||||
; CHECK-NOT: @objc_
|
||||
|
@ -16,6 +16,10 @@ declare void @llvm.memset.p0i8.i64(i8* nocapture, i8, i64, i32, i1) nounwind
|
||||
declare i8* @objc_msgSend(i8*, i8*, ...) nonlazybind
|
||||
declare void @use(i8*)
|
||||
declare void @objc_release(i8*)
|
||||
declare i8* @def()
|
||||
declare void @__crasher_block_invoke(i8* nocapture)
|
||||
declare i8* @objc_retainBlock(i8*)
|
||||
declare void @__crasher_block_invoke1(i8* nocapture)
|
||||
|
||||
!0 = metadata !{}
|
||||
|
||||
@ -279,11 +283,13 @@ forcoll.empty:
|
||||
ret void
|
||||
}
|
||||
|
||||
; Delete a nested retain+release pair.
|
||||
; TODO: Delete a nested retain+release pair.
|
||||
; The optimizer currently can't do this, because isn't isn't sophisticated enough in
|
||||
; reasnoning about nesting.
|
||||
|
||||
; CHECK: define void @test6(
|
||||
; CHECK: call i8* @objc_retain
|
||||
; CHECK-NOT: @objc_retain
|
||||
; CHECK: @objc_retain
|
||||
; CHECK: }
|
||||
define void @test6() nounwind {
|
||||
entry:
|
||||
@ -345,11 +351,13 @@ forcoll.empty:
|
||||
ret void
|
||||
}
|
||||
|
||||
; Delete a nested retain+release pair.
|
||||
; TODO: Delete a nested retain+release pair.
|
||||
; The optimizer currently can't do this, because isn't isn't sophisticated enough in
|
||||
; reasnoning about nesting.
|
||||
|
||||
; CHECK: define void @test7(
|
||||
; CHECK: call i8* @objc_retain
|
||||
; CHECK-NOT: @objc_retain
|
||||
; CHECK: @objc_retain
|
||||
; CHECK: }
|
||||
define void @test7() nounwind {
|
||||
entry:
|
||||
@ -553,12 +561,12 @@ forcoll.empty:
|
||||
ret void
|
||||
}
|
||||
|
||||
; Like test9, but without a split backedge. This we can optimize.
|
||||
; Like test9, but without a split backedge. TODO: optimize this.
|
||||
|
||||
; CHECK: define void @test9b(
|
||||
; CHECK: call i8* @objc_retain
|
||||
; CHECK: call i8* @objc_retain
|
||||
; CHECK-NOT: @objc_retain
|
||||
; CHECK: @objc_retain
|
||||
; CHECK: }
|
||||
define void @test9b() nounwind {
|
||||
entry:
|
||||
@ -687,12 +695,12 @@ forcoll.empty:
|
||||
ret void
|
||||
}
|
||||
|
||||
; Like test10, but without a split backedge. This we can optimize.
|
||||
; Like test10, but without a split backedge. TODO: optimize this.
|
||||
|
||||
; CHECK: define void @test10b(
|
||||
; CHECK: call i8* @objc_retain
|
||||
; CHECK: call i8* @objc_retain
|
||||
; CHECK-NOT: @objc_retain
|
||||
; CHECK: @objc_retain
|
||||
; CHECK: }
|
||||
define void @test10b() nounwind {
|
||||
entry:
|
||||
@ -751,3 +759,64 @@ forcoll.empty:
|
||||
call void @objc_release(i8* %0) nounwind, !clang.imprecise_release !0
|
||||
ret void
|
||||
}
|
||||
|
||||
; Pointers to strong pointers can obscure provenance relationships. Be conservative
|
||||
; in the face of escaping pointers. rdar://12150909.
|
||||
|
||||
%struct.__block_d = type { i64, i64 }
|
||||
|
||||
@_NSConcreteStackBlock = external global i8*
|
||||
@__block_d_tmp = external hidden constant { i64, i64, i8*, i8*, i8*, i8* }
|
||||
@__block_d_tmp5 = external hidden constant { i64, i64, i8*, i8*, i8*, i8* }
|
||||
|
||||
; CHECK: define void @test11(
|
||||
; CHECK: tail call i8* @objc_retain(i8* %call) nounwind
|
||||
; CHECK: tail call i8* @objc_retain(i8* %call) nounwind
|
||||
; CHECK: call void @objc_release(i8* %call) nounwind, !clang.imprecise_release !0
|
||||
; CHECK: }
|
||||
define void @test11() {
|
||||
entry:
|
||||
%block = alloca <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, align 8
|
||||
%block9 = alloca <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>, align 8
|
||||
%call = call i8* @def(), !clang.arc.no_objc_arc_exceptions !0
|
||||
%foo = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 5
|
||||
%block.isa = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 0
|
||||
store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa, align 8
|
||||
%block.flags = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 1
|
||||
store i32 1107296256, i32* %block.flags, align 8
|
||||
%block.reserved = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 2
|
||||
store i32 0, i32* %block.reserved, align 4
|
||||
%block.invoke = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 3
|
||||
store i8* bitcast (void (i8*)* @__crasher_block_invoke to i8*), i8** %block.invoke, align 8
|
||||
%block.d = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block, i64 0, i32 4
|
||||
store %struct.__block_d* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_d_tmp to %struct.__block_d*), %struct.__block_d** %block.d, align 8
|
||||
%foo2 = tail call i8* @objc_retain(i8* %call) nounwind
|
||||
store i8* %foo2, i8** %foo, align 8
|
||||
%foo4 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block to i8*
|
||||
%foo5 = call i8* @objc_retainBlock(i8* %foo4) nounwind
|
||||
call void @use(i8* %foo5), !clang.arc.no_objc_arc_exceptions !0
|
||||
call void @objc_release(i8* %foo5) nounwind
|
||||
%strongdestroy = load i8** %foo, align 8
|
||||
call void @objc_release(i8* %strongdestroy) nounwind, !clang.imprecise_release !0
|
||||
%foo10 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 5
|
||||
%block.isa11 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 0
|
||||
store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa11, align 8
|
||||
%block.flags12 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 1
|
||||
store i32 1107296256, i32* %block.flags12, align 8
|
||||
%block.reserved13 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 2
|
||||
store i32 0, i32* %block.reserved13, align 4
|
||||
%block.invoke14 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 3
|
||||
store i8* bitcast (void (i8*)* @__crasher_block_invoke1 to i8*), i8** %block.invoke14, align 8
|
||||
%block.d15 = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9, i64 0, i32 4
|
||||
store %struct.__block_d* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_d_tmp5 to %struct.__block_d*), %struct.__block_d** %block.d15, align 8
|
||||
%foo18 = call i8* @objc_retain(i8* %call) nounwind
|
||||
store i8* %call, i8** %foo10, align 8
|
||||
%foo20 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_d*, i8* }>* %block9 to i8*
|
||||
%foo21 = call i8* @objc_retainBlock(i8* %foo20) nounwind
|
||||
call void @use(i8* %foo21), !clang.arc.no_objc_arc_exceptions !0
|
||||
call void @objc_release(i8* %foo21) nounwind
|
||||
%strongdestroy25 = load i8** %foo10, align 8
|
||||
call void @objc_release(i8* %strongdestroy25) nounwind, !clang.imprecise_release !0
|
||||
call void @objc_release(i8* %call) nounwind, !clang.imprecise_release !0
|
||||
ret void
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user