mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2025-01-16 14:31:59 +00:00
Teach the ARC optimizer about the !clang.arc.copy_on_escape metadata
tag on objc_retainBlock calls, which indicates that they may be optimized away. rdar://10211286. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@142298 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
fa1ee88052
commit
a974beaa1f
@ -1154,6 +1154,10 @@ namespace {
|
||||
/// opposed to objc_retain calls).
|
||||
bool IsRetainBlock;
|
||||
|
||||
/// CopyOnEscape - True if this the Calls are objc_retainBlock calls
|
||||
/// which all have the !clang.arc.copy_on_escape metadata.
|
||||
bool CopyOnEscape;
|
||||
|
||||
/// IsTailCallRelease - True of the objc_release calls are all marked
|
||||
/// with the "tail" keyword.
|
||||
bool IsTailCallRelease;
|
||||
@ -1176,8 +1180,8 @@ namespace {
|
||||
SmallPtrSet<Instruction *, 2> ReverseInsertPts;
|
||||
|
||||
RRInfo() :
|
||||
KnownSafe(false), IsRetainBlock(false), IsTailCallRelease(false),
|
||||
Partial(false),
|
||||
KnownSafe(false), IsRetainBlock(false), CopyOnEscape(false),
|
||||
IsTailCallRelease(false), Partial(false),
|
||||
ReleaseMetadata(0) {}
|
||||
|
||||
void clear();
|
||||
@ -1187,6 +1191,7 @@ namespace {
|
||||
void RRInfo::clear() {
|
||||
KnownSafe = false;
|
||||
IsRetainBlock = false;
|
||||
CopyOnEscape = false;
|
||||
IsTailCallRelease = false;
|
||||
Partial = false;
|
||||
ReleaseMetadata = 0;
|
||||
@ -1294,6 +1299,7 @@ PtrState::Merge(const PtrState &Other, bool TopDown) {
|
||||
if (RRI.ReleaseMetadata != Other.RRI.ReleaseMetadata)
|
||||
RRI.ReleaseMetadata = 0;
|
||||
|
||||
RRI.CopyOnEscape = RRI.CopyOnEscape && Other.RRI.CopyOnEscape;
|
||||
RRI.KnownSafe = RRI.KnownSafe && Other.RRI.KnownSafe;
|
||||
RRI.IsTailCallRelease = RRI.IsTailCallRelease && Other.RRI.IsTailCallRelease;
|
||||
RRI.Calls.insert(Other.RRI.Calls.begin(), Other.RRI.Calls.end());
|
||||
@ -1482,6 +1488,10 @@ namespace {
|
||||
/// metadata.
|
||||
unsigned ImpreciseReleaseMDKind;
|
||||
|
||||
/// CopyOnEscape - The Metadata Kind for clang.arc.copy_on_escape
|
||||
/// metadata.
|
||||
unsigned CopyOnEscapeMDKind;
|
||||
|
||||
Constant *getRetainRVCallee(Module *M);
|
||||
Constant *getAutoreleaseRVCallee(Module *M);
|
||||
Constant *getReleaseCallee(Module *M);
|
||||
@ -2360,9 +2370,12 @@ ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
|
||||
S.SetAtLeastOneRefCount();
|
||||
S.DecrementNestCount();
|
||||
|
||||
// An objc_retainBlock call with just a use still needs to be kept,
|
||||
// because it may be copying a block from the stack to the heap.
|
||||
if (Class == IC_RetainBlock && S.GetSeq() == S_Use)
|
||||
// An non-copy-on-escape objc_retainBlock call with just a use still
|
||||
// needs to be kept, because it may be copying a block from the stack
|
||||
// to the heap.
|
||||
if (Class == IC_RetainBlock &&
|
||||
!Inst->getMetadata(CopyOnEscapeMDKind) &&
|
||||
S.GetSeq() == S_Use)
|
||||
S.SetSeq(S_CanRelease);
|
||||
|
||||
switch (S.GetSeq()) {
|
||||
@ -2377,6 +2390,8 @@ ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
|
||||
// better to let it remain as the first instruction after a call.
|
||||
if (Class != IC_RetainRV) {
|
||||
S.RRI.IsRetainBlock = Class == IC_RetainBlock;
|
||||
if (S.RRI.IsRetainBlock)
|
||||
S.RRI.CopyOnEscape = !!Inst->getMetadata(CopyOnEscapeMDKind);
|
||||
Retains[Inst] = S.RRI;
|
||||
}
|
||||
S.ClearSequenceProgress();
|
||||
@ -2527,6 +2542,8 @@ ObjCARCOpt::VisitTopDown(BasicBlock *BB,
|
||||
S.SetSeq(S_Retain);
|
||||
S.RRI.clear();
|
||||
S.RRI.IsRetainBlock = Class == IC_RetainBlock;
|
||||
if (S.RRI.IsRetainBlock)
|
||||
S.RRI.CopyOnEscape = !!Inst->getMetadata(CopyOnEscapeMDKind);
|
||||
// Don't check S.IsKnownIncremented() here because it's not
|
||||
// sufficient.
|
||||
S.RRI.KnownSafe = S.IsKnownNested();
|
||||
@ -2618,10 +2635,11 @@ ObjCARCOpt::VisitTopDown(BasicBlock *BB,
|
||||
S.SetSeq(S_Use);
|
||||
break;
|
||||
case S_Retain:
|
||||
// An objc_retainBlock call may be responsible for copying the block
|
||||
// data from the stack to the heap. Model this by moving it straight
|
||||
// from S_Retain to S_Use.
|
||||
// A non-copy-on-scape objc_retainBlock call may be responsible for
|
||||
// copying the block data from the stack to the heap. Model this by
|
||||
// moving it straight from S_Retain to S_Use.
|
||||
if (S.RRI.IsRetainBlock &&
|
||||
!S.RRI.CopyOnEscape &&
|
||||
CanUse(Inst, Ptr, PA, Class)) {
|
||||
assert(S.RRI.ReverseInsertPts.empty());
|
||||
S.RRI.ReverseInsertPts.insert(Inst);
|
||||
@ -2713,6 +2731,9 @@ void ObjCARCOpt::MoveCalls(Value *Arg,
|
||||
getRetainBlockCallee(M) : getRetainCallee(M),
|
||||
MyArg, "", InsertPt);
|
||||
Call->setDoesNotThrow();
|
||||
if (RetainsToMove.CopyOnEscape)
|
||||
Call->setMetadata(CopyOnEscapeMDKind,
|
||||
MDNode::get(M->getContext(), ArrayRef<Value *>()));
|
||||
if (!RetainsToMove.IsRetainBlock)
|
||||
Call->setTailCall();
|
||||
}
|
||||
@ -2792,10 +2813,11 @@ ObjCARCOpt::PerformCodePlacement(DenseMap<const BasicBlock *, BBState>
|
||||
// regardless of what possible decrements or uses lie between them.
|
||||
bool KnownSafe = isa<Constant>(Arg);
|
||||
|
||||
// Same for stack storage, unless this is an objc_retainBlock call,
|
||||
// which is responsible for copying the block data from the stack to
|
||||
// the heap.
|
||||
if (!I->second.IsRetainBlock && isa<AllocaInst>(Arg))
|
||||
// Same for stack storage, unless this is a non-copy-on-escape
|
||||
// objc_retainBlock call, which is responsible for copying the block data
|
||||
// from the stack to the heap.
|
||||
if ((!I->second.IsRetainBlock || I->second.CopyOnEscape) &&
|
||||
isa<AllocaInst>(Arg))
|
||||
KnownSafe = true;
|
||||
|
||||
// A constant pointer can't be pointing to an object on the heap. It may
|
||||
@ -2905,6 +2927,7 @@ ObjCARCOpt::PerformCodePlacement(DenseMap<const BasicBlock *, BBState>
|
||||
// Merge the IsRetainBlock values.
|
||||
if (FirstRetain) {
|
||||
RetainsToMove.IsRetainBlock = NewReleaseRetainRRI.IsRetainBlock;
|
||||
RetainsToMove.CopyOnEscape = NewReleaseRetainRRI.CopyOnEscape;
|
||||
FirstRetain = false;
|
||||
} else if (ReleasesToMove.IsRetainBlock !=
|
||||
NewReleaseRetainRRI.IsRetainBlock)
|
||||
@ -2912,6 +2935,9 @@ ObjCARCOpt::PerformCodePlacement(DenseMap<const BasicBlock *, BBState>
|
||||
// objc_retain and the other uses objc_retainBlock.
|
||||
goto next_retain;
|
||||
|
||||
// Merge the CopyOnEscape values.
|
||||
RetainsToMove.CopyOnEscape &= NewReleaseRetainRRI.CopyOnEscape;
|
||||
|
||||
// Collect the optimal insertion points.
|
||||
if (!KnownSafe)
|
||||
for (SmallPtrSet<Instruction *, 2>::const_iterator
|
||||
@ -3265,6 +3291,8 @@ bool ObjCARCOpt::doInitialization(Module &M) {
|
||||
// Identify the imprecise release metadata kind.
|
||||
ImpreciseReleaseMDKind =
|
||||
M.getContext().getMDKindID("clang.imprecise_release");
|
||||
CopyOnEscapeMDKind =
|
||||
M.getContext().getMDKindID("clang.arc.copy_on_escape");
|
||||
|
||||
// Intuitively, objc_retain and others are nocapture, however in practice
|
||||
// they are not, because they return their argument value. And objc_release
|
||||
|
@ -1,11 +1,6 @@
|
||||
; RUN: opt -S -objc-arc < %s | FileCheck %s
|
||||
; rdar://10209613
|
||||
|
||||
; CHECK: define void @test
|
||||
; CHECK: %3 = call i8* @objc_retainBlock(i8* %2) nounwind
|
||||
; CHECK: @objc_msgSend
|
||||
; CHECK-NEXT: @objc_release(i8* %3)
|
||||
|
||||
%0 = type opaque
|
||||
%struct.__block_descriptor = type { i64, i64 }
|
||||
|
||||
@ -13,6 +8,10 @@
|
||||
@__block_descriptor_tmp = external hidden constant { i64, i64, i8*, i8*, i8*, i8* }
|
||||
@"\01L_OBJC_SELECTOR_REFERENCES_" = external hidden global i8*, section "__DATA, __objc_selrefs, literal_pointers, no_dead_strip"
|
||||
|
||||
; CHECK: define void @test(
|
||||
; CHECK: %3 = call i8* @objc_retainBlock(i8* %2) nounwind
|
||||
; CHECK: @objc_msgSend
|
||||
; CHECK-NEXT: @objc_release(i8* %3)
|
||||
define void @test(%0* %array) uwtable {
|
||||
entry:
|
||||
%block = alloca <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>, align 8
|
||||
@ -41,6 +40,43 @@ entry:
|
||||
ret void
|
||||
}
|
||||
|
||||
; Same as test, but the objc_retainBlock has a clang.arc.copy_on_escape
|
||||
; tag so it's safe to delete.
|
||||
|
||||
; CHECK: define void @test_with_COE(
|
||||
; CHECK-NOT: @objc_retainBlock
|
||||
; CHECK: @objc_msgSend
|
||||
; CHECK: @objc_release
|
||||
; CHECK-NOT: @objc_release
|
||||
; CHECK: }
|
||||
define void @test_with_COE(%0* %array) uwtable {
|
||||
entry:
|
||||
%block = alloca <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>, align 8
|
||||
%0 = bitcast %0* %array to i8*
|
||||
%1 = tail call i8* @objc_retain(i8* %0) nounwind
|
||||
%block.isa = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 0
|
||||
store i8* bitcast (i8** @_NSConcreteStackBlock to i8*), i8** %block.isa, align 8
|
||||
%block.flags = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 1
|
||||
store i32 1107296256, i32* %block.flags, align 8
|
||||
%block.reserved = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 2
|
||||
store i32 0, i32* %block.reserved, align 4
|
||||
%block.invoke = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 3
|
||||
store i8* bitcast (void (i8*)* @__test_block_invoke_0 to i8*), i8** %block.invoke, align 8
|
||||
%block.descriptor = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 4
|
||||
store %struct.__block_descriptor* bitcast ({ i64, i64, i8*, i8*, i8*, i8* }* @__block_descriptor_tmp to %struct.__block_descriptor*), %struct.__block_descriptor** %block.descriptor, align 8
|
||||
%block.captured = getelementptr inbounds <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block, i64 0, i32 5
|
||||
store %0* %array, %0** %block.captured, align 8
|
||||
%2 = bitcast <{ i8*, i32, i32, i8*, %struct.__block_descriptor*, %0* }>* %block to i8*
|
||||
%3 = call i8* @objc_retainBlock(i8* %2) nounwind, !clang.arc.copy_on_escape !0
|
||||
%tmp2 = load i8** @"\01L_OBJC_SELECTOR_REFERENCES_", align 8
|
||||
call void bitcast (i8* (i8*, i8*, ...)* @objc_msgSend to void (i8*, i8*, i8*)*)(i8* %0, i8* %tmp2, i8* %3)
|
||||
call void @objc_release(i8* %3) nounwind
|
||||
%strongdestroy = load %0** %block.captured, align 8
|
||||
%4 = bitcast %0* %strongdestroy to i8*
|
||||
call void @objc_release(i8* %4) nounwind, !clang.imprecise_release !0
|
||||
ret void
|
||||
}
|
||||
|
||||
declare i8* @objc_retain(i8*)
|
||||
|
||||
declare void @__test_block_invoke_0(i8* nocapture) uwtable
|
||||
|
Loading…
x
Reference in New Issue
Block a user