Don't try to instrument allocas used by outlined SEH funclets

Summary:
Arguments to llvm.localescape must be static allocas. They must be at
some statically known offset from the frame or stack pointer so that
other functions can access them with localrecover.

If we ever want to instrument these, we can use more indirection to
recover the addresses of these local variables. We can do it during
clang irgen or with the asan module pass.

Reviewers: eugenis

Subscribers: llvm-commits

Differential Revision: http://reviews.llvm.org/D11307

git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@242726 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Reid Kleckner 2015-07-20 22:49:44 +00:00
parent bb418bc23b
commit dfc9688bd9
2 changed files with 113 additions and 0 deletions

View File

@ -439,6 +439,7 @@ struct AddressSanitizer : public FunctionPass {
Value *memToShadow(Value *Shadow, IRBuilder<> &IRB);
bool runOnFunction(Function &F) override;
bool maybeInsertAsanInitAtFunctionEntry(Function &F);
void markEscapedLocalAllocas(Function &F);
bool doInitialization(Module &M) override;
static char ID; // Pass identification, replacement for typeid
@ -548,6 +549,7 @@ struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> {
SmallVector<AllocaInst *, 1> DynamicAllocaVec;
SmallVector<IntrinsicInst *, 1> StackRestoreVec;
AllocaInst *DynamicAllocaLayout = nullptr;
IntrinsicInst *LocalEscapeCall = nullptr;
// Maps Value to an AllocaInst from which the Value is originated.
typedef DenseMap<Value *, AllocaInst *> AllocaForValueMapTy;
@ -645,6 +647,7 @@ struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> {
void visitIntrinsicInst(IntrinsicInst &II) {
Intrinsic::ID ID = II.getIntrinsicID();
if (ID == Intrinsic::stackrestore) StackRestoreVec.push_back(&II);
if (ID == Intrinsic::localescape) LocalEscapeCall = &II;
if (!ClCheckLifetime) return;
if (ID != Intrinsic::lifetime_start && ID != Intrinsic::lifetime_end)
return;
@ -1479,6 +1482,34 @@ bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) {
return false;
}
void AddressSanitizer::markEscapedLocalAllocas(Function &F) {
// Find the one possible call to llvm.localescape and pre-mark allocas passed
// to it as uninteresting. This assumes we haven't started processing allocas
// yet. This check is done up front because iterating the use list in
// isInterestingAlloca would be algorithmically slower.
assert(ProcessedAllocas.empty() && "must process localescape before allocas");
// Try to get the declaration of llvm.localescape. If it's not in the module,
// we can exit early.
if (!F.getParent()->getFunction("llvm.localescape")) return;
// Look for a call to llvm.localescape call in the entry block. It can't be in
// any other block.
for (Instruction &I : F.getEntryBlock()) {
IntrinsicInst *II = dyn_cast<IntrinsicInst>(&I);
if (II && II->getIntrinsicID() == Intrinsic::localescape) {
// We found a call. Mark all the allocas passed in as uninteresting.
for (Value *Arg : II->arg_operands()) {
AllocaInst *AI = dyn_cast<AllocaInst>(Arg->stripPointerCasts());
assert(AI && AI->isStaticAlloca() &&
"non-static alloca arg to localescape");
ProcessedAllocas[AI] = false;
}
break;
}
}
}
bool AddressSanitizer::runOnFunction(Function &F) {
if (&F == AsanCtorFunction) return false;
if (F.getLinkage() == GlobalValue::AvailableExternallyLinkage) return false;
@ -1494,6 +1525,10 @@ bool AddressSanitizer::runOnFunction(Function &F) {
if (!ClDebugFunc.empty() && ClDebugFunc != F.getName()) return false;
// We can't instrument allocas used with llvm.localescape. Only static allocas
// can be passed to that intrinsic.
markEscapedLocalAllocas(F);
// We want to instrument every address only once per basic block (unless there
// are calls between uses).
SmallSet<Value *, 16> TempsToInstrument;
@ -1584,6 +1619,8 @@ bool AddressSanitizer::runOnFunction(Function &F) {
DEBUG(dbgs() << "ASAN done instrumenting: " << res << " " << F << "\n");
ProcessedAllocas.clear();
return res;
}
@ -1745,6 +1782,9 @@ void FunctionStackPoisoner::poisonStack() {
// treated as regular stack slots.
for (auto *AI : NonInstrumentedStaticAllocaVec) AI->moveBefore(InsBefore);
// If we have a call to llvm.localescape, keep it in the entry block.
if (LocalEscapeCall) LocalEscapeCall->moveBefore(InsBefore);
SmallVector<ASanStackVariableDescription, 16> SVD;
SVD.reserve(AllocaVec.size());
for (AllocaInst *AI : AllocaVec) {

View File

@ -0,0 +1,73 @@
; RUN: opt < %s -asan -asan-module -asan-use-after-return -asan-stack-dynamic-alloca -S | FileCheck %s
; RUN: opt < %s -asan -asan-module -asan-use-after-return=0 -asan-stack-dynamic-alloca=0 -S | FileCheck %s
target datalayout = "e-m:o-i64:64-f80:128-n8:16:32:64-S128"
target triple = "x86_64-apple-macosx10.10.0"
declare i32 @llvm.eh.typeid.for(i8*) #2
declare i8* @llvm.frameaddress(i32)
declare i8* @llvm.x86.seh.recoverfp(i8*, i8*)
declare i8* @llvm.localrecover(i8*, i8*, i32)
declare void @llvm.localescape(...) #1
declare i32 @_except_handler3(...)
declare void @may_throw(i32* %r)
define i32 @main() sanitize_address personality i8* bitcast (i32 (...)* @_except_handler3 to i8*) {
entry:
%r = alloca i32, align 4
%__exception_code = alloca i32, align 4
call void (...) @llvm.localescape(i32* nonnull %__exception_code)
%0 = bitcast i32* %r to i8*
store i32 0, i32* %r, align 4
invoke void @may_throw(i32* nonnull %r) #4
to label %__try.cont unwind label %lpad
lpad: ; preds = %entry
%1 = landingpad { i8*, i32 }
catch i8* bitcast (i32 ()* @"\01?filt$0@0@main@@" to i8*)
%2 = extractvalue { i8*, i32 } %1, 1
%3 = call i32 @llvm.eh.typeid.for(i8* bitcast (i32 ()* @"\01?filt$0@0@main@@" to i8*)) #1
%matches = icmp eq i32 %2, %3
br i1 %matches, label %__except, label %eh.resume
__except: ; preds = %lpad
store i32 1, i32* %r, align 4
br label %__try.cont
__try.cont: ; preds = %entry, %__except
%4 = load i32, i32* %r, align 4
ret i32 %4
eh.resume: ; preds = %lpad
resume { i8*, i32 } %1
}
; Check that the alloca remains static and the localescape call remains in the
; entry block.
; CHECK-LABEL: define i32 @main()
; CHECK-NOT: br {{.*}}label
; CHECK: %__exception_code = alloca i32, align 4
; CHECK-NOT: br {{.*}}label
; CHECK: call void (...) @llvm.localescape(i32* nonnull %__exception_code)
; Function Attrs: nounwind
define internal i32 @"\01?filt$0@0@main@@"() #1 {
entry:
%0 = tail call i8* @llvm.frameaddress(i32 1)
%1 = tail call i8* @llvm.x86.seh.recoverfp(i8* bitcast (i32 ()* @main to i8*), i8* %0)
%2 = tail call i8* @llvm.localrecover(i8* bitcast (i32 ()* @main to i8*), i8* %1, i32 0)
%__exception_code = bitcast i8* %2 to i32*
%3 = getelementptr inbounds i8, i8* %0, i32 -20
%4 = bitcast i8* %3 to { i32*, i8* }**
%5 = load { i32*, i8* }*, { i32*, i8* }** %4, align 4
%6 = getelementptr inbounds { i32*, i8* }, { i32*, i8* }* %5, i32 0, i32 0
%7 = load i32*, i32** %6, align 4
%8 = load i32, i32* %7, align 4
store i32 %8, i32* %__exception_code, align 4
ret i32 1
}
; CHECK-LABEL: define internal i32 @"\01?filt$0@0@main@@"()
; CHECK: tail call i8* @llvm.localrecover(i8* bitcast (i32 ()* @main to i8*), i8* {{.*}}, i32 0)