mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2025-02-25 19:29:53 +00:00
[msan] Do not store origin for clean values.
Instead of unconditionally storing origin with every application store, only do this when the shadow of the stored value is != 0. This change also delays instrumentation of stores until after the walk over function's instructions, because adding new basic blocks confuses InstVisitor. We only keep 1 origin value per 4 bytes of application memory. This change fixes the bug when a store of a single clean byte wiped the origin for the whole 4-byte area. Since stores of uninitialized values are relatively uncommon, this change improves performance of track-origins mode by 5% median and by up to 47% on specs. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@169490 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
6afe478e00
commit
4031b194ac
@ -102,6 +102,10 @@ static cl::opt<bool> ClHandleICmp("msan-handle-icmp",
|
||||
cl::desc("propagate shadow through ICmpEQ and ICmpNE"),
|
||||
cl::Hidden, cl::init(true));
|
||||
|
||||
static cl::opt<bool> ClStoreCleanOrigin("msan-store-clean-origin",
|
||||
cl::desc("store origin for clean (fully initialized) values"),
|
||||
cl::Hidden, cl::init(false));
|
||||
|
||||
// This flag controls whether we check the shadow of the address
|
||||
// operand of load or store. Such bugs are very rare, since load from
|
||||
// a garbage address typically results in SEGV, but still happen
|
||||
@ -180,6 +184,8 @@ private:
|
||||
uint64_t OriginOffset;
|
||||
/// \brief Branch weights for error reporting.
|
||||
MDNode *ColdCallWeights;
|
||||
/// \brief Branch weights for origin store.
|
||||
MDNode *OriginStoreWeights;
|
||||
/// \brief The blacklist.
|
||||
OwningPtr<BlackList> BL;
|
||||
/// \brief An empty volatile inline asm that prevents callback merge.
|
||||
@ -308,6 +314,7 @@ bool MemorySanitizer::doInitialization(Module &M) {
|
||||
OriginTy = IRB.getInt32Ty();
|
||||
|
||||
ColdCallWeights = MDBuilder(*C).createBranchWeights(1, 1000);
|
||||
OriginStoreWeights = MDBuilder(*C).createBranchWeights(1, 1000);
|
||||
|
||||
// Insert a call to __msan_init/__msan_track_origins into the module's CTORs.
|
||||
appendToGlobalCtors(M, cast<Function>(M.getOrInsertFunction(
|
||||
@ -382,6 +389,7 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
|
||||
ShadowOriginAndInsertPoint() : Shadow(0), Origin(0), OrigIns(0) { }
|
||||
};
|
||||
SmallVector<ShadowOriginAndInsertPoint, 16> InstrumentationList;
|
||||
SmallVector<Instruction*, 16> StoreList;
|
||||
|
||||
MemorySanitizerVisitor(Function &F, MemorySanitizer &MS)
|
||||
: F(F), MS(MS), VAHelper(CreateVarArgHelper(F, MS, *this)) {
|
||||
@ -391,6 +399,49 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
|
||||
<< F.getName() << "'\n");
|
||||
}
|
||||
|
||||
void materializeStores() {
|
||||
for (size_t i = 0, n = StoreList.size(); i < n; i++) {
|
||||
StoreInst& I = *dyn_cast<StoreInst>(StoreList[i]);
|
||||
|
||||
IRBuilder<> IRB(&I);
|
||||
Value *Val = I.getValueOperand();
|
||||
Value *Addr = I.getPointerOperand();
|
||||
Value *Shadow = getShadow(Val);
|
||||
Value *ShadowPtr = getShadowPtr(Addr, Shadow->getType(), IRB);
|
||||
|
||||
StoreInst *NewSI = IRB.CreateAlignedStore(Shadow, ShadowPtr, I.getAlignment());
|
||||
DEBUG(dbgs() << " STORE: " << *NewSI << "\n");
|
||||
// If the store is volatile, add a check.
|
||||
if (I.isVolatile())
|
||||
insertCheck(Val, &I);
|
||||
if (ClCheckAccessAddress)
|
||||
insertCheck(Addr, &I);
|
||||
|
||||
if (ClTrackOrigins) {
|
||||
if (ClStoreCleanOrigin || isa<StructType>(Shadow->getType())) {
|
||||
IRB.CreateAlignedStore(getOrigin(Val), getOriginPtr(Addr, IRB), I.getAlignment());
|
||||
} else {
|
||||
Value *ConvertedShadow = convertToShadowTyNoVec(Shadow, IRB);
|
||||
|
||||
Constant *Cst = dyn_cast_or_null<Constant>(ConvertedShadow);
|
||||
// TODO(eugenis): handle non-zero constant shadow by inserting an
|
||||
// unconditional check (can not simply fail compilation as this could
|
||||
// be in the dead code).
|
||||
if (Cst)
|
||||
continue;
|
||||
|
||||
Value *Cmp = IRB.CreateICmpNE(ConvertedShadow,
|
||||
getCleanShadow(ConvertedShadow), "_mscmp");
|
||||
Instruction *CheckTerm =
|
||||
SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false, MS.OriginStoreWeights);
|
||||
IRBuilder<> IRBNewBlock(CheckTerm);
|
||||
IRBNewBlock.CreateAlignedStore(getOrigin(Val),
|
||||
getOriginPtr(Addr, IRBNewBlock), I.getAlignment());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void materializeChecks() {
|
||||
for (size_t i = 0, n = InstrumentationList.size(); i < n; i++) {
|
||||
Instruction *Shadow = InstrumentationList[i].Shadow;
|
||||
@ -448,6 +499,11 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
|
||||
|
||||
VAHelper->finalizeInstrumentation();
|
||||
|
||||
// Delayed instrumentation of StoreInst.
|
||||
// This make add new checks to inserted later.
|
||||
materializeStores();
|
||||
|
||||
// Insert shadow value checks.
|
||||
materializeChecks();
|
||||
|
||||
return true;
|
||||
@ -738,23 +794,7 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
|
||||
/// Optionally, checks that the store address is fully defined.
|
||||
/// Volatile stores check that the value being stored is fully defined.
|
||||
void visitStoreInst(StoreInst &I) {
|
||||
IRBuilder<> IRB(&I);
|
||||
Value *Val = I.getValueOperand();
|
||||
Value *Addr = I.getPointerOperand();
|
||||
Value *Shadow = getShadow(Val);
|
||||
Value *ShadowPtr = getShadowPtr(Addr, Shadow->getType(), IRB);
|
||||
|
||||
StoreInst *NewSI = IRB.CreateAlignedStore(Shadow, ShadowPtr, I.getAlignment());
|
||||
DEBUG(dbgs() << " STORE: " << *NewSI << "\n");
|
||||
(void)NewSI;
|
||||
// If the store is volatile, add a check.
|
||||
if (I.isVolatile())
|
||||
insertCheck(Val, &I);
|
||||
if (ClCheckAccessAddress)
|
||||
insertCheck(Addr, &I);
|
||||
|
||||
if (ClTrackOrigins)
|
||||
IRB.CreateAlignedStore(getOrigin(Val), getOriginPtr(Addr, IRB), I.getAlignment());
|
||||
StoreList.push_back(&I);
|
||||
}
|
||||
|
||||
// Vector manipulation.
|
||||
|
@ -1,4 +1,5 @@
|
||||
; RUN: opt < %s -msan -S | FileCheck %s
|
||||
; RUN: opt < %s -msan -msan-track-origins=1 -S | FileCheck -check-prefix=CHECK-ORIGINS %s
|
||||
target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128"
|
||||
|
||||
; Check the presence of __msan_init
|
||||
@ -7,6 +8,31 @@ target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f3
|
||||
; Check the presence and the linkage type of __msan_track_origins
|
||||
; CHECK: @__msan_track_origins = weak_odr constant i32 0
|
||||
|
||||
; Check instrumentation of stores
|
||||
define void @Store(i32* nocapture %p, i32 %x) nounwind uwtable {
|
||||
entry:
|
||||
store i32 %x, i32* %p, align 4
|
||||
ret void
|
||||
}
|
||||
|
||||
; CHECK: @Store
|
||||
; CHECK: load {{.*}} @__msan_param_tls
|
||||
; CHECK: store
|
||||
; CHECK: store
|
||||
; CHECK: ret void
|
||||
; CHECK-ORIGINS: @Store
|
||||
; CHECK-ORIGINS: load {{.*}} @__msan_param_tls
|
||||
; CHECK-ORIGINS: store
|
||||
; CHECK-ORIGINS: icmp
|
||||
; CHECK-ORIGINS: br i1
|
||||
; CHECK-ORIGINS: <label>
|
||||
; CHECK-ORIGINS: store
|
||||
; CHECK-ORIGINS: br label
|
||||
; CHECK-ORIGINS: <label>
|
||||
; CHECK-ORIGINS: store
|
||||
; CHECK-ORIGINS: ret void
|
||||
|
||||
|
||||
; load followed by cmp: check that we load the shadow and call __msan_warning.
|
||||
define void @LoadAndCmp(i32* nocapture %a) nounwind uwtable {
|
||||
entry:
|
||||
|
Loading…
x
Reference in New Issue
Block a user