mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2026-04-21 23:17:16 +00:00
refactor the MemoryBuiltin analysis:
- provide more extensive set of functions to detect library allocation functions (e.g., malloc, calloc, strdup, etc) - provide an API to compute the size and offset of an object pointed by Move a few clients (GVN, AA, instcombine, ...) to the new API. This implementation is a lot more aggressive than each of the custom implementations being replaced. Patch reviewed by Nick Lewycky and Chandler Carruth, thanks. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@158919 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
@@ -329,15 +329,8 @@ bool GlobalsModRef::AnalyzeIndirectGlobalMemory(GlobalValue *GV) {
|
||||
// Check the value being stored.
|
||||
Value *Ptr = GetUnderlyingObject(SI->getOperand(0));
|
||||
|
||||
if (isMalloc(Ptr)) {
|
||||
// Okay, easy case.
|
||||
} else if (CallInst *CI = dyn_cast<CallInst>(Ptr)) {
|
||||
Function *F = CI->getCalledFunction();
|
||||
if (!F || !F->isDeclaration()) return false; // Too hard to analyze.
|
||||
if (F->getName() != "calloc") return false; // Not calloc.
|
||||
} else {
|
||||
if (!isAllocLikeFn(Ptr))
|
||||
return false; // Too hard to analyze.
|
||||
}
|
||||
|
||||
// Analyze all uses of the allocation. If any of them are used in a
|
||||
// non-simple way (e.g. stored to another global) bail out.
|
||||
@@ -454,19 +447,18 @@ void GlobalsModRef::AnalyzeCallGraph(CallGraph &CG, Module &M) {
|
||||
for (inst_iterator II = inst_begin(SCC[i]->getFunction()),
|
||||
E = inst_end(SCC[i]->getFunction());
|
||||
II != E && FunctionEffect != ModRef; ++II)
|
||||
if (isa<LoadInst>(*II)) {
|
||||
if (LoadInst *LI = dyn_cast<LoadInst>(&*II)) {
|
||||
FunctionEffect |= Ref;
|
||||
if (cast<LoadInst>(*II).isVolatile())
|
||||
if (LI->isVolatile())
|
||||
// Volatile loads may have side-effects, so mark them as writing
|
||||
// memory (for example, a flag inside the processor).
|
||||
FunctionEffect |= Mod;
|
||||
} else if (isa<StoreInst>(*II)) {
|
||||
} else if (StoreInst *SI = dyn_cast<StoreInst>(&*II)) {
|
||||
FunctionEffect |= Mod;
|
||||
if (cast<StoreInst>(*II).isVolatile())
|
||||
if (SI->isVolatile())
|
||||
// Treat volatile stores as reading memory somewhere.
|
||||
FunctionEffect |= Ref;
|
||||
} else if (isMalloc(&cast<Instruction>(*II)) ||
|
||||
isFreeCall(&cast<Instruction>(*II))) {
|
||||
} else if (isAllocationFn(&*II) || isFreeCall(&*II)) {
|
||||
FunctionEffect |= ModRef;
|
||||
} else if (IntrinsicInst *Intrinsic = dyn_cast<IntrinsicInst>(&*II)) {
|
||||
// The callgraph doesn't include intrinsic calls.
|
||||
|
||||
Reference in New Issue
Block a user