mirror of
https://github.com/c64scene-ar/llvm-6502.git
synced 2024-12-13 04:30:23 +00:00
Analysis: Canonicalize access to function attributes, NFC
Canonicalize access to function attributes to use the simpler API. getAttributes().getAttribute(AttributeSet::FunctionIndex, Kind) => getFnAttribute(Kind) getAttributes().hasAttribute(AttributeSet::FunctionIndex, Kind) => hasFnAttribute(Kind) git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@229192 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
parent
c21d153388
commit
efd49ea0d2
@ -719,8 +719,7 @@ bool CallAnalyzer::simplifyCallSite(Function *F, CallSite CS) {
|
||||
|
||||
bool CallAnalyzer::visitCallSite(CallSite CS) {
|
||||
if (CS.hasFnAttr(Attribute::ReturnsTwice) &&
|
||||
!F.getAttributes().hasAttribute(AttributeSet::FunctionIndex,
|
||||
Attribute::ReturnsTwice)) {
|
||||
!F.hasFnAttribute(Attribute::ReturnsTwice)) {
|
||||
// This aborts the entire analysis.
|
||||
ExposesReturnsTwice = true;
|
||||
return false;
|
||||
@ -1350,9 +1349,7 @@ InlineCost InlineCostAnalysis::getInlineCost(CallSite CS, Function *Callee,
|
||||
}
|
||||
|
||||
bool InlineCostAnalysis::isInlineViable(Function &F) {
|
||||
bool ReturnsTwice =
|
||||
F.getAttributes().hasAttribute(AttributeSet::FunctionIndex,
|
||||
Attribute::ReturnsTwice);
|
||||
bool ReturnsTwice = F.hasFnAttribute(Attribute::ReturnsTwice);
|
||||
for (Function::iterator BI = F.begin(), BE = F.end(); BI != BE; ++BI) {
|
||||
// Disallow inlining of functions which contain indirect branches or
|
||||
// blockaddresses.
|
||||
|
@ -298,8 +298,7 @@ getLoadLoadClobberFullWidthSize(const Value *MemLocBase, int64_t MemLocOffs,
|
||||
|
||||
// Load widening is hostile to ThreadSanitizer: it may cause false positives
|
||||
// or make the reports more cryptic (access sizes are wrong).
|
||||
if (LI->getParent()->getParent()->getAttributes().
|
||||
hasAttribute(AttributeSet::FunctionIndex, Attribute::SanitizeThread))
|
||||
if (LI->getParent()->getParent()->hasFnAttribute(Attribute::SanitizeThread))
|
||||
return 0;
|
||||
|
||||
// Get the base of this load.
|
||||
@ -344,9 +343,9 @@ getLoadLoadClobberFullWidthSize(const Value *MemLocBase, int64_t MemLocOffs,
|
||||
!DL.fitsInLegalInteger(NewLoadByteSize*8))
|
||||
return 0;
|
||||
|
||||
if (LIOffs+NewLoadByteSize > MemLocEnd &&
|
||||
LI->getParent()->getParent()->getAttributes().
|
||||
hasAttribute(AttributeSet::FunctionIndex, Attribute::SanitizeAddress))
|
||||
if (LIOffs + NewLoadByteSize > MemLocEnd &&
|
||||
LI->getParent()->getParent()->hasFnAttribute(
|
||||
Attribute::SanitizeAddress))
|
||||
// We will be reading past the location accessed by the original program.
|
||||
// While this is safe in a regular build, Address Safety analysis tools
|
||||
// may start reporting false warnings. So, don't do widening.
|
||||
|
Loading…
Reference in New Issue
Block a user